diff --git a/ci/colorize_cppcheck_results.py b/ci/colorize_cppcheck_results.py index 0c52f9a156e..e74f9bdcf2a 100755 --- a/ci/colorize_cppcheck_results.py +++ b/ci/colorize_cppcheck_results.py @@ -4,25 +4,25 @@ def colorize(lines): def bold(s): - return '\x1b[1m{}\x1b[0m'.format(s) + return "\x1b[1m{}\x1b[0m".format(s) def red(s): - return '\x1b[31m{}\x1b[0m'.format(s) + return "\x1b[31m{}\x1b[0m".format(s) def green(s): - return '\x1b[32m{}\x1b[0m'.format(s) + return "\x1b[32m{}\x1b[0m".format(s) def yellow(s): - return '\x1b[33m{}\x1b[0m'.format(s) + return "\x1b[33m{}\x1b[0m".format(s) def blue(s): - return '\x1b[34m{}\x1b[0m'.format(s) + return "\x1b[34m{}\x1b[0m".format(s) def magenta(s): # purple - return '\x1b[35m{}\x1b[0m'.format(s) + return "\x1b[35m{}\x1b[0m".format(s) def cyan(s): - return '\x1b[36m{}\x1b[0m'.format(s) + return "\x1b[36m{}\x1b[0m".format(s) def format_severity(txt, severity): """ @@ -37,7 +37,7 @@ def format_severity(txt, severity): return red(txt) if severity == "warning": return yellow(txt) - if severity == 'style': + if severity == "style": return blue(txt) if severity == "performance": return cyan(txt) @@ -50,9 +50,9 @@ def format_severity(txt, severity): return txt - re_message = re.compile(r'\[(?P.*):(?P.*?)\]:' - r'\((?P.*?)\),\[(?P.*?)\],' - r'(?P.*)') + re_message = re.compile( + r"\[(?P.*):(?P.*?)\]:" r"\((?P.*?)\),\[(?P.*?)\]," r"(?P.*)" + ) colored_lines = [] matched_messages = [] @@ -68,10 +68,9 @@ def format_severity(txt, severity): else: colored_lines.append(red(line)) - severity_order = ['error', 'warning', 'performance', 'portability', - 'style', 'information', 'debug', 'none'] + severity_order = ["error", "warning", "performance", "portability", "style", "information", "debug", "none"] - counter = Counter(d['severity'] for d in matched_messages) + counter = Counter(d["severity"] for d in matched_messages) summary_line = "\n\n==========================================\n" summary_line += " {}:\n".format(bold(red("CPPCHECK Summary"))) summary_line += "------------------------------------------" @@ -88,61 +87,59 @@ def format_severity(txt, severity): summary_line += "\n==========================================\n\n" - n_errors = counter['error'] + n_errors = counter["error"] # if n_errors: # summary_line += red("{} Errors".format(n_errors)) # else: # summary_line = green("No Errors") - n_warnings = counter['warning'] + n_warnings = counter["warning"] # if n_warnings: # summary_line += yellow("{} Warnings".format(n_warnings)) # else: # summary_line = green("No Warnings") - n_styles = counter['style'] - n_performances = counter['performance'] - n_portabilities = counter['portability'] + n_styles = counter["style"] + n_performances = counter["performance"] + n_portabilities = counter["portability"] # n_informations = counter['information'] # n_debugs = counter['debug'] # Start by sorting by filename - matched_messages.sort(key=lambda d: d['file']) - matched_messages.sort(key=lambda d: severity_order.index(d['severity'])) + matched_messages.sort(key=lambda d: d["file"]) + matched_messages.sort(key=lambda d: severity_order.index(d["severity"])) # Now sort by the severity we cared about for d in matched_messages: - - f = d['file'] - line = d['line'] - severity = d['severity'] - iid = d['id'] - message = d['message'] + f = d["file"] + line = d["line"] + severity = d["severity"] + iid = d["id"] + message = d["message"] colored_lines.append( - "[{f}:{line}]:({severity}),[{i}],{message}" - .format(f=magenta(f), # format_severity(f, severity), - line=green(line), - severity=format_severity(severity, severity), - i=bold(iid), - message=message)) + "[{f}:{line}]:({severity}),[{i}],{message}".format( + f=magenta(f), # format_severity(f, severity), + line=green(line), + severity=format_severity(severity, severity), + i=bold(iid), + message=message, + ) + ) - return (colored_lines, summary_line, n_errors, n_warnings, - n_performances, n_portabilities, n_styles) + return (colored_lines, summary_line, n_errors, n_warnings, n_performances, n_portabilities, n_styles) -if __name__ == '__main__': - with open('cppcheck.txt', 'r') as f: +if __name__ == "__main__": + with open("cppcheck.txt", "r") as f: content = f.read() lines = content.splitlines() - (colored_lines, summary_line, n_errors, n_warnings, - n_performances, n_portabilities, n_styles) = colorize(lines) + (colored_lines, summary_line, n_errors, n_warnings, n_performances, n_portabilities, n_styles) = colorize(lines) print(summary_line) # sys.stdout.writelines(colored_lines) print("\n".join(colored_lines)) - n_tot = (n_errors + n_warnings + n_performances - + n_portabilities + n_styles) + n_tot = n_errors + n_warnings + n_performances + n_portabilities + n_styles if n_tot > 0: exit(1) diff --git a/developer/conan/Bump_deps_vendor_conan_dependencies.ipynb b/developer/conan/Bump_deps_vendor_conan_dependencies.ipynb index 87d07cf8edc..85efbb41909 100644 --- a/developer/conan/Bump_deps_vendor_conan_dependencies.ipynb +++ b/developer/conan/Bump_deps_vendor_conan_dependencies.ipynb @@ -56,114 +56,112 @@ "outputs": [], "source": [ "class PkgInfo:\n", - " \n", " @staticmethod\n", " def from_metadata(metadata_path):\n", - " \n", " name, version, user, channel = p.relative_to(CONAN_CACHE).parent.parts\n", - " \n", - " with open(p, 'r') as f:\n", + "\n", + " with open(p, \"r\") as f:\n", " data = json.load(f)\n", - " revision = data['recipe']['revision']\n", - " \n", + " revision = data[\"recipe\"][\"revision\"]\n", + "\n", " return PkgInfo(name=name, version=version, user=user, channel=channel, revision=revision)\n", "\n", " @staticmethod\n", " def from_str(reference):\n", - " n, revision = reference.split('#')\n", - " if '@' in n:\n", - " name_version, user_channel = n.split('@')\n", + " n, revision = reference.split(\"#\")\n", + " if \"@\" in n:\n", + " name_version, user_channel = n.split(\"@\")\n", " else:\n", " name_version = n\n", - " user_channel = ''\n", - " \n", - " name, version = name_version.split('/')\n", + " user_channel = \"\"\n", + "\n", + " name, version = name_version.split(\"/\")\n", " if user_channel:\n", - " user, channel = user_channel.split('/')\n", + " user, channel = user_channel.split(\"/\")\n", " else:\n", " user, channel = (None, None)\n", "\n", " return PkgInfo(name=name, version=version, user=user, channel=channel, revision=revision)\n", - " \n", + "\n", " def __init__(self, name, version, user, channel, revision):\n", " self.name = name\n", " self.version = version\n", " self.user = None\n", - " if user is not None and user != '_':\n", + " if user is not None and user != \"_\":\n", " self.user = user\n", " self.channel = None\n", - " if channel is not None and channel != '_':\n", + " if channel is not None and channel != \"_\":\n", " self.channel = channel\n", - " \n", + "\n", " self.revision = revision\n", "\n", - " self.remote = 'conancenter'\n", - " if self.name == 'ruby_installer':\n", - " self.remote = 'bincrafters'\n", - " elif self.name == 'openstudio_ruby':\n", - " self.remote = 'nrel'\n", - " \n", - " \n", - " def search_packages(self, verbose=True, skip_shared=False, local_cache=False, arch_only=None, compiler_version_only=None):\n", + " self.remote = \"conancenter\"\n", + " if self.name == \"ruby_installer\":\n", + " self.remote = \"bincrafters\"\n", + " elif self.name == \"openstudio_ruby\":\n", + " self.remote = \"nrel\"\n", + "\n", + " def search_packages(\n", + " self, verbose=True, skip_shared=False, local_cache=False, arch_only=None, compiler_version_only=None\n", + " ):\n", " \"\"\"Filters out packages (such as Windows MSVC 15)\n", - " \n", + "\n", " Args:\n", " ------\n", - " \n", + "\n", " * skip_shared (bool): Don't keep the shared ones\n", " * local_cache (bool, default False): if True, will search your cache. Otherwise will look in self.remote\n", - " \n", + "\n", " * arch_only (None or str): if specified, will keep only this arch (eg: 'x86')\n", " * compiler_version_only (None or str): if specified, will keep only this compiler.version (eg: '17')\n", "\n", " Example with boost:\n", " --------------------\n", - " \n", + "\n", " pkg_info = PkgInfo(name='boost', version=\"1.79.0\", user=None, channel=None, revision='f664bfe40e2245fa9baf1c742591d582')\n", - " \n", + "\n", " # Download everything\n", " pkg_info.download_all()\n", - " \n", + "\n", " !du -sh /Users/julien/.conan/bump_deps/boost/1.79.0/_/_/package/\n", " 21G boost/1.79.0/_/_/package/\n", "\n", " # Filter, but keep shared=True ones\n", " pkg_info.cleanup_skipped_packages(skip_shared=False)\n", - " \n", + "\n", " !du -sh /Users/julien/.conan/bump_deps/boost/1.79.0/_/_/package/\n", " 11G boost/1.79.0/_/_/package/\n", - " \n", + "\n", " # Remove the shared=True ones\n", " pkg_info.cleanup_skipped_packages(skip_shared=False)\n", - " \n", + "\n", " !du -sh /Users/julien/.conan/bump_deps/boost/1.79.0/_/_/package/\n", " 6.6G boost/1.79.0/_/_/package/\n", " \"\"\"\n", - " json_p = Path(f'{self.name}.json')\n", + " json_p = Path(f\"{self.name}.json\")\n", " args = [\"conan\", \"search\", \"--json\", str(json_p)]\n", " if not local_cache:\n", " args += [\"-r\", self.remote]\n", " args += [self.reference()]\n", " if verbose:\n", " print(args)\n", - " subprocess.check_call(args,\n", - " stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL)\n", - " with open(json_p, 'r') as f:\n", + " subprocess.check_call(args, stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL)\n", + " with open(json_p, \"r\") as f:\n", " data = json.load(f)\n", - " json_p.unlink(missing_ok=False) # remove tmp json\n", - " \n", - " packages = data['results'][0]['items'][0]['packages']\n", + " json_p.unlink(missing_ok=False) # remove tmp json\n", + "\n", + " packages = data[\"results\"][0][\"items\"][0][\"packages\"]\n", " keep_packages = []\n", " skipped_packages = []\n", " for p in packages:\n", - " settings = p['settings']\n", - " os_ = settings.get('os', None)\n", - " compiler_version = settings.get('compiler.version', None)\n", - " compiler = settings.get('compiler', None)\n", - " libcxx = settings.get('compiler.libcxx', None)\n", - " \n", + " settings = p[\"settings\"]\n", + " os_ = settings.get(\"os\", None)\n", + " compiler_version = settings.get(\"compiler.version\", None)\n", + " compiler = settings.get(\"compiler\", None)\n", + " libcxx = settings.get(\"compiler.libcxx\", None)\n", + "\n", " if arch_only is not None:\n", - " arch = settings.get('arch', None)\n", + " arch = settings.get(\"arch\", None)\n", " if arch not in [arch_only, None]:\n", " if verbose:\n", " print(f\"Skipping package with arch {arch} for os {os_}, {compiler=} for pkg {self.name}\")\n", @@ -172,53 +170,54 @@ " if compiler_version_only is not None:\n", " if compiler_version not in [compiler_version_only, None]:\n", " if verbose:\n", - " print(f\"Skipping package with compiler.version {compiler_version} for os {os_}, {compiler=} for pkg {self.name}\")\n", + " print(\n", + " f\"Skipping package with compiler.version {compiler_version} for os {os_}, {compiler=} for pkg {self.name}\"\n", + " )\n", " skipped_packages.append(p)\n", " continue\n", - " \n", - " is_shared = p['options'].get('shared', None) == 'True'\n", + "\n", + " is_shared = p[\"options\"].get(\"shared\", None) == \"True\"\n", " if is_shared and skip_shared:\n", " if verbose:\n", " print(f\"Skipping SHARED package for os {os_}, {compiler=} for pkg {self.name}\")\n", " skipped_packages.append(p)\n", " continue\n", - " \n", - " if os_ == 'Windows':\n", - " if compiler_version not in ['16', '17', None]:\n", + "\n", + " if os_ == \"Windows\":\n", + " if compiler_version not in [\"16\", \"17\", None]:\n", " if verbose:\n", " print(f\"Skipping Windows {compiler_version=} for pkg {self.name}\")\n", " skipped_packages.append(p)\n", " continue\n", - " \n", - " runtime = settings.get('compiler.runtime', None)\n", - " if runtime not in ['MD', 'MDd', None]:\n", + "\n", + " runtime = settings.get(\"compiler.runtime\", None)\n", + " if runtime not in [\"MD\", \"MDd\", None]:\n", " if verbose:\n", " print(f\"Skipping Windows {runtime=} for pkg {self.name}\")\n", " skipped_packages.append(p)\n", " continue\n", - " elif os_ == 'Linux':\n", - " \n", - " if compiler not in ['gcc', 'clang', None]:\n", + " elif os_ == \"Linux\":\n", + " if compiler not in [\"gcc\", \"clang\", None]:\n", " if verbose:\n", " print(f\"Skipping Linux {compiler=} for pkg {self.name}\")\n", " skipped_packages.append(p)\n", " continue\n", - " \n", - " if libcxx not in ['libstdc++11', 'libc++', None]:\n", + "\n", + " if libcxx not in [\"libstdc++11\", \"libc++\", None]:\n", " if verbose:\n", " print(f\"Skipping Linux {libcxx=} for pkg {self.name} with ({compiler=})\")\n", " skipped_packages.append(p)\n", " continue\n", - " \n", - " if compiler == 'gcc':\n", - " if compiler_version not in ['7', '8', '9', '10', '11', '12', None]:\n", + "\n", + " if compiler == \"gcc\":\n", + " if compiler_version not in [\"7\", \"8\", \"9\", \"10\", \"11\", \"12\", None]:\n", " if verbose:\n", " print(f\"Skipping Linux gcc {compiler_version=} for pkg {self.name}\")\n", " skipped_packages.append(p)\n", " continue\n", - " \n", - " elif os_ == 'Macos':\n", - " if libcxx not in ['libc++', None]:\n", + "\n", + " elif os_ == \"Macos\":\n", + " if libcxx not in [\"libc++\", None]:\n", " if verbose:\n", " print(f\"Skipping Macos {libcxx=} for pkg {self.name} with ({compiler=})\")\n", " skipped_packages.append(p)\n", @@ -229,86 +228,95 @@ " print(\"Unknown os: {os_}\")\n", " skipped_packages.append(p)\n", " continue\n", - " \n", + "\n", " keep_packages.append(p)\n", - " \n", + "\n", " return keep_packages, skipped_packages\n", "\n", " def download_all(self):\n", " subprocess.check_call(\n", - " ['conan', 'download', '-r', self.remote, self.reference()],\n", + " [\"conan\", \"download\", \"-r\", self.remote, self.reference()],\n", " # stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL\n", " )\n", - " \n", + "\n", " def download_specific_packages(self):\n", - " \"\"\"Filters out the stuff we don't need by calling `search_packages`\n", - " \"\"\"\n", + " \"\"\"Filters out the stuff we don't need by calling `search_packages`\"\"\"\n", " packages, _ = self.search_packages()\n", - " \n", + "\n", " for p_dict in packages:\n", " print(p_dict)\n", - " pkg_id = p_dict['id']\n", + " pkg_id = p_dict[\"id\"]\n", " subprocess.check_call(\n", - " ['conan', 'download', '-r', self.remote, f\"{self.reference()}:{pkg_id}\"],\n", - " #stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL\n", + " [\"conan\", \"download\", \"-r\", self.remote, f\"{self.reference()}:{pkg_id}\"],\n", + " # stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL\n", " )\n", - " \n", + "\n", " def upload_to_nrel(self):\n", " subprocess.check_call(\n", - " ['conan', 'upload', '-r', 'nrel', '--all', '--parallel',\n", - " '--no-overwrite', 'all', self.reference()],\n", + " [\"conan\", \"upload\", \"-r\", \"nrel\", \"--all\", \"--parallel\", \"--no-overwrite\", \"all\", self.reference()],\n", " # stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL\n", " )\n", - " \n", + "\n", " def upload_specific_packages_to_nrel(self, arch_only=None, compiler_version_only=None):\n", " \"\"\"Filters out the stuff we don't need by calling `search_packages(local_cache=True)`\n", " And upload only the packages that matches\n", " \"\"\"\n", " if arch_only is None and compiler_version_only is None:\n", " raise ValueError(\"Provide at least one filter!\")\n", - " packages, _ = self.search_packages(arch_only=arch_only, compiler_version_only=compiler_version_only, local_cache=True)\n", - " \n", + " packages, _ = self.search_packages(\n", + " arch_only=arch_only, compiler_version_only=compiler_version_only, local_cache=True\n", + " )\n", + "\n", " for p_dict in packages:\n", " print(p_dict)\n", - " pkg_id = p_dict['id']\n", - " args = ['conan', 'upload', '-r', 'nrel', '--all', '--parallel',\n", - " '--no-overwrite', 'all', f\"{self.reference()}:{pkg_id}\"]\n", + " pkg_id = p_dict[\"id\"]\n", + " args = [\n", + " \"conan\",\n", + " \"upload\",\n", + " \"-r\",\n", + " \"nrel\",\n", + " \"--all\",\n", + " \"--parallel\",\n", + " \"--no-overwrite\",\n", + " \"all\",\n", + " f\"{self.reference()}:{pkg_id}\",\n", + " ]\n", " print(\" \".join(args))\n", " subprocess.check_call(\n", " args,\n", - " #stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL\n", + " # stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL\n", " )\n", - " \n", + "\n", " def package_dir(self):\n", " p = CONAN_CACHE / f\"{self.name}/{self.version}\"\n", " if self.user is not None:\n", " p /= f\"{self.user}/{self.channel}\"\n", " else:\n", " p /= \"_/_\"\n", - " p /= 'package'\n", + " p /= \"package\"\n", " return p\n", - " \n", + "\n", " def cleanup_skipped_packages(self, remote=None, skip_shared=False, arch_only=None):\n", " \"\"\"if remote is none, cleans up your local cache\"\"\"\n", " _, skipped_packages = self.search_packages(skip_shared=skip_shared, arch_only=arch_only)\n", " for p_dict in skipped_packages:\n", - " pkg_id = p_dict['id']\n", - " cmd_args = ['conan', 'remove', '-f', self.reference(), '-p', pkg_id]\n", + " pkg_id = p_dict[\"id\"]\n", + " cmd_args = [\"conan\", \"remove\", \"-f\", self.reference(), \"-p\", pkg_id]\n", " if remote is not None:\n", - " cmd_args += ['-r', remote]\n", - " \n", + " cmd_args += [\"-r\", remote]\n", + "\n", " subprocess.run(cmd_args)\n", - " \n", + "\n", " def reference(self):\n", " s = f\"{self.name}/{self.version}@\"\n", " if self.user is not None:\n", " s += f\"{self.user}/{self.channel}\"\n", " s += f\"#{self.revision}\"\n", " return s\n", - " \n", + "\n", " def __repr__(self):\n", " return self.reference()\n", - " \n", + "\n", " def __eq__(self, other):\n", " return self.reference() == other.reference()" ] @@ -352,10 +360,10 @@ "metadata": {}, "outputs": [], "source": [ - "CONAN_CACHE = Path('~/.conan/bump_deps/').expanduser()\n", + "CONAN_CACHE = Path(\"~/.conan/bump_deps/\").expanduser()\n", "\n", "pkg_infos = []\n", - "for p in CONAN_CACHE.glob('**/metadata.json'):\n", + "for p in CONAN_CACHE.glob(\"**/metadata.json\"):\n", " pkg_infos.append(PkgInfo.from_metadata(p))\n", "pkg_infos.sort(key=lambda p: p.name)" ] @@ -429,7 +437,7 @@ " \"termcap/1.3.1@#733491d29bb22e81d06edaf78c6727c9\",\n", " \"tinygltf/2.5.0@#c8b2aca9505e86312bb42aa0e1c639ec\",\n", " \"websocketpp/0.8.2@#3fd704c4c5388d9c08b11af86f79f616\",\n", - " \"zlib/1.2.12@#3b9e037ae1c615d045a06c67d88491ae\"\n", + " \"zlib/1.2.12@#3b9e037ae1c615d045a06c67d88491ae\",\n", "]\n", "old_pkgs = [PkgInfo.from_str(x) for x in old_refs]" ] @@ -496,8 +504,8 @@ "metadata": {}, "outputs": [], "source": [ - "conan_lock = '/Users/julien/Software/Others/OS-build-bump/conan.lock'\n", - "with open(conan_lock, 'r') as f:\n", + "conan_lock = \"/Users/julien/Software/Others/OS-build-bump/conan.lock\"\n", + "with open(conan_lock, \"r\") as f:\n", " conan_lock_data = json.load(f)" ] }, @@ -519,11 +527,11 @@ "outputs": [], "source": [ "pkg_infos_lock = []\n", - "for k, node in conan_lock_data['graph_lock']['nodes'].items():\n", - " if not 'ref' in node:\n", + "for k, node in conan_lock_data[\"graph_lock\"][\"nodes\"].items():\n", + " if not \"ref\" in node:\n", " print(f\"{k=} has no ref (it's node 0, that's normal)\")\n", - " continue \n", - " pkg_infos_lock.append(PkgInfo.from_str(reference=node['ref']))" + " continue\n", + " pkg_infos_lock.append(PkgInfo.from_str(reference=node[\"ref\"]))" ] }, { @@ -574,15 +582,15 @@ "outputs": [], "source": [ "for pkg_info in pkg_infos:\n", - " if pkg_info.name == 'openstudio_ruby':\n", + " if pkg_info.name == \"openstudio_ruby\":\n", " continue\n", "\n", " print(pkg_info.name)\n", - " \n", + "\n", " # Filter before downloading:\n", " # keep_packages, skip_packages = pkg_info.search_packages()\n", " # pkg_info.download_specific_packages()\n", - " \n", + "\n", " # download_all has the benefit of running in parallel... so it's faster provided you have a good connection\n", " # We'll clean it up later\n", " pkg_info.download_all()\n", @@ -607,7 +615,7 @@ "outputs": [], "source": [ "for pkg_info in pkg_infos:\n", - " if pkg_info.name == 'openstudio_ruby':\n", + " if pkg_info.name == \"openstudio_ruby\":\n", " continue\n", "\n", " print(pkg_info.name)\n", @@ -631,7 +639,7 @@ "outputs": [], "source": [ "for pkg_info in pkg_infos:\n", - " if pkg_info.name == 'openstudio_ruby':\n", + " if pkg_info.name == \"openstudio_ruby\":\n", " continue\n", " print(pkg_info.name)\n", " pkg_info.upload_to_nrel()\n", @@ -692,7 +700,7 @@ "outputs": [], "source": [ "# Not sure whether I should upload those or not...?\n", - "extra_win_build_requires = ['autoconf', 'automake', 'msys2', 'nasm', 'strawberryperl', 'winflexbison']" + "extra_win_build_requires = [\"autoconf\", \"automake\", \"msys2\", \"nasm\", \"strawberryperl\", \"winflexbison\"]" ] }, { @@ -739,8 +747,9 @@ "source": [ "for pkg_info in pkg_infos_regular:\n", " keep_packages, _ = pkg_info.search_packages(\n", - " verbose=False, local_cache=True,\n", - " compiler_version_only='17',\n", + " verbose=False,\n", + " local_cache=True,\n", + " compiler_version_only=\"17\",\n", " # arch_only='x86'\n", " )\n", " if not keep_packages:\n", @@ -763,7 +772,7 @@ " # continue\n", " pkg_info.upload_specific_packages_to_nrel(\n", " # arch_only='x86',\n", - " compiler_version_only='17'\n", + " compiler_version_only=\"17\"\n", " )\n", " done.append(pkg_info)" ] diff --git a/developer/conan/dependabot_conan.py b/developer/conan/dependabot_conan.py index 301080df51f..9bec5fb7365 100644 --- a/developer/conan/dependabot_conan.py +++ b/developer/conan/dependabot_conan.py @@ -15,17 +15,10 @@ ) -def locate_conan_files( - base_dir: Path, include_cmake_files: Optional[bool] = False -) -> List[Path]: - - files = list(base_dir.glob("**/conanfile.py")) + list( - base_dir.glob("**/conanfile.txt") - ) +def locate_conan_files(base_dir: Path, include_cmake_files: Optional[bool] = False) -> List[Path]: + files = list(base_dir.glob("**/conanfile.py")) + list(base_dir.glob("**/conanfile.txt")) if include_cmake_files: - cmake_files = list(base_dir.glob("**/*.cmake")) + list( - base_dir.glob("**/CMakeLists.txt") - ) + cmake_files = list(base_dir.glob("**/*.cmake")) + list(base_dir.glob("**/CMakeLists.txt")) for cmake_file in cmake_files: print(f"reading {cmake_file}") with open(cmake_file, "r") as f: @@ -45,9 +38,7 @@ class RemoteInfo: # r'\[Verify SSL: (?P(?:True|False))\]' # ) - RE_REMOTE = re.compile( - r"(?P\w+): (?P.+) \[Verify SSL: (?P(?:True|False))\]" - ) + RE_REMOTE = re.compile(r"(?P\w+): (?P.+) \[Verify SSL: (?P(?:True|False))\]") @classmethod def from_conan_str(cls, line: str): @@ -88,7 +79,6 @@ def __init__( channel: Optional[str], rev: Optional[str], ): - self.package = package self.version = version self.user = user @@ -138,29 +128,20 @@ def _lookup_all_v(self, remote: RemoteInfo) -> List[str]: # print(r.decode().splitlines()) # Filter out the cci.DATE stuff - known_versions = [ - x for x in r.decode().splitlines() if "cci." not in x - ] + known_versions = [x for x in r.decode().splitlines() if "cci." not in x] known_versions = [ m.version for x in known_versions - if (m := PackageInfo.from_str(x)) is not None - and ((self.user is None) == (m.user is None)) + if (m := PackageInfo.from_str(x)) is not None and ((self.user is None) == (m.user is None)) ] # Filter prereleases - known_versions = list( - filter( - lambda v: not version.parse(v).is_prerelease, known_versions - ) - ) + known_versions = list(filter(lambda v: not version.parse(v).is_prerelease, known_versions)) # Force a version if self.force_version: - known_versions = [ - x for x in known_versions if self.force_version in x - ] + known_versions = [x for x in known_versions if self.force_version in x] known_versions.sort(key=lambda v: version.parse(v)) return known_versions @@ -182,30 +163,21 @@ def _lookup_last_v(self, remotes: List[RemoteInfo]) -> str: # print(f"{query} found in {remote}") found = True last_v_str = known_versions[-1] - if version.parse(last_v_str) >= version.parse( - self.last_known_version - ): + if version.parse(last_v_str) >= version.parse(self.last_known_version): # print(f"FOUND {last_v_str} in {remote}") self.last_known_version = last_v_str self.last_known_v_remote = remote if not found: - raise ValueError( - f"Could not find {query} in any of the remotes: {remotes}" - ) + raise ValueError(f"Could not find {query} in any of the remotes: {remotes}") return self.last_known_version - def _lookup_all_revs_for_version( - self, version: Optional[str] = None - ) -> str: + def _lookup_all_revs_for_version(self, version: Optional[str] = None) -> str: if version is None: version = self.version query = self._lookup_query(version=version) - cmd = ( - f"conan search -r {self.last_known_v_remote.name} " - f"{query} -rev --raw" - ) + cmd = f"conan search -r {self.last_known_v_remote.name} " f"{query} -rev --raw" print(cmd) r = subprocess.check_output(shlex.split(cmd)) @@ -229,12 +201,8 @@ def check_updates(self, remotes: List[RemoteInfo]) -> bool: cur_v = version.parse(self.version) if self.rev: - self.last_revs_info = self._lookup_all_revs_for_version( - version=self.last_known_version - ) - self.last_known_rev = max( - self.last_revs_info, key=lambda k: self.last_revs_info[k] - ) + self.last_revs_info = self._lookup_all_revs_for_version(version=self.last_known_version) + self.last_known_rev = max(self.last_revs_info, key=lambda k: self.last_revs_info[k]) if cur_v > last_v: # Not expected, though that might be possible @@ -244,16 +212,12 @@ def check_updates(self, remotes: List[RemoteInfo]) -> bool: elif cur_v == last_v: if self.rev is None: print( - f"\n:white_check_mark: [green]Package {self} is using " - "the latest version and has no revision[/]" + f"\n:white_check_mark: [green]Package {self} is using " "the latest version and has no revision[/]" ) # No-op! return False elif self.rev == self.last_known_rev: - print( - f"\n:white_check_mark: [green]Package {self} is using " - "the latest version and revision[/]" - ) + print(f"\n:white_check_mark: [green]Package {self} is using " "the latest version and revision[/]") # No-op! return False else: @@ -305,9 +269,7 @@ def update_all_remotes_known(cls, force_update: Optional[bool] = False): cls.all_remotes_known = [] r = subprocess.check_output(shlex.split("conan remote list")) for line in r.decode().splitlines(): - cls.all_remotes_known.append( - RemoteInfo.from_conan_str(line=line) - ) + cls.all_remotes_known.append(RemoteInfo.from_conan_str(line=line)) print(f"Found {len(cls.all_remotes_known)} remotes:") for remote in cls.all_remotes_known: if cls.__conan_center_url in remote.url: @@ -315,10 +277,7 @@ def update_all_remotes_known(cls, force_update: Optional[bool] = False): print(remote) if cls.conan_center is None: - raise ValueError( - "Could not find any remote for conancenter: " - f"{cls.__conan_center_url}" - ) + raise ValueError("Could not find any remote for conancenter: " f"{cls.__conan_center_url}") def __init__(self, filepath: Path): if not filepath.exists(): @@ -372,19 +331,13 @@ def __lookup_package_updates(self): self.need_updates += 1 def update_conanfile(self) -> bool: - print( - "\n:crossed_fingers: [bold yellow]Checking " - f"{self.filepath} for updates[/]" - ) + print("\n:crossed_fingers: [bold yellow]Checking " f"{self.filepath} for updates[/]") self.__lookup_package_updates() if self.need_updates == 0: print("\n:+1: [bold green]Everything up to date[/]") return False - print( - "\n\n:fire: :fire_engine: " - f"[bold cyan]{self.need_updates} packages need updates[/]\n" - ) + print("\n\n:fire: :fire_engine: " f"[bold cyan]{self.need_updates} packages need updates[/]\n") with open(self.filepath, "r") as f: content = f.read() @@ -402,20 +355,15 @@ def update_conanfile(self) -> bool: if __name__ == "__main__": - base_dir = Path(__file__).resolve().parent.parent.parent conanfile = base_dir / "ConanInstall.cmake" conanfileupdater = ConanFileUpdater(filepath=conanfile) - conanfileupdater.flag_package_to_check_in_all_remotes( - package_name="openstudio_ruby" - ) + conanfileupdater.flag_package_to_check_in_all_remotes(package_name="openstudio_ruby") - conanfileupdater.force_package_version( - package_name="openssl", version_contains="1.1.1" - ) + conanfileupdater.force_package_version(package_name="openssl", version_contains="1.1.1") if conanfileupdater.update_conanfile(): exit(1) diff --git a/developer/msvc/Visualizers/concat_natvis.py b/developer/msvc/Visualizers/concat_natvis.py index 476e6756a15..c621768a418 100644 --- a/developer/msvc/Visualizers/concat_natvis.py +++ b/developer/msvc/Visualizers/concat_natvis.py @@ -4,24 +4,24 @@ vscode-cpptools expects only one visualizerFile currently """ -import xml.etree.ElementTree as ET import glob as gb +import xml.etree.ElementTree as ET -OUT_FILENAME = 'all_concat.natvis' +OUT_FILENAME = "all_concat.natvis" def concat_all_natvis_files(): """ Main function """ - natvis_files = gb.glob('*.natvis') + natvis_files = gb.glob("*.natvis") if OUT_FILENAME in natvis_files: natvis_files.remove(OUT_FILENAME) # In order to avoid the ns0 prefix the default namespace should be set - schema = 'http://schemas.microsoft.com/vstudio/debugger/natvis/2010' + schema = "http://schemas.microsoft.com/vstudio/debugger/natvis/2010" # before reading the XML data. - ET.register_namespace('', schema) + ET.register_namespace("", schema) # Open the file first found print("Opening first: {}".format(natvis_files[0])) @@ -44,16 +44,17 @@ def concat_all_natvis_files(): final_n_child = len(root) if final_n_child == total_n_child: - print("\nOK: Started with {}, Ended with {} children as " - "expected".format(ori_n_child, final_n_child)) + print("\nOK: Started with {}, Ended with {} children as " "expected".format(ori_n_child, final_n_child)) else: - print("\nProblem: Started with {}, Ended with {} children, " - "expected {}".format(ori_n_child, final_n_child, total_n_child)) + print( + "\nProblem: Started with {}, Ended with {} children, " + "expected {}".format(ori_n_child, final_n_child, total_n_child) + ) print("\nSaving to {}".format(OUT_FILENAME)) - tree.write(OUT_FILENAME, encoding='utf-8', xml_declaration=True) + tree.write(OUT_FILENAME, encoding="utf-8", xml_declaration=True) # If run from a terminal -if __name__ == '__main__': +if __name__ == "__main__": concat_all_natvis_files() diff --git a/developer/python/CSharp_Partial_Classes_Helper.ipynb b/developer/python/CSharp_Partial_Classes_Helper.ipynb index 46bde8c5501..932f32e14a1 100644 --- a/developer/python/CSharp_Partial_Classes_Helper.ipynb +++ b/developer/python/CSharp_Partial_Classes_Helper.ipynb @@ -28,8 +28,8 @@ } ], "source": [ - "ROOT_DIR = Path('../../').resolve()\n", - "MODEL_DIR = ROOT_DIR / 'src/model'\n", + "ROOT_DIR = Path(\"../../\").resolve()\n", + "MODEL_DIR = ROOT_DIR / \"src/model\"\n", "MODEL_DIR" ] }, @@ -53,12 +53,12 @@ " self.name = name\n", " self.is_unique = is_unique\n", " self.swig_file = swig_file\n", - " \n", + "\n", " def __repr__(self):\n", " s = \"\"\n", " if self.is_unique:\n", " s = \", Unique\"\n", - " return f\"{self.name}({self.swig_file}{s})\"\n" + " return f\"{self.name}({self.swig_file}{s})\"" ] }, { @@ -88,21 +88,21 @@ ], "source": [ "swig_entries = []\n", - "for swig_file in MODEL_DIR.glob('*.i'):\n", + "for swig_file in MODEL_DIR.glob(\"*.i\"):\n", " swig_file_name = swig_file.name\n", - " if swig_file_name in ['Model_Common_Include.i', 'Model.i']:\n", + " if swig_file_name in [\"Model_Common_Include.i\", \"Model.i\"]:\n", " continue\n", " print(swig_file)\n", - " with open(swig_file, 'r') as f:\n", + " with open(swig_file, \"r\") as f:\n", " content = f.read()\n", " lines = content.splitlines()\n", " for line in lines:\n", - " line = line.split('//')[0].strip()\n", - " if 'UNIQUEMODELOBJECT_TEMPLATES' in line:\n", - " name = line.split('(')[1].split(')')[0].strip()\n", + " line = line.split(\"//\")[0].strip()\n", + " if \"UNIQUEMODELOBJECT_TEMPLATES\" in line:\n", + " name = line.split(\"(\")[1].split(\")\")[0].strip()\n", " swig_entries.append(SwigEntry(name=name, is_unique=True, swig_file=swig_file_name))\n", - " elif 'MODELOBJECT_TEMPLATES' in line:\n", - " name = line.split('(')[1].split(')')[0].strip()\n", + " elif \"MODELOBJECT_TEMPLATES\" in line:\n", + " name = line.split(\"(\")[1].split(\")\")[0].strip()\n", " swig_entries.append(SwigEntry(name=name, is_unique=False, swig_file=swig_file_name))" ] }, @@ -705,9 +705,9 @@ "metadata": {}, "outputs": [], "source": [ - "swig_file = 'ModelGeometry.i'\n", - "swig_file = 'ModelAirflow.i'\n", - "swig_file = 'ModelSimulation.i'" + "swig_file = \"ModelGeometry.i\"\n", + "swig_file = \"ModelAirflow.i\"\n", + "swig_file = \"ModelSimulation.i\"" ] }, { @@ -761,8 +761,8 @@ ], "source": [ "found_names = [x.name for x in swig_entries if x.is_unique and x.swig_file == swig_file]\n", - "if swig_file == 'ModelSimulation.i':\n", - " found_names.insert(0, 'RunPeriod')\n", + "if swig_file == \"ModelSimulation.i\":\n", + " found_names.insert(0, \"RunPeriod\")\n", "found_names" ] }, @@ -784,7 +784,7 @@ } ], "source": [ - "csharp_namespace = \"OpenStudio\" + swig_file.replace('.i', '')\n", + "csharp_namespace = \"OpenStudio\" + swig_file.replace(\".i\", \"\")\n", "csharp_namespace" ] }, @@ -945,25 +945,31 @@ } ], "source": [ - "print(\"\"\"#if defined SWIGCSHARP || defined(SWIGJAVA)\n", + "print(\n", + " \"\"\"#if defined SWIGCSHARP || defined(SWIGJAVA)\n", "\n", " %inline {\n", " namespace openstudio {\n", - " namespace model {\"\"\")\n", + " namespace model {\"\"\"\n", + ")\n", "\n", "for name in found_names:\n", " getter_name = name[0].lower() + name[1:]\n", - " print(f'''\n", + " print(\n", + " f\"\"\"\n", " boost::optional<{name}> {getter_name}(const openstudio::model::Model& model) {{\n", " return model.{getter_name}();\n", - " }}''')\n", + " }}\"\"\"\n", + " )\n", "\n", - "print(\"\"\"\n", + "print(\n", + " \"\"\"\n", " }\n", " }\n", " } // %inline\n", " \n", - "#endif // defined SWIGCSHARP\"\"\")" + "#endif // defined SWIGCSHARP\"\"\"\n", + ")" ] }, { @@ -1121,7 +1127,8 @@ } ], "source": [ - "print(\"\"\"\n", + "print(\n", + " \"\"\"\n", "#if defined(SWIGCSHARP)\n", " //%pragma(csharp) imclassimports=%{\n", " %pragma(csharp) moduleimports=%{\n", @@ -1129,19 +1136,24 @@ " using System;\n", " using System.Runtime.InteropServices;\n", "\n", - " public partial class Model : Workspace {\"\"\")\n", + " public partial class Model : Workspace {\"\"\"\n", + ")\n", "\n", "for name in found_names:\n", " getter_name = name[0].lower() + name[1:]\n", - " print(f\"\"\"\n", + " print(\n", + " f\"\"\"\n", " public Optional{name} {getter_name}() {{\n", " return OpenStudio.{csharp_namespace}.{getter_name}(this);\n", - " }}\"\"\")\n", + " }}\"\"\"\n", + " )\n", "\n", - "print(\"\"\"\n", + "print(\n", + " \"\"\"\n", " } // partial class Model\n", " %} // pragma\n", - "#endif // defined(SWIGCSHARP)\"\"\")" + "#endif // defined(SWIGCSHARP)\"\"\"\n", + ")" ] }, { diff --git a/developer/python/Check_Correct_IDD_curve_refs_between_ep_and_os.ipynb b/developer/python/Check_Correct_IDD_curve_refs_between_ep_and_os.ipynb index b0c98fd1ed4..0bf2bf2531b 100644 --- a/developer/python/Check_Correct_IDD_curve_refs_between_ep_and_os.ipynb +++ b/developer/python/Check_Correct_IDD_curve_refs_between_ep_and_os.ipynb @@ -8,6 +8,7 @@ "outputs": [], "source": [ "import openstudio\n", + "\n", "# pip install fuzzywuzzy[speedup]\n", "from fuzzywuzzy import process" ] @@ -30,7 +31,7 @@ "metadata": {}, "outputs": [], "source": [ - "ep_idd_path = '../resources/energyplus/ProposedEnergy+.idd'\n", + "ep_idd_path = \"../resources/energyplus/ProposedEnergy+.idd\"\n", "ep_idd = openstudio.IddFile.load(openstudio.toPath(ep_idd_path)).get()" ] }, @@ -41,7 +42,7 @@ "metadata": {}, "outputs": [], "source": [ - "os_idd_path = '../resources/model/OpenStudio.idd'\n", + "os_idd_path = \"../resources/model/OpenStudio.idd\"\n", "os_idd = openstudio.IddFile.load(openstudio.toPath(os_idd_path)).get()" ] }, @@ -69,8 +70,8 @@ } ], "source": [ - "curve_objects = [x for x in os_idd.objects() if ('Curve:' in x.name()) or ('Table:') in x.name()]\n", - "set([x for curve in curve_objects for x in curve.references() if not 'UniqueNames' in x])" + "curve_objects = [x for x in os_idd.objects() if (\"Curve:\" in x.name()) or (\"Table:\") in x.name()]\n", + "set([x for curve in curve_objects for x in curve.references() if not \"UniqueNames\" in x])" ] }, { @@ -80,13 +81,15 @@ "metadata": {}, "outputs": [], "source": [ - "CURVE_REFS = {'AllCurves',\n", - " 'BivariateFunctions',\n", - " 'MultivariateFunctions',\n", - " 'QuadvariateFunctions',\n", - " 'QuintvariateFunctions',\n", - " 'TrivariateFunctions',\n", - " 'UnivariateFunctions'}" + "CURVE_REFS = {\n", + " \"AllCurves\",\n", + " \"BivariateFunctions\",\n", + " \"MultivariateFunctions\",\n", + " \"QuadvariateFunctions\",\n", + " \"QuintvariateFunctions\",\n", + " \"TrivariateFunctions\",\n", + " \"UnivariateFunctions\",\n", + "}" ] }, { @@ -124,32 +127,31 @@ " obj_name = obj.name()\n", " if any(ref in CURVE_REFS for ref in obj.objectLists()):\n", " # print(obj_name)\n", - " \n", - " ep_obj_name = obj_name.replace('OS:', '')\n", + "\n", + " ep_obj_name = obj_name.replace(\"OS:\", \"\")\n", " o_ep_obj = ep_idd.getObject(ep_obj_name)\n", " if not o_ep_obj.is_initialized():\n", " print(f\"failed to locate in EP IDD for {obj_name}\")\n", " continue\n", - " \n", - " ep_obj = o_ep_obj.get() \n", + "\n", + " ep_obj = o_ep_obj.get()\n", " ep_obj_field_names = [ep_obj.getField(i).get().name() for i in range(ep_obj.numFields())]\n", - " \n", + "\n", " for idx in obj.objectListFields():\n", - " \n", " field = obj.getField(idx).get()\n", " field_name = props = field.name()\n", "\n", " props = field.properties()\n", - " refs = set([x for x in props.objectLists if not 'AllCurve' in x])\n", + " refs = set([x for x in props.objectLists if not \"AllCurve\" in x])\n", " if not any(ref in CURVE_REFS for ref in refs):\n", " continue\n", "\n", " # print(f\" * {field_name}: {refs}\")\n", - " \n", + "\n", " ep_field_name, score = process.extractOne(field_name, ep_obj_field_names)\n", " if score < 80:\n", " print(f\"Bad score for {obj_name} - {ep_field_name}: {score}\")\n", - " \n", + "\n", " else:\n", " # print(f\"{score} - {field_name} - {ep_field_name}\")\n", " pass\n", @@ -157,10 +159,10 @@ " ep_field = ep_obj.getField(ep_field_idx).get()\n", " ep_refs = set([x for x in ep_field.properties().objectLists])\n", " extra_os = refs - ep_refs\n", - " \n", + "\n", " if extra_os:\n", " print(f\"WRONG extra OS {obj_name} - {field_name}: {refs=}, {ep_refs=}, {ep_field_name=}\")\n", - " \n", + "\n", " extra_ep = ep_refs - refs\n", " if extra_ep:\n", " print(f\"WRONG extra EP {obj_name} - {field_name}: {refs=}, {ep_refs=}\")" diff --git a/developer/python/Check_FuelType_Methods.ipynb b/developer/python/Check_FuelType_Methods.ipynb index 7e503b964f7..49e1c9b31ee 100644 --- a/developer/python/Check_FuelType_Methods.ipynb +++ b/developer/python/Check_FuelType_Methods.ipynb @@ -20,8 +20,8 @@ "metadata": {}, "outputs": [], "source": [ - "ROOT_DIR = Path('.').absolute().parent.parent\n", - "MODEL_DIR = ROOT_DIR / 'src' / 'model'" + "ROOT_DIR = Path(\".\").absolute().parent.parent\n", + "MODEL_DIR = ROOT_DIR / \"src\" / \"model\"" ] }, { @@ -31,7 +31,7 @@ "metadata": {}, "outputs": [], "source": [ - "cpp_files = sorted(list(MODEL_DIR.glob('*.cpp')))" + "cpp_files = sorted(list(MODEL_DIR.glob(\"*.cpp\")))" ] }, { @@ -46,17 +46,20 @@ " self.is_simple = is_simple\n", " self.simple_return = simple_return\n", " self.func_body = func_body\n", + "\n", " def __repr__(self):\n", " if self.is_simple:\n", " return self.simple_return\n", " else:\n", " return \"Complex\"\n", + "\n", " def __str__(self):\n", " if self.is_simple:\n", " return self.simple_return\n", " else:\n", " return f\"Complex: {func_body}\"\n", "\n", + "\n", "class ClassInfo:\n", " def __init__(self, class_name: str):\n", " self.class_name = class_name\n", @@ -64,11 +67,11 @@ " self.cooling_fuel_types = None\n", " self.heating_fuel_types = None\n", " self.appg_heating_fuel_types = None\n", - " \n", + "\n", " @staticmethod\n", " def header():\n", " return [\"Class_Name\", \"ComponentType\", \"coolingFuelTypes\", \"heatingFuelTypes\", \"appGHeatingFuelTypes\"]\n", - " \n", + "\n", " def __repr__(self):\n", " return f\"{self.class_name},{self.component_type.__repr__()},{self.cooling_fuel_types.__repr__()},{self.heating_fuel_types.__repr__()},{self.appg_heating_fuel_types.__repr__()}\"" ] @@ -80,127 +83,118 @@ "metadata": {}, "outputs": [], "source": [ - "RE_SIMPLE_COMPTYPE = re.compile(r'return ComponentType::(Cooling|Heating|None);')\n", - "RE_SIMPLE_FUELTYPE = re.compile(r'return (.*);')\n", + "RE_SIMPLE_COMPTYPE = re.compile(r\"return ComponentType::(Cooling|Heating|None);\")\n", + "RE_SIMPLE_FUELTYPE = re.compile(r\"return (.*);\")\n", "\n", - "RE_INSERT_FUELTYPE = re.compile(r'result.insert\\(FuelType::(.*?)\\);')\n", - "RE_INSERT_CONVERTFUELTYPE = re.compile(r'result.insert\\(FuelType\\(.*?\\)\\);')\n", + "RE_INSERT_FUELTYPE = re.compile(r\"result.insert\\(FuelType::(.*?)\\);\")\n", + "RE_INSERT_CONVERTFUELTYPE = re.compile(r\"result.insert\\(FuelType\\(.*?\\)\\);\")\n", "\n", - "RE_INSERT_FUELTYPE_APPG = re.compile(r'result.insert\\(AppGFuelType::(.*?)\\);')\n", - "RE_INSERT_CONVERTFUELTYPE_APPG = re.compile(r'result.insert\\(convertFuelTypeToAppG\\(.*?\\)\\);')\n", + "RE_INSERT_FUELTYPE_APPG = re.compile(r\"result.insert\\(AppGFuelType::(.*?)\\);\")\n", + "RE_INSERT_CONVERTFUELTYPE_APPG = re.compile(r\"result.insert\\(convertFuelTypeToAppG\\(.*?\\)\\);\")\n", "\n", "\n", - "\n", - "RE_GET_AIRLOOP_HEATING = re.compile(r'\\s*if \\(auto a_ = airLoopHVAC\\(\\)\\) {\\s*return a_->heatingFuelTypes\\(\\);\\s*}\\s*return {};\\s*',\n", - " re.DOTALL)\n", - "RE_GET_AIRLOOP_COOLING = re.compile(r'\\s*if \\(auto a_ = airLoopHVAC\\(\\)\\) {\\s*return a_->coolingFuelTypes\\(\\);\\s*}\\s*return {};\\s*',\n", - " re.DOTALL)\n", - "RE_GET_AIRLOOP_APPG = re.compile(r'\\s*if \\(auto a_ = airLoopHVAC\\(\\)\\) {\\s*return a_->appGHeatingFuelTypes\\(\\);\\s*}\\s*return {};\\s*',\n", - " re.DOTALL)\n", + "RE_GET_AIRLOOP_HEATING = re.compile(\n", + " r\"\\s*if \\(auto a_ = airLoopHVAC\\(\\)\\) {\\s*return a_->heatingFuelTypes\\(\\);\\s*}\\s*return {};\\s*\", re.DOTALL\n", + ")\n", + "RE_GET_AIRLOOP_COOLING = re.compile(\n", + " r\"\\s*if \\(auto a_ = airLoopHVAC\\(\\)\\) {\\s*return a_->coolingFuelTypes\\(\\);\\s*}\\s*return {};\\s*\", re.DOTALL\n", + ")\n", + "RE_GET_AIRLOOP_APPG = re.compile(\n", + " r\"\\s*if \\(auto a_ = airLoopHVAC\\(\\)\\) {\\s*return a_->appGHeatingFuelTypes\\(\\);\\s*}\\s*return {};\\s*\", re.DOTALL\n", + ")\n", "\n", "\n", - "RE_GET_PLANTLOOP_HEATING = re.compile(r'\\s*if \\(auto p_ = plantLoop\\(\\)\\) {\\s*return p_->heatingFuelTypes\\(\\);\\s*}\\s*return {};\\s*',\n", - " re.DOTALL)\n", - "RE_GET_PLANTLOOP_COOLING = re.compile(r'\\s*if \\(auto p_ = plantLoop\\(\\)\\) {\\s*return p_->coolingFuelTypes\\(\\);\\s*}\\s*return {};\\s*',\n", - " re.DOTALL)\n", - "RE_GET_PLANTLOOP_APPG = re.compile(r'\\s*if \\(auto p_ = plantLoop\\(\\)\\) {\\s*return p_->appGHeatingFuelTypes\\(\\);\\s*}\\s*return {};\\s*',\n", - " re.DOTALL)\n", + "RE_GET_PLANTLOOP_HEATING = re.compile(\n", + " r\"\\s*if \\(auto p_ = plantLoop\\(\\)\\) {\\s*return p_->heatingFuelTypes\\(\\);\\s*}\\s*return {};\\s*\", re.DOTALL\n", + ")\n", + "RE_GET_PLANTLOOP_COOLING = re.compile(\n", + " r\"\\s*if \\(auto p_ = plantLoop\\(\\)\\) {\\s*return p_->coolingFuelTypes\\(\\);\\s*}\\s*return {};\\s*\", re.DOTALL\n", + ")\n", + "RE_GET_PLANTLOOP_APPG = re.compile(\n", + " r\"\\s*if \\(auto p_ = plantLoop\\(\\)\\) {\\s*return p_->appGHeatingFuelTypes\\(\\);\\s*}\\s*return {};\\s*\", re.DOTALL\n", + ")\n", "\n", "RE_INSERT_AIRLOOP_HEATING = re.compile(\n", - " r'\\s*if \\(auto a_ = airLoopHVAC\\(\\)\\) {\\s*for \\(auto ft : a_->heatingFuelTypes\\(\\)\\) {\\s*result.insert\\(ft\\);\\s*}\\s*}',\n", - " re.DOTALL\n", + " r\"\\s*if \\(auto a_ = airLoopHVAC\\(\\)\\) {\\s*for \\(auto ft : a_->heatingFuelTypes\\(\\)\\) {\\s*result.insert\\(ft\\);\\s*}\\s*}\",\n", + " re.DOTALL,\n", ")\n", "\n", "RE_INSERT_AIRLOOP_COOLING = re.compile(\n", - " r'\\s*if \\(auto a_ = airLoopHVAC\\(\\)\\) {\\s*for \\(auto ft : a_->coolingFuelTypes\\(\\)\\) {\\s*result.insert\\(ft\\);\\s*}\\s*}',\n", - " re.DOTALL\n", + " r\"\\s*if \\(auto a_ = airLoopHVAC\\(\\)\\) {\\s*for \\(auto ft : a_->coolingFuelTypes\\(\\)\\) {\\s*result.insert\\(ft\\);\\s*}\\s*}\",\n", + " re.DOTALL,\n", ")\n", "\n", "RE_INSERT_AIRLOOP_APPG = re.compile(\n", - " r'\\s*if \\(auto a_ = airLoopHVAC\\(\\)\\) {\\s*for \\(auto ft : a_->appGHeatingFuelTypes\\(\\)\\) {\\s*result.insert\\(ft\\);\\s*}\\s*}',\n", - " re.DOTALL\n", + " r\"\\s*if \\(auto a_ = airLoopHVAC\\(\\)\\) {\\s*for \\(auto ft : a_->appGHeatingFuelTypes\\(\\)\\) {\\s*result.insert\\(ft\\);\\s*}\\s*}\",\n", + " re.DOTALL,\n", ")\n", "\n", "\n", "RE_INSERT_PLANTLOOP_HEATING = re.compile(\n", - " r'\\s*if \\(auto p_ = plantLoop\\(\\)\\) {\\s*for \\(auto ft : p_->heatingFuelTypes\\(\\)\\) {\\s*result.insert\\(ft\\);\\s*}\\s*}',\n", - " re.DOTALL\n", + " r\"\\s*if \\(auto p_ = plantLoop\\(\\)\\) {\\s*for \\(auto ft : p_->heatingFuelTypes\\(\\)\\) {\\s*result.insert\\(ft\\);\\s*}\\s*}\",\n", + " re.DOTALL,\n", ")\n", "\n", "RE_INSERT_PLANTLOOP_COOLING = re.compile(\n", - " r'\\s*if \\(auto p_ = plantLoop\\(\\)\\) {\\s*for \\(auto ft : p_->coolingFuelTypes\\(\\)\\) {\\s*result.insert\\(ft\\);\\s*}\\s*}',\n", - " re.DOTALL\n", + " r\"\\s*if \\(auto p_ = plantLoop\\(\\)\\) {\\s*for \\(auto ft : p_->coolingFuelTypes\\(\\)\\) {\\s*result.insert\\(ft\\);\\s*}\\s*}\",\n", + " re.DOTALL,\n", ")\n", "\n", "RE_INSERT_PLANTLOOP_APPG = re.compile(\n", - " r'\\s*if \\(auto p_ = plantLoop\\(\\)\\) {\\s*for \\(auto ft : p_->appGHeatingFuelTypes\\(\\)\\) {\\s*result.insert\\(ft\\);\\s*}\\s*}',\n", - " re.DOTALL\n", + " r\"\\s*if \\(auto p_ = plantLoop\\(\\)\\) {\\s*for \\(auto ft : p_->appGHeatingFuelTypes\\(\\)\\) {\\s*result.insert\\(ft\\);\\s*}\\s*}\",\n", + " re.DOTALL,\n", ")\n", "\n", "RE_INSERT_REHEAT_COIL = re.compile(\n", - " r'\\s*for \\(auto ft : reheatCoil\\(\\).heatingFuelTypes\\(\\)\\) {\\s*result.insert\\(ft\\);\\s*}',\n", - " re.DOTALL\n", + " r\"\\s*for \\(auto ft : reheatCoil\\(\\).heatingFuelTypes\\(\\)\\) {\\s*result.insert\\(ft\\);\\s*}\", re.DOTALL\n", ")\n", "\n", "RE_INSERT_REHEAT_COIL_APPG = re.compile(\n", - " r'\\s*for \\(auto ft : reheatCoil\\(\\).appGHeatingFuelTypes\\(\\)\\) {\\s*result.insert\\(ft\\);\\s*}',\n", - " re.DOTALL\n", + " r\"\\s*for \\(auto ft : reheatCoil\\(\\).appGHeatingFuelTypes\\(\\)\\) {\\s*result.insert\\(ft\\);\\s*}\", re.DOTALL\n", ")\n", "\n", "\n", - "\n", "RE_INSERT_HEAT_COIL = re.compile(\n", - " r'\\s*for \\(auto ft : heatingCoil\\(\\).heatingFuelTypes\\(\\)\\) {\\s*result.insert\\(ft\\);\\s*}',\n", - " re.DOTALL\n", + " r\"\\s*for \\(auto ft : heatingCoil\\(\\).heatingFuelTypes\\(\\)\\) {\\s*result.insert\\(ft\\);\\s*}\", re.DOTALL\n", ")\n", "\n", "RE_INSERT_OPT_HEAT_COIL = re.compile(\n", - " r'\\s*for \\(auto ft : hc_->heatingFuelTypes\\(\\)\\) {\\s*result.insert\\(ft\\);\\s*}',\n", - " re.DOTALL\n", + " r\"\\s*for \\(auto ft : hc_->heatingFuelTypes\\(\\)\\) {\\s*result.insert\\(ft\\);\\s*}\", re.DOTALL\n", ")\n", "\n", "RE_INSERT_HEAT_COIL_APPG = re.compile(\n", - " r'\\s*for \\(auto ft : heatingCoil\\(\\).appGHeatingFuelTypes\\(\\)\\) {\\s*result.insert\\(ft\\);\\s*}',\n", - " re.DOTALL\n", + " r\"\\s*for \\(auto ft : heatingCoil\\(\\).appGHeatingFuelTypes\\(\\)\\) {\\s*result.insert\\(ft\\);\\s*}\", re.DOTALL\n", ")\n", "\n", "RE_INSERT_OPT_HEAT_COIL_APPG = re.compile(\n", - " r'\\s*for \\(auto ft : hc_->appGHeatingFuelTypes\\(\\)\\) {\\s*result.insert\\(ft\\);\\s*}',\n", - " re.DOTALL\n", + " r\"\\s*for \\(auto ft : hc_->appGHeatingFuelTypes\\(\\)\\) {\\s*result.insert\\(ft\\);\\s*}\", re.DOTALL\n", ")\n", "\n", "\n", "RE_INSERT_SUPHC_COIL = re.compile(\n", - " r'\\s*for \\(auto ft : supplementalHeatingCoil\\(\\).heatingFuelTypes\\(\\)\\) {\\s*result.insert\\(ft\\);\\s*}',\n", - " re.DOTALL\n", + " r\"\\s*for \\(auto ft : supplementalHeatingCoil\\(\\).heatingFuelTypes\\(\\)\\) {\\s*result.insert\\(ft\\);\\s*}\", re.DOTALL\n", ")\n", "\n", "RE_INSERT_OPT_SUPHC_COIL = re.compile(\n", - " r'\\s*for \\(auto ft : supHC_->heatingFuelTypes\\(\\)\\) {\\s*result.insert\\(ft\\);\\s*}',\n", - " re.DOTALL\n", + " r\"\\s*for \\(auto ft : supHC_->heatingFuelTypes\\(\\)\\) {\\s*result.insert\\(ft\\);\\s*}\", re.DOTALL\n", ")\n", "\n", "RE_INSERT_SUPHC_COIL_APPG = re.compile(\n", - " r'\\s*for \\(auto ft : supplementalHeatingCoil\\(\\).appGHeatingFuelTypes\\(\\)\\) {\\s*result.insert\\(ft\\);\\s*}',\n", - " re.DOTALL\n", + " r\"\\s*for \\(auto ft : supplementalHeatingCoil\\(\\).appGHeatingFuelTypes\\(\\)\\) {\\s*result.insert\\(ft\\);\\s*}\", re.DOTALL\n", ")\n", "\n", "RE_INSERT_OPT_SUPHC_COIL_APPG = re.compile(\n", - " r'\\s*for \\(auto ft : supHC_->appGHeatingFuelTypes\\(\\)\\) {\\s*result.insert\\(ft\\);\\s*}',\n", - " re.DOTALL\n", + " r\"\\s*for \\(auto ft : supHC_->appGHeatingFuelTypes\\(\\)\\) {\\s*result.insert\\(ft\\);\\s*}\", re.DOTALL\n", ")\n", "\n", "\n", - "\n", "RE_INSERT_COOL_COIL = re.compile(\n", - " r'\\s*for \\(auto ft : coolingCoil\\(\\).coolingFuelTypes\\(\\)\\) {\\s*result.insert\\(ft\\);\\s*}',\n", - " re.DOTALL\n", + " r\"\\s*for \\(auto ft : coolingCoil\\(\\).coolingFuelTypes\\(\\)\\) {\\s*result.insert\\(ft\\);\\s*}\", re.DOTALL\n", ")\n", "\n", "\n", "RE_INSERT_OPT_COOL_COIL = re.compile(\n", - " r'\\s*for \\(auto ft : cc_->coolingFuelTypes\\(\\)\\) {\\s*result.insert\\(ft\\);\\s*}',\n", - " re.DOTALL\n", + " r\"\\s*for \\(auto ft : cc_->coolingFuelTypes\\(\\)\\) {\\s*result.insert\\(ft\\);\\s*}\", re.DOTALL\n", ")" ] }, @@ -212,13 +206,13 @@ "outputs": [], "source": [ "def get_func_body(lines, i):\n", - " c = lines[i].count('{') - lines[i].count('}')\n", + " c = lines[i].count(\"{\") - lines[i].count(\"}\")\n", " k = i\n", " while c != 0:\n", " k += 1\n", - " c += lines[k].count('{') - lines[k].count('}')\n", - " func_body = \"\\n\".join(lines[i:k+1])\n", - " func_body = func_body[func_body.find('{')+1:func_body.rfind('}')].strip()\n", + " c += lines[k].count(\"{\") - lines[k].count(\"}\")\n", + " func_body = \"\\n\".join(lines[i : k + 1])\n", + " func_body = func_body[func_body.find(\"{\") + 1 : func_body.rfind(\"}\")].strip()\n", " return k, func_body" ] }, @@ -232,41 +226,40 @@ "def parse_coolingFuelTypeFuncBod(func_body: str):\n", " if m := RE_SIMPLE_FUELTYPE.match(func_body):\n", " fuel_type = m.groups()[0].strip()\n", - " if fuel_type == '{}':\n", - " fuel_type = ''\n", - " elif fuel_type.startswith('{') and fuel_type.endswith('}'):\n", - " fuel_type = fuel_type.replace('FuelType::', '')\n", - " if ',' in fuel_type:\n", + " if fuel_type == \"{}\":\n", + " fuel_type = \"\"\n", + " elif fuel_type.startswith(\"{\") and fuel_type.endswith(\"}\"):\n", + " fuel_type = fuel_type.replace(\"FuelType::\", \"\")\n", + " if \",\" in fuel_type:\n", " fuel_type = f'\"{fuel_type}\"'\n", " else:\n", " fuel_type = fuel_type[1:-1]\n", " return ComponentTypeInfo(is_simple=True, simple_return=fuel_type, func_body=func_body)\n", - " \n", + "\n", " if RE_GET_AIRLOOP_COOLING.match(func_body):\n", - " return ComponentTypeInfo(is_simple=True, simple_return='airLoopHVAC_->coolingFuelTypes()', func_body=func_body)\n", - " \n", + " return ComponentTypeInfo(is_simple=True, simple_return=\"airLoopHVAC_->coolingFuelTypes()\", func_body=func_body)\n", + "\n", " if RE_GET_PLANTLOOP_COOLING.match(func_body):\n", - " return ComponentTypeInfo(is_simple=True, simple_return='plantLoop_->coolingFuelTypes()', func_body=func_body)\n", - " \n", + " return ComponentTypeInfo(is_simple=True, simple_return=\"plantLoop_->coolingFuelTypes()\", func_body=func_body)\n", + "\n", " s = []\n", " if m := RE_INSERT_FUELTYPE.search(func_body):\n", " s.append(m.groups()[0])\n", " if RE_INSERT_CONVERTFUELTYPE.search(func_body):\n", - " s.append('fuelType()')\n", + " s.append(\"fuelType()\")\n", " if RE_INSERT_AIRLOOP_COOLING.search(func_body):\n", - " s.append('airLoopHVAC_->coolingFuelTypes()')\n", + " s.append(\"airLoopHVAC_->coolingFuelTypes()\")\n", " if RE_INSERT_PLANTLOOP_COOLING.search(func_body):\n", - " s.append('plantLoop_->coolingFuelTypes()')\n", + " s.append(\"plantLoop_->coolingFuelTypes()\")\n", " if RE_INSERT_COOL_COIL.search(func_body):\n", - " s.append('cc.coolingFuelTypes()')\n", + " s.append(\"cc.coolingFuelTypes()\")\n", " if RE_INSERT_OPT_COOL_COIL.search(func_body):\n", - " s.append('cc_->coolingFuelTypes()')\n", - " \n", + " s.append(\"cc_->coolingFuelTypes()\")\n", "\n", " if s:\n", " # print(s)\n", " return ComponentTypeInfo(is_simple=True, simple_return=\" + \".join(s), func_body=func_body)\n", - " \n", + "\n", " return ComponentTypeInfo(is_simple=False, func_body=func_body)" ] }, @@ -300,7 +293,7 @@ " return {};\n", " }\n", "\"\"\"\n", - "func_body = 'return {FuelType::Electricity, FuelType::OtherFuel_1}; // TODO: is that right?'\n", + "func_body = \"return {FuelType::Electricity, FuelType::OtherFuel_1}; // TODO: is that right?\"\n", "parse_coolingFuelTypeFuncBod(func_body)" ] }, @@ -314,92 +307,97 @@ "def parse_heatingFuelTypeFuncBod(func_body: str):\n", " if m := RE_SIMPLE_FUELTYPE.match(func_body):\n", " fuel_type = m.groups()[0].strip()\n", - " if fuel_type == '{}':\n", - " fuel_type = ''\n", - " elif fuel_type.startswith('{') and fuel_type.endswith('}'):\n", - " fuel_type = fuel_type.replace('FuelType::', '')\n", - " if ',' in fuel_type:\n", + " if fuel_type == \"{}\":\n", + " fuel_type = \"\"\n", + " elif fuel_type.startswith(\"{\") and fuel_type.endswith(\"}\"):\n", + " fuel_type = fuel_type.replace(\"FuelType::\", \"\")\n", + " if \",\" in fuel_type:\n", " fuel_type = f'\"{fuel_type}\"'\n", " else:\n", " fuel_type = fuel_type[1:-1]\n", " return ComponentTypeInfo(is_simple=True, simple_return=fuel_type)\n", - " \n", + "\n", " if RE_GET_AIRLOOP_HEATING.match(func_body):\n", - " return ComponentTypeInfo(is_simple=True, simple_return='airLoopHVAC_->heatingFuelTypes()')\n", - " \n", + " return ComponentTypeInfo(is_simple=True, simple_return=\"airLoopHVAC_->heatingFuelTypes()\")\n", + "\n", " if RE_GET_PLANTLOOP_HEATING.match(func_body):\n", - " return ComponentTypeInfo(is_simple=True, simple_return='plantLoop_->heatingFuelTypes()')\n", - " \n", + " return ComponentTypeInfo(is_simple=True, simple_return=\"plantLoop_->heatingFuelTypes()\")\n", + "\n", " s = []\n", " if m := RE_INSERT_FUELTYPE.search(func_body):\n", " s.append(m.groups()[0])\n", " if RE_INSERT_CONVERTFUELTYPE.search(func_body):\n", - " s.append('fuelType()')\n", - " \n", + " s.append(\"fuelType()\")\n", + "\n", " if RE_INSERT_AIRLOOP_HEATING.search(func_body):\n", - " s.append('airLoopHVAC_->heatingFuelTypes()')\n", + " s.append(\"airLoopHVAC_->heatingFuelTypes()\")\n", " if RE_INSERT_PLANTLOOP_HEATING.search(func_body):\n", - " s.append('airLoopHVAC_->heatingFuelTypes()')\n", + " s.append(\"airLoopHVAC_->heatingFuelTypes()\")\n", " if RE_INSERT_HEAT_COIL.search(func_body):\n", - " s.append('hc.heatingFuelTypes()')\n", + " s.append(\"hc.heatingFuelTypes()\")\n", " if RE_INSERT_OPT_HEAT_COIL.search(func_body):\n", - " s.append('hc_->heatingFuelTypes()')\n", + " s.append(\"hc_->heatingFuelTypes()\")\n", " if RE_INSERT_REHEAT_COIL.search(func_body):\n", - " s.append('reheatCoil.heatingFuelTypes()')\n", + " s.append(\"reheatCoil.heatingFuelTypes()\")\n", " if RE_INSERT_SUPHC_COIL.search(func_body):\n", - " s.append('supHC.heatingFuelTypes()')\n", + " s.append(\"supHC.heatingFuelTypes()\")\n", " if RE_INSERT_OPT_SUPHC_COIL.search(func_body):\n", - " s.append('supHC_->heatingFuelTypes()')\n", + " s.append(\"supHC_->heatingFuelTypes()\")\n", " if s:\n", " # print(s)\n", " return ComponentTypeInfo(is_simple=True, simple_return=\" + \".join(s))\n", - " \n", + "\n", " return ComponentTypeInfo(is_simple=False, func_body=func_body)\n", "\n", + "\n", "def parse_appGHeatingFuelTypeFuncBod(func_body: str):\n", " if m := RE_SIMPLE_FUELTYPE.match(func_body):\n", " fuel_type = m.groups()[0].strip()\n", - " if fuel_type == '{}':\n", - " fuel_type = ''\n", - " elif fuel_type.startswith('{') and fuel_type.endswith('}'):\n", - " fuel_type = fuel_type.replace('AppGFuelType::', '')\n", - " if ',' in fuel_type:\n", + " if fuel_type == \"{}\":\n", + " fuel_type = \"\"\n", + " elif fuel_type.startswith(\"{\") and fuel_type.endswith(\"}\"):\n", + " fuel_type = fuel_type.replace(\"AppGFuelType::\", \"\")\n", + " if \",\" in fuel_type:\n", " fuel_type = f'\"{fuel_type}\"'\n", " else:\n", " fuel_type = fuel_type[1:-1]\n", " return ComponentTypeInfo(is_simple=True, simple_return=fuel_type, func_body=func_body)\n", - " \n", + "\n", " if RE_GET_AIRLOOP_APPG.match(func_body):\n", - " return ComponentTypeInfo(is_simple=True, simple_return='airLoopHVAC_->appGHeatingFuelTypes()', func_body=func_body)\n", - " \n", + " return ComponentTypeInfo(\n", + " is_simple=True, simple_return=\"airLoopHVAC_->appGHeatingFuelTypes()\", func_body=func_body\n", + " )\n", + "\n", " if RE_GET_PLANTLOOP_APPG.match(func_body):\n", - " return ComponentTypeInfo(is_simple=True, simple_return='plantLoop_->appGHeatingFuelTypes()', func_body=func_body)\n", - " \n", + " return ComponentTypeInfo(\n", + " is_simple=True, simple_return=\"plantLoop_->appGHeatingFuelTypes()\", func_body=func_body\n", + " )\n", + "\n", " s = []\n", - " \n", + "\n", " if m := RE_INSERT_FUELTYPE_APPG.search(func_body):\n", " s.append(m.groups()[0])\n", " if RE_INSERT_CONVERTFUELTYPE_APPG.search(func_body):\n", - " s.append('fuelType()')\n", - " \n", + " s.append(\"fuelType()\")\n", + "\n", " if RE_INSERT_AIRLOOP_APPG.search(func_body):\n", - " s.append('airLoopHVAC_->appGHeatingFuelTypes()')\n", + " s.append(\"airLoopHVAC_->appGHeatingFuelTypes()\")\n", " if RE_INSERT_PLANTLOOP_APPG.search(func_body):\n", - " s.append('airLoopHVAC_->appGHeatingFuelTypes()')\n", + " s.append(\"airLoopHVAC_->appGHeatingFuelTypes()\")\n", " if RE_INSERT_HEAT_COIL_APPG.search(func_body):\n", - " s.append('hc.appGHeatingFuelTypes()')\n", + " s.append(\"hc.appGHeatingFuelTypes()\")\n", " if RE_INSERT_OPT_HEAT_COIL_APPG.search(func_body):\n", - " s.append('hc_->appGHeatingFuelTypes()')\n", + " s.append(\"hc_->appGHeatingFuelTypes()\")\n", " if RE_INSERT_REHEAT_COIL_APPG.search(func_body):\n", - " s.append('reheatCoil.appGHeatingFuelTypes()')\n", + " s.append(\"reheatCoil.appGHeatingFuelTypes()\")\n", " if RE_INSERT_SUPHC_COIL_APPG.search(func_body):\n", - " s.append('supHC.appGHeatingFuelTypes()')\n", + " s.append(\"supHC.appGHeatingFuelTypes()\")\n", " if RE_INSERT_OPT_SUPHC_COIL_APPG.search(func_body):\n", - " s.append('supHC_->appGHeatingFuelTypes()')\n", + " s.append(\"supHC_->appGHeatingFuelTypes()\")\n", " if s:\n", " # print(s)\n", " return ComponentTypeInfo(is_simple=True, simple_return=\" + \".join(s), func_body=func_body)\n", - " \n", + "\n", " return ComponentTypeInfo(is_simple=False, func_body=func_body)" ] }, @@ -500,33 +498,33 @@ "source": [ "class_infos = {}\n", "for cpp_file in cpp_files:\n", - " class_name = cpp_file.name.replace('.cpp', '')\n", + " class_name = cpp_file.name.replace(\".cpp\", \"\")\n", "\n", - " with open(cpp_file, 'r') as f:\n", + " with open(cpp_file, \"r\") as f:\n", " content = f.read()\n", - " if not 'Impl::componentType()' in content:\n", + " if not \"Impl::componentType()\" in content:\n", " continue\n", - " \n", + "\n", " lines = content.splitlines()\n", - " \n", + "\n", " class_info = ClassInfo(class_name=class_name)\n", "\n", " i = 0\n", " while i < len(lines):\n", - " if '::componentType' in lines[i]:\n", + " if \"::componentType\" in lines[i]:\n", " i, func_body = get_func_body(lines, i)\n", " if m := RE_SIMPLE_COMPTYPE.match(func_body):\n", " class_info.component_type = ComponentTypeInfo(is_simple=True, simple_return=m.groups()[0])\n", " else:\n", " class_info.component_type = ComponentTypeInfo(is_simple=False, func_body=func_body)\n", - " elif '::coolingFuelTypes' in lines[i]:\n", + " elif \"::coolingFuelTypes\" in lines[i]:\n", " i, func_body = get_func_body(lines, i)\n", " class_info.cooling_fuel_types = parse_coolingFuelTypeFuncBod(func_body=func_body)\n", "\n", - " elif '::heatingFuelTypes' in lines[i]:\n", + " elif \"::heatingFuelTypes\" in lines[i]:\n", " i, func_body = get_func_body(lines, i)\n", " class_info.heating_fuel_types = parse_heatingFuelTypeFuncBod(func_body=func_body)\n", - " elif '::appGHeatingFuelTypes' in lines[i]:\n", + " elif \"::appGHeatingFuelTypes\" in lines[i]:\n", " i, func_body = get_func_body(lines, i)\n", " class_info.appg_heating_fuel_types = parse_appGHeatingFuelTypeFuncBod(func_body=func_body)\n", " i += 1\n", @@ -551,7 +549,7 @@ } ], "source": [ - "class_infos['ZoneHVACPackagedTerminalHeatPump']" + "class_infos[\"ZoneHVACPackagedTerminalHeatPump\"]" ] }, { @@ -700,7 +698,7 @@ "for k, c in class_infos.items():\n", " if c.cooling_fuel_types.is_simple:\n", " continue\n", - " \n", + "\n", " print(f\"{c.class_name}\\n==============\\n{c.cooling_fuel_types.func_body}\\n\\n\")" ] }, @@ -790,7 +788,7 @@ "for k, c in class_infos.items():\n", " if c.heating_fuel_types.is_simple:\n", " continue\n", - " \n", + "\n", " print(f\"{c.class_name}\\n==============\\n{c.heating_fuel_types.func_body}\\n\\n\")" ] }, @@ -880,7 +878,7 @@ "for k, c in class_infos.items():\n", " if c.appg_heating_fuel_types.is_simple:\n", " continue\n", - " \n", + "\n", " print(f\"{c.class_name}\\n==============\\n{c.appg_heating_fuel_types.func_body}\\n\\n\")" ] }, @@ -902,9 +900,9 @@ "lines = [\",\".join(ClassInfo.header())]\n", "for k, c in class_infos.items():\n", " lines.append(c.__repr__())\n", - " \n", - "with open('FuelTypes.csv', 'w') as f:\n", - " f.write(\"\\n\".join(lines) + '\\n')" + "\n", + "with open(\"FuelTypes.csv\", \"w\") as f:\n", + " f.write(\"\\n\".join(lines) + \"\\n\")" ] } ], diff --git a/developer/python/Modernize.ipynb b/developer/python/Modernize.ipynb index 1eb29381016..4c8d92a7fd5 100644 --- a/developer/python/Modernize.ipynb +++ b/developer/python/Modernize.ipynb @@ -33,7 +33,7 @@ } ], "source": [ - "ROOT_DIR = Path('.').absolute().parent.parent\n", + "ROOT_DIR = Path(\".\").absolute().parent.parent\n", "ROOT_DIR" ] }, @@ -44,7 +44,7 @@ "metadata": {}, "outputs": [], "source": [ - "ROOT_DIR = Path('/Users/julien/Software/Others/OpenStudio')" + "ROOT_DIR = Path(\"/Users/julien/Software/Others/OpenStudio\")" ] }, { @@ -54,8 +54,8 @@ "metadata": {}, "outputs": [], "source": [ - "#cpp_files = gb.glob(str(ROOT_DIR / 'src' / '**' / '*.cpp'), recursive=True)\n", - "cpp_files = list(ROOT_DIR.glob('src/**/*.cpp'))" + "# cpp_files = gb.glob(str(ROOT_DIR / 'src' / '**' / '*.cpp'), recursive=True)\n", + "cpp_files = list(ROOT_DIR.glob(\"src/**/*.cpp\"))" ] }, { @@ -73,7 +73,7 @@ "metadata": {}, "outputs": [], "source": [ - "#cpp_files = [ '/Users/julien/Software/Others/OpenStudio/src/model/test/SubSurface_GTest.cpp',]" + "# cpp_files = [ '/Users/julien/Software/Others/OpenStudio/src/model/test/SubSurface_GTest.cpp',]" ] }, { @@ -83,10 +83,10 @@ "metadata": {}, "outputs": [], "source": [ - "RE_VERTICES = re.compile(r'^(\\s*)(?:std::vector|Point3dVector) (\\w+);')\n", - "RE_VERTICES_CLEAR = re.compile(r'^(\\s*)(\\w+).clear\\(\\);')\n", - "RE_PUSHBACK = re.compile(r'\\w+.push_back\\(Point3d\\(\\s*(-?\\d+(?:\\.\\d+)?),\\s*(-?\\d+(?:\\.\\d+)?),\\s*(-?\\d+(?:\\.\\d+)?)\\)\\);')\n", - "RE_PUSHBACK = re.compile(r'\\w+.push_back\\(Point3d\\(\\s*([^,]+),\\s*([^,]+),\\s*([^,]+)\\)\\);')" + "RE_VERTICES = re.compile(r\"^(\\s*)(?:std::vector|Point3dVector) (\\w+);\")\n", + "RE_VERTICES_CLEAR = re.compile(r\"^(\\s*)(\\w+).clear\\(\\);\")\n", + "RE_PUSHBACK = re.compile(r\"\\w+.push_back\\(Point3d\\(\\s*(-?\\d+(?:\\.\\d+)?),\\s*(-?\\d+(?:\\.\\d+)?),\\s*(-?\\d+(?:\\.\\d+)?)\\)\\);\")\n", + "RE_PUSHBACK = re.compile(r\"\\w+.push_back\\(Point3d\\(\\s*([^,]+),\\s*([^,]+),\\s*([^,]+)\\)\\);\")" ] }, { @@ -97,7 +97,7 @@ "outputs": [], "source": [ "for cpp_file in cpp_files:\n", - " with open(cpp_file, 'r') as f:\n", + " with open(cpp_file, \"r\") as f:\n", " content = f.read()\n", " lines = content.splitlines()\n", " done_lines = []\n", @@ -113,35 +113,34 @@ " else:\n", " spacing = m12.groups()[0]\n", " print(i, line)\n", - " j = i+1\n", + " j = i + 1\n", " pts = []\n", - " while (m2 := RE_PUSHBACK.search(lines[j])):\n", + " while m2 := RE_PUSHBACK.search(lines[j]):\n", " anyDone = True\n", " pts.append(m2.groups())\n", " done_lines.append(j)\n", " j += 1\n", " if pts:\n", " if m11:\n", - " new_lines.append(line.replace(';', ' {'))\n", + " new_lines.append(line.replace(\";\", \" {\"))\n", " else:\n", - " new_lines.append(line.replace('.clear();', ' = {'))\n", + " new_lines.append(line.replace(\".clear();\", \" = {\"))\n", " for k, pt in enumerate(pts):\n", - " new_line = spacing + ' {{{}, {}, {}}}'.format(*pt)\n", + " new_line = spacing + \" {{{}, {}, {}}}\".format(*pt)\n", " # if k != (len(pts) - 1):\n", - " new_line += ','\n", + " new_line += \",\"\n", " new_lines.append(new_line)\n", "\n", - " new_lines.append(spacing + '};')\n", + " new_lines.append(spacing + \"};\")\n", " else:\n", " for k in range(i, j):\n", " new_lines.append(lines[i])\n", " else:\n", " new_lines.append(line)\n", - " \n", + "\n", " if anyDone:\n", - " with open(cpp_file, 'w') as f:\n", - " f.write(\"\\n\".join(new_lines) + '\\n')\n", - " " + " with open(cpp_file, \"w\") as f:\n", + " f.write(\"\\n\".join(new_lines) + \"\\n\")" ] }, { @@ -159,7 +158,7 @@ "metadata": {}, "outputs": [], "source": [ - "p_method_name = re.compile(r'(\\s*)bool\\s+(.*?_Impl::)(set.*?)(\\(\\s*(std::string).*)')\n", + "p_method_name = re.compile(r\"(\\s*)bool\\s+(.*?_Impl::)(set.*?)(\\(\\s*(std::string).*)\")\n", "\n", "all_dict = {}\n", "changed_dict = {}\n", @@ -169,33 +168,34 @@ "n_tot = 0\n", "\n", "for file in cpp_files:\n", - " \n", " # Check number of method changed for consistency between cpp / hpp/ Impl\n", " # Stores the actual line of function names for checking\n", " cpp_impl_methods = []\n", - " \n", + "\n", " class_name = os.path.splitext(os.path.basename(file))[0]\n", - " with open(file, 'r') as f:\n", + " with open(file, \"r\") as f:\n", " ori_lines = f.read().splitlines()\n", "\n", " lines = deepcopy(ori_lines)\n", " # Might as well strip the trailing (right) whitespace too\n", " lines = [x.rstrip() for x in lines]\n", - " \n", + "\n", " found_lines = []\n", " for i, line in enumerate(lines):\n", " # Check that the method name is setXXX\n", " m3 = p_method_name.match(lines[i])\n", - " \n", + "\n", " if m3:\n", " method_name = m3.groups()[2]\n", " # print(i, m3.groups())\n", " # Go until the end of the function\n", - " lines[i] = \"{}bool {}{}{}\".format(m3.groups()[0],\n", - " m3.groups()[1],\n", - " m3.groups()[2],\n", - " m3.groups()[3].replace('std::string', 'const std::string&'))\n", - " \n", + " lines[i] = \"{}bool {}{}{}\".format(\n", + " m3.groups()[0],\n", + " m3.groups()[1],\n", + " m3.groups()[2],\n", + " m3.groups()[3].replace(\"std::string\", \"const std::string&\"),\n", + " )\n", + "\n", " found_lines.append(i)\n", " cpp_impl_methods.append(line)\n", " if not class_name in changed_dict.keys():\n", @@ -203,46 +203,43 @@ " else:\n", " changed_dict[class_name].append(method_name)\n", "\n", - " \n", " if found_lines:\n", - " \n", " # Make the change to the Public implemention in the cpp file\n", " cpp_public_methods = []\n", - " p_getimpl = re.compile(r'(\\s*)(getImpl.*)')\n", + " p_getimpl = re.compile(r\"(\\s*)(getImpl.*)\")\n", "\n", " for method_name in changed_dict[class_name]:\n", - " p_public = re.compile(r'(\\s*)bool\\s*({}::{}\\s*\\(\\s*(std::string).*)'.format(class_name, method_name))\n", + " p_public = re.compile(r\"(\\s*)bool\\s*({}::{}\\s*\\(\\s*(std::string).*)\".format(class_name, method_name))\n", " for i, line in enumerate(lines):\n", " m = p_public.match(line)\n", " if m:\n", " found_lines.append(i)\n", - " lines[i] = lines[i].replace('std::string', 'const std::string&')\n", + " lines[i] = lines[i].replace(\"std::string\", \"const std::string&\")\n", " cpp_public_methods.append(line)\n", "\n", - " \n", - " all_dict[file] = {'ori': ori_lines, 'new': lines}\n", + " all_dict[file] = {\"ori\": ori_lines, \"new\": lines}\n", " # print('{}: {}'.format(file, found_lines))\n", " # if len(found_lines)%2 != 0:\n", " # print(\"Something probably went wrong\")\n", - " \n", + "\n", " # Write the cpp file\n", - " with open(file, 'w') as f:\n", + " with open(file, \"w\") as f:\n", " f.write(\"\\n\".join(lines) + \"\\n\")\n", - " \n", + "\n", " # Change it in the hpp files\n", - " \n", - " file_hpp = os.path.join(base_dir, '{}.hpp'.format(class_name))\n", - " file_impl_hpp = os.path.join(base_dir, '{}_Impl.hpp'.format(class_name))\n", - " \n", + "\n", + " file_hpp = os.path.join(base_dir, \"{}.hpp\".format(class_name))\n", + " file_impl_hpp = os.path.join(base_dir, \"{}_Impl.hpp\".format(class_name))\n", + "\n", " hpp_impl_methods = []\n", " hpp_public_methods = []\n", - " \n", + "\n", " for file_header in [file_hpp, file_impl_hpp]:\n", " write_needed = False\n", - " with open(file_header, 'r') as f:\n", + " with open(file_header, \"r\") as f:\n", " lines = f.read().splitlines()\n", " for method_name in changed_dict[class_name]:\n", - " p = re.compile('(\\s*)bool\\s*({}\\s*\\(\\s*(std::string).*)'.format(method_name))\n", + " p = re.compile(\"(\\s*)bool\\s*({}\\s*\\(\\s*(std::string).*)\".format(method_name))\n", "\n", " # Might as well strip the trailing (right) whitespace too\n", " lines = [x.rstrip() for x in lines]\n", @@ -252,29 +249,27 @@ " if m:\n", " is_found = True\n", " write_needed = True\n", - " lines[i] = lines[i].replace('std::string', 'const std::string&')\n", + " lines[i] = lines[i].replace(\"std::string\", \"const std::string&\")\n", " if file_header == file_hpp:\n", " hpp_public_methods.append(line)\n", " else:\n", " hpp_impl_methods.append(line)\n", " n_tot += 1\n", " if not is_found:\n", - " print('{}: {} not found'.format(file, method_name))\n", - " \n", - " \n", + " print(\"{}: {} not found\".format(file, method_name))\n", + "\n", " if write_needed:\n", - " with open(file_header, 'w') as f:\n", + " with open(file_header, \"w\") as f:\n", " f.write(\"\\n\".join(lines) + \"\\n\")\n", - " \n", - " \n", + "\n", " if len(cpp_impl_methods) != len(hpp_impl_methods):\n", - " warnings.warn(\"Not maching for {} in Impl: Cpp={}, Hpp=\"\n", - " \"{}\".format(file, cpp_impl_methods,\n", - " hpp_impl_methods))\n", + " warnings.warn(\n", + " \"Not maching for {} in Impl: Cpp={}, Hpp=\" \"{}\".format(file, cpp_impl_methods, hpp_impl_methods)\n", + " )\n", " if len(cpp_public_methods) != len(hpp_impl_methods):\n", - " warnings.warn(\"Not maching for {} in Public: Cpp={}, Hpp=\"\n", - " \"{}\".format(file, cpp_public_methods,\n", - " hpp_impl_methods))" + " warnings.warn(\n", + " \"Not maching for {} in Public: Cpp={}, Hpp=\" \"{}\".format(file, cpp_public_methods, hpp_impl_methods)\n", + " )" ] }, { @@ -292,8 +287,8 @@ "metadata": {}, "outputs": [], "source": [ - "impl_hpp_files = list(ROOT_DIR.glob('src/**/*_Impl.hpp'))\n", - "hpp_files = list(ROOT_DIR.glob('src/**/*.hpp'))\n", + "impl_hpp_files = list(ROOT_DIR.glob(\"src/**/*_Impl.hpp\"))\n", + "hpp_files = list(ROOT_DIR.glob(\"src/**/*.hpp\"))\n", "print(len(impl_hpp_files), len(hpp_files))\n", "hpp_files = list(set(hpp_files) - set(impl_hpp_files))\n", "print(len(impl_hpp_files), len(hpp_files))" @@ -306,9 +301,9 @@ "metadata": {}, "outputs": [], "source": [ - "re_destructors = re.compile(r'~(\\w+)\\(\\)')\n", - "re_destructor_cpp_non_trivial = re.compile(r'(\\w+)::~\\1()\\(\\)\\s*{\\s*(\\w+.*)\\s*}')\n", - "re_destructor_cpp_trivial = re.compile(r'(\\w+)::~\\1()\\(\\)\\s*{\\s*}')" + "re_destructors = re.compile(r\"~(\\w+)\\(\\)\")\n", + "re_destructor_cpp_non_trivial = re.compile(r\"(\\w+)::~\\1()\\(\\)\\s*{\\s*(\\w+.*)\\s*}\")\n", + "re_destructor_cpp_trivial = re.compile(r\"(\\w+)::~\\1()\\(\\)\\s*{\\s*}\")" ] }, { @@ -328,11 +323,11 @@ "source": [ "for cpp_file in cpp_files:\n", " class_name = cpp_file.stem\n", - " with open(cpp_file, 'r') as f:\n", + " with open(cpp_file, \"r\") as f:\n", " content = f.read()\n", - " \n", + "\n", " for x in re_destructor_cpp_non_trivial.finditer(content):\n", - " print(f'{cpp_file}: {x.groups()}')" + " print(f\"{cpp_file}: {x.groups()}\")" ] }, { @@ -350,7 +345,7 @@ "metadata": {}, "outputs": [], "source": [ - "re_destructor_cpp_trivial = re.compile(r'^[\\t ]*(\\w+)::~\\1()\\(\\)\\s*{\\s*}\\s*?$[\\r\\n]', re.MULTILINE)" + "re_destructor_cpp_trivial = re.compile(r\"^[\\t ]*(\\w+)::~\\1()\\(\\)\\s*{\\s*}\\s*?$[\\r\\n]\", re.MULTILINE)" ] }, { @@ -363,53 +358,53 @@ "outputs": [], "source": [ "for cpp_file in cpp_files:\n", - "#for cpp_file in [Path('/Users/julien/Software/Others/OpenStudio/src/airflow/contam/PrjAirflowElements.cpp')]:\n", + " # for cpp_file in [Path('/Users/julien/Software/Others/OpenStudio/src/airflow/contam/PrjAirflowElements.cpp')]:\n", " class_name = cpp_file.stem\n", - " with open(cpp_file, 'r') as f:\n", + " with open(cpp_file, \"r\") as f:\n", " content = f.read()\n", " found_dtors = []\n", " for x in re_destructor_cpp_trivial.finditer(content):\n", " found_dtors.append(x.groups()[0])\n", " if found_dtors:\n", - " #print(cpp_file, found_dtors)\n", - " with open(cpp_file, 'w') as f:\n", - " f.write(re_destructor_cpp_trivial.sub('', content))\n", - " \n", - " hpp_file = cpp_file.with_suffix('.hpp')\n", - " impl_file = cpp_file.with_stem(cpp_file.stem + \"_Impl\").with_suffix('.hpp')\n", - " impl_dtors = [x for x in found_dtors if '_Impl' in x]\n", + " # print(cpp_file, found_dtors)\n", + " with open(cpp_file, \"w\") as f:\n", + " f.write(re_destructor_cpp_trivial.sub(\"\", content))\n", + "\n", + " hpp_file = cpp_file.with_suffix(\".hpp\")\n", + " impl_file = cpp_file.with_stem(cpp_file.stem + \"_Impl\").with_suffix(\".hpp\")\n", + " impl_dtors = [x for x in found_dtors if \"_Impl\" in x]\n", " dtors = list(set(found_dtors) - set(impl_dtors))\n", " if impl_dtors:\n", - " with open(impl_file, 'r') as f:\n", + " with open(impl_file, \"r\") as f:\n", " content = f.read()\n", " lines = content.splitlines()\n", " new_lines = []\n", " for i, line in enumerate(lines):\n", " found = False\n", " for dtor in impl_dtors:\n", - " if f'~{dtor}();' in line:\n", + " if f\"~{dtor}();\" in line:\n", " found = True\n", - " new_lines.append(line.replace(';', ' = default;'))\n", + " new_lines.append(line.replace(\";\", \" = default;\"))\n", " if not found:\n", - " new_lines.append(line) \n", - " with open(impl_file, 'w') as f:\n", - " f.write('\\n'.join(new_lines) + '\\n')\n", - " \n", - " if dtors: \n", - " with open(hpp_file, 'r') as f:\n", + " new_lines.append(line)\n", + " with open(impl_file, \"w\") as f:\n", + " f.write(\"\\n\".join(new_lines) + \"\\n\")\n", + "\n", + " if dtors:\n", + " with open(hpp_file, \"r\") as f:\n", " content = f.read()\n", " lines = content.splitlines()\n", " new_lines = []\n", " for i, line in enumerate(lines):\n", " found = False\n", " for dtor in dtors:\n", - " if f'~{dtor}();' in line:\n", + " if f\"~{dtor}();\" in line:\n", " found = True\n", - " new_lines.append(line.replace(';', ' = default;'))\n", + " new_lines.append(line.replace(\";\", \" = default;\"))\n", " if not found:\n", " new_lines.append(line)\n", - " with open(hpp_file, 'w') as f:\n", - " f.write('\\n'.join(new_lines) + '\\n')" + " with open(hpp_file, \"w\") as f:\n", + " f.write(\"\\n\".join(new_lines) + \"\\n\")" ] }, { @@ -427,23 +422,21 @@ "metadata": {}, "outputs": [], "source": [ - "hpp_files = list(ROOT_DIR.glob('src/**/*.hpp'))\n", - "re_destructor_hpp_trivial = re.compile(r'^([\\t ]*(?:virtual *)?~\\w+\\(\\))(\\s*?{\\s*?}\\s*?;*)(.*?)$', re.MULTILINE)\n", + "hpp_files = list(ROOT_DIR.glob(\"src/**/*.hpp\"))\n", + "re_destructor_hpp_trivial = re.compile(r\"^([\\t ]*(?:virtual *)?~\\w+\\(\\))(\\s*?{\\s*?}\\s*?;*)(.*?)$\", re.MULTILINE)\n", "\n", "for hpp_file in hpp_files:\n", - " with open(hpp_file, 'r') as f:\n", + " with open(hpp_file, \"r\") as f:\n", " content = f.read()\n", - " \n", - "\n", "\n", " found_dtors = []\n", " for x in re_destructor_hpp_trivial.finditer(content):\n", - " #print(x.groups())\n", + " # print(x.groups())\n", " found_dtors.append(x.groups()[1])\n", " if found_dtors:\n", - " #print(hpp_file, found_dtors)\n", - " with open(hpp_file, 'w') as f:\n", - " f.write(re_destructor_hpp_trivial.sub(r'\\1 = default;\\3', content))" + " # print(hpp_file, found_dtors)\n", + " with open(hpp_file, \"w\") as f:\n", + " f.write(re_destructor_hpp_trivial.sub(r\"\\1 = default;\\3\", content))" ] }, { @@ -461,11 +454,11 @@ "metadata": {}, "outputs": [], "source": [ - "cpp_model_files = list(ROOT_DIR.glob('src/model/*.cpp'))\n", + "cpp_model_files = list(ROOT_DIR.glob(\"src/model/*.cpp\"))\n", "\n", - "re_normal_return = re.compile(r'^(\\s*)return\\s*(\\w+);(.*)$')\n", + "re_normal_return = re.compile(r\"^(\\s*)return\\s*(\\w+);(.*)$\")\n", "\n", - "re_init_return = re.compile(r'(\\w+)\\s+' + class_name + r'\\s+=\\s+(\\w+)::clone\\(model\\)(?:.cast<(\\w+)>\\(\\);)')" + "re_init_return = re.compile(r\"(\\w+)\\s+\" + class_name + r\"\\s+=\\s+(\\w+)::clone\\(model\\)(?:.cast<(\\w+)>\\(\\);)\")" ] }, { @@ -476,51 +469,55 @@ "outputs": [], "source": [ "for cpp_file in cpp_model_files:\n", - "#for cpp_file in [Path('/Users/julien/Software/Others/OpenStudio/src/model/AirLoopHVACUnitaryHeatPumpAirToAirMultiSpeed.cpp')]:\n", - " \n", + " # for cpp_file in [Path('/Users/julien/Software/Others/OpenStudio/src/model/AirLoopHVACUnitaryHeatPumpAirToAirMultiSpeed.cpp')]:\n", + "\n", " class_name = cpp_file.stem\n", - " with open(cpp_file, 'r') as f:\n", + " with open(cpp_file, \"r\") as f:\n", " content = f.read()\n", - " if not f'{class_name}_Impl::clone(Model model)' in content:\n", - " #print(f'{class_name} has no clone override')\n", + " if not f\"{class_name}_Impl::clone(Model model)\" in content:\n", + " # print(f'{class_name} has no clone override')\n", " continue\n", - " \n", + "\n", " lines = content.splitlines()\n", - " \n", + "\n", " i = 0\n", - " while (f'{class_name}_Impl::clone(Model model)' not in lines[i]):\n", + " while f\"{class_name}_Impl::clone(Model model)\" not in lines[i]:\n", " i += 1\n", - " \n", + "\n", " k = i + 1\n", - " while ('return ' not in lines[k]):\n", + " while \"return \" not in lines[k]:\n", " k += 1\n", - " \n", - " #print(i, k)\n", - " #print(lines[k+1])\n", - " \n", - " if (m := re_normal_return.search(lines[k])):\n", + "\n", + " # print(i, k)\n", + " # print(lines[k+1])\n", + "\n", + " if m := re_normal_return.search(lines[k]):\n", " return_init_space, return_var, trailing_chars = m.groups()\n", - " #print(return_var)\n", + " # print(return_var)\n", " found = False\n", - " #re_init_return = re.compile(r'^(\\s*)(\\w+)\\s+(' + return_var + r')\\s+=\\s+(\\w+::clone\\(model\\))(.cast<(\\w+)>\\(\\);.*)$')\n", - " re_init_return = re.compile(r'^(\\s*)(\\w+)\\s+(' + return_var + r')\\s+=\\s+(\\w+::clone\\(model\\))(.optionalCast<(\\w+)>\\(\\).get();.*)$')\n", + " # re_init_return = re.compile(r'^(\\s*)(\\w+)\\s+(' + return_var + r')\\s+=\\s+(\\w+::clone\\(model\\))(.cast<(\\w+)>\\(\\);.*)$')\n", + " re_init_return = re.compile(\n", + " r\"^(\\s*)(\\w+)\\s+(\" + return_var + r\")\\s+=\\s+(\\w+::clone\\(model\\))(.optionalCast<(\\w+)>\\(\\).get();.*)$\"\n", + " )\n", "\n", " for j in range(i, k):\n", - " if (m := re_init_return.match(lines[j])):\n", + " if m := re_init_return.match(lines[j]):\n", " found = True\n", " initial_space, vartype, varname, clone_method, cast_method, cast_type = m.groups()\n", - " #print(m.groups())\n", - " if cast_type != 'ModelObject':\n", - " lines[j] = f'{initial_space}auto {varname} = {clone_method}{cast_method}'\n", - " lines[k] = f'{return_init_space}return std::move({return_var});{trailing_chars}'\n", + " # print(m.groups())\n", + " if cast_type != \"ModelObject\":\n", + " lines[j] = f\"{initial_space}auto {varname} = {clone_method}{cast_method}\"\n", + " lines[k] = f\"{return_init_space}return std::move({return_var});{trailing_chars}\"\n", " print(lines[j])\n", " print(lines[k])\n", - " #with open(cpp_file, 'w') as f:\n", + " # with open(cpp_file, 'w') as f:\n", " # f.write('\\n'.join(lines) + '\\n')\n", " if not found:\n", - " re_init_return = re.compile(r'^(\\s*)(\\w+)\\s+(' + return_var + r')\\s+=\\s+(\\w+::clone\\(model\\))(.cast<(\\w+)>\\(\\);.*)$', re.MULTILINE)\n", + " re_init_return = re.compile(\n", + " r\"^(\\s*)(\\w+)\\s+(\" + return_var + r\")\\s+=\\s+(\\w+::clone\\(model\\))(.cast<(\\w+)>\\(\\);.*)$\", re.MULTILINE\n", + " )\n", "\n", - " if (m := re_init_return.search('\\n'.join(lines[i:k+1]))):\n", + " if m := re_init_return.search(\"\\n\".join(lines[i : k + 1])):\n", " print(m.groups())" ] }, @@ -544,29 +541,29 @@ ], "source": [ "for cpp_file in cpp_model_files:\n", - "#for cpp_file in [Path('/Users/julien/Software/Others/OpenStudio/src/model/AirLoopHVACUnitaryHeatPumpAirToAirMultiSpeed.cpp')]:\n", - " \n", + " # for cpp_file in [Path('/Users/julien/Software/Others/OpenStudio/src/model/AirLoopHVACUnitaryHeatPumpAirToAirMultiSpeed.cpp')]:\n", + "\n", " class_name = cpp_file.stem\n", - " with open(cpp_file, 'r') as f:\n", + " with open(cpp_file, \"r\") as f:\n", " content = f.read()\n", - " if not f'{class_name}_Impl::clone(Model model)' in content:\n", - " #print(f'{class_name} has no clone override')\n", + " if not f\"{class_name}_Impl::clone(Model model)\" in content:\n", + " # print(f'{class_name} has no clone override')\n", " continue\n", - " \n", + "\n", " lines = content.splitlines()\n", - " \n", + "\n", " i = 0\n", - " while (f'{class_name}_Impl::clone(Model model)' not in lines[i]):\n", + " while f\"{class_name}_Impl::clone(Model model)\" not in lines[i]:\n", " i += 1\n", - " \n", + "\n", " k = i + 1\n", - " while ('return ' not in lines[k]):\n", + " while \"return \" not in lines[k]:\n", " k += 1\n", - " \n", - " #print(i, k)\n", - " #print(lines[k+1])\n", - " \n", - " if (m := re_normal_return.search(lines[k])):\n", + "\n", + " # print(i, k)\n", + " # print(lines[k+1])\n", + "\n", + " if m := re_normal_return.search(lines[k]):\n", " return_init_space, return_var, trailing_chars = m.groups()\n", " print(cpp_file, lines[k])" ] @@ -638,36 +635,35 @@ "metadata": {}, "outputs": [], "source": [ - "model_impl_hpp_files = list(ROOT_DIR.glob('src/**/*_Impl.hpp'))\n", - "model_hpp_files = list(ROOT_DIR.glob('src/model/*.hpp'))\n", + "model_impl_hpp_files = list(ROOT_DIR.glob(\"src/**/*_Impl.hpp\"))\n", + "model_hpp_files = list(ROOT_DIR.glob(\"src/model/*.hpp\"))\n", "print(len(model_impl_hpp_files), len(model_hpp_files))\n", "model_hpp_files = list(set(model_hpp_files) - set(model_impl_hpp_files))\n", "print(len(model_impl_hpp_files), len(model_hpp_files))\n", "\n", "\n", - "re_destructor_hpp_trivial = re.compile(r'^([\\t ]*)((?:virtual *)?~(\\w+)\\(\\)\\s*=\\s*default;*.*?)$', re.MULTILINE)\n", + "re_destructor_hpp_trivial = re.compile(r\"^([\\t ]*)((?:virtual *)?~(\\w+)\\(\\)\\s*=\\s*default;*.*?)$\", re.MULTILINE)\n", "for hpp_file in model_hpp_files:\n", - " with open(hpp_file, 'r') as f:\n", + " with open(hpp_file, \"r\") as f:\n", " content = f.read()\n", - " \n", + "\n", " lines = content.splitlines()\n", " new_lines = []\n", " for line in lines:\n", - " if (m := re_destructor_hpp_trivial.match(line)):\n", + " if m := re_destructor_hpp_trivial.match(line):\n", " init_space, dtor_signature, class_name = m.groups()\n", " new_lines.append(line)\n", - " new_lines.append(f'{init_space}// Default the copy and move operators because the virtual dtor is explicit')\n", + " new_lines.append(f\"{init_space}// Default the copy and move operators because the virtual dtor is explicit\")\n", " new_lines.append(f\"{init_space}{class_name}(const {class_name}& other) = default;\")\n", " new_lines.append(f\"{init_space}{class_name}({class_name}&& other) = default;\")\n", " new_lines.append(f\"{init_space}{class_name}& operator=(const {class_name}&) = default;\")\n", " new_lines.append(f\"{init_space}{class_name}& operator=({class_name}&&) = default;\")\n", - " \n", + "\n", " else:\n", " new_lines.append(line)\n", "\n", - " with open(hpp_file, 'w') as f:\n", - " f.write('\\n'.join(new_lines) + '\\n')\n", - " " + " with open(hpp_file, \"w\") as f:\n", + " f.write(\"\\n\".join(new_lines) + \"\\n\")" ] }, { @@ -685,8 +681,8 @@ "metadata": {}, "outputs": [], "source": [ - "concrete_modelobject_file = ROOT_DIR / 'src/model/ConcreteModelObjects.hpp'\n", - "with open(concrete_modelobject_file, 'r') as f:\n", + "concrete_modelobject_file = ROOT_DIR / \"src/model/ConcreteModelObjects.hpp\"\n", + "with open(concrete_modelobject_file, \"r\") as f:\n", " content = f.read()\n", "lines = content.splitlines()" ] @@ -699,13 +695,13 @@ "outputs": [], "source": [ "concrete_classes = []\n", - "concrete_impl_classes = [] # for QA/QC\n", + "concrete_impl_classes = [] # for QA/QC\n", "for line in lines:\n", - " line = line.split('//')[0].strip()\n", - " if '#include' in line:\n", - " c = line.split('\"')[1].replace('.hpp', '')\n", - " if '_Impl' in c:\n", - " concrete_impl_classes.append(c.replace('_Impl', ''))\n", + " line = line.split(\"//\")[0].strip()\n", + " if \"#include\" in line:\n", + " c = line.split('\"')[1].replace(\".hpp\", \"\")\n", + " if \"_Impl\" in c:\n", + " concrete_impl_classes.append(c.replace(\"_Impl\", \"\"))\n", " else:\n", " concrete_classes.append(c)" ] @@ -780,7 +776,7 @@ "metadata": {}, "outputs": [], "source": [ - "lookfors = ['getModelObjects<', 'getModelObjectsByName<', 'getModelObjectByName<']" + "lookfors = [\"getModelObjects<\", \"getModelObjectsByName<\", \"getModelObjectByName<\"]" ] }, { @@ -790,8 +786,8 @@ "metadata": {}, "outputs": [], "source": [ - "re_getobject_call = re.compile(r'(getModelObjects|getModelObjectsByName|getModelObjectByName)<(.*?)>')\n", - "line = ' std::vector constructionBases = model.getModelObjects();'" + "re_getobject_call = re.compile(r\"(getModelObjects|getModelObjectsByName|getModelObjectByName)<(.*?)>\")\n", + "line = \" std::vector constructionBases = model.getModelObjects();\"" ] }, { @@ -2397,42 +2393,42 @@ } ], "source": [ - "cpp_files = list(ROOT_DIR.glob('src/**/*.cpp'))\n", + "cpp_files = list(ROOT_DIR.glob(\"src/**/*.cpp\"))\n", "for cpp_file in cpp_files:\n", - "#for cpp_file in [Path('/Users/julien/Software/Others/OpenStudio/src/model/AirLoopHVACUnitaryHeatPumpAirToAirMultiSpeed.cpp')]:\n", - " \n", + " # for cpp_file in [Path('/Users/julien/Software/Others/OpenStudio/src/model/AirLoopHVACUnitaryHeatPumpAirToAirMultiSpeed.cpp')]:\n", + "\n", " class_name = cpp_file.stem\n", - " with open(cpp_file, 'r') as f:\n", + " with open(cpp_file, \"r\") as f:\n", " content = f.read()\n", " if not any([x in content for x in lookfors]):\n", - " #print(f'{cpp_file.relative_to(ROOT_DIR)} has no getObject')\n", + " # print(f'{cpp_file.relative_to(ROOT_DIR)} has no getObject')\n", " continue\n", - " \n", - " #found_calls = []\n", - " #for x in re_getobject_call.finditer(content):\n", + "\n", + " # found_calls = []\n", + " # for x in re_getobject_call.finditer(content):\n", " # found_calls.append(x.groups())\n", - " #if not found_calls:\n", + " # if not found_calls:\n", " # #print(f'{cpp_file.relative_to(ROOT_DIR)} has no getObject')\n", " # continue\n", - " \n", - " print(f'{cpp_file.relative_to(ROOT_DIR)} has call to getObject template')\n", + "\n", + " print(f\"{cpp_file.relative_to(ROOT_DIR)} has call to getObject template\")\n", " print(found_calls)\n", - " \n", + "\n", " lines = content.splitlines()\n", " new_lines = []\n", " for i, line in enumerate(lines):\n", - " if (m := re_getobject_call.search(line)):\n", + " if m := re_getobject_call.search(line):\n", " template, t = m.groups()\n", - " type_clean = t.split('::')[-1]\n", + " type_clean = t.split(\"::\")[-1]\n", " if type_clean in concrete_classes:\n", - " new_lines.append(line.replace(template, template.replace('getModelObject', 'getConcreteModelObject')))\n", + " new_lines.append(line.replace(template, template.replace(\"getModelObject\", \"getConcreteModelObject\")))\n", " print(i, template, type_clean)\n", " continue\n", - " \n", + "\n", " new_lines.append(line)\n", - " \n", - " with open(cpp_file, 'w') as f:\n", - " f.write('\\n'.join(new_lines) + '\\n')" + "\n", + " with open(cpp_file, \"w\") as f:\n", + " f.write(\"\\n\".join(new_lines) + \"\\n\")" ] }, { @@ -2957,7 +2953,7 @@ ], "source": [ "for concrete_class in concrete_classes:\n", - " print(f'template <> struct is_concrete_model_mobject<{concrete_class}> : std::true_type {{}};')" + " print(f\"template <> struct is_concrete_model_mobject<{concrete_class}> : std::true_type {{}};\")" ] }, { diff --git a/developer/python/run_benchmark.py b/developer/python/run_benchmark.py index b423920a442..60de3b25278 100755 --- a/developer/python/run_benchmark.py +++ b/developer/python/run_benchmark.py @@ -30,18 +30,17 @@ run_benchmark.py --prefix ori --quiet --all """ -from docopt import docopt +import glob as gb import re import shlex import stat import subprocess import typing - from pathlib import Path -import glob as gb -import pandas as pd -import matplotlib.pyplot as plt +import matplotlib.pyplot as plt +import pandas as pd +from docopt import docopt ROOT_DIR = Path(__file__).parent.absolute() @@ -52,17 +51,14 @@ def get_branch_and_sha() -> [str, str]: the CMakeCache.txt has the branch and sha, but it may be outdated (if cmake didn't rerun explicitly) """ - with open(ROOT_DIR / 'CMakeCache.txt', 'r') as f: + with open(ROOT_DIR / "CMakeCache.txt", "r") as f: content = f.read() - source_dir = re.search(r'CMAKE_HOME_DIRECTORY:INTERNAL=(.*)', - content).groups()[0] + source_dir = re.search(r"CMAKE_HOME_DIRECTORY:INTERNAL=(.*)", content).groups()[0] - cmd_branch = shlex.split(f'git --git-dir={source_dir}/.git ' - 'rev-parse --abbrev-ref HEAD') + cmd_branch = shlex.split(f"git --git-dir={source_dir}/.git " "rev-parse --abbrev-ref HEAD") branch = subprocess.check_output(cmd_branch).decode().strip() - cmd_sha = shlex.split(f'git --git-dir={source_dir}/.git ' - 'log --pretty=format:"%h" -n 1') + cmd_sha = shlex.split(f"git --git-dir={source_dir}/.git " 'log --pretty=format:"%h" -n 1') sha = subprocess.check_output(cmd_sha).decode().strip() return branch, sha @@ -75,30 +71,33 @@ def infer_products_dir() -> Path: -------- * products_dir (pathlib.Path): the path to the Products directory """ - matches = gb.glob(str(ROOT_DIR / '**/Products/benchmark'), recursive=True) + matches = gb.glob(str(ROOT_DIR / "**/Products/benchmark"), recursive=True) matches = [x for x in matches if Path(x).is_dir()] if len(matches) == 0: raise IOError( "Couldn't locate build dir, looked everywhere for a 'Products'" - "directory but came up empty. Have you built the project?") + "directory but came up empty. Have you built the project?" + ) if len(matches) > 1: - print("Found multiple 'Products' directories, returning the first =" - f"'{matches[0]}'" - f"Other potential matches: {matches[1:]}") + print( + "Found multiple 'Products' directories, returning the first =" + f"'{matches[0]}'" + f"Other potential matches: {matches[1:]}" + ) return Path(matches[0]) def is_executable(path: Path) -> bool: mode = path.stat().st_mode executable = stat.S_IEXEC | stat.S_IXGRP | stat.S_IXOTH - return (mode & executable) + return mode & executable def is_benchmark(path_exe: Path) -> bool: - cmd = shlex.split(f'{path_exe} --help') + cmd = shlex.split(f"{path_exe} --help") try: output = subprocess.check_output(cmd) - return output.decode('utf-8').strip().lower().startswith('benchmark') + return output.decode("utf-8").strip().lower().startswith("benchmark") except subprocess.CalledProcessError: pass except OSError: @@ -118,7 +117,7 @@ def list_benchmarks(products_dir: Path) -> list[str]: bench_names: the names of the executables """ - matches = products_dir.glob('*') + matches = products_dir.glob("*") matches = filter(is_executable, matches) matches = filter(is_benchmark, matches) return matches @@ -140,27 +139,27 @@ def find_skip_rows(results_file: Path) -> int: skiprows (int): the number of lines to skip when read_csv is called """ skiprows = 0 - search_str = 'name,iterations,' - with open(results_file, 'r') as f: + search_str = "name,iterations," + with open(results_file, "r") as f: content = f.read() if search_str not in content: return None lines = content.splitlines() - while (search_str not in lines[skiprows]): + while search_str not in lines[skiprows]: skiprows += 1 return skiprows -def run_bench(bench_exe: Path, - skip_run: bool = False, - stacked: bool = False, - prefix: typing.Optional[str] = None, - quiet: bool = False): - +def run_bench( + bench_exe: Path, + skip_run: bool = False, + stacked: bool = False, + prefix: typing.Optional[str] = None, + quiet: bool = False, +): if not bench_exe.exists(): - msg = (f"Couldn't find a benchmark executable named {bench_exe} " - f"in {products_dir}. ") + msg = f"Couldn't find a benchmark executable named {bench_exe} " f"in {products_dir}. " msg += format_list_benchmark(list_benchmarks(products_dir)) raise ValueError(msg) @@ -174,49 +173,48 @@ def run_bench(bench_exe: Path, results_file = results_dir / f"{bench_name}.csv" if not skip_run: - cmd = shlex.split( - f'{bench_exe} --benchmark_out_format=csv' - f' --benchmark_out="{results_file}"') + cmd = shlex.split(f"{bench_exe} --benchmark_out_format=csv" f' --benchmark_out="{results_file}"') try: subprocess.check_call(cmd) except subprocess.CalledProcessError: - print(f'ERROR RUNNING {bench_exe}') + print(f"ERROR RUNNING {bench_exe}") return # Prepend branch + Sha branch, sha = get_branch_and_sha() - with open(results_file, 'r') as original: + with open(results_file, "r") as original: data = original.read() - with open(results_file, 'w') as modified: - modified.write(f'Git Branch: {branch}\n') - modified.write(f'Git SHA: {sha}\n') + with open(results_file, "w") as modified: + modified.write(f"Git Branch: {branch}\n") + modified.write(f"Git SHA: {sha}\n") modified.write(data) - df = pd.read_csv(results_file, - skiprows=find_skip_rows(results_file), - index_col=0) + df = pd.read_csv(results_file, skiprows=find_skip_rows(results_file), index_col=0) - if df['error_occurred'].notnull().any(): + if df["error_occurred"].notnull().any(): print("Some benchmarks seem to have failed...") print(f"{df.index[df['error_occurred'].notnull()].values}") print("continuining anyways") # Skip bigO/RMS if any - df = df.loc[df['iterations'].notnull()] + df = df.loc[df["iterations"].notnull()] # Convert everything in ms - for col in ['real_time', 'cpu_time']: - df[col] = (df[[col, 'time_unit']].apply( - lambda row: pd.to_timedelta(arg=row[0], unit=row[1]), axis=1) - .dt.total_seconds() * 1e3) + for col in ["real_time", "cpu_time"]: + df[col] = ( + df[[col, "time_unit"]] + .apply(lambda row: pd.to_timedelta(arg=row[0], unit=row[1]), axis=1) + .dt.total_seconds() + * 1e3 + ) if stacked: - df['real_time'] = df['real_time'] - df['cpu_time'] + df["real_time"] = df["real_time"] - df["cpu_time"] fig, ax = plt.subplots(figsize=(16, 9)) - df[['cpu_time', 'real_time']].plot(kind='barh', stacked=stacked, ax=ax) - time_units = df['time_unit'].unique() + df[["cpu_time", "real_time"]].plot(kind="barh", stacked=stacked, ax=ax) + time_units = df["time_unit"].unique() ax.set_title(f"{bench_name} [ms]") if len(time_units) == 1: ax.set_ylabel(time_units[0]) @@ -231,36 +229,40 @@ def run_bench(bench_exe: Path, fig.savefig(results_dir / f"{bench_name}.png", dpi=150) -if __name__ == '__main__': +if __name__ == "__main__": arguments = docopt(__doc__) # print(arguments) # exit(0) root_dir = Path(__file__).parent.absolute() products_dir = None - if arguments['--products_dir']: - products_dir = Path(arguments['--products_dir']) + if arguments["--products_dir"]: + products_dir = Path(arguments["--products_dir"]) else: products_dir = infer_products_dir() if not products_dir.exists(): raise IOError(f"Products directory {products_dir} does not exist") - if arguments['--list']: + if arguments["--list"]: benches = list_benchmarks(products_dir) benches = map(lambda p: p.relative_to(products_dir), benches) print(format_list_benchmark(benches)) exit(0) benches = [] - if arguments['--all']: + if arguments["--all"]: benches = list_benchmarks(products_dir) else: - bench_name = arguments['BENCH_NAME'] + bench_name = arguments["BENCH_NAME"] bench_exe = products_dir / bench_name benches = [bench_exe] for bench_exe in benches: - run_bench(bench_exe=bench_exe, skip_run=arguments['--skip-run'], - stacked=arguments['--stacked'], prefix=arguments['--prefix'], - quiet=arguments['--quiet']) + run_bench( + bench_exe=bench_exe, + skip_run=arguments["--skip-run"], + stacked=arguments["--stacked"], + prefix=arguments["--prefix"], + quiet=arguments["--quiet"], + ) diff --git a/python/module/find_pypi_tag.py b/python/module/find_pypi_tag.py index c2e70ec5fc3..287fbc4ae50 100644 --- a/python/module/find_pypi_tag.py +++ b/python/module/find_pypi_tag.py @@ -3,45 +3,45 @@ # See also https://openstudio.net/license ######################################################################################################################## +import argparse import os import re +from typing import List + import requests from packaging import version -import argparse -from typing import List -REPO_ROOT = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../..') +REPO_ROOT = os.path.join(os.path.dirname(os.path.realpath(__file__)), "../..") def parse_pypi_version(pypi: bool = False): if pypi: - response = requests.get('https://pypi.org/pypi/openstudio/json') + response = requests.get("https://pypi.org/pypi/openstudio/json") else: - response = requests.get('https://test.pypi.org/pypi/openstudio/json') + response = requests.get("https://test.pypi.org/pypi/openstudio/json") response.raise_for_status() data = response.json() - releases = [version.parse(v) for v in data['releases'].keys()] + releases = [version.parse(v) for v in data["releases"].keys()] return releases def parse_cmake_version_info(): - - with open(os.path.join(REPO_ROOT, 'CMakeLists.txt'), 'r') as f: + with open(os.path.join(REPO_ROOT, "CMakeLists.txt"), "r") as f: content = f.read() no_comments_lines = [] for line in content.splitlines(): - line_cleaned = line.strip().split('#')[0] + line_cleaned = line.strip().split("#")[0] if line_cleaned: no_comments_lines.append(line_cleaned) content = "\n".join(no_comments_lines) - m = re.search(r'project\(OpenStudio VERSION (\d+\.\d+\.\d+)\)', content) - v = '' + m = re.search(r"project\(OpenStudio VERSION (\d+\.\d+\.\d+)\)", content) + v = "" if m: v = m.groups()[0] - m = re.search(r'set\(PROJECT_VERSION_PRERELEASE \"(.*?)\"\)', content) - pre_release = '' + m = re.search(r"set\(PROJECT_VERSION_PRERELEASE \"(.*?)\"\)", content) + pre_release = "" if m: pre_release = m.groups()[0].strip() if pre_release: @@ -50,9 +50,7 @@ def parse_cmake_version_info(): return version.Version(v) -def compute_appropriate_version(current_v: version.Version, - releases: List[version.Version], - current: bool = False): +def compute_appropriate_version(current_v: version.Version, releases: List[version.Version], current: bool = False): """ Args: ------ @@ -68,8 +66,7 @@ def compute_appropriate_version(current_v: version.Version, is_offical = True # Start by filtering out the stuff that does not match the base version - matched_releases = [v for v in releases - if v.base_version == current_v.base_version] + matched_releases = [v for v in releases if v.base_version == current_v.base_version] if not is_pre_release: # Filter out prereleases @@ -80,14 +77,12 @@ def compute_appropriate_version(current_v: version.Version, # If we're a pre-release, we only match prerelease with the same pre # identifier (eg: 'a', 'b', 'rc') pre_iden, pre_v = current_v.pre - matched_releases = [v for v in matched_releases - if v.is_prerelease and v.pre[0] == pre_iden] - if pre_iden == 'rc': + matched_releases = [v for v in matched_releases if v.is_prerelease and v.pre[0] == pre_iden] + if pre_iden == "rc": # Treat rc as official is_offical = True # I match on the pre_v too - matched_releases = [v for v in matched_releases - if v.pre[1] == pre_v] + matched_releases = [v for v in matched_releases if v.pre[1] == pre_v] new_v = current_v.base_version if matched_releases: @@ -98,7 +93,7 @@ def compute_appropriate_version(current_v: version.Version, post_v = max_v.post if not post_v: if not current: - new_v += 'post0' + new_v += "post0" elif current: new_v += f"post{post_v}" else: @@ -122,26 +117,19 @@ def compute_appropriate_version(current_v: version.Version, return new_v -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="Find the right version from pypi/testpypi") +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Find the right version from pypi/testpypi") - parser.add_argument("--pypi", default=False, - action='store_true', - help="Check pypi instead of testpypi") + parser.add_argument("--pypi", default=False, action="store_true", help="Check pypi instead of testpypi") # This is more for testing purposes... - parser.add_argument("--current", default=False, - action='store_true', - help="Check current version (no incrementing +1)") + parser.add_argument( + "--current", default=False, action="store_true", help="Check current version (no incrementing +1)" + ) args = parser.parse_args() current_v = parse_cmake_version_info() releases = parse_pypi_version(pypi=args.pypi) - new_v = compute_appropriate_version( - current_v=current_v, - releases=releases, - current=args.current - ) + new_v = compute_appropriate_version(current_v=current_v, releases=releases, current=args.current) print(new_v, end="") diff --git a/python/module/openstudio.py b/python/module/openstudio.py index 164c36ba110..80e8353ea44 100644 --- a/python/module/openstudio.py +++ b/python/module/openstudio.py @@ -4,16 +4,6 @@ ######################################################################################################################## if __package__ or "." in __name__: - - from . import openstudioairflow as airflow - from . import openstudioenergyplus as energyplus - from . import openstudioepjson as epjson - from . import openstudiogbxml as gbxml - from . import openstudiogltf as gltf - from . import openstudioisomodel as isomodel - from . import openstudiomeasure as measure - - from . import openstudiomodel as model # These are already included in the `model` namespace via Model.i # from . import openstudiomodelcore as modelcore # from . import openstudiomodelgenerators as modelgenerators @@ -27,7 +17,14 @@ # from . import openstudiomodelplantequipmentoperationscheme as modelplantequipmentoperationscheme # from . import openstudiomodelstraightcomponent as modelstraightcomponent # from . import openstudiomodelzonehvac as momodelzonehvac - + from . import openstudioairflow as airflow + from . import openstudioenergyplus as energyplus + from . import openstudioepjson as epjson + from . import openstudiogbxml as gbxml + from . import openstudiogltf as gltf + from . import openstudioisomodel as isomodel + from . import openstudiomeasure as measure + from . import openstudiomodel as model from . import openstudioosversion as osversion from . import openstudioradiance as radiance from . import openstudiosdd as sdd @@ -35,10 +32,11 @@ from .openstudioutilitiesbcl import * from .openstudioutilitiescore import * from .openstudioutilitiesdata import * + from .openstudioutilitiesfiletypes import * from .openstudioutilitiesgeometry import * from .openstudioutilitiesidd import * from .openstudioutilitiesidf import * - # from .openstudioutilitiesfiletypes import * + # from .openstudioutilitiesplot import * from .openstudioutilitiessql import * from .openstudioutilitiestime import * @@ -46,7 +44,6 @@ from .openstudioutilitiesxml import * else: - import openstudioairflow as airflow import openstudioenergyplus as energyplus import openstudioepjson as epjson @@ -54,8 +51,8 @@ import openstudiogltf as gltf import openstudioisomodel as isomodel import openstudiomeasure as measure - import openstudiomodel as model + # These are already included in the `model` namespace via Model.i # import openstudiomodelcore as modelcore # import openstudiomodelgenerators as modelgenerators @@ -69,7 +66,6 @@ # import openstudiomodelplantequipmentoperationscheme as modelplantequipmentoperationscheme # import openstudiomodelstraightcomponent as modelstraightcomponent # import openstudiomodelzonehvac as momodelzonehvac - import openstudioosversion as osversion import openstudioradiance as radiance import openstudiosdd as sdd @@ -77,10 +73,11 @@ from openstudioutilitiesbcl import * from openstudioutilitiescore import * from openstudioutilitiesdata import * + from openstudioutilitiesfiletypes import * from openstudioutilitiesgeometry import * from openstudioutilitiesidd import * from openstudioutilitiesidf import * - # from openstudioutilitiesfiletypes import * + # from openstudioutilitiesplot import * from openstudioutilitiessql import * from openstudioutilitiestime import * diff --git a/python/module/setup.py b/python/module/setup.py index a0fe018a7bb..ab9d18d0d6a 100644 --- a/python/module/setup.py +++ b/python/module/setup.py @@ -3,17 +3,17 @@ # See also https://openstudio.net/license ######################################################################################################################## -from setuptools import setup, find_packages -from setuptools.dist import Distribution -from setuptools.command.install import install import os import platform +from setuptools import find_packages, setup +from setuptools.command.install import install +from setuptools.dist import Distribution + try: from wheel.bdist_wheel import bdist_wheel as _bdist_wheel class bdist_wheel(_bdist_wheel): - def finalize_options(self): _bdist_wheel.finalize_options(self) # Mark us as not a pure python package, so that the wheel is marked @@ -23,9 +23,8 @@ def finalize_options(self): def get_tag(self): # Setting it up to build generic wheels. python, abi, plat = _bdist_wheel.get_tag(self) - print("Original: (python, abi, plat) = " - "({}, {}, {})".format(python, abi, plat)) - if platform.system() != 'Windows': + print("Original: (python, abi, plat) = " "({}, {}, {})".format(python, abi, plat)) + if platform.system() != "Windows": # There is no ABI incompatibility on Unix. # On windows, there is... since we need to actually link # to Python37.dll for eg @@ -39,28 +38,29 @@ def get_tag(self): # I tested that building on 3.8 will work for install on 3.9 # on both Ubuntu and macOS though - python = 'py3' - if abi != 'cp39': - abi = 'none' + python = "py3" + if abi != "cp39": + abi = "none" # Our bindings won't be compatible with all distributions, # BUT pypi will refuse the upload if we do not replace # "Binary wheel 'openstudio-3.1.0rc3-py3-none-linux_x86_64.whl' # has an unsupported platform tag 'linux_x86_64'" - if 'aarch64' in plat: - plat = plat.lower().replace('linux', 'manylinux2014') + if "aarch64" in plat: + plat = plat.lower().replace("linux", "manylinux2014") else: - plat = plat.lower().replace('linux', 'manylinux1') - plat = plat.lower().replace('darwin_x86_64', 'macosx_10_6_intel') - if plat[:3] == 'mac': - if 'arm64' in plat: - plat = 'macosx_11_0_arm64' + plat = plat.lower().replace("linux", "manylinux1") + plat = plat.lower().replace("darwin_x86_64", "macosx_10_6_intel") + if plat[:3] == "mac": + if "arm64" in plat: + plat = "macosx_11_0_arm64" else: # We don't use a fat binary ('intel' = both i386 and x86_64) # but we set the platform to old one in the hope that it'll # work for all - plat = 'macosx_10_9_x86_64' + plat = "macosx_10_9_x86_64" return python, abi, plat + except ImportError: bdist_wheel = None @@ -79,26 +79,25 @@ def finalize_options(self): self.install_lib = self.install_platlib -with open(os.path.join('@PROJECT_SOURCE_DIR@', 'README.md'), - encoding='utf-8') as f: +with open(os.path.join("@PROJECT_SOURCE_DIR@", "README.md"), encoding="utf-8") as f: long_description = f.read() dev_release = "a2" setup( - name='openstudio', + name="openstudio", # eg 3.1.0rc3 # version='@OpenStudio_VERSION@'.replace('-', '') + dev_release, - version='@PYPI_VERSION@', - description='OpenStudio python bindings.', + version="@PYPI_VERSION@", + description="OpenStudio python bindings.", long_description=long_description, - long_description_content_type='text/markdown', - url='https://github.com/NREL/OpenStudio', - author='Alliance for Sustainable Energy, LLC, and other contributors', - author_email='openstudio@nrel.gov', - maintainer='Julien Marrec', - maintainer_email='contact@effibem.com', - license='BSD3', + long_description_content_type="text/markdown", + url="https://github.com/NREL/OpenStudio", + author="Alliance for Sustainable Energy, LLC, and other contributors", + author_email="openstudio@nrel.gov", + maintainer="Julien Marrec", + maintainer_email="contact@effibem.com", + license="BSD3", project_urls={ "Bug Tracker": "https://github.com/NREL/OpenStudio/issues", "Documentation": "https://openstudio.net/", @@ -107,31 +106,30 @@ def finalize_options(self): platforms="any", python_requires=">=3.7.1", classifiers=[ - 'Development Status :: 4 - Beta', - 'Environment :: Console', - 'Intended Audience :: Science/Research', - 'Topic :: Scientific/Engineering', - 'License :: OSI Approved :: BSD License', + "Development Status :: 4 - Beta", + "Environment :: Console", + "Intended Audience :: Science/Research", + "Topic :: Scientific/Engineering", + "License :: OSI Approved :: BSD License", "Programming Language :: Python", "Programming Language :: Python :: 3", - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", ], - keywords='openstudio py3 setuptools development', - packages=find_packages(include=['openstudio']), + keywords="openstudio py3 setuptools development", + packages=find_packages(include=["openstudio"]), package_data={ "openstudio": [ "*.so", - "*.pyd", "*.dll", - "*.dylib", "*.bundle", + "*.pyd", + "*.dll", + "*.dylib", + "*.bundle", # "*.lib", "*.exp", ], }, install_requires=[], - cmdclass={ - 'bdist_wheel': bdist_wheel, - 'install': InstallPlatlib - }, + cmdclass={"bdist_wheel": bdist_wheel, "install": InstallPlatlib}, distclass=BinaryDistribution, ) diff --git a/python/module/test_find_pypi_tag.py b/python/module/test_find_pypi_tag.py index 34b42f8461f..1a4732bd30f 100644 --- a/python/module/test_find_pypi_tag.py +++ b/python/module/test_find_pypi_tag.py @@ -3,22 +3,22 @@ # See also https://openstudio.net/license ######################################################################################################################## -import unittest import sys +import unittest + from packaging import version -sys.path.append('..') + +sys.path.append("..") from find_pypi_tag import compute_appropriate_version -class TestFindTestPyPiTag(unittest.TestCase): +class TestFindTestPyPiTag(unittest.TestCase): def compare(self, releases: list[str], v: str, expected_v: str): """ Test helper, to avoid repeating myself """ new_v = compute_appropriate_version( - current_v=version.parse(v), - releases=[version.parse(v) for v in releases], - current=False + current_v=version.parse(v), releases=[version.parse(v) for v in releases], current=False ) self.assertEqual(version.parse(expected_v), new_v) @@ -31,54 +31,39 @@ def test_when_not_exist_no_prereleasetag(self): def test_when_exist_with_prereleasetag(self): self.compare(releases=["3.2.1"], v="3.2.1-alpha", expected_v="3.2.1a0") - self.compare(releases=["3.2.1", "3.2.1a0"], - v="3.2.1-alpha", expected_v="3.2.1a1") + self.compare(releases=["3.2.1", "3.2.1a0"], v="3.2.1-alpha", expected_v="3.2.1a1") - self.compare(releases=["3.2.1", "3.2.1a0", "3.2.1a1"], - v="3.2.1-alpha", expected_v="3.2.1a2") + self.compare(releases=["3.2.1", "3.2.1a0", "3.2.1a1"], v="3.2.1-alpha", expected_v="3.2.1a2") # Different prerelease tag - self.compare(releases=["3.2.1", "3.2.1a0", "3.2.1a1"], - v="3.2.1-beta", expected_v="3.2.1b0") + self.compare(releases=["3.2.1", "3.2.1a0", "3.2.1a1"], v="3.2.1-beta", expected_v="3.2.1b0") - self.compare(releases=["3.2.1", "3.2.1a0", "3.2.1a1", "3.2.1b0"], - v="3.2.1-beta", expected_v="3.2.1b1") + self.compare(releases=["3.2.1", "3.2.1a0", "3.2.1a1", "3.2.1b0"], v="3.2.1-beta", expected_v="3.2.1b1") - self.compare(releases=["3.2.1", "3.2.1a20", "3.2.1b0", "3.2.1b10"], - v="3.2.1-beta", expected_v="3.2.1b11") + self.compare(releases=["3.2.1", "3.2.1a20", "3.2.1b0", "3.2.1b10"], v="3.2.1-beta", expected_v="3.2.1b11") def test_with_rctag(self): - - self.compare(releases=["3.2.0", "3.2.1a1"], - v='3.2.1-rc1', expected_v="3.2.1rc1") + self.compare(releases=["3.2.0", "3.2.1a1"], v="3.2.1-rc1", expected_v="3.2.1rc1") # Here I actually expect a post release to be appended - self.compare(releases=["3.2.0", "3.2.1a1", "3.2.1rc1"], - v='3.2.1-rc1', expected_v="3.2.1rc1.post0") + self.compare(releases=["3.2.0", "3.2.1a1", "3.2.1rc1"], v="3.2.1-rc1", expected_v="3.2.1rc1.post0") - self.compare(releases=["3.2.0", "3.2.1a1", "3.2.1rc1"], - v='3.2.1-rc2', expected_v="3.2.1rc2") + self.compare(releases=["3.2.0", "3.2.1a1", "3.2.1rc1"], v="3.2.1-rc2", expected_v="3.2.1rc2") # Now we release official - self.compare(releases=["3.2.0", "3.2.1a1", "3.2.1rc1"], - v='3.2.1', expected_v="3.2.1") + self.compare(releases=["3.2.0", "3.2.1a1", "3.2.1rc1"], v="3.2.1", expected_v="3.2.1") def test_when_exist_no_prereleasetag(self): - - self.compare(releases=["3.2.1", "3.2.1a1"], - v='3.2.1', expected_v="3.2.1post0") + self.compare(releases=["3.2.1", "3.2.1a1"], v="3.2.1", expected_v="3.2.1post0") class TestFindTestPyPiTagCurrent(unittest.TestCase): - def compare(self, releases: list[str], v: str, expected_v: str): """ Test helper, to avoid repeating myself """ new_v = compute_appropriate_version( - current_v=version.parse(v), - releases=[version.parse(v) for v in releases], - current=True + current_v=version.parse(v), releases=[version.parse(v) for v in releases], current=True ) self.assertEqual(version.parse(expected_v), new_v) @@ -91,39 +76,27 @@ def test_when_not_exist_no_prereleasetag(self): def test_when_exist_with_prereleasetag(self): self.compare(releases=["3.2.1"], v="3.2.1-alpha", expected_v="3.2.1a0") - self.compare(releases=["3.2.1", "3.2.1a0"], - v="3.2.1-alpha", expected_v="3.2.1a0") + self.compare(releases=["3.2.1", "3.2.1a0"], v="3.2.1-alpha", expected_v="3.2.1a0") - self.compare(releases=["3.2.1", "3.2.1a0", "3.2.1a1"], - v="3.2.1-alpha", expected_v="3.2.1a1") + self.compare(releases=["3.2.1", "3.2.1a0", "3.2.1a1"], v="3.2.1-alpha", expected_v="3.2.1a1") # Different prerelease tag - self.compare(releases=["3.2.1", "3.2.1a0", "3.2.1a1"], - v="3.2.1-beta", expected_v="3.2.1b0") + self.compare(releases=["3.2.1", "3.2.1a0", "3.2.1a1"], v="3.2.1-beta", expected_v="3.2.1b0") - self.compare(releases=["3.2.1", "3.2.1a0", "3.2.1a1", "3.2.1b0"], - v="3.2.1-beta", expected_v="3.2.1b0") + self.compare(releases=["3.2.1", "3.2.1a0", "3.2.1a1", "3.2.1b0"], v="3.2.1-beta", expected_v="3.2.1b0") - self.compare(releases=["3.2.1", "3.2.1a20", "3.2.1b0", "3.2.1b10"], - v="3.2.1-beta", expected_v="3.2.1b10") + self.compare(releases=["3.2.1", "3.2.1a20", "3.2.1b0", "3.2.1b10"], v="3.2.1-beta", expected_v="3.2.1b10") def test_with_rctag(self): - - self.compare(releases=["3.2.0", "3.2.1a1"], - v='3.2.1-rc1', expected_v="3.2.1rc1") + self.compare(releases=["3.2.0", "3.2.1a1"], v="3.2.1-rc1", expected_v="3.2.1rc1") # Here I actually expect a post release to be appended - self.compare(releases=["3.2.0", "3.2.1a1", "3.2.1rc1"], - v='3.2.1-rc1', expected_v="3.2.1rc1") + self.compare(releases=["3.2.0", "3.2.1a1", "3.2.1rc1"], v="3.2.1-rc1", expected_v="3.2.1rc1") - self.compare(releases=["3.2.0", "3.2.1a1", "3.2.1rc1"], - v='3.2.1-rc2', expected_v="3.2.1rc2") + self.compare(releases=["3.2.0", "3.2.1a1", "3.2.1rc1"], v="3.2.1-rc2", expected_v="3.2.1rc2") # Now we release official - self.compare(releases=["3.2.0", "3.2.1a1", "3.2.1rc1"], - v='3.2.1', expected_v="3.2.1") + self.compare(releases=["3.2.0", "3.2.1a1", "3.2.1rc1"], v="3.2.1", expected_v="3.2.1") def test_when_exist_no_prereleasetag(self): - - self.compare(releases=["3.2.1", "3.2.1a1"], - v='3.2.1', expected_v="3.2.1") + self.compare(releases=["3.2.1", "3.2.1a1"], v="3.2.1", expected_v="3.2.1") diff --git a/python/testpath.py b/python/testpath.py index 11e6b4b8766..81a06f6ea86 100644 --- a/python/testpath.py +++ b/python/testpath.py @@ -4,13 +4,12 @@ ######################################################################################################################## import openstudio + # import openstudio_dynamic as openstudio # workspace test workspace = openstudio.Workspace() -zone = workspace.addObject( - openstudio.IdfObject(openstudio.IddObjectType("Zone")) -) +zone = workspace.addObject(openstudio.IdfObject(openstudio.IddObjectType("Zone"))) zone = zone.get() zone.setName("New Zone") diff --git a/resources/Examples/compact_osw/measures/IncreaseRoofRValuePython/measure.py b/resources/Examples/compact_osw/measures/IncreaseRoofRValuePython/measure.py index 81edef77520..94b4d5542c5 100644 --- a/resources/Examples/compact_osw/measures/IncreaseRoofRValuePython/measure.py +++ b/resources/Examples/compact_osw/measures/IncreaseRoofRValuePython/measure.py @@ -1,9 +1,9 @@ -import openstudio import typing +import openstudio -class IncreaseInsulationRValueForRoofsByPercentagePython(openstudio.measure.ModelMeasure): +class IncreaseInsulationRValueForRoofsByPercentagePython(openstudio.measure.ModelMeasure): def name(self): """ Return the human readable name. @@ -37,24 +37,25 @@ def arguments(self, model: typing.Optional[openstudio.model.Model] = None): return args - def run(self, - model: openstudio.model.Model, - runner: openstudio.measure.OSRunner, - user_arguments: openstudio.measure.OSArgumentMap): + def run( + self, + model: openstudio.model.Model, + runner: openstudio.measure.OSRunner, + user_arguments: openstudio.measure.OSArgumentMap, + ): """ define what happens when the measure is run """ super().run(model, runner, user_arguments) # Do **NOT** remove this line - if not(runner.validateUserArguments(self.arguments(model), - user_arguments)): + if not (runner.validateUserArguments(self.arguments(model), user_arguments)): return False r_value = runner.getDoubleArgumentValue("r_value", user_arguments) # set limit for minimum insulation. This is used to limit input and for inferring insulation layer in construction. min_expected_r_value_ip = 1 # ip units - min_expected_r_value_si = openstudio.convert(min_expected_r_value_ip, "ft^2*h*R/Btu","m^2*K/W").get() + min_expected_r_value_si = openstudio.convert(min_expected_r_value_ip, "ft^2*h*R/Btu", "m^2*K/W").get() # check the R-value for reasonableness if r_value < -100: @@ -89,17 +90,19 @@ def run(self, for exterior_surface_construction in exterior_surface_constructions: # unit conversion of roof insulation from SI units (M^2*K/W) to IP units (ft^2*h*R/Btu) initial_conductance_ip = openstudio.convert( - 1.0 / exterior_surface_construction.thermalConductance().get(), - "m^2*K/W", "ft^2*h*R/Btu").get() - initial_string.append( - f"{exterior_surface_construction.nameString()} (R-{initial_conductance_ip:.1f})" - ) + 1.0 / exterior_surface_construction.thermalConductance().get(), "m^2*K/W", "ft^2*h*R/Btu" + ).get() + initial_string.append(f"{exterior_surface_construction.nameString()} (R-{initial_conductance_ip:.1f})") - runner.registerInitialCondition(f"The building had {len(initial_string)} roof constructions: {', '.join(sorted(initial_string))}.") + runner.registerInitialCondition( + f"The building had {len(initial_string)} roof constructions: {', '.join(sorted(initial_string))}." + ) # hashes to track constructions and materials made by the measure, to avoid duplicates constructions_hash_old_new = {} - constructions_hash_new_old = {} # used to get netArea of new construction and then cost objects of construction it replaced + constructions_hash_new_old = ( + {} + ) # used to get netArea of new construction and then cost objects of construction it replaced materials_hash = {} # array and counter for new constructions that are made, used for reporting final condition @@ -123,7 +126,9 @@ def run(self, thermal_resistance_values.append(construction_layer_r_value) if max(thermal_resistance_values) <= min_expected_r_value_si: - runner.registerWarning(f"Construction '{exterior_surface_construction.nameString()}' does not appear to have an insulation layer and was not altered.") + runner.registerWarning( + f"Construction '{exterior_surface_construction.nameString()}' does not appear to have an insulation layer and was not altered." + ) continue # clone the construction @@ -136,12 +141,14 @@ def run(self, # push to hashes constructions_hash_old_new[exterior_surface_construction.nameString()] = final_construction - constructions_hash_new_old[final_construction.nameString()] = exterior_surface_construction #push the object to hash key vs. name + constructions_hash_new_old[ + final_construction.nameString() + ] = exterior_surface_construction # push the object to hash key vs. name # find already cloned insulation material and link to construction target_material = max_thermal_resistance_material found_material = False - for orig,new in materials_hash.items(): + for orig, new in materials_hash.items(): if target_material.nameString() == orig: new_material = new materials_hash[max_thermal_resistance_material.nameString()] = new_material @@ -158,7 +165,9 @@ def run(self, materials_hash[max_thermal_resistance_material.nameString()] = new_material final_construction.eraseLayer(max_thermal_resistance_material_index) final_construction.insertLayer(max_thermal_resistance_material_index, new_material) - runner.registerInfo(f"For construction'{final_construction.nameString()}', material '{new_material.nameString()}' was altered.") + runner.registerInfo( + f"For construction'{final_construction.nameString()}', material '{new_material.nameString()}' was altered." + ) # edit insulation material new_material_matt = new_material.to_Material() @@ -170,13 +179,16 @@ def run(self, new_material_massless = new_material.to_MasslessOpaqueMaterial() if new_material_massless.is_initialized(): starting_thermal_resistance = new_material_massless.get().thermalResistance() - final_thermal_resistance = new_material_massless.get().setThermalResistance(starting_thermal_resistance) + final_thermal_resistance = new_material_massless.get().setThermalResistance( + starting_thermal_resistance + ) new_material_airgap = new_material.to_AirGap() if new_material_airgap.is_initialized(): starting_thermal_resistance = new_material_airgap.get().thermalResistance() - final_thermal_resistance = new_material_airgap.get().setThermalResistance(starting_thermal_resistance) - + final_thermal_resistance = new_material_airgap.get().setThermalResistance( + starting_thermal_resistance + ) # loop through construction sets used in the model default_construction_sets = model.getDefaultConstructionSets() @@ -194,7 +206,9 @@ def run(self, new_default_construction_set.setName(f"{default_construction_set.nameString()} adj roof insulation") # create new surface set and link to construction set - new_default_surface_const_set = default_surface_const_set.get().clone(model).to_DefaultSurfaceConstructions().get() + new_default_surface_const_set = ( + default_surface_const_set.get().clone(model).to_DefaultSurfaceConstructions().get() + ) new_default_surface_const_set.setName(f"{default_surface_const_set.get().nameString()} adj roof insulation") new_default_construction_set.setDefaultExteriorSurfaceConstructions(new_default_surface_const_set) @@ -209,9 +223,12 @@ def run(self, new_default_surface_const_set.setRoofCeilingConstruction(final_construction) found_const_flag = True - if not found_const_flag: # this should never happen but is just an extra test in case something goes wrong with the measure code - runner.registerWarning(f"Measure couldn't find the construction named '{target_const}' in the exterior surface hash.") - + if ( + not found_const_flag + ): # this should never happen but is just an extra test in case something goes wrong with the measure code + runner.registerWarning( + f"Measure couldn't find the construction named '{target_const}' in the exterior surface hash." + ) # swap all uses of the old construction set for the new construction_set_sources = default_construction_set.sources() @@ -251,17 +268,18 @@ def run(self, final_construction = new # report strings for final condition - final_string = [] # not all exterior roof constructions, but only new ones made. If roof didn't have insulation and was not altered we don't want to show it + final_string = ( + [] + ) # not all exterior roof constructions, but only new ones made. If roof didn't have insulation and was not altered we don't want to show it affected_area_si = 0 for final_construction in final_constructions_array: - # unit conversion of roof insulation from SI units (M^2*K/W) to IP units (ft^2*h*R/Btu) - final_conductance_ip = openstudio.convert(1.0 / final_construction.thermalConductance().get(), - "m^2*K/W", "ft^2*h*R/Btu").get() + final_conductance_ip = openstudio.convert( + 1.0 / final_construction.thermalConductance().get(), "m^2*K/W", "ft^2*h*R/Btu" + ).get() final_string.append(f"{final_construction.nameString()} (R-{final_conductance_ip:.1f})") affected_area_si = affected_area_si + final_construction.getNetArea() - # add not applicable test if there were exterior roof constructions but none of them were altered (already enough insulation or doesn't look like insulated wall) if affected_area_si == 0: runner.registerAsNotApplicable("No roofs were altered.") @@ -275,7 +293,8 @@ def run(self, runner.registerFinalCondition( f"The existing insulation for roofs was increased by {r_value}%. " f"This was applied to {affected_area_ip:,.0f} (ft^2) across " - f"{len(final_string)} roof constructions: {', '.join(sorted(final_string))}.") + f"{len(final_string)} roof constructions: {', '.join(sorted(final_string))}." + ) return True diff --git a/resources/Examples/compact_osw/measures/IncreaseRoofRValuePython/tests/test_measure.py b/resources/Examples/compact_osw/measures/IncreaseRoofRValuePython/tests/test_measure.py index 73430deeca9..6f852ccee33 100644 --- a/resources/Examples/compact_osw/measures/IncreaseRoofRValuePython/tests/test_measure.py +++ b/resources/Examples/compact_osw/measures/IncreaseRoofRValuePython/tests/test_measure.py @@ -1,11 +1,12 @@ -import pytest -import openstudio import pathlib + +import openstudio +import pytest + from measure import IncreaseInsulationRValueForRoofsByPercentagePython class TestIncreaseInsulationRValueForRoofsByPercentagePython: - def test_number_of_arguments_and_argument_names(self): """ Test that the arguments are what we expect @@ -19,7 +20,7 @@ def test_number_of_arguments_and_argument_names(self): # get arguments and test that they are what we are expecting arguments = measure.arguments(model) assert arguments.size() == 1 - assert arguments[0].name() == 'r_value' + assert arguments[0].name() == "r_value" def test_optional_model_for_arguments(self): """ @@ -31,7 +32,7 @@ def test_optional_model_for_arguments(self): # Ruby allows **not** passing model to the method, so test that arguments = measure.arguments() assert arguments.size() == 1 - assert arguments[0].name() == 'r_value' + assert arguments[0].name() == "r_value" def test_good_argument_values(self): """ @@ -43,8 +44,7 @@ def test_good_argument_values(self): measure = IncreaseInsulationRValueForRoofsByPercentagePython() # create runner with empty OSW - # osw = openstudio.WorkflowJSON() # TODO: FIXME - osw = openstudio.openstudioutilitiesfiletypes.WorkflowJSON() + osw = openstudio.WorkflowJSON() runner = openstudio.measure.OSRunner(osw) # load the test model @@ -65,14 +65,14 @@ def test_good_argument_values(self): # If the argument has a default that you want to use, # you don't need it in the dict args_dict = {} - args_dict['r_value'] = 35.0 + args_dict["r_value"] = 35.0 # using defaults values from measure.py for other arguments # populate argument with specified hash value if specified for arg in arguments: temp_arg_var = arg.clone() if arg.name() in args_dict: - assert(temp_arg_var.setValue(args_dict[arg.name()])) + assert temp_arg_var.setValue(args_dict[arg.name()]) argument_map[arg.name()] = temp_arg_var print("run measure:") @@ -85,19 +85,21 @@ def test_good_argument_values(self): print(f"results: {result}") # assert that it ran correctly - assert result.value().valueName() == 'Success' + assert result.value().valueName() == "Success" assert len(result.info()) == 1 assert len(result.warnings()) == 0 assert result.finalCondition().get().logMessage() == ( "The existing insulation for roofs was increased by 35.0%. This was applied to 4,306 (ft^2) " - "across 1 roof constructions: Exterior Roof adj roof insulation (R-4.6).") + "across 1 roof constructions: Exterior Roof adj roof insulation (R-4.6)." + ) assert result.info()[0].logMessage() == ( "For construction'Exterior Roof adj roof insulation', " - "material 'F16 Acoustic tile_R-value 35.0% increase' was altered.") + "material 'F16 Acoustic tile_R-value 35.0% increase' was altered." + ) # save the model to test output directory output_file_path = openstudio.toPath( - str(pathlib.Path(__file__).parent.absolute() - / "output" / "test_output.osm")) + str(pathlib.Path(__file__).parent.absolute() / "output" / "test_output.osm") + ) model.save(output_file_path, True) diff --git a/resources/Examples/compact_osw/measures/PythonEnergyplusMeasureDXF/measure.py b/resources/Examples/compact_osw/measures/PythonEnergyplusMeasureDXF/measure.py index f64952b9c4d..b33266a989b 100644 --- a/resources/Examples/compact_osw/measures/PythonEnergyplusMeasureDXF/measure.py +++ b/resources/Examples/compact_osw/measures/PythonEnergyplusMeasureDXF/measure.py @@ -41,7 +41,7 @@ def arguments(self, workspace: openstudio.Workspace): """ args = openstudio.measure.OSArgumentVector() - dxf_type = openstudio.measure.OSArgument.makeChoiceArgument('dxf_type', ["DXF", "DXF:WireFrame"], True) + dxf_type = openstudio.measure.OSArgument.makeChoiceArgument("dxf_type", ["DXF", "DXF:WireFrame"], True) dxf_type.setDisplayName("DXF Type") dxf_type.setDescription("DXF Type for the eplusout.dxf") @@ -61,7 +61,7 @@ def run( if not (runner.validateUserArguments(self.arguments(workspace), user_arguments)): return False - dxf_type = runner.getStringArgumentValue('dxf_type', user_arguments) + dxf_type = runner.getStringArgumentValue("dxf_type", user_arguments) idfObject: openstudio.IdfObject = openstudio.IdfObject(openstudio.IddObjectType("Output:Surfaces:Drawing")) result = idfObject.setString(0, dxf_type) @@ -71,7 +71,9 @@ def run( if dxf_type == "DXF": result = idfObject.setString(1, "Triangulate3DFace") if not result: - runner.registerError("Something went wrong when trying to set the Report Specifications 1 to Triangulate3DFace") + runner.registerError( + "Something went wrong when trying to set the Report Specifications 1 to Triangulate3DFace" + ) return False wsObject_: openstudio.OptionalWorkspaceObject = workspace.addObject(idfObject) if not wsObject_.is_initialized(): diff --git a/resources/model/PythonPluginThermochromicWindow.py b/resources/model/PythonPluginThermochromicWindow.py index ddcd00b1230..891415c564b 100644 --- a/resources/model/PythonPluginThermochromicWindow.py +++ b/resources/model/PythonPluginThermochromicWindow.py @@ -61,7 +61,6 @@ class MySupportClassThatDoesntDeriveEnergyPlusPlugin: class ZN_1_wall_south_Window_1_Control(EnergyPlusPlugin): - def __init__(self): # init parent class super().__init__() @@ -88,21 +87,17 @@ def __init__(self): self.TCwindow_85_handle = None def on_begin_timestep_before_predictor(self, state) -> int: - # api is ready to execute if self.api.exchange.api_data_fully_ready(state): - # get variable handles if needed if self.need_to_get_handles: self.Win1_Tout_handle = self.api.exchange.get_variable_handle( - state, - "Surface Outside Face Temperature", - "Perimeter_ZN_1_wall_south_Window_1") - - self.Win1_Construct_handle = self.api.exchange.get_actuator_handle(state, - "Surface", - "Construction State", - "Perimeter_ZN_1_wall_south_Window_1") + state, "Surface Outside Face Temperature", "Perimeter_ZN_1_wall_south_Window_1" + ) + + self.Win1_Construct_handle = self.api.exchange.get_actuator_handle( + state, "Surface", "Construction State", "Perimeter_ZN_1_wall_south_Window_1" + ) self.TCwindow_25_handle = self.api.exchange.get_construction_handle(state, "TCwindow_25") diff --git a/src/cli/main.cpp b/src/cli/main.cpp index 9c1cfee5d55..1d4a6d89839 100644 --- a/src/cli/main.cpp +++ b/src/cli/main.cpp @@ -100,19 +100,6 @@ int main(int argc, char* argv[]) { auto* const experimentalApp = app.add_subcommand("labs"); - auto* const verboseOpt = experimentalApp->add_flag_function( - "--verbose", - [](auto count) { - if (count == 1) { - fmt::print("Setting Log Level to Debug ({})\n", LogLevel::Debug); - openstudio::Logger::instance().standardOutLogger().setLogLevel(LogLevel::Debug); - } else if (count == 2) { - fmt::print("Setting Log Level to Trace ({})\n", LogLevel::Trace); - openstudio::Logger::instance().standardOutLogger().setLogLevel(LogLevel::Trace); - } - }, - "Print the full log to STDOUT - sets verbosity to Debug if given once and Trace if given twice."); - // specify string->value mappings const std::map logLevelMap{ {"Trace", LogLevel::Trace}, {"Debug", LogLevel::Debug}, {"Info", LogLevel::Info}, @@ -127,8 +114,7 @@ int main(int argc, char* argv[]) { fmt::print("Setting Log Level to {} ({})\n", logLevelStrs[static_cast(level) - static_cast(LogLevel::Trace)], level); openstudio::Logger::instance().standardOutLogger().setLogLevel(level); }, - "LogLevel settings: One of {Trace, Debug, Info, Warn, Error, Fatal} [Default: Warn] Excludes: --verbose") - ->excludes(verboseOpt) + "LogLevel settings: One of {Trace, Debug, Info, Warn, Error, Fatal} [Default: Warn]") ->option_text("LEVEL") ->transform(CLI::CheckedTransformer(logLevelMap, CLI::ignore_case)); diff --git a/src/cli/test/conftest.py b/src/cli/test/conftest.py index 9ec8426cea3..9962ee4d403 100644 --- a/src/cli/test/conftest.py +++ b/src/cli/test/conftest.py @@ -1,6 +1,8 @@ -import pytest from pathlib import Path +import pytest + + def validate_file(arg): if (filepath := Path(arg)).is_file(): return filepath @@ -9,9 +11,8 @@ def validate_file(arg): def pytest_addoption(parser): - parser.addoption( - "--os-cli-path", type=validate_file, help="Path to the OS CLI" #, required=True - ) + parser.addoption("--os-cli-path", type=validate_file, help="Path to the OS CLI") # , required=True + @pytest.fixture def osclipath(request): diff --git a/src/cli/test/logger_test.py b/src/cli/test/logger_test.py index e3bf307f519..4c8ddbc5604 100644 --- a/src/cli/test/logger_test.py +++ b/src/cli/test/logger_test.py @@ -1,12 +1,13 @@ import logging -import openstudio import sys +import openstudio + # Root logger logger = logging.getLogger() logger.setLevel(logging.WARNING) -formatter = logging.Formatter('LOGGER - %(message)s') +formatter = logging.Formatter("LOGGER - %(message)s") handler = logging.StreamHandler(sys.stdout) handler.setLevel(logging.WARNING) handler.setFormatter(formatter) diff --git a/src/cli/test/run_test_logger.py b/src/cli/test/run_test_logger.py index 141b02259dc..21d302cdd94 100644 --- a/src/cli/test/run_test_logger.py +++ b/src/cli/test/run_test_logger.py @@ -1,6 +1,6 @@ import argparse -from pathlib import Path import subprocess +from pathlib import Path def validate_file(arg): @@ -9,34 +9,30 @@ def validate_file(arg): else: raise FileNotFoundError(arg) -if __name__ == "__main__": +if __name__ == "__main__": parser = argparse.ArgumentParser(description="Run a logger test.") - parser.add_argument( - "os_cli_path", type=validate_file, help="Path to the OS CLI" - ) - parser.add_argument('--labs', action='store_true') - parser.add_argument( - "logger_file", type=validate_file, help="Path to the logger test file to run" - ) + parser.add_argument("os_cli_path", type=validate_file, help="Path to the OS CLI") + parser.add_argument("--labs", action="store_true") + parser.add_argument("logger_file", type=validate_file, help="Path to the logger test file to run") args = parser.parse_args() print(args) command = [str(args.os_cli_path)] if args.labs: command.append("labs") - if (ext := args.logger_file.suffix) == '.py': + if (ext := args.logger_file.suffix) == ".py": if not args.labs: raise ValueError("When supplying a .py file, you must pass --labs") command.append("execute_python_script") - elif ext == '.rb': + elif ext == ".rb": command.append("execute_ruby_script") else: raise ValueError(f"logger_file should have a .rb or .py extension, not {ext}") command.append(str(args.logger_file)) print(f"Running: {' '.join(command)}") - r = subprocess.check_output(command, encoding='utf-8') + r = subprocess.check_output(command, encoding="utf-8") lines = r.splitlines() # Pop the labs box @@ -45,7 +41,7 @@ def validate_file(arg): if "The `labs` command is experimental - Do not use in production" in line: i_warn = i break - lines = lines[:(i_warn - 1)] + lines[(i_warn + 2):] + lines = lines[: (i_warn - 1)] + lines[(i_warn + 2) :] for i, line in enumerate(lines): print(i, line) @@ -55,4 +51,4 @@ def validate_file(arg): # Ruby when called this way has the openstudio logger messages first instead of last, so just sort lines.sort() - assert lines == ['LOGGER - STDOUT Error', 'LOGGER - STDOUT Warn', '[test] <1> Error'] + assert lines == ["LOGGER - STDOUT Error", "LOGGER - STDOUT Warn", "[test] <1> Error"] diff --git a/src/model/Resources/sandia/prepare_sandia_json.py b/src/model/Resources/sandia/prepare_sandia_json.py index 9fbb5c5ab46..da01d09f3fb 100644 --- a/src/model/Resources/sandia/prepare_sandia_json.py +++ b/src/model/Resources/sandia/prepare_sandia_json.py @@ -1,23 +1,30 @@ import json + import pandas as pd # need to do some funky business since the second header has a truncated number # of commas -df = pd.read_csv('https://raw.githubusercontent.com/NREL/SAM/develop/deploy/libraries/Sandia%20Modules.csv', index_col=0, header=None).T.fillna('').set_index(['Name','Units', '[0]']).T -df = df.apply(pd.to_numeric, errors='ignore') -df = df[df.select_dtypes('number').notnull().all(axis=1)] +df = ( + pd.read_csv( + "https://raw.githubusercontent.com/NREL/SAM/develop/deploy/libraries/Sandia%20Modules.csv", + index_col=0, + header=None, + ) + .T.fillna("") + .set_index(["Name", "Units", "[0]"]) + .T +) +df = df.apply(pd.to_numeric, errors="ignore") +df = df[df.select_dtypes("number").notnull().all(axis=1)] df_cols = pd.DataFrame(df.columns.tolist(), columns=df.columns.names) -col_description = df_cols.set_index('[0]').to_dict(orient='index') +col_description = df_cols.set_index("[0]").to_dict(orient="index") -df.index = df.index.str.replace(r'[ ', '[', regex=False) +df.index = df.index.str.replace(r"[ ", "[", regex=False) df.columns = df.columns.droplevel([0, 1]) -data = df.to_dict(orient='index') +data = df.to_dict(orient="index") -json_data = { - 'column_description': col_description, - 'data': data -} +json_data = {"column_description": col_description, "data": data} -with open('Sandia_Modules.json', 'w') as f: +with open("Sandia_Modules.json", "w") as f: json.dump(json_data, f, indent=2) diff --git a/src/utilities/Untitled.ipynb b/src/utilities/Untitled.ipynb deleted file mode 100644 index 846c4cd9bbc..00000000000 --- a/src/utilities/Untitled.ipynb +++ /dev/null @@ -1,108 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 46, - "metadata": {}, - "outputs": [], - "source": [ - "files = !grep -Rl OPENSTUDIO_ENUM" - ] - }, - { - "cell_type": "code", - "execution_count": 47, - "metadata": {}, - "outputs": [], - "source": [ - "files = [f for f in files if f.endswith('.hpp') or f.endswith('.cpp')]" - ] - }, - { - "cell_type": "code", - "execution_count": 48, - "metadata": {}, - "outputs": [], - "source": [ - "done = !grep -Rl clang-format" - ] - }, - { - "cell_type": "code", - "execution_count": 49, - "metadata": {}, - "outputs": [], - "source": [ - "done = [f for f in done if f.endswith('.hpp') or f.endswith('.cpp')]" - ] - }, - { - "cell_type": "code", - "execution_count": 50, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'core/Enum.hpp',\n", - " 'core/EnumBase.hpp',\n", - " 'core/test/EnumHelpers_GTest.cpp',\n", - " 'core/test/Enum_GTest.cpp',\n", - " 'idd/IddEnums.hpp',\n", - " 'idd/IddObject.hpp',\n", - " 'idd/IddObject_Impl.hpp',\n", - " 'sql/SqlFile_Impl.cpp'}" - ] - }, - "execution_count": 50, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "set(files) - set(done)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.0" - }, - "toc": { - "base_numbering": 1, - "nav_menu": {}, - "number_sections": true, - "sideBar": true, - "skip_h1_title": false, - "title_cell": "Table of Contents", - "title_sidebar": "Contents", - "toc_cell": false, - "toc_position": {}, - "toc_section_display": true, - "toc_window_display": false - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/src/utilities/bcl/templates/EnergyPlusMeasure/tests/test_energyplus_measure.py b/src/utilities/bcl/templates/EnergyPlusMeasure/tests/test_energyplus_measure.py index e47e8c43fdc..5963bd9401f 100644 --- a/src/utilities/bcl/templates/EnergyPlusMeasure/tests/test_energyplus_measure.py +++ b/src/utilities/bcl/templates/EnergyPlusMeasure/tests/test_energyplus_measure.py @@ -32,8 +32,7 @@ def test_bad_argument_values(self): measure = EnergyPlusMeasureName() # create runner with empty OSW - # osw = openstudio.WorkflowJSON() # TODO: FIXME - osw = openstudio.openstudioutilitiesfiletypes.WorkflowJSON() + osw = openstudio.WorkflowJSON() runner = openstudio.measure.OSRunner(osw) # make an empty workspace @@ -65,8 +64,7 @@ def test_good_argument_values(self): measure = EnergyPlusMeasureName() # create runner with empty OSW - # osw = openstudio.WorkflowJSON() # TODO: FIXME - osw = openstudio.openstudioutilitiesfiletypes.WorkflowJSON() + osw = openstudio.WorkflowJSON() runner = openstudio.measure.OSRunner(osw) # make an empty workspace diff --git a/src/utilities/bcl/templates/ModelMeasure/measure.py b/src/utilities/bcl/templates/ModelMeasure/measure.py index 083b0009a9b..53c39d02603 100644 --- a/src/utilities/bcl/templates/ModelMeasure/measure.py +++ b/src/utilities/bcl/templates/ModelMeasure/measure.py @@ -4,9 +4,10 @@ # http://nrel.github.io/OpenStudio-user-documentation/reference/measure_writing_guide/ """ -import openstudio import typing +import openstudio + class ModelMeasureName(openstudio.measure.ModelMeasure): """A ModelMeasure.""" diff --git a/src/utilities/bcl/templates/ModelMeasure/tests/test_model_measure.py b/src/utilities/bcl/templates/ModelMeasure/tests/test_model_measure.py index 972ff5d6c05..1b4e1fff8bd 100644 --- a/src/utilities/bcl/templates/ModelMeasure/tests/test_model_measure.py +++ b/src/utilities/bcl/templates/ModelMeasure/tests/test_model_measure.py @@ -1,8 +1,10 @@ """insert your copyright here.""" -import pytest -import openstudio from pathlib import Path + +import openstudio +import pytest + from measure import ModelMeasureName @@ -28,8 +30,7 @@ def test_bad_argument_values(self): measure = ModelMeasureName() # create runner with empty OSW - # osw = openstudio.WorkflowJSON() # TODO: FIXME - osw = openstudio.openstudioutilitiesfiletypes.WorkflowJSON() + osw = openstudio.WorkflowJSON() runner = openstudio.measure.OSRunner(osw) # Make an empty model @@ -72,8 +73,7 @@ def test_good_argument_values(self): measure = ModelMeasureName() # create runner with empty OSW - # osw = openstudio.WorkflowJSON() # TODO: FIXME - osw = openstudio.openstudioutilitiesfiletypes.WorkflowJSON() + osw = openstudio.WorkflowJSON() runner = openstudio.measure.OSRunner(osw) # load the test model diff --git a/src/utilities/bcl/templates/ReportingMeasure/measure.py b/src/utilities/bcl/templates/ReportingMeasure/measure.py index 50230cacfce..e01d324d325 100644 --- a/src/utilities/bcl/templates/ReportingMeasure/measure.py +++ b/src/utilities/bcl/templates/ReportingMeasure/measure.py @@ -4,9 +4,10 @@ # http://nrel.github.io/OpenStudio-user-documentation/reference/measure_writing_guide/ """ -import openstudio from pathlib import Path +import openstudio + class ReportingMeasureName(openstudio.measure.ReportingMeasure): """An ReportingMeasure.""" @@ -50,7 +51,7 @@ def arguments(self, model: openstudio.model.Model): report_drybulb_temp = openstudio.measure.OSArgument.makeBoolArgument("report_drybulb_temp", True) report_drybulb_temp.setDisplayName("Add output variables for Drybulb Temperature") - report_drybulb_temp.setDescription('Will add drybulb temp and report min/mix value in html.') + report_drybulb_temp.setDescription("Will add drybulb temp and report min/mix value in html.") report_drybulb_temp.setValue(True) args.append(report_drybulb_temp) @@ -65,9 +66,7 @@ def outputs(self): return outs def energyPlusOutputRequests( - self, - runner: openstudio.measure.OSRunner, - user_arguments: openstudio.measure.OSArgumentMap + self, runner: openstudio.measure.OSRunner, user_arguments: openstudio.measure.OSArgumentMap ): """Returns a vector of IdfObject's to request EnergyPlus objects needed by the run method.""" super().energyPlusOutputRequests(runner, user_arguments) # Do **NOT** remove this line @@ -78,7 +77,7 @@ def energyPlusOutputRequests( # get the last model and sql file model = runner.lastOpenStudioModel() if not model.is_initialized(): - runner.registerError('Cannot find last model.') + runner.registerError("Cannot find last model.") return False model = model.get() @@ -87,9 +86,10 @@ def energyPlusOutputRequests( if not runner.validateUserArguments(self.arguments(model), user_arguments): return False - if runner.getBoolArgumentValue('report_drybulb_temp', user_arguments): + if runner.getBoolArgumentValue("report_drybulb_temp", user_arguments): request = openstudio.IdfObject.load( - 'Output:Variable, , Site Outdoor Air Drybulb Temperature, Hourly;').get() + "Output:Variable, , Site Outdoor Air Drybulb Temperature, Hourly;" + ).get() result.append(request) return result @@ -105,7 +105,7 @@ def run( # get the last model and sql file model = runner.lastOpenStudioModel() if not model.is_initialized(): - runner.registerError('Cannot find last model.') + runner.registerError("Cannot find last model.") return False model = model.get() @@ -114,12 +114,12 @@ def run( return False # get measure arguments - report_drybulb_temp = runner.getBoolArgumentValue('report_drybulb_temp', user_arguments) + report_drybulb_temp = runner.getBoolArgumentValue("report_drybulb_temp", user_arguments) # load sql file sql_file = runner.lastEnergyPlusSqlFile() if not sql_file.is_initialized(): - runner.registerError('Cannot find last sql file.') + runner.registerError("Cannot find last sql file.") return False sql_file = sql_file.get() @@ -226,23 +226,23 @@ def run( env_type = sql_file.environmentType(env_pd) if not env_type.is_initialized(): continue - if env_type.get() == openstudio.EnvironmentType('WeatherRunPeriod'): + if env_type.get() == openstudio.EnvironmentType("WeatherRunPeriod"): ann_env_pd = env_pd break if ann_env_pd is None: - runner.registerWarning('No annual environment period found.') + runner.registerWarning("No annual environment period found.") else: # get desired variable - key_value = 'Environment' - time_step = 'Hourly' # "Zone Timestep", "Hourly", "HVAC System Timestep" - variable_name = 'Site Outdoor Air Drybulb Temperature' + key_value = "Environment" + time_step = "Hourly" # "Zone Timestep", "Hourly", "HVAC System Timestep" + variable_name = "Site Outdoor Air Drybulb Temperature" # key value would go at the end if we used it. output_timeseries = sql_file.timeSeries(ann_env_pd, time_step, variable_name, key_value) if not output_timeseries.is_initialized(): - runner.registerWarning('Timeseries not found.') + runner.registerWarning("Timeseries not found.") else: - runner.registerInfo('Found timeseries.') + runner.registerInfo("Found timeseries.") output_timeseries = output_timeseries.get() values = output_timeseries.values() @@ -369,8 +369,8 @@ def format_dt(dt): # write html file: any file named 'report*.*' in the current working directory # will be copied to the ./reports/ folder as 'reports/_.html' - html_out_path = Path('./report.html').absolute() - with open(html_out_path, 'w') as f: + html_out_path = Path("./report.html").absolute() + with open(html_out_path, "w") as f: f.write(output) # Close the sql file diff --git a/src/utilities/bcl/templates/ReportingMeasure/tests/test_reporting_measure.py b/src/utilities/bcl/templates/ReportingMeasure/tests/test_reporting_measure.py index 8c88a9f9f53..e3a50e306c8 100644 --- a/src/utilities/bcl/templates/ReportingMeasure/tests/test_reporting_measure.py +++ b/src/utilities/bcl/templates/ReportingMeasure/tests/test_reporting_measure.py @@ -1,10 +1,11 @@ """Insert your copyright here.""" -from pathlib import Path import os +import subprocess +from pathlib import Path + import openstudio import pytest -import subprocess from measure import ReportingMeasureName @@ -33,9 +34,12 @@ def report_path(test_name) -> Path: return TestReportingMeasureName.run_dir(test_name) / "report.html" @staticmethod - def setup_test(test_name: str, idf_output_requests: openstudio.IdfObjectVector, model_in_path: Path = MODEL_IN_PATH_DEFAULT, epw_path: Path = - EPW_IN_PATH_DEFAULT): - + def setup_test( + test_name: str, + idf_output_requests: openstudio.IdfObjectVector, + model_in_path: Path = MODEL_IN_PATH_DEFAULT, + epw_path: Path = EPW_IN_PATH_DEFAULT, + ): run_dir = TestReportingMeasureName.run_dir(test_name) run_dir.mkdir(parents=True, exist_ok=True) @@ -62,23 +66,22 @@ def setup_test(test_name: str, idf_output_requests: openstudio.IdfObjectVector, model.addObjects(request_model.objects()) model.save(str(model_out_path), True) - if os.environ.get('OPENSTUDIO_TEST_NO_CACHE_SQLFILE'): + if os.environ.get("OPENSTUDIO_TEST_NO_CACHE_SQLFILE"): sql_file = TestReportingMeasureName.sql_path(test_name) if sql_file.exists(): sql_file.unlink() - osw_path = run_dir / 'in.osw' + osw_path = run_dir / "in.osw" - # workflow = openstudio.WorkflowJSON() # TODO: FIXME - workflow = openstudio.openstudioutilitiesfiletypes.WorkflowJSON() + workflow = openstudio.WorkflowJSON() workflow.setSeedFile(str(model_out_path)) workflow.setWeatherFile(str(epw_path)) workflow.saveAs(str(osw_path)) # TODO: use the system openstudio for now, replace with openstudio.getOpenStudioCLI eventually - cli_path = 'openstudio' - args = [cli_path, 'run', '-w', str(osw_path)] + cli_path = "openstudio" + args = [cli_path, "run", "-w", str(osw_path)] print(" ".join(args)) subprocess.check_call(args) @@ -97,14 +100,13 @@ def test_number_of_arguments_and_argument_names(self): def test_with_drybulb_temp(self): """Test running the measure with appropriate arguments, with db temp.""" - test_name = 'test_with_drybulb_temp' + test_name = "test_with_drybulb_temp" # create an instance of the measure measure = ReportingMeasureName() # create runner with empty OSW - # osw = openstudio.WorkflowJSON() # TODO: FIXME - osw = openstudio.openstudioutilitiesfiletypes.WorkflowJSON() + osw = openstudio.WorkflowJSON() runner = openstudio.measure.OSRunner(osw) # make an empty model @@ -149,7 +151,7 @@ def test_with_drybulb_temp(self): # temporarily change directory to the run directory and run the measure start_dir = Path.cwd() - #try: + # try: os.chdir(TestReportingMeasureName.run_dir(test_name)) # run the measure @@ -159,7 +161,7 @@ def test_with_drybulb_temp(self): assert result.value().valueName() == "Success" assert len(result.warnings()) == 0 os.chdir(start_dir) - #except: + # except: # os.chdir(start_dir) # make sure the report file exists @@ -167,14 +169,13 @@ def test_with_drybulb_temp(self): def test_without_drybulb_temp(self): """Test running the measure with appropriate arguments, without db temp.""" - test_name = 'test_without_drybulb_temp' + test_name = "test_without_drybulb_temp" # create an instance of the measure measure = ReportingMeasureName() # create runner with empty OSW - # osw = openstudio.WorkflowJSON() # TODO: FIXME - osw = openstudio.openstudioutilitiesfiletypes.WorkflowJSON() + osw = openstudio.WorkflowJSON() runner = openstudio.measure.OSRunner(osw) # make an empty model @@ -219,7 +220,7 @@ def test_without_drybulb_temp(self): # temporarily change directory to the run directory and run the measure start_dir = Path.cwd() - #try: + # try: os.chdir(TestReportingMeasureName.run_dir(test_name)) # run the measure @@ -229,7 +230,7 @@ def test_without_drybulb_temp(self): assert result.value().valueName() == "Success" assert len(result.warnings()) == 0 os.chdir(start_dir) - #except: + # except: # os.chdir(start_dir) # make sure the report file exists diff --git a/src/utilities/core/jsoncpp.i b/src/utilities/core/jsoncpp.i index 2f31c6b95a6..fb71b0bcca1 100644 --- a/src/utilities/core/jsoncpp.i +++ b/src/utilities/core/jsoncpp.i @@ -10,79 +10,108 @@ #endif #if defined SWIGPYTHON -%fragment("JsonToDict","header", fragment="SWIG_FromCharPtrAndSize") { - inline PyObject* SWIG_From_JsonValue(const Json::Value& value) { - - if (value.isBool()) { - return value.asBool() ? Py_True : Py_False; - } else if (value.isIntegral()) { - return PyLong_FromLongLong(value.asInt64()); - } else if (value.isNumeric()) { - return PyFloat_FromDouble(value.asDouble()); - } else if (value.isString()) { - // return PyUnicode_FromString(value.asCString()); - const auto str = value.asString(); - return SWIG_FromCharPtrAndSize(str.data(), str.size()); - } else if (value.isArray()) { - PyObject* result = PyList_New(value.size()); - Py_ssize_t idx = 0; - for( const auto& arrayElement : value) { - // TODO: this should do a recursive call to convert n (which is Json::Value) to a python type... - auto val = SWIG_From_JsonValue(arrayElement); - // PyList_Append(result, val); - PyList_SetItem(result, idx++, val); - } - return result; - - } else if (value.isObject()) { - PyObject* result = PyDict_New(); - for( const auto& id : value.getMemberNames()) { - // TODO: this should do a recursive call to convert *$1[id] (which is a Json::Value) to a python type... - auto val = SWIG_From_JsonValue(value[id]); - PyDict_SetItemString(result, id.c_str(), val); - Py_DECREF(val); - } - return result; +%fragment("JsonToDict", "header", fragment="SWIG_FromCharPtrAndSize") { + SWIGINTERN PyObject* SWIG_From_JsonValue(const Json::Value& value) { + // PyErr_WarnEx(PyExc_UserWarning, "Translating a Json::Value to a PyObject", 1); // Debugging + + if (value.isNull()) { + return Py_None; + } + + if (value.isBool()) { + return value.asBool() ? Py_True : Py_False; + } + + if (value.isIntegral()) { + return PyLong_FromLongLong(value.asInt64()); + } + + if (value.isNumeric()) { + return PyFloat_FromDouble(value.asDouble()); + } + + if (value.isString()) { + // return PyUnicode_FromString(value.asCString()); + const auto str = value.asString(); + return SWIG_FromCharPtrAndSize(str.data(), str.size()); + } + + if (value.isArray()) { + PyObject* result = PyList_New(value.size()); + Py_ssize_t idx = 0; + for( const auto& arrayElement : value) { + // recursive call + auto val = SWIG_From_JsonValue(arrayElement); + // PyList_Append(result, val); + PyList_SetItem(result, idx++, val); } + return result; + } - return PyDict_New(); + if (value.isObject()) { + PyObject* result = PyDict_New(); + for( const auto& id : value.getMemberNames()) { + // recursive call + auto val = SWIG_From_JsonValue(value[id]); + PyDict_SetItemString(result, id.c_str(), val); + Py_DECREF(val); + } + return result; + } + + return Py_None; } } %typemap(out, fragment="JsonToDict") Json::Value { $result = SWIG_From_JsonValue($1); } + + #endif #if defined SWIGRUBY %fragment("JsonToDict","header", fragment="SWIG_FromCharPtrAndSize") { - inline VALUE SWIG_From_JsonValue(const Json::Value& value) { - - if (value.isBool()) { - return value.asBool() ? Qtrue : Qfalse; - } else if (value.isIntegral()) { - return INT2NUM(value.asInt64()); - } else if (value.isNumeric()) { - return DOUBLE2NUM(value.asDouble()); - } else if (value.isString()) { - const auto str = value.asString(); - return SWIG_FromCharPtrAndSize(str.data(), str.size()); - } else if (value.isArray()) { - VALUE result = rb_ary_new2(value.size()); - for( const auto& arrayElement : value) { - rb_ary_push(result, SWIG_From_JsonValue(arrayElement)); - } - return result; - - } else if (value.isObject()) { - VALUE result = rb_hash_new(); - for( const auto& id : value.getMemberNames()) { - rb_hash_aset(result, ID2SYM(rb_intern(id.data())), SWIG_From_JsonValue(value[id])); - } - return result; + SWIGINTERN VALUE SWIG_From_JsonValue(const Json::Value& value) { + + if (value.isNull()) { + return Qnil; + } + + if (value.isBool()) { + return value.asBool() ? Qtrue : Qfalse; + } + + if (value.isIntegral()) { + return INT2NUM(value.asInt64()); + } + + if (value.isNumeric()) { + return DOUBLE2NUM(value.asDouble()); + } + + if (value.isString()) { + const auto str = value.asString(); + return SWIG_FromCharPtrAndSize(str.data(), str.size()); + } + + if (value.isArray()) { + VALUE result = rb_ary_new2(value.size()); + for( const auto& arrayElement : value) { + rb_ary_push(result, SWIG_From_JsonValue(arrayElement)); + } + return result; + } + + if (value.isObject()) { + VALUE result = rb_hash_new(); + for( const auto& id : value.getMemberNames()) { + rb_hash_aset(result, ID2SYM(rb_intern(id.data())), SWIG_From_JsonValue(value[id])); } + return result; + } - return rb_hash_new(); + return Qnil; } }