diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000..97610f3 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "client/ayon_usd/ayon_bin_client"] + path = client/ayon_usd/ayon_bin_client + url = https://github.com/ynput/ayon-bin-bridge-client.git diff --git a/Docs/Ayon_Docs/Admin_Docs.md b/Docs/Ayon_Docs/Admin_Docs.md new file mode 100644 index 0000000..72e39ee --- /dev/null +++ b/Docs/Ayon_Docs/Admin_Docs.md @@ -0,0 +1,97 @@ +## Introduction + +> **_NOTE_**\ +> this addon is currently in its Alpha stage and you will need to set some +> LakeFs keys (LakeFs is the data server we use to distribute Bin data) you can +> get those Keys on our Discord server just ask one of the Ynput staff for them. +> the settings are the following: +> `ayon+settings://ayon_usd/LakeFs_Settings/access_key_id` and +> `ayon+settings://ayon_usd/LakeFs_Settings/secret_access_key` + +USD is a modern, open-source, scene description and file format developed by +Pixar Animation Studios. Its an Extensive and extendable C++Lib that is used in +3D, 2D and Games Graphics in order to allow for eficient work with Sceene data. + +Many might know it from SideFs Houdini Solaris or Nvidia Omniverse but it is by +now included in most other Vfx software packages. + +In AYON we use it in our New **Contribution workflow** as the data backend to +allow cross Platform and Application workflows. This allows for better +integrated Teams and a more artist centric Workflow as artists can open the same +scene in different applications and work with the tools that serve them the +best. + +Goal of this addon is to extend the Contirbution workflow by automaticly +distributing Usd and Ayon Libs: + +- USD-Lib build for supported platforms. +- USD AR2 [Asset Resolver](https://github.com/ynput/ayon-usd-resolver) plugin + for + [**some**](https://github.com/ynput/ayon-usd-resolver?tab=readme-ov-file#tested-platforms) + supported DCCs. +- USD Tools (usdcat, usdedit, usdinfo, usdview, usdzip) coming with USD build. +- Standalone tools for AYON - Extending the capability and usability of the + OpenUsdLib for artists and studios. + +## Configuration + +there is a list of things that you can configure in the server settings to +achieve the optimal setup for your studio. In most cases you will probably not +need to touch them tho. + +#### LakeFs Config + +**LakeFs Settings:** `ayon+settings://ayon_usd/LakeFs_Settings`\ +LakeFs is the backend of our bin distribution system the addon will use the +specified server to download the resolvers and AyonUsdLibs from LakeFs. + +**LakeFs Server Uri:** +`ayon+settings://ayon_usd/LakeFs_Settings/ayon_usd_lake_fs_server_repo`\ +this is the Uri used to host the LakeFs server. the Ynput server can be found at +`https://lake.ayon.cloud` + +**LakeFs Repository Uri:** +`ayon+settings://ayon_usd/LakeFs_Settings/ayon_usd_lake_fs_server_repo`\ +this is a LakeFs internal link that also specifies the branch your downloading +from.\ +this can be great if you want to pin your pipeline to a specific release. + +**Asset Resolvers:** `ayon+settings://ayon_usd/LakeFs_Settings/asset_resolvers`\ +allows you to associate a specific Application name with a specific resolver.\ +we always setup all the resolvers we compile but if you have special App_Names +in your Applications then you might want to add an App Alias.\ +e.g if you have hou19.5.xxx setup as an variant for Houdini you can then set it +as an alias for the Hou19.5 entry because they share the same resolver. + +#### Usd Resolver Config + +`ayon+settings://ayon_usd/Ayon_UsdResolver_Settings` + +**Log Lvl** `ayon+settings://ayon_usd/Ayon_UsdResolver_Settings/ayon_log_lvl` +control the log lvl of the AyonUsdResolver. It is advised to have this at Warn +or Critical as Logging will impact the performance. + +**File Logger Enabled** +`ayon+settings://ayon_usd/Ayon_UsdResolver_Settings/ayon_file_logger_enabled` +AyonUsdResolver includes a file logger if needed. + +**Logging Keys** +`ayon+settings://ayon_usd/Ayon_UsdResolver_Settings/ayon_logger_logging_keys` +AyonUsdResolver Logger has a few predefined logging keys that can be enabled for +Debugging. it is advised to only do this with Developer bundles as it can expose +AYON Server data. it will also generate quite a big output. + +**File Logger Path** +`ayon+settings://ayon_usd/Ayon_UsdResolver_Settings/file_logger_file_path` The +Ayon File Logger needs an output path this needs to be a relative or absolute +path to a folder. + +#### UsdLib Config: + +`ayon+settings://ayon_usd/Usd_Settings` + +**Tf_Debug** `ayon+settings://ayon_usd/Usd_Settings/usd_tf_debug`\ +this allows you to set the UsdTfDebug env variable to get extra debug info from +the UsdLib.\ +[Usd Survival Guide (Luca Sheller)](https://lucascheller.github.io/VFX-UsdSurvivalGuide/core/profiling/debug.html)\ +[OpenUsd Debug Wiki](https://openusd.org/release/api/group__group__tf___debugging_output.html) diff --git a/Docs/Ayon_Docs/Dev_Docs.md b/Docs/Ayon_Docs/Dev_Docs.md new file mode 100644 index 0000000..20d3fcf --- /dev/null +++ b/Docs/Ayon_Docs/Dev_Docs.md @@ -0,0 +1,26 @@ +# Developer Docs + +Most important Locations. + +1. config + - provides general addon config and a list of variables and functions to + access settings across the addon. + - also allows to get lakeFs related classes and functions in there global + state +2. hooks + - Ayon + Pyblish related addons and hooks. +3. standalone + - AyonUsd standalone tools. They should be DCC agnostic. +4. utils + - utility functions to make interaction with the addon simpler. + +## Standalone + +### Pinning Support. + +The Ayon Usd resolver has a feature we call pinning support. This allows storing +the current state of an Usd stage in a file to load the data quickly and without +server interaction on a Farm or any distributed system that might overwhelm or +impact the server performance. + +The rest of this can be found in the pinning support Branch diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..261eeb9 --- /dev/null +++ b/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.md b/README.md index 5b41b71..f451e6a 100644 --- a/README.md +++ b/README.md @@ -1,41 +1,30 @@ # AYON USD Addon -This is AYON Addon for support of [USD](https://github.com/PixarAnimationStudios/OpenUSD). +This AYON Addon acts as an extension for the **AYON Contribution workflow** +utilizing the +[Open-USD Framework](https://github.com/PixarAnimationStudios/OpenUSD) to allow +for a more automated workflow across Applications, Systems and Teams. -It helps to distribute USD binaries and related tools to artist workstations and -to configure its environment. +You can find Admin and Developer docs under Docs/Ayon_Docs/ -## Introduction - -USD is a modern, open-source, scene description and file format developed by -Pixar Animation Studios. It is used for interchanging 3D graphics data between -applications and for rendering. - -Goal of this addon is to help distribute USD binaries: +## Getting Started -- USD build for supported platforms. -- USD AR2 plugin for __some__ supported DCCs. -- USD Tools (usdcat, usdedit, usdinfo, usdview, usdzip) coming with USD build. -- Standalone tools for AYON - USD interoperation. +### Clone the Repo -## Getting Started +> **_IMOPRTANT_**\ +> This repository uses Git Submodules. Make sure to use the correct `git clone`\ +> commands accordingly.\ +> `git clone --recurse-submodules https://github.com/ynput/ayon-usd.git`\ +> `git submodule update --init --recursive` ### Installation 1. Clone the repository to your local machine. -2. Run `.\tools\manage.ps1 create-env` on Windows or `./tools/manage.sh create-env` on Linux. -3. Run `.\tools\manage.ps1 build` on Windows or `./tools/manage.sh build` on Linux. -4. In AYON, go to `Studio Settings` -> `Bundles` -> `Install Addons...` and select the `./package/ayon_usd-x.x.x.zip` file. +2. Run `.\tools\manage.ps1 create-env` on Windows or + `./tools/manage.sh create-env` on Linux. +3. Run `.\tools\manage.ps1 build` on Windows or `./tools/manage.sh build` on + Linux. +4. In AYON, go to `Studio Settings` -> `Bundles` -> `Install Addons...` and + select the `./package/ayon_usd-x.x.x.zip` file. 5. Upload the Addon and let the server restart after installation is done. 6. Use new addon in your bundles. - -### Configuration - -In addon settings, you can configure mapping between USD Resolver plugin and you DCCs. -`App Name` is the name of the DCC application, like `maya/2025` corresponding -to the Application addon settings. Then there is a platform settings, where you -can specify the platform for which the USD Asset Resolver plugin is used. -Lastly, there is a URL to the USD Asset Resolver plugin zip matching the platform and -the DCC application. - -`ayon+settings://ayon_usd/asset_resolvers` is the key for the settings. diff --git a/client/ayon_usd/__init__.py b/client/ayon_usd/__init__.py index 993f9f9..ea9f668 100644 --- a/client/ayon_usd/__init__.py +++ b/client/ayon_usd/__init__.py @@ -1,9 +1,11 @@ """USD Addon for AYON - client part.""" -import os -import sys -from .addon import USD_ADDON_DIR, USDAddon -from .utils import extract_zip_file, get_download_dir, get_downloaded_usd_root +from .addon import USDAddon +from .utils import ( + get_download_dir, + get_downloaded_usd_root, +) +from .ayon_bin_client.ayon_bin_distro.util.zip import extract_zip_file __all__ = ( "USDAddon", @@ -11,28 +13,3 @@ "extract_zip_file", "get_download_dir", ) - - -def initialize_environment(): - """Initialize environment for USD. - - This should be called from Python console or any script running - within AYON Python interpreter to initialize USD environment. - It cannot be set automatically during AYON startup because it would then - pollute environment for other processes - some of them having USD - already embedded. - - """ - sys.path.append( - os.path.join(get_downloaded_usd_root(), "lib", "python")) - - # Resolver settings - os.environ["PXR_PLUGINPATH_NAME"] = USD_ADDON_DIR - os.environ["USD_ASSET_RESOLVER"] = "" - os.environ["TF_DEBUG"] = "1" - os.environ["PYTHONPATH"] = os.path.join( - get_downloaded_usd_root(), "lib", "python") - os.environ["PATH"] = f"{os.getenv('PATH')}{os.path.pathsep}{os.path.join(get_downloaded_usd_root(), 'bin')}" - os.environ["AYONLOGGERLOGLVL"] = "WARN" - os.environ["AYONLOGGERSFILELOGGING"] = "1" - os.environ["AYONLOGGERSFILEPOS"] = ".log" diff --git a/client/ayon_usd/addon.py b/client/ayon_usd/addon.py index ebf9e1a..45323cf 100644 --- a/client/ayon_usd/addon.py +++ b/client/ayon_usd/addon.py @@ -1,14 +1,21 @@ """USD Addon for AYON.""" + +import json import os +from datetime import datetime, timezone + +from ayon_core.addon import AYONAddon, ITrayAddon + +from ayon_core import style -from ayon_core.modules import AYONAddon, ITrayModule -from .utils import is_usd_download_needed -from .version import __version__ +from . import config, utils -USD_ADDON_DIR = os.path.dirname(os.path.abspath(__file__)) +from .ayon_bin_client.ayon_bin_distro.gui import progress_ui +from .ayon_bin_client.ayon_bin_distro.work_handler import worker +from .ayon_bin_client.ayon_bin_distro.util import zip -class USDAddon(AYONAddon, ITrayModule): +class USDAddon(AYONAddon, ITrayAddon): """Addon to add USD Support to AYON. Addon can also skip distribution of binaries from server and can @@ -17,8 +24,8 @@ class USDAddon(AYONAddon, ITrayModule): Cares about supplying USD Framework. """ - name = "ayon_usd" - version = __version__ + name = config.ADDON_NAME + version = config.ADDON_VERSION _download_window = None def tray_init(self): @@ -27,6 +34,10 @@ def tray_init(self): def initialize(self, module_settings): """Initialize USD Addon.""" + if not module_settings["ayon_usd"]["allow_addon_start"]: + raise SystemError( + "The experimental AyonUsd addon is currently activated, but you haven't yet acknowledged the user agreement indicating your understanding that this feature is experimental. Please go to the Studio settings and check the agreement checkbox." + ) self.enabled = True self._download_window = None @@ -36,22 +47,78 @@ def tray_start(self): Download USD if needed. """ super(USDAddon, self).tray_start() - download_usd = is_usd_download_needed() - if not download_usd: + + if not os.path.exists(config.DOWNLOAD_DIR): + os.makedirs(config.DOWNLOAD_DIR, exist_ok=True) + if os.path.exists(str(config.DOWNLOAD_DIR) + ".zip"): + os.remove(str(config.DOWNLOAD_DIR) + ".zip") + if not os.path.exists(config.ADDON_DATA_JSON_PATH): + with open(config.ADDON_DATA_JSON_PATH, "w+") as data_json: + init_data = {} + init_data["ayon_usd_addon_first_init_utc"] = str( + datetime.now().astimezone(timezone.utc) + ) + json.dump( + init_data, + data_json, + ) + + if not utils.is_usd_lib_download_needed(): + print("usd is already downloaded") return - from .download_ui import show_download_window + lake_fs_usd_lib_path = f"{config.get_addon_settings_value(config.get_addon_settings(),config.ADDON_SETTINGS_LAKE_FS_REPO_URI)}{config.get_usd_lib_conf_from_lakefs()}" - download_window = show_download_window( - download_usd + usd_lib_lake_fs_time_cest = ( + config.get_global_lake_instance() + .get_element_info(lake_fs_usd_lib_path) + .get("Modified Time") + ) + if not usd_lib_lake_fs_time_cest: + raise ValueError(f"could not find UsdLib time stamp on LakeFs server") + + with open(config.ADDON_DATA_JSON_PATH, "r+") as data_json: + addon_data_json = json.load(data_json) + addon_data_json["usd_lib_lake_fs_time_cest"] = usd_lib_lake_fs_time_cest + + data_json.seek(0) + json.dump( + addon_data_json, + data_json, + ) + data_json.truncate() + + controller = worker.Controller() + + usd_download_work_item = controller.construct_work_item( + func=config.get_global_lake_instance().clone_element, + kwargs={ + "lake_fs_object_uir": lake_fs_usd_lib_path, + "dist_path": config.DOWNLOAD_DIR, + }, + progress_title="Download UsdLib", ) - download_window.finished.connect(self._on_download_finish) - download_window.start() - self._download_window = download_window - def _on_download_finish(self): - self._download_window.close() - self._download_window = None + controller.construct_work_item( + func=zip.extract_zip_file, + kwargs={ + "zip_file_path": config.USD_ZIP_PATH, + "dest_dir": config.USD_LIB_PATH, + }, + progress_title="Unzip UsdLib", + dependency_id=[usd_download_work_item.get_uuid()], + ) + + download_ui = progress_ui.ProgressDialog( + controller, + close_on_finish=True, + auto_close_timeout=1, + delet_progress_bar_on_finish=False, + title=f"{config.ADDON_NAME}-Addon [UsdLib Download]", + ) + download_ui.setStyleSheet(style.load_stylesheet()) + download_ui.start() + self._download_window = download_ui def tray_exit(self): """Exit tray module.""" @@ -63,6 +130,4 @@ def tray_menu(self, tray_menu): def get_launch_hook_paths(self): """Get paths to launch hooks.""" - return [ - os.path.join(USD_ADDON_DIR, "hooks") - ] + return [os.path.join(config.USD_ADDON_DIR, "hooks")] diff --git a/client/ayon_usd/ayon_bin_client b/client/ayon_usd/ayon_bin_client new file mode 160000 index 0000000..127f7d9 --- /dev/null +++ b/client/ayon_usd/ayon_bin_client @@ -0,0 +1 @@ +Subproject commit 127f7d9d5258cb4c99bb3a59c1729eeb34790669 diff --git a/client/ayon_usd/config.py b/client/ayon_usd/config.py new file mode 100644 index 0000000..b34dce2 --- /dev/null +++ b/client/ayon_usd/config.py @@ -0,0 +1,175 @@ +"""USD Addon utility functions.""" + +import functools +import os +import platform +import json +import hashlib +from pathlib import Path +import ayon_api +from ayon_usd import version +from ayon_usd.ayon_bin_client.ayon_bin_distro.lakectlpy import wrapper + +CURRENT_DIR: Path = Path(os.path.dirname(os.path.abspath(__file__))) +DOWNLOAD_DIR: Path = CURRENT_DIR / "downloads" +NOT_SET = type("NOT_SET", (), {"__bool__": lambda: False})() +ADDON_NAME: str = version.name +ADDON_VERSION: str = version.__version__ +AYON_BUNDLE_NAME = os.environ["AYON_BUNDLE_NAME"] +USD_ADDON_DIR = os.path.dirname(os.path.abspath(__file__)) + +ADDON_DATA_JSON_PATH = os.path.join(DOWNLOAD_DIR, "ayon_usd_addon_info.json") + + +# Addon Settings +# LakeFs +ADDON_SETTINGS_LAKE_FS_URI = ("lakefs_settings", "ayon_usd_lake_fs_server_uri") +ADDON_SETTINGS_LAKE_FS_REPO_URI = ("lakefs_settings", "ayon_usd_lake_fs_server_repo") +ADDON_SETTINGS_LAKE_FS_KEY_ID = ("lakefs_settings", "access_key_id") +ADDON_SETTINGS_LAKE_FS_KEY = ("lakefs_settings", "secret_access_key") +# Resolver def +ADDON_SETTINGS_ASSET_RESOLVERS = ("lakefs_settings", "asset_resolvers") +ADDON_SETTINGS_ASSET_RESOLVERS_OVERWRITES = ("lakefs_settings", "lake_fs_overrides") +# Usd settings +ADDON_SETTINGS_USD_TF_DEBUG = ("usd_settings", "usd_tf_debug") +# Resolver Settings +ADDON_SETTINGS_USD_RESOLVER_LOG_LVL = ("ayon_usd_resolver_settings", "ayon_log_lvl") + +ADDON_SETTINGS_USD_RESOLVER_LOG_FILLE_LOOGER_ENABLED = ( + "ayon_usd_resolver_settings", + "ayon_file_logger_enabled", +) + +ADDON_SETTINGS_USD_RESOLVER_LOG_FILLE_LOOGER_FILE_PATH = ( + "ayon_usd_resolver_settings", + "file_logger_file_path", +) + +ADDON_SETTINGS_USD_RESOLVER_LOG_LOGGIN_KEYS = ( + "ayon_usd_resolver_settings", + "ayon_logger_logging_keys", +) + + +def get_addon_settings_value(settings: dict, key_path: tuple): + try: + selected_element = settings + for key in key_path: + selected_element = selected_element[key] + + return selected_element + except (KeyError, TypeError) as e: + raise KeyError(f"Error accessing settings with key path {key_path}: {e}") + + +class SingletonFuncCache: + _instance = None + _cache = {} + + def __new__(cls, *args, **kwargs): + if cls._instance is None: + cls._instance = super().__new__(cls, *args, **kwargs) + return cls._instance + + @classmethod + def func_io_cache(cls, func): + @functools.wraps(func) + def cache_func(*args, **kwargs): + + cache_key = tuple((func.__name__, cls._hash_args_kwargs(args, kwargs))) + + if cache_key in cls._cache.keys(): + return cls._cache[cache_key] + result = func(*args, **kwargs) + cls._cache[cache_key] = result + + return result + + return cache_func + + @staticmethod + def _hash_args_kwargs(args, kwargs): + """Generate a hashable key from *args and **kwargs.""" + args_hash = SingletonFuncCache._make_hashable(args) + kwargs_hash = SingletonFuncCache._make_hashable(kwargs) + return args_hash + kwargs_hash + + @staticmethod + def _make_hashable(obj): + """Converts an object to a hashable representation.""" + + if isinstance(obj, (int, float, str, bool, type(None))): + return hashlib.sha256(str(obj).encode()).hexdigest() + + if isinstance(obj, dict) or hasattr(obj, "__dict__"): + return hashlib.sha256(json.dumps(obj, sort_keys=True).encode()).hexdigest() + + try: + return hashlib.sha256(json.dumps(obj).encode()).hexdigest() + except TypeError: + return hashlib.sha256(str(id(obj)).encode()).hexdigest() + + def debug(self): + return self._cache + + +def print_cache(): + print(SingletonFuncCache().debug()) + + +def get_addon_settings(): + + return ayon_api.get_addon_settings( + addon_name=ADDON_NAME, + addon_version=ADDON_VERSION, + variant=AYON_BUNDLE_NAME, + ) + + +@SingletonFuncCache.func_io_cache +def get_global_lake_instance(): + addon_settings = ( + get_addon_settings() + ) # the function is cached, but this reduces the call stack + return wrapper.LakeCtl( + server_url=str( + get_addon_settings_value(addon_settings, ADDON_SETTINGS_LAKE_FS_URI) + ), + access_key_id=str( + get_addon_settings_value(addon_settings, ADDON_SETTINGS_LAKE_FS_KEY_ID) + ), + secret_access_key=str( + get_addon_settings_value(addon_settings, ADDON_SETTINGS_LAKE_FS_KEY) + ), + ) + + +@SingletonFuncCache.func_io_cache +def _get_lake_fs_repo_items() -> list: + lake_repo_uri = str( + get_addon_settings_value(get_addon_settings(), ADDON_SETTINGS_LAKE_FS_REPO_URI) + ) + if not lake_repo_uri: + return [] + return get_global_lake_instance().list_repo_objects(lake_repo_uri) + + +@SingletonFuncCache.func_io_cache +def get_usd_lib_conf_from_lakefs() -> str: + usd_zip_lake_path = "" + for item in _get_lake_fs_repo_items(): + if "AyonUsdBin/usd" in item and platform.system().lower() in item: + usd_zip_lake_path = item + return usd_zip_lake_path + + +USD_ZIP_PATH = Path( + os.path.join( + DOWNLOAD_DIR, + os.path.basename( + f"{get_addon_settings_value(get_addon_settings(), ADDON_SETTINGS_LAKE_FS_REPO_URI)}{get_usd_lib_conf_from_lakefs()}" + ), + ) +) + +USD_LIB_PATH = Path(str(USD_ZIP_PATH).replace(USD_ZIP_PATH.suffix, "")) diff --git a/client/ayon_usd/download_ui.py b/client/ayon_usd/download_ui.py deleted file mode 100644 index cf05d6e..0000000 --- a/client/ayon_usd/download_ui.py +++ /dev/null @@ -1,267 +0,0 @@ -"""Download UI.""" -import threading -import uuid -from functools import partial - -from ayon_api import TransferProgress -from ayon_core import style -from qtpy import QtCore, QtWidgets - -from .utils import download_usd - - -class DownloadItem: - """Download item.""" - - def __init__(self, title, func): - """Download item. - - Args: - title (str): Title. - func (Callable): Function. - - """ - self._id = uuid.uuid4().hex - progress = TransferProgress() - self._func = partial(func, progress) - self.title = title - self.progress = progress - self._thread = None - - @property - def id(self): - """Id.""" - return self._id - - @property - def finished(self): - """Check if download is finished.""" - return True if self._thread is None else not self._thread.is_alive() - - def download(self): - """Download.""" - if self._thread is None: - self._thread = threading.Thread(target=self._func) - self._thread.start() - - def finish(self): - """Finish.""" - if self._thread is None: - return - self._thread.join() - self._thread = None - - -class DownloadController: - """Download controller.""" - - def __init__(self, usd): - """Download controller. - - Args: - usd (bool): Download usd. - - """ - self._items = [DownloadItem("usd", download_usd)] - - self._items_by_id = { - item.id: item - for item in self._items - } - self._download_started = False - self._download_finished = False - - def items(self): - """Items.""" - yield from self._items_by_id.items() - - @property - def download_items(self): - """Download items.""" - yield from self._items - - @property - def download_started(self): - """Check if download is started.""" - return self._download_started - - @property - def download_finished(self): - """Check if download is finished.""" - return self._download_finished - - @property - def is_downloading(self): - """Check if downloading is in progress.""" - if not self._download_started or self._download_finished: - return False - - return any(not item.finished for item in self.download_items) - - def start_download(self): - """Start download.""" - if self._download_started: - return - self._download_started = True - for item in self.download_items: - item.download() - - def finish_download(self): - """Finish download.""" - if self._download_finished: - return - for item in self.download_items: - item.finish() - self._download_finished = True - - -class DownloadItemWidget(QtWidgets.QWidget): - """Download item widget.""" - - def __init__(self, download_item, parent): - """Download item widget. - - Args: - download_item (DownloadItem): Download item. - parent (QWidget): Parent widget. - - """ - super(DownloadItemWidget, self).__init__(parent) - - title_label = QtWidgets.QLabel(download_item.title, self) - progress_label = QtWidgets.QLabel("0%", self) - - content_layout = QtWidgets.QHBoxLayout(self) - content_layout.addWidget(title_label, 1) - content_layout.addWidget(progress_label, 0) - - self._title_label = title_label - self._progress_label = progress_label - self._download_item = download_item - - def update_progress(self): - """Update progress.""" - if self._download_item.finished: - self._progress_label.setText("Finished") - return - - progress = self._download_item.progress - if not progress.started: - return - - # TODO replace with 'progress.is_running' once is fixed - progress_is_running = not ( - not progress.started - or progress.transfer_done - or progress.failed - ) - if progress_is_running: - transfer_progress = progress.transfer_progress - if transfer_progress is None: - transfer_progress = "Downloading..." - else: - transfer_progress = "{:.2f}%".format(transfer_progress) - self._progress_label.setText(transfer_progress) - return - self._progress_label.setText("Extracting...") - - -class DownloadWindow(QtWidgets.QWidget): - """Download window.""" - - finished = QtCore.Signal() - - def __init__(self, controller, parent=None): - """Download window. - - Args: - controller (DownloadController): Download controller. - parent (QWidget): Parent widget. - - """ - super(DownloadWindow, self).__init__(parent=parent) - - self.setWindowTitle("Downloading 3rd party dependencies") - - content_widget = QtWidgets.QWidget(self) - - content_layout = QtWidgets.QVBoxLayout(content_widget) - content_layout.setContentsMargins(0, 0, 0, 0) - - item_widgets = [] - for item in controller.download_items: - item_widget = DownloadItemWidget(item, content_widget) - item_widgets.append(item_widget) - content_layout.addWidget(item_widget, 0) - content_layout.addStretch(1) - - main_layout = QtWidgets.QVBoxLayout(self) - main_layout.addWidget(content_widget, 1) - - timer = QtCore.QTimer() - timer.setInterval(10) - timer.timeout.connect(self._on_timer) - - self._timer = timer - self._controller = controller - self._item_widgets = item_widgets - self._first_show = True - self._start_on_show = False - - def showEvent(self, event): - """Show event.""" - super(DownloadWindow, self).showEvent(event) - if self._first_show: - self._first_show = False - # Set stylesheet and resize - self.setStyleSheet(style.load_stylesheet()) - self.resize(360, 200) - - if self._start_on_show: - self.start() - - def _update_progress(self): - for widget in self._item_widgets: - widget.update_progress() - - def _on_timer(self): - if self._controller.download_finished: - self._timer.stop() - self.finished.emit() - return - - if not self._controller.download_started: - self._controller.start_download() - self._update_progress() - return - - if self._controller.is_downloading: - self._update_progress() - return - - self._controller.finish_download() - self._update_progress() - - def start(self): - """Start download.""" - if self._first_show: - self._start_on_show = True - return - if self._controller.download_started: - return - self._timer.start() - - -def show_download_window(usd, parent=None): - """Show download window. - - Args: - usd (bool): Download usd. - parent (QWidget): Parent widget. - - """ - controller = DownloadController(usd) - window = DownloadWindow(controller, parent=parent) - window.show() - window.start() - return window diff --git a/client/ayon_usd/hooks/__init__.py b/client/ayon_usd/hooks/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/client/ayon_usd/hooks/pre_resolver_init.py b/client/ayon_usd/hooks/pre_resolver_init.py index 7af4b13..d0bb3a0 100644 --- a/client/ayon_usd/hooks/pre_resolver_init.py +++ b/client/ayon_usd/hooks/pre_resolver_init.py @@ -1,40 +1,9 @@ """Pre-launch hook to initialize asset resolver for the application.""" -import concurrent.futures -import os -from platform import system -from pathlib import Path -import requests +import json +import os from ayon_applications import LaunchTypes, PreLaunchHook -from ayon_usd import get_download_dir, extract_zip_file - - -def download_zip(url, directory, filename=None): - """ - Download a zip file from a URL. - - Args: - url (str): The URL of the zip file. - directory (str): The directory to save the zip file in. - filename (str, optional): The name of the file - to save the zip file as. - - Returns: - str: The name of the file the zip file was saved as. - """ - with requests.get(url) as response: - # get filename from response headers - if "Content-Disposition" in response.headers: - filename = response.headers["Content-Disposition"].split( - "filename=")[-1].strip('"') - if not filename: - # if not in headers, try to use last part of the URI without - # query string - filename = url.split("/")[-1].split("?")[0] - with open(os.path.join(directory,filename), 'wb') as file: - file.write(response.content) - - return filename +from ayon_usd import config, utils class InitializeAssetResolver(PreLaunchHook): @@ -43,110 +12,78 @@ class InitializeAssetResolver(PreLaunchHook): Asset resolver is used to resolve assets in the application. """ - app_groups = { - "maya", - "nuke", - "nukestudio", - "houdini", - "blender", - "unreal" - } + app_groups = {"maya", "nuke", "nukestudio", "houdini", "blender", "unreal"} launch_types = {LaunchTypes.local} + def _setup_resolver(self, local_resolver, settings): + self.log.info(f"Initializing USD asset resolver for application: {self.app_name}") + env_var_dict = utils.get_resolver_setup_info( + local_resolver, settings, self.app_name, self.log + ) + + for key in env_var_dict: + value = env_var_dict[key] + self.launch_context.env[key] = value + def execute(self): """Pre-launch hook entry method.""" - resolver_settings = self.data["project_settings"]["ayon_usd"]["asset_resolvers"] # noqa: E501 - self.log.debug(self.app_group) - for resolver in resolver_settings: - if resolver["app_name"] != self.app_name: - continue + self.log.debug(self.app_group) + settings = self.data["project_settings"][config.ADDON_NAME] - if resolver["platform"].lower() != system().lower(): - continue + resolver_lake_fs_path = utils.get_resolver_to_download(settings, self.app_name) + if not resolver_lake_fs_path: + raise RuntimeError( + f"no Resolver could be found but AYON-Usd addon is activated {self.app_name}" + ) - if all( - self.app_name not in resolver["app_name"] - for resolver in resolver_settings - ): - self.log.info( - f"No USD asset resolver settings for {self.app_name}.") - return + with open(config.ADDON_DATA_JSON_PATH, "r") as data_json: + addon_data_json = json.load(data_json) try: - pxr_plugin_paths = self.launch_context.env[ - "PXR_PLUGINPATH_NAME"].split(os.pathsep) - except KeyError: - pxr_plugin_paths = [] + key = str(self.app_name).replace("/", "_") + local_resolver_data = addon_data_json[f"resolver_data_{key}"] - try: - ld_path = self.launch_context.env[ - "LD_LIBRARY_PATH"].split(os.pathsep) except KeyError: - ld_path = [] + local_resolver_data = None + + lake_fs_resolver_time_stamp = ( + config.get_global_lake_instance().get_element_info(resolver_lake_fs_path)[ + "Modified Time" + ] + ) + + if ( + local_resolver_data + and lake_fs_resolver_time_stamp == local_resolver_data[0] + and os.path.exists(local_resolver_data[1]) + ): - try: - python_path = self.launch_context.env[ - "PYTHONPATH"].split(os.pathsep) - except KeyError: - python_path = [] - - for resolver in resolver_settings: - if resolver["app_name"] != self.app_name: - continue - - if resolver["platform"].lower() != system().lower(): - continue - - self.log.info( - f"Initializing USD asset resolver for [ {self.app_name} ] .") - download_dir = get_download_dir() - with concurrent.futures.ThreadPoolExecutor() as executor: - future_file = executor.submit( - download_zip, - resolver["uri"], - download_dir, - None - ) - file = future_file.result() - - resolver_dir = download_dir / Path(Path(file).stem) - if resolver_dir.is_dir(): - self.log.info( - "Existing resolver found in " - f"['{resolver_dir.as_posix()}'].") - else: - self.log.info( - f"Extracting resolver to ['{resolver_dir.as_posix()}'].") - extract_zip_file( - (download_dir / Path(file)).as_posix(), - download_dir) - - pxr_plugin_paths.append( - ( - resolver_dir / "ayonUsdResolver" / - "resources" - ).as_posix() - ) - ld_path.append( - (resolver_dir / "ayonUsdResolver" / "lib").as_posix()) - python_path.append( - (resolver_dir / "ayonUsdResolver" / - "lib" / "python").as_posix()) - self.log.info(f"Asset resolver {self.app_name} initiated.") - - if pxr_plugin_paths: - self.launch_context.env["PXR_PLUGINPATH_NAME"] = os.pathsep.join( - pxr_plugin_paths - ) + self._setup_resolver(local_resolver_data[1], settings) + return - if python_path: - self.launch_context.env["PYTHONPATH"] = os.pathsep.join( - python_path - ) + local_resolver = utils.download_and_extract_resolver( + resolver_lake_fs_path, str(utils.get_download_dir()) + ) - if ld_path: - env_key = "PATH" if system().lower() == "windows" else "LD_LIBRARY_PATH" # noqa: E501 - if existing_path := self.launch_context.env.get(env_key): - ld_path.insert(0, existing_path) - self.launch_context.env[env_key] = os.pathsep.join(ld_path) + if not local_resolver: + return + + key = str(self.app_name).replace("/", "_") + resolver_time_stamp = ( + config.get_global_lake_instance() + .get_element_info(resolver_lake_fs_path) + .get("Modified Time") + ) + if not resolver_time_stamp: + raise ValueError( + f"could not find resolver time stamp on LakeFs server for {self.app_name}" + ) + addon_data_json[f"resolver_data_{key}"] = [ + resolver_time_stamp, + local_resolver, + ] + with open(config.ADDON_DATA_JSON_PATH, "w") as addon_json: + json.dump(addon_data_json, addon_json) + + self._setup_resolver(local_resolver, settings) diff --git a/client/ayon_usd/standalone/__init__.py b/client/ayon_usd/standalone/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/client/ayon_usd/utils.py b/client/ayon_usd/utils.py index c819bda..58713a1 100644 --- a/client/ayon_usd/utils.py +++ b/client/ayon_usd/utils.py @@ -1,47 +1,25 @@ """USD Addon utility functions.""" -import copy -import datetime -import hashlib + import json import os import platform -import subprocess -import zipfile -from pathlib import Path -from typing import Union +import pathlib +import sys import ayon_api -from ayon_core.lib.local_settings import get_ayon_appdirs -from ayon_usd import version - -CURRENT_DIR: Path = Path(os.path.dirname(os.path.abspath(__file__))) -DOWNLOAD_DIR: Path = CURRENT_DIR / "downloads" -NOT_SET = type("NOT_SET", (), {"__bool__": lambda: False})() -ADDON_NAME: str = version.name -ADDON_VERSION: str = version.__version__ - - -class _USDOptions: - download_needed = None - downloaded_root = NOT_SET - +from ayon_usd.ayon_bin_client.ayon_bin_distro.work_handler import worker +from ayon_usd.ayon_bin_client.ayon_bin_distro.util import zip +from ayon_usd import config -class _USDCache: - addon_settings = NOT_SET - -def get_addon_settings(): +def get_addon_settings() -> dict: """Get addon settings. Return: dict: Addon settings. """ - if _USDCache.addon_settings is NOT_SET: - _USDCache.addon_settings = ayon_api.get_addon_settings( - ADDON_NAME, ADDON_VERSION - ) - return copy.deepcopy(_USDCache.addon_settings) + return ayon_api.get_addon_settings(config.ADDON_NAME, config.ADDON_VERSION) def get_download_dir(create_if_missing=True): @@ -54,277 +32,198 @@ def get_download_dir(create_if_missing=True): str: Path to download dir. """ - if create_if_missing and not os.path.exists(DOWNLOAD_DIR): - os.makedirs(DOWNLOAD_DIR, exist_ok=True) - return DOWNLOAD_DIR - - -def _check_args_returncode(args): - try: - kwargs = {} - if platform.system().lower() == "windows": - kwargs["creationflags"] = ( - subprocess.CREATE_NEW_PROCESS_GROUP - | getattr(subprocess, "DETACHED_PROCESS", 0) - | getattr(subprocess, "CREATE_NO_WINDOW", 0) - ) - - if hasattr(subprocess, "DEVNULL"): - proc = subprocess.Popen( - args, - stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL, - **kwargs - ) - proc.wait() - else: - with open(os.devnull, "w") as devnull: - proc = subprocess.Popen( - args, stdout=devnull, stderr=devnull, **kwargs - ) - proc.wait() - - except Exception: - return False - return proc.returncode == 0 - - -def _get_addon_endpoint(): - return f"addons/{ADDON_NAME}/{ADDON_VERSION}" - - -def _get_info_path(name): - return get_ayon_appdirs("addons", f"{ADDON_NAME}-{name}.json") - - -def _filter_file_info(name): - filepath = _get_info_path(name) - - if os.path.exists(filepath): - with open(filepath, "r") as stream: - return json.load(stream) - return [] - - -def _store_file_info(name, info): - """Store info to file.""" - filepath = _get_info_path(name) - root, filename = os.path.split(filepath) - if not os.path.exists(root): - os.makedirs(root, exist_ok=True) - with open(filepath, "w") as stream: - json.dump(info, stream) - - -def get_downloaded_usd_info(): - """Get USD info from file.""" - return _filter_file_info("usd") - - -def store_downloaded_usd_info(usd_info): - """Store USD info to file. + if create_if_missing and not os.path.exists(config.DOWNLOAD_DIR): + os.makedirs(config.DOWNLOAD_DIR, exist_ok=True) + return config.DOWNLOAD_DIR + + +@config.SingletonFuncCache.func_io_cache +def get_downloaded_usd_root() -> str: + """Get downloaded USDLib os local root path.""" + target_usd_lib = config.get_usd_lib_conf_from_lakefs() + usd_lib_local_path = os.path.join( + config.DOWNLOAD_DIR, + os.path.basename(target_usd_lib).replace( + f".{target_usd_lib.split('.')[-1]}", "" + ), + ) + return usd_lib_local_path - Args: - usd_info (list[dict[str, str]]): USD info to store. - """ - _store_file_info("usd", usd_info) +def is_usd_lib_download_needed() -> bool: + # TODO redocument + usd_lib_dir = os.path.abspath(get_downloaded_usd_root()) + if os.path.exists(usd_lib_dir): -def get_server_files_info(): - """Receive zip file info from server. + ctl = config.get_global_lake_instance() + lake_fs_usd_lib_path = f"{config.get_addon_settings_value(config.get_addon_settings(),config.ADDON_SETTINGS_LAKE_FS_REPO_URI)}{config.get_usd_lib_conf_from_lakefs()}" - Information must contain at least 'filename' and 'hash' with md5 zip - file hash. + with open(config.ADDON_DATA_JSON_PATH, "r") as data_json: + addon_data_json = json.load(data_json) + try: + usd_lib_lake_fs_time_stamp_local = addon_data_json[ + "usd_lib_lake_fs_time_cest" + ] + except KeyError: + return True - Returns: - list[dict[str, str]]: Information about files on server. + if ( + usd_lib_lake_fs_time_stamp_local + == ctl.get_element_info(lake_fs_usd_lib_path)["Modified Time"] + ): + return False - """ - response = ayon_api.get(f"{_get_addon_endpoint()}/files_info") - response.raise_for_status() - return response.data + return True -def _find_file_info(name, files_info): - """Find file info by name. +def download_and_extract_resolver(resolver_lake_fs_path: str, download_dir: str) -> str: + """downloads an individual object based on the lake_fs_path and extracts the zip into the specific download_dir - Args: - name (str): Name of file to find. - files_info (list[dict[str, str]]): List of file info dicts. + Args + resolver_lake_fs_path (): + download_dir (): Returns: - Union[dict[str, str], None]: File info data. """ - platform_name = platform.system().lower() - return next( - ( - file_info - for file_info in files_info - if ( - file_info["name"] == name - and file_info["platform"] == platform_name - ) - ), - None + controller = worker.Controller() + download_item = controller.construct_work_item( + func=config.get_global_lake_instance().clone_element, + args=[resolver_lake_fs_path, download_dir], ) + extract_zip_item = controller.construct_work_item( + func=zip.extract_zip_file, + args=[ + download_item.connect_func_return, + download_dir, + ], + dependency_id=[download_item.get_uuid()], + ) -def get_downloaded_usd_root() -> Union[str, None]: - """Get downloaded USD binary root path.""" - if _USDOptions.downloaded_root is not NOT_SET: - return _USDOptions.downloaded_root - - server_usd_info = _find_file_info( - "ayon_usd", get_server_files_info()) - if not server_usd_info: - return None - - root = None - for existing_info in get_downloaded_usd_info(): - if existing_info["checksum"] != server_usd_info["checksum"]: - continue - found_root = existing_info["root"] - if os.path.exists(found_root): - root = found_root - break - - _USDOptions.downloaded_root = root - return _USDOptions.downloaded_root - - -def is_usd_download_needed(addon_settings=None): - """Check if is download needed. - - Returns: - bool: Should be config downloaded. - - """ - if _USDOptions.download_needed is not None: - return _USDOptions.download_needed - - if addon_settings is None: - addon_settings = get_addon_settings() - download_needed = False - if addon_settings["use_downloaded"]: - # Check what is required by server - usd_root = get_downloaded_usd_root() - download_needed = not bool(usd_root) - - _USDOptions.download_needed = download_needed - return _USDOptions.download_needed - - -def validate_file_checksum(filename: str, checksum: str, hash_function: str): - """Generate checksum for file based on hash function (MD5 or SHA256). - - Args: - filename (str): Path to file that will have the checksum generated. - checksum (str): Checksum to compare with the generated checksum. - hash_function (str): Hash function name - supports MD5 or SHA256 + controller.start() - Returns: - bool: True if checksums match, False otherwise. + return str(extract_zip_item.func_return) - Raises: - Exception: Invalid hash function is entered. +@config.SingletonFuncCache.func_io_cache +def get_resolver_to_download(settings, app_name: str) -> str: """ - hash_function = hash_function.lower() - - with open(filename, "rb") as f: - data = f.read() # read file as bytes - if hash_function == "md5": - readable_hash = hashlib.md5(data).hexdigest() - elif hash_function == "sha256": - readable_hash = hashlib.sha256(data).hexdigest() - else: - raise ValueError( - f"{hash_function} is an invalid hash function." - f"Please Enter MD5 or SHA256") - - return readable_hash == checksum - - -def extract_zip_file(zip_file_path: str, dest_dir: str): - """Extract a zip file to a destination directory. + Gets LakeFs path that can be used with copy element to download + specific resolver, this will prioritize `lake_fs_overrides` over + asset_resolvers entries. - Args: - zip_file_path (str): The path to the zip file. - dest_dir (str): The directory where the zip file should be extracted. + Returns: str: LakeFs object path to be used with lake_fs_py wrapper """ - with zipfile.ZipFile(zip_file_path, "r") as zip_ref: - zip_ref.extractall(dest_dir) + resolver_overwrite_list = config.get_addon_settings_value( + settings, config.ADDON_SETTINGS_ASSET_RESOLVERS_OVERWRITES + ) + if resolver_overwrite_list: + resolver_overwrite = next( + ( + item + for item in resolver_overwrite_list + if item["app_name"] == app_name + and item["platform"] == sys.platform.lower() + ), + None, + ) + if resolver_overwrite: + return resolver_overwrite["lake_fs_path"] -def _download_file(file_info, dirpath, progress=None): - filename = file_info["filename"] - checksum = file_info["checksum"] - checksum_algorithm = file_info["checksum_algorithm"] + resolver_list = config.get_addon_settings_value( + settings, config.ADDON_SETTINGS_ASSET_RESOLVERS + ) + if not resolver_list: + return "" - zip_filepath = ayon_api.download_addon_private_file( - ADDON_NAME, - ADDON_VERSION, - filename, - dirpath, - progress=progress + resolver = next( + ( + item + for item in resolver_list + if (item["name"] == app_name or app_name in item["app_alias_list"]) + and item["platform"] == platform.system().lower() + ), + None, ) + if not resolver: + return "" - try: - if not validate_file_checksum( - zip_filepath, checksum, checksum_algorithm - ): - raise ValueError( - f"Downloaded file hash ({checksum_algorithm}) does not " - f"match expected hash for file '{filename}'." - ) - extract_zip_file(zip_filepath, dirpath) + lake_base_path = config.get_addon_settings_value( + settings, config.ADDON_SETTINGS_LAKE_FS_REPO_URI + ) + resolver_lake_path = lake_base_path + resolver["lake_fs_path"] + return resolver_lake_path - finally: - os.remove(zip_filepath) +@config.SingletonFuncCache.func_io_cache +def get_resolver_setup_info(resolver_dir, settings, app_name: str, logger=None) -> dict: + pxr_plugin_paths = [] + ld_path = [] + python_path = [] -def download_usd(progress=None): - """Download usd from server. + if val := os.getenv("PXR_PLUGINPATH_NAME"): + pxr_plugin_paths.extend(val.split(os.pathsep)) + if val := os.getenv("LD_LIBRARY_PATH"): + ld_path.extend(val.split(os.pathsep)) + if val := os.getenv("PYTHONPATH"): + python_path.extend(val.split(os.pathsep)) - Todo: - Add safeguard to avoid downloading of the file from multiple - processes at once. + resolver_plugin_info_path = os.path.join( + resolver_dir, "ayonUsdResolver", "resources", "plugInfo.json" + ) + resolver_ld_path = os.path.join(resolver_dir, "ayonUsdResolver", "lib") + resolver_python_path = os.path.join( + resolver_dir, "ayonUsdResolver", "lib", "python" + ) - Args: - progress (ayon_api.TransferProgress): Keep track about download. + if ( + not os.path.exists(resolver_python_path) + or not os.path.exists(resolver_ld_path) + or not os.path.exists(resolver_python_path) + ): + raise RuntimeError( + f"Cant start Resolver missing path resolver_python_path: {resolver_python_path}, resolver_ld_path: {resolver_ld_path}, resolver_python_path: {resolver_python_path}" + ) + pxr_plugin_paths.append(pathlib.Path(resolver_plugin_info_path).as_posix()) + ld_path.append(pathlib.Path(resolver_ld_path).as_posix()) + python_path.append(pathlib.Path(resolver_python_path).as_posix()) + + if logger: + logger.info(f"Asset resolver {app_name} initiated.") + resolver_setup_info_dict = {} + resolver_setup_info_dict["PXR_PLUGINPATH_NAME"] = os.pathsep.join(pxr_plugin_paths) + resolver_setup_info_dict["PYTHONPATH"] = os.pathsep.join(python_path) + if platform.system().lower() == "windows": + resolver_setup_info_dict["PATH"] = os.pathsep.join(ld_path) + else: + resolver_setup_info_dict["LD_LIBRARY_PATH"] = os.pathsep.join(ld_path) + + resolver_setup_info_dict["TF_DEBUG"] = config.get_addon_settings_value( + settings, config.ADDON_SETTINGS_USD_TF_DEBUG + ) - """ - dir_path = os.path.join(get_download_dir(), "ayon_usd") + resolver_setup_info_dict["AYONLOGGERLOGLVL"] = config.get_addon_settings_value( + settings, config.ADDON_SETTINGS_USD_RESOLVER_LOG_LVL + ) - files_info = get_server_files_info() - file_info = _find_file_info("ayon_usd", files_info) - if file_info is None: - raise ValueError(f"Can't find USD binary zip for the platform '{platform.system()}'") + resolver_setup_info_dict["AYONLOGGERSFILELOGGING"] = ( + config.get_addon_settings_value( + settings, config.ADDON_SETTINGS_USD_RESOLVER_LOG_FILLE_LOOGER_ENABLED + ) + ) - _download_file(file_info, dir_path, progress=progress) + resolver_setup_info_dict["AYONLOGGERSFILEPOS"] = config.get_addon_settings_value( + settings, config.ADDON_SETTINGS_USD_RESOLVER_LOG_FILLE_LOOGER_FILE_PATH + ) - usd_info = get_downloaded_usd_info() - existing_item = next( - ( - item - for item in usd_info - if item["root"] == dir_path - ), - None + resolver_setup_info_dict["AYON_LOGGIN_LOGGIN_KEYS"] = ( + config.get_addon_settings_value( + settings, config.ADDON_SETTINGS_USD_RESOLVER_LOG_LOGGIN_KEYS + ) ) - if existing_item is None: - existing_item = {} - usd_info.append(existing_item) - existing_item.update({ - "root": dir_path, - "checksum": file_info["checksum"], - "checksum_algorithm": file_info["checksum_algorithm"], - "downloaded": datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") - }) - store_downloaded_usd_info(usd_info) - - _USDOptions.download_needed = False - _USDOptions.downloaded_root = NOT_SET + + return resolver_setup_info_dict diff --git a/client/ayon_usd/version.py b/client/ayon_usd/version.py index 883bf2e..dfbb833 100644 --- a/client/ayon_usd/version.py +++ b/client/ayon_usd/version.py @@ -1,10 +1,4 @@ # -*- coding: utf-8 -*- -"""Package declaring ayon_usd addon version. - -Version is regenerated by `create_package.py` script or the -build system based on the content of package.py. - -Do not manually edit this file. -""" +"""Package declaring AYON addon 'ayon_usd' version.""" name = "ayon_usd" -__version__ = "1.0.3-dev.1" +__version__ = "1.0.4-dev.37" diff --git a/create_package.py b/create_package.py index e5128ce..da23c9c 100644 --- a/create_package.py +++ b/create_package.py @@ -1,3 +1,5 @@ +#!/usr/bin/env python + """Prepares server package from addon repo to upload to server. Requires Python 3.9. (Or at least 3.8+). @@ -5,7 +7,7 @@ This script should be called from cloned addon repo. It will produce 'package' subdirectory which could be pasted into server -addon directory directly (e.g. into `ayon-backend/addons`). +addon directory directly (eg. into `ayon-backend/addons`). Format of package folder: ADDON_REPO/package/{addon name}/{addon version} @@ -19,104 +21,69 @@ client side code zipped in `private` subfolder. """ -import argparse -import collections -import contextlib -import hashlib -import json -import logging import os -import platform +import sys import re +import io import shutil -import sys -import urllib.request +import platform +import argparse +import logging +import collections import zipfile -from pathlib import Path -from typing import Optional +import subprocess +from typing import Optional, Iterable, Pattern, Union, List, Tuple + import package +FileMapping = Tuple[Union[str, io.BytesIO], str] ADDON_NAME: str = package.name ADDON_VERSION: str = package.version -ADDON_CLIENT_DIR: str = package.client_dir - - -CLIENT_VERSION_CONTENT = '''# -*- coding: utf-8 -*- -"""Package declaring ayon_usd addon version. - -Version is regenerated by `create_package.py` script or the -build system based on the content of package.py. - -Do not manually edit this file. -""" -name = "{}" -__version__ = "{}" +ADDON_CLIENT_DIR: Union[str, None] = getattr(package, "client_dir", None) + +CURRENT_ROOT: str = os.path.dirname(os.path.abspath(__file__)) +SERVER_ROOT: str = os.path.join(CURRENT_ROOT, "server") +FRONTEND_ROOT: str = os.path.join(CURRENT_ROOT, "frontend") +FRONTEND_DIST_ROOT: str = os.path.join(FRONTEND_ROOT, "dist") +DST_DIST_DIR: str = os.path.join("frontend", "dist") +PRIVATE_ROOT: str = os.path.join(CURRENT_ROOT, "private") +PUBLIC_ROOT: str = os.path.join(CURRENT_ROOT, "public") +CLIENT_ROOT: str = os.path.join(CURRENT_ROOT, "client") + +VERSION_PY_CONTENT = f'''# -*- coding: utf-8 -*- +"""Package declaring AYON addon '{ADDON_NAME}' version.""" +name = "{ADDON_NAME}" +__version__ = "{ADDON_VERSION}" ''' -# Set sources to download -AYON_SOURCE_URL = "https://distribute.openpype.io/thirdparty" -USD_SOURCES = { - "24.03": { - "windows": { - "url": f"{AYON_SOURCE_URL}/usd-24.03_win64_py39.zip", - "checksum": "7d7852b9c8e3501e5f64175decc08d70e3bf1c083faaaf2c1a8aa8f9af43ab30", - "checksum_algorithm": "sha256", - }, - "linux": { - "url": f"{AYON_SOURCE_URL}/usd-24.03_linux_py39.zip", - "checksum": "27010ad67d5acd25e3c95b1ace4ab30e047b5a9e48082db0545ae44ae7ec9b09", - "checksum_algorithm": "sha256", - } - } - } - # Patterns of directories to be skipped for server part of addon -IGNORE_DIR_PATTERNS = [ +IGNORE_DIR_PATTERNS: List[Pattern] = [ re.compile(pattern) for pattern in { # Skip directories starting with '.' r"^\.", # Skip any pycache folders - "^__pycache__$" + "^__pycache__$", + "downloads", + "test", } ] # Patterns of files to be skipped for server part of addon -IGNORE_FILE_PATTERNS = [ +IGNORE_FILE_PATTERNS: List[Pattern] = [ re.compile(pattern) for pattern in { # Skip files starting with '.' # NOTE this could be an issue in some cases r"^\.", # Skip '.pyc' files - r"\.pyc$" + r"\.pyc$", } ] -def calculate_file_checksum( - filepath, hash_algorithm, chunk_size=10000): - """Calculate file checksum. - - Args: - filepath (str): File path. - hash_algorithm (str): Hash algorithm. - chunk_size (int, optional): Chunk size for reading file. - - Returns: - str: Checksum of file. - - """ - func = getattr(hashlib, hash_algorithm) - hash_obj = func() - with open(filepath, "rb") as f: - for chunk in iter(lambda: f.read(chunk_size), b""): - hash_obj.update(chunk) - return hash_obj.hexdigest() - - class ZipFileLongPaths(zipfile.ZipFile): - r"""Allows longer paths in zip files. + """Allows longer paths in zip files. Regular DOS paths are limited to MAX_PATH (260) characters, including the string's terminating NUL character. @@ -134,12 +101,26 @@ def _extract_member(self, member, tpath, pwd): else: tpath = "\\\\?\\" + tpath - return super(ZipFileLongPaths, self)._extract_member( - member, tpath, pwd - ) + return super()._extract_member(member, tpath, pwd) + + +def _get_yarn_executable() -> Union[str, None]: + cmd = "which" + if platform.system().lower() == "windows": + cmd = "where" + + for line in subprocess.check_output([cmd, "yarn"], encoding="utf-8").splitlines(): + if not line or not os.path.exists(line): + continue + try: + subprocess.call([line, "--version"]) + return line + except OSError: + continue + return None -def safe_copy_file(src_path, dst_path): +def safe_copy_file(src_path: str, dst_path: str): """Copy file and make sure destination directory exists. Ignore if destination already contains directories from source. @@ -147,300 +128,289 @@ def safe_copy_file(src_path, dst_path): Args: src_path (str): File path that will be copied. dst_path (str): Path to destination file. - """ + if src_path == dst_path: return - dst_dir = os.path.dirname(dst_path) - with contextlib.suppress(Exception): - os.makedirs(dst_dir) + dst_dir: str = os.path.dirname(dst_path) + os.makedirs(dst_dir, exist_ok=True) + shutil.copy2(src_path, dst_path) -def _value_match_regexes(value, regexes): +def _value_match_regexes(value: str, regexes: Iterable[Pattern]) -> bool: return any(regex.search(value) for regex in regexes) def find_files_in_subdir( - src_path, - ignore_file_patterns=None, - ignore_dir_patterns=None -): - """Find files in subdirectories. + src_path: str, + ignore_file_patterns: Optional[List[Pattern]] = None, + ignore_dir_patterns: Optional[List[Pattern]] = None, +) -> List[Tuple[str, str]]: + """Find all files to copy in subdirectories of given path. + + All files that match any of the patterns in 'ignore_file_patterns' will + be skipped and any directories that match any of the patterns in + 'ignore_dir_patterns' will be skipped with all subfiles. Args: - src_path (str): Source directory path. - ignore_file_patterns (list, optional): List of regex patterns - to ignore files. - ignore_dir_patterns (list, optional): List of regex patterns - to ignore directories. + src_path (str): Path to directory to search in. + ignore_file_patterns (Optional[list[Pattern]]): List of regexes + to match files to ignore. + ignore_dir_patterns (Optional[list[Pattern]]): List of regexes + to match directories to ignore. Returns: - list: List of tuples with file path and relative path. - + list[tuple[str, str]]: List of tuples with path to file and parent + directories relative to 'src_path'. """ + if ignore_file_patterns is None: ignore_file_patterns = IGNORE_FILE_PATTERNS if ignore_dir_patterns is None: ignore_dir_patterns = IGNORE_DIR_PATTERNS - output = [] + output: List[Tuple[str, str]] = [] + if not os.path.exists(src_path): + return output - hierarchy_queue = collections.deque() + hierarchy_queue: collections.deque = collections.deque() hierarchy_queue.append((src_path, [])) while hierarchy_queue: - item = hierarchy_queue.popleft() + item: Tuple[str, str] = hierarchy_queue.popleft() dirpath, parents = item for name in os.listdir(dirpath): - path = os.path.join(dirpath, name) + path: str = os.path.join(dirpath, name) if os.path.isfile(path): if not _value_match_regexes(name, ignore_file_patterns): - items = list(parents) + items: List[str] = list(parents) items.append(name) output.append((path, os.path.sep.join(items))) continue if not _value_match_regexes(name, ignore_dir_patterns): - items = list(parents) + items: List[str] = list(parents) items.append(name) hierarchy_queue.append((path, items)) return output -def copy_server_content(addon_output_dir, current_dir, log): - """Copy server side folders to 'addon_package_dir'. +def update_client_version(logger): + """Update version in client code if version.py is present.""" + if not ADDON_CLIENT_DIR: + return - Args: - addon_output_dir (str): package dir in addon repo dir - current_dir (str): addon repo dir - log (logging.Logger) + version_path: str = os.path.join(CLIENT_ROOT, ADDON_CLIENT_DIR, "version.py") + if not os.path.exists(version_path): + logger.debug("Did not find version.py in client directory") + return - """ - log.info("Copying server content") + logger.info("Updating client version") + with open(version_path, "w") as stream: + stream.write(VERSION_PY_CONTENT) - filepaths_to_copy = [] - server_dirpath = os.path.join(current_dir, "server") - for item in find_files_in_subdir(server_dirpath): - src_path, dst_subpath = item - dst_path = os.path.join(addon_output_dir, "server", dst_subpath) - filepaths_to_copy.append((src_path, dst_path)) +def build_frontend(): + yarn_executable = _get_yarn_executable() + if yarn_executable is None: + raise RuntimeError("Yarn executable was not found.") - # Copy files - for src_path, dst_path in filepaths_to_copy: - safe_copy_file(src_path, dst_path) + subprocess.run([yarn_executable, "install"], cwd=FRONTEND_ROOT) + subprocess.run([yarn_executable, "build"], cwd=FRONTEND_ROOT) + if not os.path.exists(FRONTEND_DIST_ROOT): + raise RuntimeError("Frontend build failed. Did not find 'dist' folder.") -def _fill_client_version(current_dir): - version_file = os.path.join( - current_dir, "client", ADDON_CLIENT_DIR, "version.py" - ) - with open(version_file, "w") as stream: - stream.write( - CLIENT_VERSION_CONTENT.format( - ADDON_NAME, ADDON_VERSION)) +def get_client_files_mapping() -> List[Tuple[str, str]]: + """Mapping of source client code files to destination paths. + Example output: + [ + ( + "C:/addons/MyAddon/version.py", + "my_addon/version.py" + ), + ( + "C:/addons/MyAddon/client/my_addon/__init__.py", + "my_addon/__init__.py" + ) + ] -def zip_client_side(addon_package_dir, current_dir, log): - """Copy and zip `client` content into 'addon_package_dir'. + Returns: + list[tuple[str, str]]: List of path mappings to copy. The destination + path is relative to expected output directory. + """ - Args: - addon_package_dir (str): Output package directory path. - current_dir (str): Directory path of addon source. - log (logging.Logger): Logger object. + # Add client code content to zip + client_code_dir: str = os.path.join(CLIENT_ROOT, ADDON_CLIENT_DIR) - """ - client_dir = os.path.join(current_dir, "client") - if not os.path.isdir(client_dir): - log.info("Client directory was not found. Skipping") - return + return [ + (path, os.path.join(ADDON_CLIENT_DIR, sub_path)) + for path, sub_path in find_files_in_subdir(client_code_dir) + ] + +def get_client_zip_content(log) -> io.BytesIO: log.info("Preparing client code zip") - private_dir = os.path.join(addon_package_dir, "private") + files_mapping: List[Tuple[str, str]] = get_client_files_mapping() + stream = io.BytesIO() + with ZipFileLongPaths(stream, "w", zipfile.ZIP_DEFLATED) as zipf: + for src_path, subpath in files_mapping: + zipf.write(src_path, subpath) + stream.seek(0) + return stream - if not os.path.exists(private_dir): - os.makedirs(private_dir) - zip_filepath = os.path.join(os.path.join(private_dir, "client.zip")) - with ZipFileLongPaths(zip_filepath, "w", zipfile.ZIP_DEFLATED) as zipf: - # Add client code content to zip - for path, sub_path in find_files_in_subdir(client_dir): - zipf.write(path, sub_path) +def get_base_files_mapping() -> List[FileMapping]: + filepaths_to_copy: List[FileMapping] = [ + (os.path.join(CURRENT_ROOT, "package.py"), "package.py") + ] + # Go through server, private and public directories and find all files + for dirpath in (SERVER_ROOT, PRIVATE_ROOT, PUBLIC_ROOT): + if not os.path.exists(dirpath): + continue + dirname = os.path.basename(dirpath) + for src_file, subpath in find_files_in_subdir(dirpath): + dst_subpath = os.path.join(dirname, subpath) + filepaths_to_copy.append((src_file, dst_subpath)) -def download_usd_zip(downloads_dir: Path, log: logging.Logger): - """Download USD zip files. + if os.path.exists(FRONTEND_DIST_ROOT): + for src_file, subpath in find_files_in_subdir(FRONTEND_DIST_ROOT): + dst_subpath = os.path.join(DST_DIST_DIR, subpath) + filepaths_to_copy.append((src_file, dst_subpath)) - Args: - downloads_dir (Path): Directory path to download zip files. - log (logging.Logger): Logger object. + pyproject_toml = os.path.join(CLIENT_ROOT, "pyproject.toml") + if os.path.exists(pyproject_toml): + filepaths_to_copy.append((pyproject_toml, "private/pyproject.toml")) + + return filepaths_to_copy - """ - zip_files_info = [] - for item_name, item_info in USD_SOURCES.items(): - for platform_name, platform_info in item_info.items(): - src_url = platform_info["url"] - filename = src_url.split("/")[-1] - zip_path = downloads_dir / filename - checksum = platform_info["checksum"] - checksum_algorithm = platform_info["checksum_algorithm"] - zip_files_info.append({ - "name": ADDON_NAME, - "filename": filename, - "checksum": checksum, - "checksum_algorithm": checksum_algorithm, - "platform": platform_name, - }) - if zip_path.exists(): - file_checksum = calculate_file_checksum( - zip_path, checksum_algorithm) - if checksum == file_checksum: - log.debug(f"USD zip from {src_url} already exists") - continue - os.remove(zip_path) - - log.debug(f"USD zip from {src_url} -> {zip_path}") - log.info("USD zip download - started") - - urllib.request.urlretrieve( - src_url, - zip_path) - log.info("USD zip download - finished") - - file_checksum = calculate_file_checksum( - zip_path, checksum_algorithm) - - if checksum != file_checksum: - raise ValueError( - f"USD zip checksum mismatch: {file_checksum} != {checksum}" - ) - - return zip_files_info - - -def create_server_package( - current_dir: str, - output_dir: str, - addon_output_dir: str, - addon_version: str, - log: logging.Logger -): - """Create server package zip file. - The zip file can be installed to a server using UI or rest api endpoints. +def copy_client_code(output_dir: str, log: logging.Logger): + """Copies server side folders to 'addon_package_dir' Args: - current_dir (str): Directory path of addon source. - output_dir (str): Directory path to output zip file. - addon_output_dir (str): Directory path to addon output directory. - addon_version (str): Version of addon. - log (logging.Logger): Logger object. + output_dir (str): Output directory path. + log (logging.Logger) """ - log.info("Creating server package") - output_path = os.path.join( - output_dir, f"{ADDON_NAME}-{addon_version}.zip" - ) - with ZipFileLongPaths(output_path, "w", zipfile.ZIP_DEFLATED) as zipf: - # Write a manifest to zip - zipf.write( - os.path.join(current_dir, "package.py"), "package.py" - ) - - # Move addon content to zip into 'addon' directory - addon_output_dir_offset = len(addon_output_dir) + 1 - for root, _, filenames in os.walk(addon_output_dir): - if not filenames: - continue + log.info(f"Copying client for {ADDON_NAME}-{ADDON_VERSION}") - dst_root = None - if root != addon_output_dir: - dst_root = root[addon_output_dir_offset:] - for filename in filenames: - src_path = os.path.join(root, filename) - dst_path = filename - if dst_root: - dst_path = os.path.join(dst_root, filename) - zipf.write(src_path, dst_path) + full_output_path = os.path.join(output_dir, f"{ADDON_NAME}_{ADDON_VERSION}") + if os.path.exists(full_output_path): + shutil.rmtree(full_output_path) + os.makedirs(full_output_path, exist_ok=True) - log.info(f"Output package can be found: {output_path}") + for src_path, dst_subpath in get_client_files_mapping(): + dst_path = os.path.join(full_output_path, dst_subpath) + safe_copy_file(src_path, dst_path) + log.info("Client copy finished") -def main( - output_dir: Optional[str] = None, - skip_zip: bool = False, - keep_sources: bool = False -): - """Create addon package. - Main function to execute package creation. +def copy_addon_package( + output_dir: str, files_mapping: List[FileMapping], log: logging.Logger +): + """Copy client code to output directory. Args: - output_dir (str, optional): Output directory path. - skip_zip (bool): Skip zipping server package. - keep_sources (bool): Keep sources when server package is created. + output_dir (str): Directory path to output client code. + files_mapping (List[FileMapping]): List of tuples with source file + and destination subpath. + log (logging.Logger): Logger object. """ - logging.basicConfig(level=logging.INFO) - log = logging.getLogger("create_package") - log.setLevel(logging.INFO) + log.info(f"Copying package for {ADDON_NAME}-{ADDON_VERSION}") - log.info("Start creating package") + # Add addon name and version to output directory + addon_output_dir: str = os.path.join(output_dir, ADDON_NAME, ADDON_VERSION) + if os.path.isdir(addon_output_dir): + log.info(f"Purging {addon_output_dir}") + shutil.rmtree(addon_output_dir) - current_dir = os.path.dirname(os.path.abspath(__file__)) - if not output_dir: - output_dir = os.path.join(current_dir, "package") + os.makedirs(addon_output_dir, exist_ok=True) - downloads_dir = Path(os.path.join(current_dir, "downloads")) - downloads_dir.mkdir(exist_ok=True) + # Copy server content + for src_file, dst_subpath in files_mapping: + dst_path: str = os.path.join(addon_output_dir, dst_subpath) + dst_dir: str = os.path.dirname(dst_path) + os.makedirs(dst_dir, exist_ok=True) + if isinstance(src_file, io.BytesIO): + with open(dst_path, "wb") as stream: + stream.write(src_file.getvalue()) + else: + safe_copy_file(src_file, dst_path) - files_info = download_usd_zip(downloads_dir, log) + log.info("Package copy finished") - new_created_version_dir = os.path.join( - output_dir, ADDON_NAME, ADDON_VERSION - ) - if os.path.isdir(new_created_version_dir): - log.info(f"Purging {new_created_version_dir}") - shutil.rmtree(output_dir) - _fill_client_version(current_dir) +def create_addon_package( + output_dir: str, files_mapping: List[FileMapping], log: logging.Logger +): + log.info(f"Creating package for {ADDON_NAME}-{ADDON_VERSION}") + + os.makedirs(output_dir, exist_ok=True) + output_path = os.path.join(output_dir, f"{ADDON_NAME}-{ADDON_VERSION}.zip") + + with ZipFileLongPaths(output_path, "w", zipfile.ZIP_DEFLATED) as zipf: + # Copy server content + for src_file, dst_subpath in files_mapping: + if isinstance(src_file, io.BytesIO): + zipf.writestr(dst_subpath, src_file.getvalue()) + else: + zipf.write(src_file, dst_subpath) + + log.info("Package created") - log.info(f"Preparing package for {ADDON_NAME}-{ADDON_VERSION}") - addon_output_root = os.path.join(output_dir, ADDON_NAME) - addon_output_dir = os.path.join(addon_output_root, ADDON_VERSION) - if not os.path.exists(addon_output_dir): - os.makedirs(addon_output_dir) +def main( + output_dir: Optional[str] = None, + skip_zip: Optional[bool] = False, + only_client: Optional[bool] = False, +): + log: logging.Logger = logging.getLogger("create_package") + log.info("Package creation started") - copy_server_content(addon_output_dir, current_dir, log) + if not output_dir: + output_dir = os.path.join(CURRENT_ROOT, "package") + + has_client_code = bool(ADDON_CLIENT_DIR) + if has_client_code: + client_dir: str = os.path.join(CLIENT_ROOT, ADDON_CLIENT_DIR) + if not os.path.exists(client_dir): + raise RuntimeError( + f"Client directory was not found '{client_dir}'." + " Please check 'client_dir' in 'package.py'." + ) + update_client_version(log) + + if only_client: + if not has_client_code: + raise RuntimeError("Client code is not available. Skipping") + + copy_client_code(output_dir, log) + return - private_dir = Path(addon_output_dir) / "private" - if not private_dir.exists(): - private_dir.mkdir(parents=True) + log.info(f"Preparing package for {ADDON_NAME}-{ADDON_VERSION}") - for file_info in files_info: - filename = file_info["filename"] - src_path = downloads_dir / filename - dst_path = private_dir / filename - shutil.copy(src_path, dst_path) + if os.path.exists(FRONTEND_ROOT): + build_frontend() - zips_info_path = private_dir / "files_info.json" - with open(zips_info_path, "w") as stream: - json.dump(files_info, stream) + files_mapping: List[FileMapping] = [] + files_mapping.extend(get_base_files_mapping()) - zip_client_side(addon_output_dir, current_dir, log) + if has_client_code: + files_mapping.append((get_client_zip_content(log), "private/client.zip")) # Skip server zipping - if not skip_zip: - create_server_package( - current_dir, output_dir, addon_output_dir, ADDON_VERSION, log - ) - # Remove sources only if zip file is created - if not keep_sources: - log.info("Removing source files for server package") - shutil.rmtree(addon_output_root) + if skip_zip: + copy_addon_package(output_dir, files_mapping, log) + else: + create_addon_package(output_dir, files_mapping, log) + log.info("Package creation finished") @@ -451,27 +421,35 @@ def main( dest="skip_zip", action="store_true", help=( - "Skip zipping server package and create only" - " server folder structure." - ) - ) - parser.add_argument( - "--keep-sources", - dest="keep_sources", - action="store_true", - help=( - "Keep folder structure when server package is created." - ) + "Skip zipping server package and create only" " server folder structure." + ), ) parser.add_argument( - "-o", "--output", + "-o", + "--output", dest="output_dir", default=None, help=( "Directory path where package will be created" " (Will be purged if already exists!)" - ) + ), + ) + parser.add_argument( + "--only-client", + dest="only_client", + action="store_true", + help=( + "Extract only client code. This is useful for development." + " Requires '-o', '--output' argument to be filled." + ), + ) + parser.add_argument( + "--debug", dest="debug", action="store_true", help="Debug log messages." ) args = parser.parse_args(sys.argv[1:]) - main(args.output_dir, args.skip_zip, args.keep_sources) + level = logging.INFO + if args.debug: + level = logging.DEBUG + logging.basicConfig(level=level) + main(args.output_dir, args.skip_zip, args.only_client) diff --git a/package.py b/package.py index 8ff72ce..7fcbf5d 100644 --- a/package.py +++ b/package.py @@ -1,7 +1,8 @@ """AYON USD Addon package file.""" + name = "ayon_usd" -title = "AYON USD support" -version = "1.0.3-dev.1" +title = "Usd Addon" +version = "1.0.4-dev.37" client_dir = "ayon_usd" services = {} diff --git a/pyproject.toml b/pyproject.toml index 5df4d81..cdaab9b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -102,4 +102,4 @@ line-ending = "auto" ignore-words-list = "ayon,ynput" skip = "./.*,./client/ayon_usd/downloads,*/vendor/*,./downloads,./package,./private" count = true -quiet-level = 3 \ No newline at end of file +quiet-level = 3 diff --git a/server/__init__.py b/server/__init__.py index 42d84de..0f58434 100644 --- a/server/__init__.py +++ b/server/__init__.py @@ -1,19 +1,16 @@ """USD Addon for AYON - server part.""" + import os -import json from pathlib import Path from fastapi import Depends # noqa: F401 from ayon_server.addons import BaseServerAddon -from ayon_server.api.dependencies import dep_current_user -from ayon_server.entities import UserEntity -from ayon_server.exceptions import NotFoundException + from .settings import USDSettings -PRIVATE_DIR = Path( - os.path.dirname(os.path.abspath(__file__))).parent / "private" +PRIVATE_DIR = Path(os.path.dirname(os.path.abspath(__file__))).parent / "private" class USDAddon(BaseServerAddon): @@ -23,22 +20,3 @@ class USDAddon(BaseServerAddon): def initialize(self): """Initialize USD Addon.""" - self.add_endpoint( - "files_info", - self._get_files_info, - method="GET", - name="files_info", - description="Get information about binary files on server.", - ) - - async def _get_files_info( - self, - user: UserEntity = Depends(dep_current_user) - ) -> list[dict[str, str]]: - info_filepath = (PRIVATE_DIR / "files_info.json").resolve().as_posix() - try: - with open(info_filepath, "r") as stream: - data = json.load(stream) - except FileNotFoundError as e: - raise NotFoundException("Files info not found") from e - return data diff --git a/server/settings/__init__.py b/server/settings/__init__.py index f925a93..1fcecfe 100644 --- a/server/settings/__init__.py +++ b/server/settings/__init__.py @@ -1,9 +1,8 @@ """Settings for the server part.""" + from .main import ( USDSettings, ) -__all__ = ( - "USDSettings", -) \ No newline at end of file +__all__ = ("USDSettings",) diff --git a/server/settings/main.py b/server/settings/main.py index 2fbad0c..af69881 100644 --- a/server/settings/main.py +++ b/server/settings/main.py @@ -1,6 +1,6 @@ """Main settings for USD on AYON server.""" -from ayon_server.settings import BaseSettingsModel, MultiplatformPathListModel -from pydantic import Field + +from ayon_server.settings import BaseSettingsModel, SettingsField def platform_enum(): @@ -12,80 +12,257 @@ def platform_enum(): ] +# FIX find a way to pull this info from AyonCppApi (Later AyonLogger) +def logger_logging_keys_enum(): + """Return enumerator for supported platforms.""" + return [ + {"label": "Off", "value": ""}, + {"label": "Api Debug", "value": "AyonApi/"}, + {"label": "Env Debug", "value": "AyonApiDebugEnvVars/"}, + {"label": "All", "value": "AyonApi/AyonApiDebugEnvVars/"}, + ] + + +# FIX find a way to pull this from AyonCppApi (later AyonLogger) +def log_lvl_enum(): + """Return enumerator for supported log lvls.""" + return [ + {"label": "Info", "value": "INFO"}, + {"label": "Error", "value": "ERROR"}, + {"label": "Warn", "value": "WARN"}, + {"label": "Critical", "value": "CRITICAL"}, + {"label": "Off", "value": "OFF"}, + ] + + +# FIX find a way to pull this from AyonCppApi (later AyonLogger) +def file_logger_enum(): + """Return enumerator for supported log lvls.""" + return [ + {"label": "Off", "value": "OFF"}, + {"label": "On", "value": "ON"}, + ] + + +class AppPlatformPathModel(BaseSettingsModel): + + _layout = "collapsed" + name: str = SettingsField( + title="App Name", description="Application name, e.g. maya/2025" + ) + + app_alias_list: list[str] = SettingsField( + title="Applicatoin Alias", + description="Allows an admin to define a list of App Names that use the same resolver as the parent application", + default_factory=list, + ) + + # TODO: we need to take into account here different linux flavors + platform: str = SettingsField( + title="Platform", + enum_resolver=platform_enum, + description="windows / linux / darwin", + ) + lake_fs_path: str = SettingsField( + title="LakeFs Object Path", + description="The LakeFs internal path to the resolver zip, e.g: `AyonUsdResolverBin/Hou/ayon-usd-resolver_hou19.5_linux_py37.zip`\n" + "This information can be found on LakeFs server Object Information.", + ) + + class AppPlatformURIModel(BaseSettingsModel): """Application platform URI model.""" _layout = "compact" - app_name: str = Field( - title="App Name", - description="Application name, e.g. maya/2025") + app_name: str = SettingsField( + title="App Name", description="Application name, e.g. maya/2025" + ) # TODO: we need to take into account here different linux flavors - platform: str = Field( + platform: str = SettingsField( title="Platform", enum_resolver=platform_enum, - description="windows / linux / darwin") - uri: str = Field( - title="URI", - description="Path to USD Asset Resolver plugin zip file") + description="windows / linux / darwin", + ) + uri: str = SettingsField( + title="LakeFs Object Uri", + description="Path to USD Asset Resolver plugin zip file on the LakeFs server, e.g: `lakefs://ayon-usd/V001/AyonUsdResolverBin/Hou/ayon-usd-resolver_hou19.5_linux_py37.zip`", + ) -class USDSettings(BaseSettingsModel): - """USD settings.""" +class LakeFsSettings(BaseSettingsModel): + """LakeFs Settings / Download Settings ?""" - use_downloaded: bool = Field( - default=True, - title="Download USD from server", - description="If disabled, one of custom options must be used", + _layout = "collapsed" + + ayon_usd_lake_fs_server_uri: str = SettingsField( + "https://lake.ayon.cloud", + title="LakeFs Server Uri", + description="The url to your LakeFs server.", + ) + ayon_usd_lake_fs_server_repo: str = SettingsField( + "lakefs://ayon-usd/main/", + title="LakeFs Repository Uri", + description="The url to your LakeFs Repository Path", ) - custom_roots: MultiplatformPathListModel = Field( - default_factory=MultiplatformPathListModel, - title="Custom USD root", - description="Root to directory where USD binaries can be found", + access_key_id: str = SettingsField( + "{Ayon_LakeFs_Key_Id}", + title="Access Key Id", + description="LakeFs Access Key Id", ) - asset_resolvers: list[AppPlatformURIModel] = Field( - title="Asset Resolvers", - description="USD Asset Resolver settings", + secret_access_key: str = SettingsField( + "{Ayon_LakeFs_Key}", + title="Access Key", + description="LakeFs Access Key", + ) + asset_resolvers: list[AppPlatformPathModel] = SettingsField( + title="Resolver Application LakeFs Paths", + description="Allows an admin to define a specific Resolver Zip for a specific Application", default=[ - AppPlatformURIModel( - app_name="maya/2025", + AppPlatformPathModel( + name="maya/2024", + platform="linux", + lake_fs_path="AyonUsdResolverBin/MayaLinux/Maya2024_2_Py310_Linux_Linux_x86_64.zip", + ), + AppPlatformPathModel( + name="maya/2024", platform="windows", - uri="https://distribute.openpype.io/resolvers/ayon-usd-resolver_maya2025_win64_py310.zip"), - AppPlatformURIModel( - app_name="maya/2025", + lake_fs_path="AyonUsdResolverBin/MayaWin/Maya2024_2_Py310_Win_Windows_AMD64.zip", + ), + AppPlatformPathModel( + name="maya/2025", platform="linux", - uri="https://distribute.openpype.io/resolvers/ayon-usd-resolver_maya2025_linux_py311.zip"), - AppPlatformURIModel( - app_name="maya/2024", + lake_fs_path="AyonUsdResolverBin/MayaLinux/Maya2025_Py311_Linux_Linux_x86_64.zip", + ), + AppPlatformPathModel( + name="maya/2025", platform="windows", - uri="https://distribute.openpype.io/resolvers/ayon-usd-resolver_maya2024.2_win64_py310.zip"), - AppPlatformURIModel( - app_name="maya/2024", + lake_fs_path="AyonUsdResolverBin/MayaWin/Maya2025_Py311_Win_Windows_AMD64.zip", + ), + AppPlatformPathModel( + name="houdini/19-5Py37", + platform="linux", + lake_fs_path="AyonUsdResolverBin/HouLinux/Houdini195_Py37_Linux_Linux_x86_64.zip", + ), + AppPlatformPathModel( + name="houdini/19-5", platform="linux", - uri="https://distribute.openpype.io/resolvers/ayon-usd-resolver_maya2024.2_linux_py310.zip"), - AppPlatformURIModel( - app_name="unreal/5-4", + lake_fs_path="AyonUsdResolverBin/HouLinux/Houdini195_Py39_Linux_Linux_x86_64.zip", + ), + AppPlatformPathModel( + name="houdini/19-5Py37", platform="windows", - uri="https://distribute.openpype.io/resolvers/ayon-usd-resolver_unreal5.4_win64_py311.zip"), - AppPlatformURIModel( - app_name="unreal/5-4", + lake_fs_path="AyonUsdResolverBin/HouWin/Houdini195_Py37_Win_Windows_AMD64.zip", + ), + AppPlatformPathModel( + name="houdini/19-5", + platform="windows", + lake_fs_path="AyonUsdResolverBin/HouWin/Houdini195_Py39_Win_Windows_AMD64.zip", + ), + AppPlatformPathModel( + name="houdini/20-0", + platform="linux", + lake_fs_path="AyonUsdResolverBin/HouLinux/Houdini20_Py310_Linux_Linux_x86_64.zip", + ), + AppPlatformPathModel( + name="houdini/20-0Py39", platform="linux", - uri="https://distribute.openpype.io/resolvers/ayon-usd-resolver_unreal5.4_linux_py311.zip"), - AppPlatformURIModel( - app_name="houdini/19-5", + lake_fs_path="AyonUsdResolverBin/HouLinux/Houdini20_Py39_Linux_Linux_x86_64.zip", + ), + AppPlatformPathModel( + name="houdini/20-0", platform="windows", - uri="https://distribute.openpype.io/resolvers/ayon-usd-resolver_hou19.5_win64_py39.zip"), - AppPlatformURIModel( - app_name="houdini/19-5", + lake_fs_path="AyonUsdResolverBin/HouWin/Houdini20_Py310_Win_Windows_AMD64.zip", + ), + AppPlatformPathModel( + name="houdini/20-0Py39", + platform="windows", + lake_fs_path="AyonUsdResolverBin/HouWin/Houdini20_Py39_Win_Windows_AMD64.zip", + ), + AppPlatformPathModel( + name="unreal/5-4", platform="linux", - uri="https://distribute.openpype.io/resolvers/ayon-usd-resolver_hou19.5_linux_py39.zip"), - AppPlatformURIModel( - app_name="houdini/20", + lake_fs_path="AyonUsdResolverBin/UnrealLinux/Unreal5_4_Py39_Linux_Linux_x86_64.zip", + ), + AppPlatformPathModel( + name="unreal/5-4", platform="windows", - uri="https://distribute.openpype.io/resolvers/ayon-usd-resolver_hou20_win64_py310.zip"), - AppPlatformURIModel( - app_name="houdini/20", + lake_fs_path="AyonUsdResolverBin/UnrealWin/Unreal5_4_Py39_Win_Windows_AMD64.zip", + ), + AppPlatformPathModel( + name="ayon_usd/23-5", platform="linux", - uri="https://distribute.openpype.io/resolvers/ayon-usd-resolver_hou20_linux_py310.zip"), - + lake_fs_path="AyonUsdResolverBin/AyonUsdLinux/AyonUsd23_5_Py39_Linux_Linux_x86_64.zip", + ), + AppPlatformPathModel( + name="ayon_usd/23-5", + platform="windows", + lake_fs_path="AyonUsdResolverBin/AyonUsdWin/AyonUsd23_5_Py39_Win_Windows_AMD64.zip", + ), ], ) + lake_fs_overrides: list[AppPlatformURIModel] = SettingsField( + title="Resolver Application overwrites", + description="Allows an admin to define a specific Resolver Zip for a specific Application", + default_factory=list, + ) + + +class AyonResolverSettings(BaseSettingsModel): + """LakeFs Settings / Download Settings ?""" + + _layout = "collapsed" + + ayon_log_lvl: str = SettingsField( + "WARN", + title="AyonResolver Log Lvl", + enum_resolver=log_lvl_enum, + description="Allows you to set the Verbosity of the AyonUsdResolver logger", + ) + ayon_file_logger_enabled: str = SettingsField( + "OFF", + title="AyonResolver File Logger Enabled ", + enum_resolver=file_logger_enum, + description="Allows you to enable or disalbe the AyonUsdResolver file logger, default is Off", + ) + ayon_logger_logging_keys: str = SettingsField( + "", + title="AyonCppApi Logging Keys", + enum_resolver=logger_logging_keys_enum, + description="List of extra logging options for the AyonCppApi", + ) + file_logger_file_path: str = SettingsField( + "", + title="AyonResolver File logger file path", + description="Allows you to set a custom location where the file logger will export to. This can be a relative or absolute path. This is only used if `ayon_file_logger_enabled` is enabled.", + ) + + +class UsdSettings(BaseSettingsModel): + """LakeFs Settings / Download Settings ?""" + + _layout = "collapsed" + usd_tf_debug: str = SettingsField( + "", + title="Tf Debug Variable for Debugging Usd", + description="", + ) + + +class USDSettings(BaseSettingsModel): + """USD settings.""" + + allow_addon_start: bool = SettingsField( + False, title="I Understand and Accept that this is an experimental feature" + ) + + lakefs_settings: LakeFsSettings = SettingsField( + default_factory=LakeFsSettings, title="LakeFs Config" + ) + + ayon_usd_resolver_settings: AyonResolverSettings = SettingsField( + default_factory=AyonResolverSettings, title="UsdResolver Config" + ) + + usd_settings: UsdSettings = SettingsField( + default_factory=UsdSettings, title="UsdLib Config" + ) diff --git a/tests/client/ayon_usd/test_utils.py b/tests/client/ayon_usd/test_utils.py index 4db8c45..f41e101 100644 --- a/tests/client/ayon_usd/test_utils.py +++ b/tests/client/ayon_usd/test_utils.py @@ -6,34 +6,34 @@ def test_validate_file_checksum(file_info, tmp_path): # Create a temporary file - file_path = tmp_path / file_info['filename'] + file_path = tmp_path / file_info["filename"] file_path.write_text("Hello, World!") # Update the checksum to match the file's content - file_info['checksum'] = hashlib.md5(file_path.read_bytes()).hexdigest() + file_info["checksum"] = hashlib.md5(file_path.read_bytes()).hexdigest() - assert utils.validate_file_checksum(str(file_path), file_info['checksum'], file_info['checksum_algorithm']) + assert utils.validate_file_checksum( + str(file_path), file_info["checksum"], file_info["checksum_algorithm"] + ) -def test_extract_zip_file(file_info, tmp_path): - # Create a temporary zip file - zip_path = tmp_path / file_info['filename'] - with zipfile.ZipFile(str(zip_path), 'w') as zipf: - zipf.writestr('test.txt', 'Hello, World!') - - # Extract the zip file - extract_dir = tmp_path / 'extracted' - utils.extract_zip_file(str(zip_path), str(extract_dir)) - - # Check that the extracted file exists - assert (extract_dir / 'test.txt').exists() +# def test_extract_zip_file(file_info, tmp_path): +# # Create a temporary zip file +# zip_path = tmp_path / file_info['filename'] +# with zipfile.ZipFile(str(zip_path), 'w') as zipf: +# zipf.writestr('test.txt', 'Hello, World!') +# +# # Extract the zip file +# extract_dir = tmp_path / 'extracted' +# utils.extract_zip_file(str(zip_path), str(extract_dir)) +# +# # Check that the extracted file exists +# assert (extract_dir / 'test.txt').exists() def test_file_info_endpoint( - printer_session, - installed_addon, - ayon_server_session, - ayon_connection_env): + printer_session, installed_addon, ayon_server_session, ayon_connection_env +): server_url, api_key = ayon_connection_env session = ayon_server_session diff --git a/tools/manage.sh b/tools/manage.sh index dd427fe..93a2157 100644 --- a/tools/manage.sh +++ b/tools/manage.sh @@ -179,7 +179,6 @@ run_codespell () { build () { echo -e "${BIGreen}>>>${RST} Building the addon ..." python ./create_package.py - } main () {