Skip to content

Commit

Permalink
CI: Better tracking of what is confirmed tested prior to release. WIP.
Browse files Browse the repository at this point in the history
  • Loading branch information
mario4tier committed Jan 13, 2025
1 parent 80d178e commit 48cb63d
Show file tree
Hide file tree
Showing 10 changed files with 611 additions and 117 deletions.
Binary file modified dist/ta-lib-0.6.4-src.tar.gz
Binary file not shown.
Binary file modified dist/ta-lib_0.6.4_amd64.deb
Binary file not shown.
Binary file modified dist/ta-lib_0.6.4_arm64.deb
Binary file not shown.
Binary file modified dist/ta-lib_0.6.4_i386.deb
Binary file not shown.
408 changes: 300 additions & 108 deletions scripts/package.py

Large diffs are not rendered by default.

18 changes: 14 additions & 4 deletions scripts/post-release-brew.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,8 @@
#!/usr/bin/env python3

# Updates the brew formula to matche the latest release assets.
# (done with a PR to the homebrew-core repository)
# Normally, homebrew detects TA-Lib releases and creates a PR without intervention.
#
# Brew updates are CI automated, therefore this script should be
# called only by Github actions.
# This script is for when manual intervention is needed.
#
import argparse
from dataclasses import dataclass
Expand Down Expand Up @@ -132,6 +130,18 @@ def get_ta_lib_brew_info() -> BrewInfo:
parser.add_argument('--token', help='GitHub token for authentication', required=False)
args = parser.parse_args()

if os.getenv('GITHUB_ACTIONS') == 'true':
print("This script should not be called from a GitHub Action. It should be run manually/locally")
sys.exit(1)

print("Normally, homebrew autmatically detects TA-Lib releases and creates a PR without intervention.")
print("Please verify first if a PR is already pending https://github.com/Homebrew/brew/pulls")
print("This script is for manual intervention only if somehow the automatic PR was not created.\n")
confirm = input("Are you sure you want to run this script? (yes/NO): ")
if confirm.lower() != 'yes':
print("Operation cancelled by the user.")
sys.exit(0)

if not is_brew_installed():
print("Error: Brew needs to be installed and be on the PATH")
sys.exit(1)
Expand Down
2 changes: 1 addition & 1 deletion scripts/post-release-docs.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def replace_version(file_path: str, version: str):
if args.token:
print("Using GitHub token for authentication")

# Will exit if run from a fork.
# if run from a fork, will exit with a message, but not an error.
root_dir = verify_git_repo_original()

try:
Expand Down
49 changes: 46 additions & 3 deletions scripts/pre-release-checks.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,11 @@
# - Detect VERSION inconsistenties among the files.
# - Detect if some release candidate assets are missing in dist/
# - Detect if a top entry for VERSION is missing in CHANGELOG.md
# - Verify the top entry in CHANGELOG.md has a valid YYYY-MM-DD date within
# one day of wallclock.
# - Verify that all dist/digests files match the current source digests. This
# is to ensure that various packagings and tests were all performed with the
# current source code.
#
# If no problem are found, the script will create a temp/DRAFT_RELEASE_NOTES.md
# which is intended for the CI when creating the initial draft release.
Expand All @@ -18,6 +23,7 @@
from utilities.files import path_join
from utilities.common import get_release_assets, verify_git_repo
from utilities.versions import check_sources_digest, check_versions
from datetime import datetime

if __name__ == "__main__":

Expand Down Expand Up @@ -47,9 +53,31 @@
print("Did you forget to wait for all release candidates assets be auto-generated in the dev branch?")
exit(1)

# Verify CHANGELOG.md exists and there is an entry for version.
# At same accumulates all lines until end of top entry for latest version.

# Verify that the directory dist/digests already exists and contains at least one file per package
# in dist/. The digest filename is <package_filename>.digest.
#
# Example: for dist/ta-lib-0.4.0.tar.gz, the source digest is dist/digests/ta-lib-0.4.0.tar.gz.digest
dist_dir = path_join(root_dir, 'dist')
digests_dir = path_join(dist_dir, 'digests')
if not os.path.exists(digests_dir):
print(f"Error: Missing {digests_dir} directory.")
exit(1)
# Iterate all files in dist/ and check if a digest exists for each.
for asset in os.listdir(dist_dir):
if os.path.isdir(path_join(dist_dir, asset)):
continue
digest_file = path_join(digests_dir, f"{asset}.digest")
if not os.path.exists(digest_file):
print(f"Error: Missing file [{asset}.digest]. Did you forget some re-build and/or tests steps?")
exit(1)
# Verify the digest file is for the current source code.
with open(digest_file, 'r') as f:
digest = f.read().strip()
if digest != sources_digest:
print(f"Error: Digest mismatch for [{asset}.digest]. Did you forget some re-build and/or tests steps?");
exit(1)

# Verify CHANGELOG.md exists and there is a top entry matching the VERSION file.
changelog_path = path_join(root_dir, 'CHANGELOG.md')
version_pattern = re.compile(r'##\s+\[\d+\.\d+\.\d+\].*')
top_version_found = False
Expand All @@ -66,6 +94,21 @@
print("Did you forget to update CHANGELOG.md?")
exit(1)
top_version_found = True
# Extract the YYYY-MM-DD part of the pattern.
date = line.split(']')[1].strip()
if not re.match(r'\d{4}-\d{2}-\d{2}', date):
print(f"Error: Invalid date found in top entry of CHANGELOG.md: {date}")
exit(1)
# Verify date is within one day of wallclock.
# This is to ensure the release notes are up-to-date.
# We allow a small margin of error to account for timezone differences.
today = datetime.today().strftime('%Y-%m-%d')
if date != today:
print(f"Error: Invalid date found in top entry of CHANGELOG.md: {date}")
print(f"Expected date to be within one day from: {today}")
exit(1)
print(f"Found valid CHANGELOG.md entry: [{version}] {date}")

else:
break
# Skip from writing the header lines "## Changelog" in the release notes.
Expand Down
12 changes: 11 additions & 1 deletion scripts/utilities/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -362,9 +362,19 @@ def verify_git_repo_original() -> str:
print("Error: Could not determine the remote URL of the repository.")
sys.exit(1)

print("This script performs no operation when run from a fork")
print("Warning: some processing were skipped because running from a fork")
sys.exit(0)

def get_git_user_name() -> str:
try:
result = subprocess.run(['git', 'config', 'user.name'], capture_output=True, text=True, check=True)
user_name = result.stdout.strip()
if not user_name:
user_name = "local"
return user_name
except subprocess.CalledProcessError as e:
return "local"

def are_generated_files_git_changed(root_dir: str) -> bool:
# Using git, verify if any of the generated files have changed.
#
Expand Down
239 changes: 239 additions & 0 deletions scripts/utilities/package_digest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,239 @@
from dataclasses import dataclass
import hashlib
import json
import os
import sys

from utilities.files import path_join

def _calculate_md5(filepath: str) -> str:
# Calculate md5 of a binary file (do not use for text file because
# of portability issue for line endings)
hash_md5 = hashlib.md5()
with open(filepath, "rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
hash_md5.update(chunk)
return hash_md5.hexdigest()

def _digests_path(root_dir: str) -> str:
return path_join(root_dir, "dist", "digests")

def _asset_file_name_to_filepath(root_dir: str, asset_file_name: str) -> str:
return path_join(_digests_path(root_dir), f"{asset_file_name}.digest")

@dataclass
class PackageDigest:
root_dir: str = "" # Mandatory at construction
asset_file_name: str = "" # Mandatory at construction
sources_digest: str = "" # Mandatory at construction
builder_id: str = "" # Mandatory at construction

built_success: str = "False" # "True" or "False"

package_md5: str = "Disabled" # "Disabled", "Unknown" or "hash_of_package"
gen_code_pass: str = "Disabled" # "Disabled", "Unknown", "True" or "False"
ta_regtest_pass: str = "Disabled" # "Disabled", "Unknown", "True" or "False"
dist_test_pass: str = "Disabled" # "Disabled", ""Unknown", True" or "False"

@staticmethod
def default(root_dir: str, asset_file_name: str, sources_digest: str, builder_id: str) -> 'PackageDigest':
pdigest = PackageDigest(
root_dir=root_dir,
asset_file_name=asset_file_name,
sources_digest=sources_digest,
builder_id=builder_id,
)

pdigest._update_features()

return pdigest

def _update_features(self):
# Use the asset_file_name to figure out which features are enabled.
#
# When name is "github-*", then it is tracking the state of a repo branch, else
# assume it is tracking a package in dist/
if self.asset_file_name.startswith("github-"):
# gen_code is enabled only when doing repos branch processing.
self._disable_package_md5()
self._disable_dist_test()
self._enable_gen_code()
self._enable_ta_regtest()
else:
self._enable_package_md5()
self._enable_dist_test()
self._disable_gen_code()
# ta_regtest must work for src.tar.gz (needed for homebrew maintainer).
if self.asset_file_name.endswith("-src.tar.gz"):
self._enable_ta_regtest()
else:
self._disable_ta_regtest()

@staticmethod
def from_json(root_dir: str, data: dict) -> 'PackageDigest':
# Note: root_dir purposely not saved in JSON.

# Exit the process on missing mandatory fields.
# For non-mandatory fields, just initialize as if the action
# was failed or not done.
pdigest = PackageDigest(
root_dir=root_dir,
asset_file_name=data.get("asset_file_name", ""),
sources_digest=data.get("sources_digest", ""),
builder_id=data.get("builder_id", ""),
built_success=data.get("built_success", ""),
package_md5=data.get("package_md5", ""),
gen_code_pass=data.get("gen_code_pass", ""),
ta_regtest_pass=data.get("ta_regtest_pass", ""),
dist_test_pass=data.get("dist_test_pass", ""),
)

if pdigest.asset_file_name == "" or pdigest.builder_id == "" or pdigest.sources_digest == "":
print(f"Error: Missing mandatory field(s) in JSON: {data}")
sys.exit(1)

# Verify supported values.
if pdigest.built_success not in ["True", "False"]:
print(f"Error: Invalid value for built_success: {pdigest.built_success}")
sys.exit(1)

if pdigest.gen_code_pass not in ["Disabled", "Unknown", "True", "False"]:
print(f"Error: Invalid value for gen_code_pass: {pdigest.gen_code_pass}")
sys.exit(1)

if pdigest.ta_regtest_pass not in ["Disabled", "Unknown", "True", "False"]:
print(f"Error: Invalid value for ta_regtest_pass: {pdigest.ta_regtest_pass}")
sys.exit(1)

if pdigest.dist_test_pass not in ["Disabled", "Unknown", "True", "False"]:
print(f"Error: Invalid value for dist_test_pass: {pdigest.dist_test_pass}")
sys.exit(1)

# Refresh the enabling/disabling of features.
pdigest._update_features()

return pdigest

def to_json(self):
# Note: root_dir purposely not saved in JSON.
return {
"asset_file_name": self.asset_file_name,
"sources_digest": self.sources_digest,
"builder_id": self.builder_id,
"built_success": self.built_success,
"package_md5": self.package_md5,
"gen_code_pass": self.gen_code_pass,
"ta_regtest_pass": self.ta_regtest_pass,
"dist_test_pass": self.dist_test_pass,
}

def calculate_md5(self) -> str:
if self.package_md5 == "Disabled":
return "Disabled"
package_file_path = path_join(self.root_dir, "dist", self.asset_file_name)
return _calculate_md5(package_file_path)

def clear_tests(self):
if self.gen_code_pass != "Disabled":
self.gen_code_pass = "Unknown"

if self.ta_regtest_pass != "Disabled":
self.ta_regtest_pass = "Unknown"

if self.dist_test_pass != "Disabled":
self.dist_test_pass = "Unknown"

def are_all_tests_passed(self) -> bool:
if self.gen_code_pass != "Disabled" and self.gen_code_pass != "True":
return False
if self.ta_regtest_pass != "Disabled" and self.ta_regtest_pass != "True":
return False
if self.dist_test_pass != "Disabled" and self.dist_test_pass != "True":
return False
return True

def _enable_package_md5(self):
if self.package_md5 == "Disabled":
self.package_md5 = "Unknown"

def _disable_package_md5(self):
self.package_md5 = "Disabled"

def _enable_gen_code(self):
if self.gen_code_pass == "Disabled":
self.gen_code_pass = "Unknown"

def _disable_gen_code(self):
self.gen_code_pass = "Disabled"

def _enable_ta_regtest(self):
if self.ta_regtest_pass == "Disabled":
self.ta_regtest_pass = "Unknown"

def _disable_ta_regtest(self):
self.ta_regtest_pass = "Disabled"

def _enable_dist_test(self):
if self.dist_test_pass == "Disabled":
self.dist_test_pass = "Unknown"

def _disable_dist_test(self):
self.dist_test_pass = "Disabled"

def write(self):
digests_dir = _digests_path(self.root_dir)
os.makedirs(digests_dir, exist_ok=True)

filepath = _asset_file_name_to_filepath(self.root_dir, self.asset_file_name)

with open(filepath, 'w') as file:
json.dump(self.to_json(), file, indent=4)

# Test that the file can be read back correctly
try:
read_back = PackageDigest.read(self.root_dir, self.asset_file_name)
if read_back != self:
raise ValueError(f"Error reading back {filepath}: {read_back} != {self}")
except Exception as e:
raise ValueError(f"Error writing {filepath}: {e}")

@staticmethod
def read_or_create(root_dir: str, asset_file_name: str, sources_digest: str, builder_id: str ) -> 'PackageDigest':
filepath = _asset_file_name_to_filepath(root_dir, asset_file_name)
create = False

if not os.path.exists(filepath):
create = True
else:
try:
with open(filepath, 'r') as file:
data = json.load(file)
return PackageDigest.from_json(root_dir,data)
except Exception as e:
create = True

if create:
pd = PackageDigest.default(root_dir, asset_file_name, sources_digest, builder_id)
try:
pd.write()
return pd
except Exception as e:
print(f"Error creating {filepath}: {e}")
sys.exit(1)

@staticmethod
def read(root_dir: str, asset_file_name: str) -> 'PackageDigest':
# Throws an exception if reading fails.
filepath = _asset_file_name_to_filepath(root_dir, asset_file_name)
create = False

if not os.path.exists(filepath):
raise FileNotFoundError(f"File not found: {filepath}")
else:
try:
with open(filepath, 'r') as file:
data = json.load(file)
return PackageDigest.from_json(root_dir,data)
except Exception as e:
raise ValueError(f"Error reading {filepath}: {e}")

0 comments on commit 48cb63d

Please sign in to comment.