Skip to content

Commit

Permalink
🐛 Make grabrepos sensible, various bugfixes (#18)
Browse files Browse the repository at this point in the history
  • Loading branch information
Thalhammer authored Jul 4, 2024
1 parent 35eeadd commit bfcdd2c
Show file tree
Hide file tree
Showing 9 changed files with 38 additions and 347 deletions.
4 changes: 2 additions & 2 deletions content/implementations.md
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ Please refer on websites directly to these labels to use always current version.

## SVG version

[![works with MQTT Homie](/img/works-with-homie.svg "[works with MQTT Homie")](https://homieiot.github.io/)
[![works with MQTT Homie](img/works-with-homie.svg "[works with MQTT Homie")](https://homieiot.github.io/)

HTML snippet:

Expand All @@ -121,7 +121,7 @@ HTML snippet:

## PNG version

[![works with MQTT Homie](/img/works-with-homie.png "works with MQTT Homie")](https://homieiot.github.io/)
[![works with MQTT Homie](img/works-with-homie.png "works with MQTT Homie")](https://homieiot.github.io/)

HTML snippet:

Expand Down
105 changes: 17 additions & 88 deletions grabrepos.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,61 +29,14 @@ def readyaml():
print(exc)
exit(-1)

# Split a markdown content by its 2nd level headings and return the array
def split_by_headings(data):
sections = []
index = 0
while True:
index = data.find("\n## ", index)
if index == -1:
break
nextindex = data.find("\n## ", index+1)
if nextindex == -1:
sections.append(data[index:])
break
else:
sections.append(data[index:nextindex])
index = nextindex
return sections

# Filter the given array with markdown 2nd level headings.
# Keep everything that is mentioned in "keepsections".
def filter_by_headings(sections, keepsections):
newsections = []
if len(keepsections)==0: return newsections
for section in sections:
for keep in keepsections:
if section.startswith("\n## "+keep):
newsections.append(section.strip())
return newsections

def remaining_headings(sections, removesections):
newsections = []
if len(removesections)==0: return newsections
for section in sections:
found = False
for keep in removesections:
if section.startswith("\n## "+keep):
found = True
if not found:
newsections.append(section.strip())

return newsections

# Create a destination filename, given the repo name ("core","ota" etc),
# and the tag name ("master","v2.0").
def dest_filepath(reponame, refname):
reponame = reponame.replace(" ","-").lower()
return "spec-"+reponame+"-"+refname.replace(".","_")

# Copy files to "docs/". Filter sections of file first. Add generated header to file.
def write_file(reponame, targetdir, srcdir, keepsections, filename, data, tagname, date, absurl):
sections = split_by_headings(data)
sections = filter_by_headings(sections, keepsections)

if len(sections)<=0:
return

def write_file(reponame, targetdir, srcdir, filename, data, tagname, date, absurl):
# Generate version select box html code
# Hide page in the left nav panel if not the latest
header = "---\n"
Expand All @@ -96,35 +49,13 @@ def write_file(reponame, targetdir, srcdir, keepsections, filename, data, tagnam
header += "---\n"

# New file content and filename
filecontent = header + "\n".join(sections)
filecontent = header + "\n" + data
filepath = os.path.join(targetdir,dest_filepath(reponame, tagname)+".md")

with open(filepath, "w") as text_file:
text_file.write(filecontent)
print("Wrote file: "+filepath)

def write_preface(tagname,reponame,repourl,localpath,keepsections):
filepath = os.path.join(localpath,"convention.md")
with open(filepath, 'r') as myfile:
localdata = myfile.read() + "\n"
sections = split_by_headings(localdata)
sections = remaining_headings(sections, keepsections)
absurl = repourl.replace(".git","")+"/tree/" + tagname
header = "---\n"
header += "path: "+absurl+"\n"
header += "source: convention.md\n"
header += "convention: "+reponame+"\n"
header += "preface: true\n"
header += "---\n"
filecontent = header + "\n".join(sections)
filepath = os.path.join(targetdir,"preface",dest_filepath(reponame, tagname)+".md")
prefacedir = os.path.join(targetdir,"preface")
if not os.path.exists(prefacedir):
os.makedirs(prefacedir)
with open(filepath, "w") as text_file:
text_file.write(filecontent)
print("Wrote file: "+filepath)

def write_diff_file(targetdir,reponame,ref,nextref):
outputfilename = dest_filepath(reponame, nextref.name)+"-diff.html"
header = "---\n"
Expand All @@ -146,7 +77,7 @@ def write_diff_file(targetdir,reponame,ref,nextref):

# Clone a repository url (or update repo), checkout all tags.
# Call copy_files for each checked out working directory
def checkout_repo(targetdir, reponame, repourl, filepattern, checkoutdir, keepsections, update_repos):
def checkout_repo(targetdir, reponame, repourl, checkoutdir, update_repos):
localpath = os.path.join(checkoutdir,reponame)
if os.path.exists(localpath):
repo = Repo(localpath)
Expand All @@ -157,6 +88,9 @@ def checkout_repo(targetdir, reponame, repourl, filepattern, checkoutdir, keepse
print("Clone "+reponame+" to "+localpath)
repo = Repo.clone_from(repourl, localpath)

if not os.path.exists(targetdir):
os.makedirs(targetdir)

# Add "develop" and all tags
refs = []
refs.append(repo.heads.develop)
Expand All @@ -165,30 +99,25 @@ def checkout_repo(targetdir, reponame, repourl, filepattern, checkoutdir, keepse
# Get all preface sections from the latest develop version
repo.head.reference = repo.heads.develop
repo.head.reset(index=True, working_tree=True)
write_preface(repo.head.reference.name,reponame,repourl,localpath,keepsections)

g = Git(localpath)
# Combine all files of a repo and create one specificaton file out of it
for ref in refs:
repo.head.reference = ref
repo.head.reset(index=True, working_tree=True)
data = ""
mainfile = ""
for filename in fnmatch.filter(os.listdir(localpath), filepattern):
filepath = os.path.join(localpath,filename)
with open(filepath, 'r') as myfile:
localdata = myfile.read() + "\n"
# Check if the file has relevant sections
sections = split_by_headings(localdata)
sections = filter_by_headings(sections, keepsections)
if len(sections)<=0:
continue
data += localdata
mainfile = filename
mainfile = "README.md"
if os.path.exists(os.path.join(localpath,"convention.md")):
mainfile = "convention.md"
with open(os.path.join(localpath,mainfile), 'r') as myfile:
data = myfile.read() + "\n"
# Remove everything before the first minor heading to avoid headers and (on the website) broken images
data = data[data.find('##'):]
# Add an artificial level 1 heading to fix numbering of topics
data = "#\n\n" + data
tagname = ref.name
date = ref.commit.committed_datetime
absurl = repourl.replace(".git","")+"/tree/"+ref.name
write_file(reponame, targetdir, localpath, keepsections, mainfile, data, tagname, date, absurl)
write_file(reponame, targetdir, localpath, mainfile, data, tagname, date, absurl)

refs = []
refs.extend(repo.tags)
Expand Down Expand Up @@ -229,4 +158,4 @@ def recreate_dir(file_path, clean):
os.makedirs(targetdir)
for entry in controlfile['specifications']:
if not 'disabled' in entry or not entry['disabled']:
checkout_repo(targetdir, entry['name'], entry['repo'], entry['filepattern'],checkoutdir, entry['keepsections'], update_repos)
checkout_repo(targetdir, entry['name'], entry['repo'], checkoutdir, update_repos)
5 changes: 0 additions & 5 deletions multiversion.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,18 +7,13 @@ updaterepos: false
specifications:
- name: Core
repo: https://github.com/homieiot/convention.git
filepattern: '*.md'
keepsections: ['Topology','Convention','Arrays','Broadcast Channel']
- name: OTA
repo: https://github.com/homieiot/convention-ota.git
filepattern: 'convention.md'
disabled: true
- name: property_types
repo: https://github.com/homieiot/convention-types.git
filepattern: 'convention.md'
disabled: true
- name: statistics
repo: https://github.com/homieiot/convention-stats.git
filepattern: 'convention.md'
disabled: true

92 changes: 8 additions & 84 deletions themes/b4/assets/js/main.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,93 +4,17 @@
* In this file the document progress bar and ajax page loading are realized
*/

function loadUrl(newUrl) {
fetch(newUrl)
.then(response => {
if(response.ok)
return response.text();
throw new Error('Network response was not ok.',response);
})
.then(text => new DOMParser().parseFromString(text, "text/html"))
.then(doc => {
if (doc === null) return;

var newContent = doc.getElementById("mainContent");
var elemLanguage = doc.getElementById("mainmenu");
if (newContent === null || elemLanguage === null) {
console.log("elements missing!")
return;
}

document.title = doc.title;
document.getElementById("mainContent").replaceWith(newContent);
document.getElementById("mainmenu").replaceWith(elemLanguage);

document.dispatchEvent(new Event('MainContentChanged'));

location.hash = newUrl.hash;

var elem = (newUrl.hash) ? document.getElementById(newUrl.hash.replace("#","")) : null;
if (elem)
elem.scrollIntoView({ behavior: 'smooth' });
else
window.scroll({ top: 0, left: 0, behavior: 'smooth' });
}).catch(function(error) {
console.log('Fetch failed', error);
window.location.href = "/404.html"
});
}

function changeToURL(newUrl, event) {
// External links should instead open in a new tab
// var domain = window.location.origin;
if (newUrl.hostname !== window.location.hostname) {
// Other domain -> default behaviour
} else if (newUrl.pathname === window.location.pathname) {
// Only anchor changed -> default behaviour
if (newUrl.hash === window.location.hash)
if (event) event.preventDefault(); // Same url -> do nothing
else {
location.hash = newUrl.hash;
//location.reload();
}
} else {
if (event) event.preventDefault();
loadUrl(newUrl);
history.pushState({consider:true} /*stateObj*/, "" /*title*/, newUrl);
}
}

document.addEventListener("DOMContentLoaded", () => {
// document read progress bar
var progress = document.querySelector('.progress')
document.addEventListener('scroll', function() {
const st = 'scrollTop';
const sh = 'scrollHeight';
var scroll = (document.documentElement[st]||document.body[st]) / ((document.documentElement[sh]||document.body[sh]) - document.documentElement.clientHeight) * 100;
progress.style.setProperty('--scroll', scroll + '%');
});

document.addEventListener('MainContentChanged', function() {
const varlist = initTOC({selector:'h2, h3, h4, h5, h6',overwrite:false,prefix:'toc' });
document.querySelectorAll(".toc").forEach(e => e.appendChild(varlist.cloneNode(true)));
});

window.addEventListener("popstate", e => {
if (e.state && e.state.consider)
loadUrl(new URL(document.location));
});
if (progress) {
document.addEventListener('scroll', function () {
const st = 'scrollTop';
const sh = 'scrollHeight';
var scroll = (document.documentElement[st] || document.body[st]) / ((document.documentElement[sh] || document.body[sh]) - document.documentElement.clientHeight) * 100;
progress.style.setProperty('--scroll', scroll + '%');
});
}

history.replaceState({consider:true} /*stateObj*/, "" /*title*/, document.location);
document.dispatchEvent(new Event('MainContentChanged'));
window.loaded = true;

// Ajax loading of pages. Intercept link clicks.
document.body.addEventListener("click", event => {
if (event.target.tagName !== "A" ||
event.target.dataset["fullreload"]) return;
if (history === null || event.target.href === "") return;

changeToURL(new URL(event.target.href), event);
})
});
Loading

0 comments on commit bfcdd2c

Please sign in to comment.