Skip to content

Commit

Permalink
fix: updated docstrings
Browse files Browse the repository at this point in the history
  • Loading branch information
Kohulan committed Jul 2, 2024
1 parent 9db9eef commit 7056acc
Show file tree
Hide file tree
Showing 4 changed files with 16 additions and 11 deletions.
4 changes: 3 additions & 1 deletion STOUT/predictor_demo.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,9 @@
file_out.close()

# STOUT - SMILES to IUPAC names example
file_smiles = open("test_data/SMILES_test.txt", "r") # file is available in the Github repository
file_smiles = open(
"test_data/SMILES_test.txt", "r"
) # file is available in the Github repository
file_out = open("IUPAC_predictions", "w")

for i, line in enumerate(file_smiles):
Expand Down
5 changes: 4 additions & 1 deletion STOUT/repack/transformer_model_4_repack.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import numpy as np
from typing import Tuple, Optional


def get_angles(pos: int, i: int, d_model: int) -> np.ndarray:
"""
Computes the angles for the positional encoding.
Expand Down Expand Up @@ -44,7 +45,9 @@ def positional_encoding(position: int, d_model: int) -> tf.Tensor:
return tf.cast(pos_encoding, dtype=tf.float32)


def scaled_dot_product_attention(q: tf.Tensor, k: tf.Tensor, v: tf.Tensor, mask: Optional[tf.Tensor] = None) -> Tuple[tf.Tensor, tf.Tensor]:
def scaled_dot_product_attention(
q: tf.Tensor, k: tf.Tensor, v: tf.Tensor, mask: Optional[tf.Tensor] = None
) -> Tuple[tf.Tensor, tf.Tensor]:
"""
Calculate the attention weights.
q, k, v must have matching leading dimensions.
Expand Down
4 changes: 2 additions & 2 deletions STOUT/stout.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,8 +73,8 @@ def load_reverse_translation_utils() -> tuple:
"""
Loads necessary utilities for reverse translation from pickle files.
This function loads the input and target tokenizers as well as the
maximum length setting for input sequences. The tokenizers are loaded
This function loads the input and target tokenizers as well as the
maximum length setting for input sequences. The tokenizers are loaded
from pickle files located in the 'assets' directory under the default path.
Returns:
Expand Down
14 changes: 7 additions & 7 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,13 @@
# -- Project information -----------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information

project = 'STOUT - Smiles-TO-iUpac-Translator'
project = "STOUT - Smiles-TO-iUpac-Translator"
version = STOUT.__version__
current_year = datetime.today().year
copyright = "2021-{}, Kohulan Rajan at the Friedrich Schiller University Jena".format(
current_year
)
author = 'Kohulan Rajan'
author = "Kohulan Rajan"
rst_prolog = """
.. |current_year| replace:: {}
""".format(
Expand All @@ -37,8 +37,8 @@
"sphinx.ext.viewcode",
]

templates_path = ['_templates']
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
templates_path = ["_templates"]
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
source_suffix = ".rst"

# The master toctree document.
Expand All @@ -55,7 +55,7 @@
# -- Options for HTML output -------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output

html_theme = 'alabaster'
html_theme = "alabaster"

html_theme_options = {
"light_css_variables": {
Expand Down Expand Up @@ -83,6 +83,6 @@
"top_of_page_button": "edit",
}

html_static_path = ['_static']
html_static_path = ["_static"]
html_favicon = "_static/STOUT.svg"
html_logo = "_static/STOUT.png"
html_logo = "_static/STOUT.png"

0 comments on commit 7056acc

Please sign in to comment.