Skip to content

Commit

Permalink
Corrections on howtos
Browse files Browse the repository at this point in the history
  • Loading branch information
detlefarend committed Aug 12, 2024
1 parent 4f8b7a6 commit a55d328
Show file tree
Hide file tree
Showing 2 changed files with 44 additions and 39 deletions.
28 changes: 15 additions & 13 deletions test/howtos/oa/howto_oa_ca_004_run_kmeans_2d_dynamic_normalized.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,11 @@
## -- 2024-02-02 1.1.0 SY Parameters Optimization
## -- 2024-04-30 1.2.0 DA Alignment with MLPro 2
## -- 2024-05-27 1.2.1 SY Printing clusters' sizes
## -- 2024-08-12 1.3.0 DA Alignment with MLPro 2
## -------------------------------------------------------------------------------------------------

"""
Ver. 1.2.1 (2024-05-27)
Ver. 1.3.0 (2024-08-12)
This module demonstrates online cluster analysis of normalized dynamic 2D random point clouds using the wrapped
River implementation of stream algorithm KMeans. To this regard, the systematics of sub-framework
Expand All @@ -40,7 +41,7 @@

from mlpro.bf.streams.streams import *
from mlpro.bf.various import Log
from mlpro.bf.streams.tasks.windows import Window
from mlpro.bf.streams.tasks.windows import RingBuffer
from mlpro.oa.streams import *
from mlpro_int_river.wrappers.clusteranalyzers import WrRiverKMeans2MLPro

Expand Down Expand Up @@ -75,20 +76,21 @@ def _setup(self, p_mode, p_ada: bool, p_visualize: bool, p_logging):
# 1.2.2 Creation of tasks and add them to the workflow

# Window
task_window = Window(p_buffer_size=100,
p_delay=False,
p_enable_statistics=True,
p_name='#1: Sliding Window',
p_duplicate_data=True,
p_visualize=p_visualize,
p_logging=p_logging)
task_window = RingBuffer( p_buffer_size=100,
p_delay=False,
p_enable_statistics=True,
p_name='#1: Sliding Window',
p_duplicate_data=True,
p_visualize=p_visualize,
p_logging=p_logging )

workflow.add_task(p_task=task_window)

# Boundary detector
task_bd = BoundaryDetector(p_name='#2: Boundary Detector',
p_ada=True,
p_visualize=p_visualize,
p_logging=p_logging)
task_bd = BoundaryDetector( p_name='#2: Boundary Detector',
p_ada=True,
p_visualize=p_visualize,
p_logging=p_logging )
workflow.add_task(p_task=task_bd, p_pred_tasks=[task_window])

# MinMax-Normalizer
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,11 @@
## -- 2024-02-23 1.0.3 SY Parameters Optimization
## -- 2024-04-30 1.1.0 DA Alignment with MLPro 2
## -- 2024-05-27 1.1.1 SY Printing clusters' sizes
## -- 2024-08-12 1.2.0 DA Alignment with MLPro 2
## -------------------------------------------------------------------------------------------------

"""
Ver. 1.1.1 (2024-05-27)
Ver. 1.2.1 (2024-08-12)
This module demonstrates online cluster analysis of normalized dynamic 2D random point clouds using the wrapped
River implementation of stream algorithm STREAMKMeans. To this regard, the systematics of sub-framework
Expand All @@ -40,7 +41,7 @@

from mlpro.bf.streams.streams import *
from mlpro.bf.various import Log
from mlpro.bf.streams.tasks.windows import Window
from mlpro.bf.streams.tasks.windows import RingBuffer
from mlpro.oa.streams import *
from mlpro_int_river.wrappers.clusteranalyzers import WrRiverStreamKMeans2MLPro

Expand All @@ -65,30 +66,32 @@ def _setup(self, p_mode, p_ada: bool, p_visualize: bool, p_logging):
# 1.2 Set up a stream workflow based on a custom stream task

# 1.2.1 Creation of a workflow
workflow = OAWorkflow(p_name='Cluster Analysis using StreamKMeans@River',
p_range_max=OAWorkflow.C_RANGE_NONE,
p_ada=p_ada,
p_visualize=p_visualize,
p_logging=p_logging)
workflow = OAWorkflow( p_name='Cluster Analysis using StreamKMeans@River',
p_range_max=OAWorkflow.C_RANGE_NONE,
p_ada=p_ada,
p_visualize=p_visualize,
p_logging=p_logging )


# 1.2.2 Creation of tasks and add them to the workflow

# Window
task_window = Window(p_buffer_size=100,
p_delay=False,
p_enable_statistics=True,
p_name='#1: Sliding Window',
p_duplicate_data=True,
p_visualize=p_visualize,
p_logging=p_logging)
task_window = RingBuffer( p_buffer_size=100,
p_delay=False,
p_enable_statistics=True,
p_name='#1: Sliding Window',
p_duplicate_data=True,
p_visualize=p_visualize,
p_logging=p_logging )

workflow.add_task(p_task=task_window)

# Boundary detector
task_bd = BoundaryDetector(p_name='#2: Boundary Detector',
p_ada=True,
p_visualize=p_visualize,
p_logging=p_logging)
task_bd = BoundaryDetector( p_name='#2: Boundary Detector',
p_ada=True,
p_visualize=p_visualize,
p_logging=p_logging )

workflow.add_task(p_task=task_bd, p_pred_tasks=[task_window])

# MinMax-Normalizer
Expand All @@ -104,14 +107,14 @@ def _setup(self, p_mode, p_ada: bool, p_visualize: bool, p_logging):
workflow.add_task(p_task = task_norm_minmax, p_pred_tasks=[task_bd])

# Cluster Analyzer
task_clusterer = WrRiverStreamKMeans2MLPro(p_name='#4: StreamKMeans@River',
p_chunk_size=500,
p_n_clusters=5,
p_halflife=1.00,
p_sigma=0.5,
p_seed=54,
p_visualize=p_visualize,
p_logging=p_logging )
task_clusterer = WrRiverStreamKMeans2MLPro( p_name='#4: StreamKMeans@River',
p_chunk_size=500,
p_n_clusters=5,
p_halflife=1.00,
p_sigma=0.5,
p_seed=54,
p_visualize=p_visualize,
p_logging=p_logging )

workflow.add_task(p_task = task_clusterer, p_pred_tasks=[task_norm_minmax])

Expand Down

0 comments on commit a55d328

Please sign in to comment.