Skip to content

Commit

Permalink
Дополнение для сохранения в JSON (#177)
Browse files Browse the repository at this point in the history
* add characteristic in save progress

* add sol time&accuracy

* add Task, Parameters and creation_time for sd_item in save_progress

* change save&load

* Update method.py

* meaningless change

* add _init_ in loadProgress
  • Loading branch information
YaniKolt authored Feb 3, 2024
1 parent 17dea87 commit 9a299fc
Show file tree
Hide file tree
Showing 5 changed files with 124 additions and 109 deletions.
3 changes: 3 additions & 0 deletions iOpt/method/method.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import math
import sys
from typing import Tuple
from time import time

import numpy as np

Expand Down Expand Up @@ -366,6 +367,8 @@ def finalize_iteration(self) -> None:
r"""
End the iteration, updates the iteration counter
"""
self.search_data.get_last_item().creation_time = time()
self.search_data.get_last_item().iterationNumber = self.iterations_count #будет ли работать в параллельном случае?
self.iterations_count += 1

def get_iterations_count(self) -> int:
Expand Down
18 changes: 16 additions & 2 deletions iOpt/method/process.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,15 +176,29 @@ def save_progress(self, file_name: str, mode = 'full') -> None:
:param file_name: file name.
"""
self.search_data.save_progress(file_name=file_name, mode=mode)
data = self.search_data.searchdata_to_json(mode=mode)
data['Parameters'] = []
data['Parameters'].append({
'eps': self.parameters.eps,
'r': self.parameters.r,
'iters_limit': self.parameters.iters_limit,
'start_point': self.parameters.start_point,
'number_of_parallel_points': self.parameters.number_of_parallel_points
})
with open(file_name, 'w') as f:
json.dump(data, f, indent='\t', separators=(',', ':'))
f.write('\n')

def load_progress(self, file_name: str, mode = 'full') -> None:
"""
Load the optimization process from a file
:param file_name: file name.
"""
self.search_data.load_progress(file_name=file_name, mode=mode)
with open(file_name) as json_file:
data = json.load(json_file)

self.search_data.json_to_searchdata(data=data, mode=mode)
self.method.iterations_count = self.search_data.get_count() - 2

for ditem in self.search_data:
Expand Down
178 changes: 88 additions & 90 deletions iOpt/method/search_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@ def __init__(self, y: Point, x: np.double,
self.localR: np.double = -1.0
self.iterationNumber: int = -1
self.blocked: bool = False
self.creation_time = 0

def get_x(self) -> np.double:
"""
Expand Down Expand Up @@ -356,18 +357,15 @@ def get_last_items(self, N: int = 1) -> list[SearchDataItem]:
except Exception:
print("GetLastItems: List is empty")

def save_progress(self, file_name: str, mode = 'full'):
def searchdata_to_json(self, mode ='full') -> json: #что именно возвращает? По идее просто словарь с кучей вложений, но насколько понятно?
"""
Save the optimization process to a file
:param file_name: file name.
"""
data = {}
data['SearchDataItem'] = []
iternum = -2
num_iteration_best = -1
for dataItem in self._allTrials:

fvs = []
for fv in dataItem.function_values:
fvs.append({
Expand All @@ -388,13 +386,12 @@ def save_progress(self, file_name: str, mode = 'full'):
'localR': dataItem.localR,
'index': dataItem.get_index(),
'discrete_value_index': dataItem.get_discrete_value_index(),
'__z': dataItem.get_z()
'__z': dataItem.get_z(),
'creation_time': dataItem.creation_time,
'iterationNumber': dataItem.iterationNumber
})

if dataItem==self.solution.best_trials[0]:
num_iteration_best = iternum
iternum +=1

num_iterations_best = [] #в случае с mco - несколько лучших
data['best_trials'] = []
for dataItem in self.solution.best_trials: # сохранение всех лучших (если несколько, например, в mco)
for fv in dataItem.function_values:
Expand All @@ -416,9 +413,12 @@ def save_progress(self, file_name: str, mode = 'full'):
'index': dataItem.get_index(),
'discrete_value_index': dataItem.get_discrete_value_index(),
'__z': dataItem.get_z(),
#'iterationNumber': dataItem.iterationNumber # он больше нигде не используется. ПОЧЕМУ?!
'creation_time': dataItem.creation_time,
'iterationNumber': dataItem.iterationNumber
})

num_iterations_best.append(dataItem.iterationNumber)

if mode == 'full':
data['solution'] = []
data['solution'].append({
Expand All @@ -427,106 +427,104 @@ def save_progress(self, file_name: str, mode = 'full'):
'number_of_local_trials': self.solution.number_of_local_trials,
'solving_time': self.solution.solving_time,
'solution_accuracy': self.solution.solution_accuracy,
'num_iteration_best_trial': num_iteration_best
'num_iteration_best_trial': list(num_iterations_best)
})

data['float_variables'] = []
float_variables = []
for i in range(self.solution.problem.number_of_float_variables):
bounds = [self.solution.problem.lower_bound_of_float_variables[i],
self.solution.problem.upper_bound_of_float_variables[i]]
data['float_variables'].append({
float_variables.append({
str(self.solution.problem.float_variable_names[i]): (list(bounds)),
})

data['discrete_variables'] = []
discrete_variables = []
for i in range(self.solution.problem.number_of_discrete_variables):
data['discrete_variables'].append({
discrete_variables.append({
str(self.solution.problem.discrete_variable_names[i]):
(list(self.solution.problem.discrete_variable_values[i])),
})

with open(file_name, 'w') as f:
json.dump(data, f, indent='\t', separators=(',', ':'))
data['Task'] = []
data['Task'].append({
'float_variables': list(float_variables),
'discrete_variables': list(discrete_variables),
'name': self.solution.problem.name
})

return data

def load_progress(self, file_name: str, mode = 'full'):
def json_to_searchdata(self, data, mode ='full'):
"""
Load the optimization process from a file
:param file_name: file name.
"""

with open(file_name) as json_file:
data = json.load(json_file)

function_values = []
for trial in data['best_trials']:
for fv in trial['function_values']:
function_values.append(FunctionValue(
(FunctionType.OBJECTIV if fv['type'] == 1 else FunctionType.CONSTRAINT),
str(fv['functionID'])))
function_values[-1].value = np.double(fv['value'])

data_item = SearchDataItem(Point(trial['float_variables'], trial['discrete_variables']), trial['x'],
function_values,
trial['discrete_value_index'])
data_item.delta = trial['delta']
data_item.globalR = trial['globalR']
data_item.localR = trial['localR']
data_item.set_z(trial['__z'])
data_item.set_index(trial['index'])

self.solution.best_trials[0] = data_item
if mode == 'only search_data':
self.solution.solution_accuracy = min(data_item.delta, self.solution.solution_accuracy)

first_data_item = []
for trial in data['SearchDataItem'][:2]:
function_values = []
for fv in trial['function_values']:
function_values.append(FunctionValue(
(FunctionType.OBJECTIV if fv['type'] == 1 else FunctionType.CONSTRAINT),
str(fv['functionID'])))
function_values[-1].value = np.double(fv['value'])

first_data_item.append(
SearchDataItem(Point(trial['float_variables'], trial['discrete_variables']), trial['x'], function_values,
trial['discrete_value_index']))
first_data_item[-1].delta = trial['delta']
first_data_item[-1].globalR = trial['globalR']
first_data_item[-1].localR = trial['localR']
first_data_item[-1].set_index(trial['index'])

self.insert_first_data_item(first_data_item[0], first_data_item[1])

for trial in data['SearchDataItem'][2:]:
function_values = []
for p in data['best_trials']:

for fv in p['function_values']:
function_values.append(FunctionValue(
(FunctionType.OBJECTIV if fv['type'] == 1 else FunctionType.CONSTRAINT),
str(fv['functionID'])))
function_values[-1].value = np.double(fv['value'])

data_item = SearchDataItem(Point(p['float_variables'], p['discrete_variables']), p['x'],
function_values,
p['discrete_value_index'])
data_item.delta = p['delta'] # [-1] - обращение к последнему элементу
data_item.globalR = p['globalR']
data_item.localR = p['localR']
data_item.set_z(p['__z'])
data_item.set_index(p['index'])

self.solution.best_trials[0] = data_item
if mode == 'only search_data':
self.solution.solution_accuracy = min(data_item.delta, self.solution.solution_accuracy)

first_data_item = []

for p in data['SearchDataItem'][:2]:
function_values = []

for fv in p['function_values']:
function_values.append(FunctionValue(
(FunctionType.OBJECTIV if fv['type'] == 1 else FunctionType.CONSTRAINT),
str(fv['functionID'])))
function_values[-1].value = np.double(fv['value'])

first_data_item.append(
SearchDataItem(Point(p['float_variables'], p['discrete_variables']), p['x'], function_values,
p['discrete_value_index']))
first_data_item[-1].delta = p['delta']
first_data_item[-1].globalR = p['globalR']
first_data_item[-1].localR = p['localR']
first_data_item[-1].set_index(p['index'])

self.insert_first_data_item(first_data_item[0], first_data_item[1])

for p in data['SearchDataItem'][2:]:
function_values = []

for fv in p['function_values']:
function_values.append(FunctionValue(
(FunctionType.OBJECTIV if fv['type'] == 1 else FunctionType.CONSTRAINT),
str(fv['functionID'])))
function_values[-1].value = np.double(fv['value'])

data_item = SearchDataItem(Point(p['float_variables'], p['discrete_variables']),
p['x'], function_values, p['discrete_value_index'])
data_item.delta = p['delta']
data_item.globalR = p['globalR']
data_item.localR = p['localR']
data_item.set_z(p['__z'])
data_item.set_index(p['index'])

self.insert_data_item(data_item)

if mode == 'full': # а есть ли в этом смысл, если они все обнуляются? Аналогично с параметрами самой проблемы, их нет смысла выгружать
for p in data['solution']:
self.solution.number_of_global_trials = p['number_of_global_trials']
self.solution.number_of_local_trials = p['number_of_local_trials']
self.solution.solving_time = p['solving_time']
self.solution.solution_accuracy = p['solution_accuracy']
#сюда еще точность
for fv in trial['function_values']:
function_values.append(FunctionValue(
(FunctionType.OBJECTIV if fv['type'] == 1 else FunctionType.CONSTRAINT),
str(fv['functionID'])))
function_values[-1].value = np.double(fv['value'])

data_item = SearchDataItem(Point(trial['float_variables'], trial['discrete_variables']),
trial['x'], function_values, trial['discrete_value_index'])
data_item.delta = trial['delta']
data_item.globalR = trial['globalR']
data_item.localR = trial['localR']
data_item.set_z(trial['__z'])
data_item.creation_time = trial['creation_time']
data_item.set_index(trial['index'])

self.insert_data_item(data_item)

if mode == 'full':
for trial in data['solution']:
self.solution.number_of_global_trials = trial['number_of_global_trials']
self.solution.number_of_local_trials = trial['number_of_local_trials']
self.solution.solving_time = trial['solving_time']
self.solution.solution_accuracy = trial['solution_accuracy']

def __iter__(self):
# вернуть самую левую точку из дерева (ниже код проверить!)
Expand Down
Empty file.
34 changes: 17 additions & 17 deletions test/iOpt/method/loadProgress/test_load_progress.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,15 +39,16 @@ def test_Rastrigin2(self):
self.solver = Solver(self.problem, parameters=self.params)
self.sol100_ns = self.solver.solve()

with open('Rastrigin2_50_100.json') as json_file:
with open('Rastrigin2_50_100.json', 'r') as json_file:
data1 = json.load(json_file)

with open('Rastrigin2_100.json') as json_file:

with open('Rastrigin2_100.json', 'r') as json_file:
data2 = json.load(json_file)

self.assertEqual(self.sol50_100.best_trials, self.sol100_ns.best_trials)
self.assertEqual(data1['best_trials'], data2['best_trials'])
self.assertEqual(data1['SearchDataItem'], data2['SearchDataItem'])
self.assertEqual({key:value for (key,value) in data1['best_trials'][0].items() if key!= 'creation_time'}, {key:value for (key,value) in data2['best_trials'][0].items() if key!= 'creation_time'})
self.assertEqual({key:value for (key,value) in data1['SearchDataItem'][0].items() if key!= 'creation_time'}, {key:value for (key,value) in data2['SearchDataItem'][0].items() if key!= 'creation_time'})
self.assertEqual(data1['solution'][0]['number_of_global_trials'],
data2['solution'][0]['number_of_global_trials'])
self.assertEqual(data1['solution'][0]['number_of_local_trials'],
Expand Down Expand Up @@ -89,8 +90,10 @@ def test_RastriginInt(self):
data2 = json.load(json_file)

self.assertEqual(self.sol50_100.best_trials, self.sol100_ns.best_trials)
self.assertEqual(data1['best_trials'], data2['best_trials'])
self.assertEqual(data1['SearchDataItem'], data2['SearchDataItem'])
self.assertEqual({key: value for (key, value) in data1['best_trials'][0].items() if key != 'creation_time'},
{key: value for (key, value) in data2['best_trials'][0].items() if key != 'creation_time'})
self.assertEqual({key: value for (key, value) in data1['SearchDataItem'][0].items() if key != 'creation_time'},
{key: value for (key, value) in data2['SearchDataItem'][0].items() if key != 'creation_time'})

pathlist = ['RastriginInt_50.json', 'RastriginInt_50_100.json', 'RastriginInt_100.json']
for path in pathlist:
Expand Down Expand Up @@ -128,8 +131,10 @@ def test_GKLS(self):
data2 = json.load(json_file)

self.assertEqual(self.sol50_100.best_trials, self.sol100_ns.best_trials)
self.assertEqual(data1['best_trials'], data2['best_trials'])
self.assertEqual(data1['SearchDataItem'], data2['SearchDataItem'])
self.assertEqual({key: value for (key, value) in data1['best_trials'][0].items() if key != 'creation_time'},
{key: value for (key, value) in data2['best_trials'][0].items() if key != 'creation_time'})
self.assertEqual({key: value for (key, value) in data1['SearchDataItem'][0].items() if key != 'creation_time'},
{key: value for (key, value) in data2['SearchDataItem'][0].items() if key != 'creation_time'})

pathlist = ['GKLS_50.json', 'GKLS_50_100.json', 'GKLS_100.json']
for path in pathlist:
Expand Down Expand Up @@ -166,21 +171,16 @@ def test_Stronginc2(self):
data2 = json.load(json_file)

self.assertEqual(self.sol50_100.best_trials, self.sol100_ns.best_trials)
self.assertEqual(data1['best_trials'], data2['best_trials'])
self.assertEqual(data1['SearchDataItem'], data2['SearchDataItem'])
self.assertEqual({key: value for (key, value) in data1['best_trials'][0].items() if key != 'creation_time'},
{key: value for (key, value) in data2['best_trials'][0].items() if key != 'creation_time'})
self.assertEqual({key: value for (key, value) in data1['SearchDataItem'][0].items() if key != 'creation_time'},
{key: value for (key, value) in data2['SearchDataItem'][0].items() if key != 'creation_time'})

pathlist = ['Stronginc2_50.json', 'Stronginc2_50_100.json', 'Stronginc2_100.json']
for path in pathlist:
if os.path.isfile(path):
os.remove(path)






# Executing the tests in the above test case class


if __name__ == "__main__":
unittest.main()

0 comments on commit 9a299fc

Please sign in to comment.