diff --git a/.gitignore b/.gitignore index 5075eef..f46e7dd 100644 --- a/.gitignore +++ b/.gitignore @@ -1,97 +1,106 @@ # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] *$py.class +# latex +*.aux +*.bbl +*.blg +*.log +*.tcp + +# solver_comparison +examples/solver_comparison/gfx/* *.vpp.lck .pytest_cache/ *.vpp.bak_* python_tests_xml # C extensions *.so # Distribution / packaging .Python env/ build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ *.egg-info/ .installed.cfg *.egg .idea/ # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *,cover .hypothesis/ # Translations *.mo *.pot # Django stuff: *.log # Sphinx documentation docs/_build/ doc/ # PyBuilder target/ #Ipython Notebook .ipynb_checkpoints #Pycharm files *.iml # merging stuff *.orig *~ # Paths in repository mcml.py # images etc *.tif *.nrrd *.caffemodel # C++ stuff build* *.user hyppopy/tests/test_snipped_000.py hyppopy/tests/test_snipped_001.py hyppopy/tests/test_snipped_002.py hyppopy/tests/test_snipped_003.py hyppopy/tests/test_snipped_004.py hyppopy/tests/test_snipped_005.py hyppopy/tests/test_snipped_006.py diff --git a/README.md b/README.md index a5f1b8d..f067979 100644 --- a/README.md +++ b/README.md @@ -1,379 +1,379 @@                 ![docs_title_logo](./resources/docs_title_logo.png) # A Hyper-Parameter Optimization Toolbox
## What is Hyppopy? Hyppopy is a python toolbox for blackbox optimization. It's purpose is to offer a unified and easy to use interface to a collection of solver libraries. Currently provided solvers are: * [Hyperopt](http://hyperopt.github.io/hyperopt/) * [Optunity](https://optunity.readthedocs.io/en/latest/user/index.html) * [Optuna](https://optuna.org/) * Quasi-Randomsearch Solver * Randomsearch Solver * Gridsearch Solver ## Installation 1. clone the [Hyppopy](http:\\github.com) project from Github 2. (create a virtual environment), open a console (with your activated virtual env) and go to the hyppopy root folder 3. ```$ pip install -r requirements.txt``` 4. ```$ python setup.py install``` (for normal usage) or ```$ python setup.py develop``` (if you want to join the hyppopy development *hooray*) ## How to use Hyppopy? #### The Hyperparamaterspace Hyppopy defines a common hyperparameterspace description, whatever solver is used. A hyperparameter description includes the following fields: * domain: the domain defines how the solver samples the parameter space, options are: * uniform: samples the data range [a,b] evenly, whereas b>a * normal: samples the data range [a,b] using a normal distribution with mu=a+(b-a)/2, sigma=(b-a)/6, whereas b>a * loguniform: samples the data range [a,b] logarithmic using e^x by sampling the exponent range x=[log(a), log(b)] uniformly, whereas a>0 and b>a * categorical: is used to define a data list * data: in case of categorical domain data is a list, all other domains expect a range [a, b] * type: the parameter data type as string 'int', 'float' or 'str' An exeption must be kept in mind when using the GridsearchSolver. The gridsearch additionally needs a number of samples per domain, which must be set using the field: frequency. #### The HyppopyProject class The HyppopyProject class takes care all settings necessary for the solver and your workflow. To setup a HyppopyProject instance we can use a nested dictionary or the classes memberfunctions respectively. ```python # Import the HyppopyProject class from hyppopy.HyppopyProject import HyppopyProject # Create a nested dict with a section hyperparameter. We define a 2 dimensional # hyperparameter space with a numerical dimension named myNumber of type float and # a uniform sampling. The second dimension is a categorical parameter of type string. config = { "hyperparameter": { "myNumber": { "domain": "uniform", "data": [0, 100], "type": float }, "myOption": { "domain": "categorical", "data": ["a", "b", "c"], "type": str } }} # Create a HyppopyProject instance and pass the config dict to # the constructor. Alternatively one can use set_config method. project = HyppopyProject(config=config) # We can also add hyperparameter using the add_hyperparameter method project = HyppopyProject() project.add_hyperparameter(name="myNumber", domain="uniform", data=[0, 100], dtype=float) project.add_hyperparameter(name="myOption", domain="categorical", data=["a", "b", "c"], dtype=str) ``` Additional settings for the solver or custom parameters can be set either as additional entries in the config dict, or via the methods set_settings or add_setting: ```python from hyppopy.HyppopyProject import HyppopyProject config = { "hyperparameter": { "myNumber": { "domain": "uniform", "data": [0, 100], "type": float }, "myOption": { "domain": "categorical", "data": ["a", "b", "c"], "type": str } }, "max_iterations": 500, "anything_you_want": 42 } project = HyppopyProject(config=config) print("max_iterations:", project.max_iterations) print("anything_you_want:", project.anything_you_want) #alternatively project = HyppopyProject() project.set_settings(max_iterations=500, anything_you_want=42) print("anything_you_want:", project.anything_you_want) #alternatively project = HyppopyProject() project.add_setting(name="max_iterations", value=500) project.add_setting(name="anything_you_want", value=42) print("anything_you_want:", project.anything_you_want) ``` #### The HyppopySolver classes Each solver is a child of the HyppopySolver class. This is only interesting if you're planning to write a new solver, we will discuss this in the section Solver Development. All solvers we can use to optimize our blackbox function are part of the module 'hyppopy.solver'. Below is a list of all solvers available along with their access key in squared brackets. * HyperoptSolver [hyperopt] _Bayes Optimization use Tree-Parzen Estimator, supports uniform, normal, loguniform and categorical parameter_ * OptunitySolver [optunity] _Particle Swarm Optimizer, supports uniform and categorical parameter_ * OptunaSolver [optuna] _Bayes Optimization, supports uniform, and categorical parameter_ * RandomsearchSolver [randomsearch] _Naive randomized parameter search, supports uniform, normal, loguniform and categorical parameter_ * QuasiRandomsearchSolver [quasirandomsearch] _Randomized grid ensuring random sample drawing and a good space coverage, supports uniform, normal, loguniform and categorical parameter_ * GridsearchSolver [gridsearch] _Standard gridsearch, supports uniform, normal, loguniform and categorical parameter_ There are two options to get a solver, we can import directly from the hyppopy.solvers package or we use the SolverPool class. We look into both options by optimizing a simple function, starting with the direct import case. ```python # Import the HyppopyProject class from hyppopy.HyppopyProject import HyppopyProject # Import the HyperoptSolver class, in this case wh use Hyperopt from hyppopy.solvers.HyperoptSolver import HyperoptSolver # Our function to optimize def my_loss_func(x, y): return x**2+y**2 # Creating a HyppopyProject instance project = HyppopyProject() project.add_hyperparameter(name="x", domain="uniform", data=[-10, 10], type=float) project.add_hyperparameter(name="y", domain="uniform", data=[-10, 10], type=float) project.add_setting(name="max_iterations", value=300) # create a solver instance solver = HyperoptSolver(project) # pass the loss function to the solver solver.blackbox = my_loss_func # run the solver solver.run() df, best = solver.get_results() print("\n") print("*"*100) print("Best Parameter Set:\n{}".format(best)) print("*"*100) ``` The SolverPool is a class keeping track of all solver classes. We have several options to ask the SolverPool for the desired solver. We can add a setting called solver to our config or to the project instance respectively, or we can use the solver access key (see solver listing above) to ask for the solver directly. ```python # import the SolverPool class from hyppopy.SolverPool import SolverPool # Import the HyppopyProject class from hyppopy.HyppopyProject import HyppopyProject # Our function to optimize def my_loss_func(x, y): return x**2+y**2 # Creating a HyppopyProject instance project = HyppopyProject() project.add_hyperparameter(name="x", domain="uniform", data=[-10, 10], type=float) project.add_hyperparameter(name="y", domain="uniform", data=[-10, 10], type=float) project.set_settings(max_iterations=300, solver="hyperopt") # create a solver instance. The SolverPool class is a singleton # and can be used without instanciating. It looks in the project # instance for the use_solver option and returns the correct solver. solver = SolverPool.get(project=project) # Another option without the usage of the solver field would be: # solver = SolverPool.get(solver_name='hyperopt', project=project) # pass the loss function to the solver solver.blackbox = my_loss_func # run the solver solver.run() df, best = solver.get_results() print("\n") print("*"*100) print("Best Parameter Set:\n{}".format(best)) print("*"*100) ``` #### The BlackboxFunction class To extend the possibilities beyond using parameter only loss functions as in the examples above, we can use the BlackboxFunction class. This class is a wrapper class around the actual loss_function providing a more advanced access interface to data handling and a callback_function for accessing the solvers iteration loop. ```python # import the HyppopyProject class keeping track of inputs from hyppopy.HyppopyProject import HyppopyProject # import the SolverPool singleton class from hyppopy.SolverPool import SolverPool # import the Blackboxfunction class wrapping your problem for Hyppopy from hyppopy.BlackboxFunction import BlackboxFunction # Create the HyppopyProject class instance project = HyppopyProject() project.add_hyperparameter(name="C", domain="uniform", data=[0.0001, 20], type=float) project.add_hyperparameter(name="gamma", domain="uniform", data=[0.0001, 20], type=float) project.add_hyperparameter(name="kernel", domain="categorical", data=["linear", "sigmoid", "poly", "rbf"], type=str) project.add_setting(name="max_iterations", value=500) project.add_setting(name="solver", value="optunity") # The BlackboxFunction signature is as follows: # BlackboxFunction(blackbox_func=None, # dataloader_func=None, # preprocess_func=None, # callback_func=None, # data=None, # **kwargs) # # - blackbox_func: a function pointer to the users loss function # - dataloader_func: a function pointer for handling dataloading. The function is called once before # optimizing. What it returns is passed as first argument to your loss functions # data argument. # - preprocess_func: a function pointer for data preprocessing. The function is called once before # optimizing and gets via kwargs['data'] the raw data object set directly or returned # from dataloader_func. What this function returns is then what is passed as first # argument to your loss function. # - callback_func: a function pointer called after each iteration. The input kwargs is a dictionary # keeping the parameters used in this iteration, the 'iteration' index, the 'loss' # and the 'status'. The function in this example is used for realtime printing it's # input but can also be used for realtime visualization. # - data: if not done via dataloader_func one can set a raw_data object directly # - kwargs: dict that whose content is passed to all functions above. from sklearn.svm import SVC from sklearn.datasets import load_iris from sklearn.model_selection import cross_val_score def my_dataloader_function(**kwargs): print("Dataloading...") # kwargs['params'] allows accessing additional parameter passed, # see below my_preproc_param, my_dataloader_input. print("my loading argument: {}".format(kwargs['params']['my_dataloader_input'])) iris_data = load_iris() return [iris_data.data, iris_data.target] def my_preprocess_function(**kwargs): print("Preprocessing...") # kwargs['data'] allows accessing the input data print("data:", kwargs['data'][0].shape, kwargs['data'][1].shape) # kwargs['params'] allows accessing additional parameter passed, # see below my_preproc_param, my_dataloader_input. print("kwargs['params']['my_preproc_param']={}".format(kwargs['params']['my_preproc_param']), "\n") # if the preprocessing function returns something, # the input data will be replaced with the data returned by this function. x = kwargs['data'][0] y = kwargs['data'][1] for i in range(x.shape[0]): x[i, :] += kwargs['params']['my_preproc_param'] return [x, y] def my_callback_function(**kwargs): print("\r{}".format(kwargs), end="") def my_loss_function(data, params): clf = SVC(**params) return -cross_val_score(estimator=clf, X=data[0], y=data[1], cv=3).mean() # We now create the BlackboxFunction object and pass all function pointers defined above, # as well as 2 dummy parameter (my_preproc_param, my_dataloader_input) for demonstration purposes. blackbox = BlackboxFunction(blackbox_func=my_loss_function, dataloader_func=my_dataloader_function, preprocess_func=my_preprocess_function, callback_func=my_callback_function, my_preproc_param=1, my_dataloader_input='could/be/a/path') # Get the solver solver = SolverPool.get(project=project) # Give the solver your blackbox solver.blackbox = blackbox # Run the solver solver.run() # Get your results df, best = solver.get_results() print("\n") print("*"*100) print("Best Parameter Set:\n{}".format(best)) print("*"*100) ``` #### The Parameter Space Domains Each hyperparameter needs a range and a domain specifier. The range, specified via 'data', is the left and right bound of an interval (exception is the domain 'categorical', here 'data' is the actual list of data elements) and the domain specifier the way this interval is sampled. Currently supported domains are: * uniform (samples the interval [a,b] evenly) * normal* (a gaussian sampling of the interval [a,b] such that mu=a+(b-a)/2 and sigma=(b-a)/6) * loguniform* (a logaritmic sampling of the iterval [a,b], such that the exponent e^x is sampled evenly x=[log(a),log(b)]) * categorical (in this case data is not interpreted as interval but as actual list of objects) *Not all domains are supported by all solvers, this might be fixed in the future, but until, the solver throws an error telling you that the domain is unknown. -When using the GridsearchSolver we need to specifiy an interval and a number of samples using a frequency specifier. The max_iterations parameter is obsolet in this case, because each axis specifies an individual number of samples via frequency. +When using the GridsearchSolver we need to specifiy an interval and a number of samples using a frequency specifier. The max_iterations parameter is obsolet in this case, because each axis specifies an individual number of samples via frequency. This applies only to numerical space domains, categorical space domains need a frequency value of 1. ```python # import the SolverPool class from hyppopy.solvers.GridsearchSolver import GridsearchSolver # Import the HyppopyProject class from hyppopy.HyppopyProject import HyppopyProject # Our function to optimize def my_loss_func(x, y): return x**2+y**2 # Creating a HyppopyProject instance project = HyppopyProject() project.add_hyperparameter(name="x", domain="uniform", data=[-1.1, 1], frequency=10, type=float) project.add_hyperparameter(name="y", domain="uniform", data=[-1.1, 1], frequency=12, type=float) solver = GridsearchSolver(project=project) # pass the loss function to the solver solver.blackbox = my_loss_func # run the solver solver.run() df, best = solver.get_results() print("\n") print("*"*100) print("Best Parameter Set:\n{}".format(best)) print("*"*100) ``` #### Using a Visdom Server to Visualize the Optimization Process We can simply create a realtime visualization using a visdom server. If installed, start your visdom server via console command: ``` >visdom ``` Go to your browser and open the site: http://localhost:8097 To enable the visualization call the function 'start_viewer' before running the solver: ``` #enable visualization solver.start_viewer() # Run the solver solver.run() ``` You can also change the port and the server name in start_viewer(port=8097, server="http://localhost") -## Acknowledgements: +## Acknowledgements: _This work is supported by the [Helmholtz Association Initiative and Networking](https://www.helmholtz.de/en/about_us/the_association/initiating_and_networking/) Fund under project number ZT-I-0003._
diff --git a/examples/solver_comparison.py b/examples/solver_comparison.py index e9eaf5b..670f873 100644 --- a/examples/solver_comparison.py +++ b/examples/solver_comparison.py @@ -1,311 +1,325 @@ # DKFZ # # # Copyright (c) German Cancer Research Center, # Division of Medical Image Computing. # All rights reserved. # # This software is distributed WITHOUT ANY WARRANTY; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. # # See LICENSE import os import sys import time import pickle import numpy as np from math import pi import matplotlib.pyplot as plt from hyppopy.SolverPool import SolverPool from hyppopy.HyppopyProject import HyppopyProject from hyppopy.VirtualFunction import VirtualFunction from hyppopy.BlackboxFunction import BlackboxFunction -OUTPUTDIR = "C:\\Users\\s635r\\Desktop\\solver_comparison" -SOLVER = ["hyperopt", "optunity", "randomsearch", "optuna", "quasirandomsearch"] -ITERATIONS = [50, 100, 250, 500] -STATREPEATS = 50 -OVERWRITE = False +#OUTPUTDIR = "C:\\Users\\s635r\\Desktop\\solver_comparison" +OUTPUTDIR = "D:\\Projects\\Python\\hyppopy\\examples\\solver_comparison\\gfx" + +SOLVER = [] +#SOLVER.append("hyperopt") +#SOLVER.append("optunity") +#SOLVER.append("randomsearch") +#SOLVER.append("optuna") +SOLVER.append("quasirandomsearch") + +ITERATIONS = [] +ITERATIONS.append(50) +ITERATIONS.append(100) +ITERATIONS.append(250) +ITERATIONS.append(500) +STATREPEATS = 1 + +OVERWRITE = False def compute_deviation(solver_name, vfunc_id, iterations, N, fname): project = HyppopyProject() project.add_hyperparameter(name="axis_00", domain="uniform", data=[0, 1], type=float) project.add_hyperparameter(name="axis_01", domain="uniform", data=[0, 1], type=float) project.add_hyperparameter(name="axis_02", domain="uniform", data=[0, 1], type=float) project.add_hyperparameter(name="axis_03", domain="uniform", data=[0, 1], type=float) project.add_hyperparameter(name="axis_04", domain="uniform", data=[0, 1], type=float) vfunc = VirtualFunction() vfunc.load_default(vfunc_id) minima = vfunc.minima() def my_loss_function(data, params): return vfunc(**params) blackbox = BlackboxFunction(data=[], blackbox_func=my_loss_function) results = {} results["gt"] = [] for mini in minima: results["gt"].append(np.median(mini[0])) for iter in iterations: results[iter] = {"minima": {}, "distance": {}, "duration": None, "set_difference": None, "loss": None, "loss_history": {}} for i in range(vfunc.dims()): results[iter]["minima"]["axis_0{}".format(i)] = [] results[iter]["distance"]["axis_0{}".format(i)] = [] project.add_setting("max_iterations", iter) project.add_setting("solver", solver_name) solver = SolverPool.get(project=project) solver.blackbox = blackbox axis_minima = [] best_losses = [] best_sets_diff = [] for i in range(vfunc.dims()): axis_minima.append([]) loss_history = [] durations = [] for n in range(N): print("\rSolver={} iteration={} round={}".format(solver, iter, n), end="") start = time.time() solver.run(print_stats=False) end = time.time() durations.append(end-start) df, best = solver.get_results() loss_history.append(np.flip(np.sort(df['losses'].values))) best_row = df['losses'].idxmin() best_losses.append(df['losses'][best_row]) best_sets_diff.append(abs(df['axis_00'][best_row] - best['axis_00'])+ abs(df['axis_01'][best_row] - best['axis_01'])+ abs(df['axis_02'][best_row] - best['axis_02'])+ abs(df['axis_03'][best_row] - best['axis_03'])+ abs(df['axis_04'][best_row] - best['axis_04'])) for i in range(vfunc.dims()): tmp = df['axis_0{}'.format(i)][best_row] axis_minima[i].append(tmp) results[iter]["loss_history"] = loss_history for i in range(vfunc.dims()): results[iter]["minima"]["axis_0{}".format(i)] = [np.mean(axis_minima[i]), np.std(axis_minima[i])] dist = np.sqrt((axis_minima[i]-results["gt"][i])**2) results[iter]["distance"]["axis_0{}".format(i)] = [np.mean(dist), np.std(dist)] results[iter]["loss"] = [np.mean(best_losses), np.std(best_losses)] results[iter]["set_difference"] = sum(best_sets_diff) results[iter]["duration"] = np.mean(durations) file = open(fname, 'wb') pickle.dump(results, file) file.close() def make_radarplot(results, title, fname=None): gt = results.pop("gt") categories = list(results[list(results.keys())[0]]["minima"].keys()) N = len(categories) angles = [n / float(N) * 2 * pi for n in range(N)] angles += angles[:1] ax = plt.subplot(1, 1, 1, polar=True, ) ax.set_theta_offset(pi / 2) ax.set_theta_direction(-1) plt.xticks(angles[:-1], categories, color='grey', size=8) ax.set_rlabel_position(0) plt.yticks([0.2, 0.4, 0.6, 0.8, 1.0], ["0.2", "0.4", "0.6", "0.8", "1.0"], color="grey", size=7) plt.ylim(0, 1) gt += gt[:1] ax.fill(angles, gt, color=(0.2, 0.8, 0.2), alpha=0.2) colors = [] cm = plt.get_cmap('Set1') if len(results) > 2: indices = list(range(0, len(results) + 1)) indices.pop(2) else: indices = list(range(0, len(results))) for i in range(len(results)): colors.append(cm(indices[i])) for iter, data in results.items(): values = [] for i in range(len(categories)): values.append(data["minima"]["axis_0{}".format(i)][0]) values += values[:1] color = colors.pop(0) ax.plot(angles, values, color=color, linewidth=2, linestyle='solid', label="iterations {}".format(iter)) plt.title(title, size=11, color=(0.1, 0.1, 0.1), y=1.1) plt.legend(bbox_to_anchor=(0.08, 1.12)) if fname is None: plt.show() else: plt.savefig(fname + ".png") #plt.savefig(fname + ".svg") plt.clf() def make_errrorbars_plot(results, fname=None): n_groups = len(results) for iter in ITERATIONS: means = [] stds = [] names = [] colors = [] axis = [] fig = plt.figure(figsize=(10, 8)) for solver_name, numbers in results.items(): names.append(solver_name) means.append([]) stds.append([]) for axis_name, data in numbers[iter]["distance"].items(): means[-1].append(data[0]) stds[-1].append(data[1]) if len(axis) < 5: axis.append(axis_name) for c in range(len(names)): colors.append(plt.cm.Set2(c/len(names))) index = np.arange(len(axis)) bar_width = 0.14 opacity = 0.8 error_config = {'ecolor': '0.3'} for k, name in enumerate(names): plt.bar(index + k*bar_width, means[k], bar_width, alpha=opacity, color=colors[k], yerr=stds[k], error_kw=error_config, label=name) plt.xlabel('Axis') plt.ylabel('Mean [+/- std]') plt.title('Deviation per Axis and Solver for {} Iterations'.format(iter)) plt.xticks(index + 2*bar_width, axis) plt.legend() if fname is None: plt.show() else: plt.savefig(fname + "_{}.png".format(iter)) #plt.savefig(fname + "_{}.svg".format(iter)) plt.clf() def plot_loss_histories(results, fname=None): colors = [] for c in range(len(SOLVER)): colors.append(plt.cm.Set2(c / len(SOLVER))) for iter in ITERATIONS: fig = plt.figure(figsize=(10, 8)) added_solver = [] for n, solver_name in enumerate(results.keys()): for history in results[solver_name][iter]["loss_history"]: if solver_name not in added_solver: plt.plot(history, color=colors[n], label=solver_name, alpha=0.5) added_solver.append(solver_name) else: plt.plot(history, color=colors[n], alpha=0.5) plt.legend() plt.ylabel('Loss') plt.xlabel('Iteration') if fname is None: plt.show() else: plt.savefig(fname + "_{}.png".format(iter)) plt.clf() def print_durations(results, fname=None): colors = [] for c in range(len(SOLVER)): colors.append(plt.cm.Set2(c / len(SOLVER))) f = open(fname, "w") lines = ["\t".join(SOLVER)+"\n"] for iter in ITERATIONS: txt = str(iter) + "\t" for solver_name in SOLVER: duration = results[solver_name][iter]["duration"] txt += str(duration) + "\t" txt += "\n" lines.append(txt) f.writelines(lines) f.close() +id2dirmapping = {"5D": "data_I", "5D2": "data_II", "5D3": "data_II"} if __name__ == "__main__": vfunc_ID = "5D" if len(sys.argv) == 2: vfunc_ID = sys.argv[1] print("Start Evaluation on {}".format(vfunc_ID)) - OUTPUTDIR = os.path.join(OUTPUTDIR, vfunc_ID) + OUTPUTDIR = os.path.join(OUTPUTDIR, id2dirmapping[vfunc_ID]) if not os.path.isdir(OUTPUTDIR): os.makedirs(OUTPUTDIR) ################################################## ############### create datasets ################## fnames = [] for solver_name in SOLVER: fname = os.path.join(OUTPUTDIR, solver_name) fnames.append(fname) if OVERWRITE or not os.path.isfile(fname): compute_deviation(solver_name, vfunc_ID, ITERATIONS, N=STATREPEATS, fname=fname) ################################################## ################################################## ################################################## ############## create radarplots ################# all_results = {} for solver_name, fname in zip(SOLVER, fnames): file = open(fname, 'rb') results = pickle.load(file) file.close() make_radarplot(results, solver_name, fname + "_deviation") all_results[solver_name] = results fname = os.path.join(OUTPUTDIR, "errorbars") make_errrorbars_plot(all_results, fname) fname = os.path.join(OUTPUTDIR, "losshistory") plot_loss_histories(all_results, fname) fname = os.path.join(OUTPUTDIR, "durations.txt") print_durations(all_results, fname) for solver_name, iterations in all_results.items(): for iter, numbers in iterations.items(): if numbers["set_difference"] != 0: print("solver {} has a different parameter set match in iteration {}".format(solver_name, iter)) ################################################## ################################################## diff --git a/examples/solver_comparison/HyppopyReport.pdf b/examples/solver_comparison/HyppopyReport.pdf new file mode 100644 index 0000000..6aafecb Binary files /dev/null and b/examples/solver_comparison/HyppopyReport.pdf differ diff --git a/examples/solver_comparison/HyppopyReport.tex b/examples/solver_comparison/HyppopyReport.tex new file mode 100644 index 0000000..bbd194d --- /dev/null +++ b/examples/solver_comparison/HyppopyReport.tex @@ -0,0 +1,31 @@ +\title{Hyppopy Solver Comparison Report} +\date{\today} + + +\documentclass[12pt]{article} +\usepackage{geometry} + \geometry{ + a4paper, + total={170mm,257mm}, + left=20mm, + top=20mm, + } + +\usepackage{subcaption} +\usepackage{graphicx} +\graphicspath{{./gfx/}} +\captionsetup[subfigure]{labelformat=empty} + +\begin{document} +\maketitle +\newpage + +\section{Benchmarking} + +\input{ReportPage_I.tex} +\newpage +\input{ReportPage_II.tex} +\newpage +\input{ReportPage_III.tex} + +\end{document} \ No newline at end of file diff --git a/examples/solver_comparison/ReportPage_I.tex b/examples/solver_comparison/ReportPage_I.tex new file mode 100644 index 0000000..ce7a3f6 --- /dev/null +++ b/examples/solver_comparison/ReportPage_I.tex @@ -0,0 +1,138 @@ +\subsection{Virtual Function I} +The figures below are depicting the axis plots of the virtual hyperparameter space function I to be optimized with the available hyppopy solvers. +\begin{figure}[h] + \begin{subfigure}{0.32\textwidth} + \includegraphics[width=\linewidth]{gt_I/axis_00} + \caption{axis 00} + \label{fig:axis00_I} + \end{subfigure} + \begin{subfigure}{0.32\textwidth} + \includegraphics[width=\linewidth]{gt_I/axis_01} + \caption{axis 01} + \label{fig:axis01_I} + \end{subfigure} + \begin{subfigure}{0.32\textwidth} + \includegraphics[width=\linewidth]{gt_I/axis_02} + \caption{axis 02} + \label{fig:axis02_I} + \end{subfigure} +\end{figure} + +\begin{figure}[h] + \begin{subfigure}{0.32\textwidth} + \includegraphics[width=\linewidth]{gt_I/axis_03} + \caption{axis 03} + \label{fig:axis03_I} + \end{subfigure} + \begin{subfigure}{0.32\textwidth} + \includegraphics[width=\linewidth]{gt_I/axis_04} + \caption{axis 04} + \label{fig:axis04_I} + \end{subfigure} + \begin{subfigure}{0.32\textwidth} + \includegraphics[width=\linewidth]{gt_I/dummy} + \caption{} + \label{fig:dummy1_I} + \end{subfigure} +\end{figure} + + +\newpage + + +\subsubsection{Minimum Finding Abilities} + +The pictures below depict the accuracies reached on the individual axis. The light green region is the ground truth and the red, blue, violet and orange lines are the results after 50, 100, 250, and 500 iterations. Each line is the mean result over 50 individual optimizations on the target function. + +\begin{figure}[h] + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_I/hyperopt_deviation} + \label{fig:hyperopt_deviation_I} + \end{subfigure} + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_I/optunity_deviation} + \label{fig:optunity_deviation_I} + \end{subfigure} +\end{figure} + +\begin{figure}[h] + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_I/optuna_deviation} + \label{fig:optuna_deviation_I} + \end{subfigure} + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_I/randomsearch_deviation} + \label{fig:randomsearch_deviation_I} + \end{subfigure} +\end{figure} + +\begin{figure}[h] + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_I/quasirandomsearch_deviation} + \label{fig:quasirandomsearch_deviation_I} + \end{subfigure} + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_I/dummy} + \label{fig:dummy2_I} + \end{subfigure} +\end{figure} + + +\newpage + + +\subsubsection{Relative Distances to the Axis Optima} + +The pictures in this section depict the mean distance and the standard deviation per axis for each solver. + +\begin{figure}[h] + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_I/errorbars_50} + \label{fig:errorbars_50_I} + \end{subfigure} + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_I/errorbars_100} + \label{fig:errorbars_100_I} + \end{subfigure} +\end{figure} + +\begin{figure}[h] + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_I/errorbars_250} + \label{fig:errorbars_250_I} + \end{subfigure} + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_I/errorbars_500} + \label{fig:errorbars_500_I} + \end{subfigure} +\end{figure} + + +\newpage + + +\subsubsection{Convergence Beahviour} + +The pictures in this section depict the loss over iteration plots for each of the 50 iterations for each solver. For better visualization the Loss values are sorted, so the mapping between Iteration and Loss values might not be correct. The purpose of these plots is to show the overall Loss curve for each solver and it's variation over different runs. + +\begin{figure}[h] + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_I/losshistory_50} + \label{fig:losshistory_50_I} + \end{subfigure} + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_I/losshistory_100} + \label{fig:losshistory_100_I} + \end{subfigure} +\end{figure} + +\begin{figure}[h] + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_I/losshistory_250} + \label{fig:losshistory_250_I} + \end{subfigure} + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_I/losshistory_500} + \label{fig:losshistory_500_I} + \end{subfigure} +\end{figure} \ No newline at end of file diff --git a/examples/solver_comparison/ReportPage_II.tex b/examples/solver_comparison/ReportPage_II.tex new file mode 100644 index 0000000..2d3198b --- /dev/null +++ b/examples/solver_comparison/ReportPage_II.tex @@ -0,0 +1,138 @@ +\subsection{Virtual Function II} +The figures below are depicting the axis plots of the virtual hyperparameter space function II to be optimized with the available hyppopy solvers. +\begin{figure}[h] + \begin{subfigure}{0.32\textwidth} + \includegraphics[width=\linewidth]{gt_II/axis_00} + \caption{axis 00} + \label{fig:axis00_II} + \end{subfigure} + \begin{subfigure}{0.32\textwidth} + \includegraphics[width=\linewidth]{gt_II/axis_01} + \caption{axis 01} + \label{fig:axis01_II} + \end{subfigure} + \begin{subfigure}{0.32\textwidth} + \includegraphics[width=\linewidth]{gt_II/axis_02} + \caption{axis 02} + \label{fig:axis02_II} + \end{subfigure} +\end{figure} + +\begin{figure}[h] + \begin{subfigure}{0.32\textwidth} + \includegraphics[width=\linewidth]{gt_II/axis_03} + \caption{axis 03} + \label{fig:axis03_II} + \end{subfigure} + \begin{subfigure}{0.32\textwidth} + \includegraphics[width=\linewidth]{gt_II/axis_04} + \caption{axis 04} + \label{fig:axis04_II} + \end{subfigure} + \begin{subfigure}{0.32\textwidth} + \includegraphics[width=\linewidth]{gt_II/dummy} + \caption{} + \label{fig:dummy1_II} + \end{subfigure} +\end{figure} + + +\newpage + + +\subsubsection{Minimum Finding Abilities} + +The pictures below depict the accuracies reached on the individual axis. The light green region is the ground truth and the red, blue, violet and orange lines are the results after 50, 100, 250, and 500 iterations. Each line is the mean result over 50 individual optimizations on the target function. + +\begin{figure}[h] + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_II/hyperopt_deviation} + \label{fig:hyperopt_deviation_II} + \end{subfigure} + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_II/optunity_deviation} + \label{fig:optunity_deviation_II} + \end{subfigure} +\end{figure} + +\begin{figure}[h] + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_II/optuna_deviation} + \label{fig:optuna_deviation_II} + \end{subfigure} + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_II/randomsearch_deviation} + \label{fig:randomsearch_deviation_II} + \end{subfigure} +\end{figure} + +\begin{figure}[h] + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_II/quasirandomsearch_deviation} + \label{fig:quasirandomsearch_deviation_II} + \end{subfigure} + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_II/dummy} + \label{fig:dummy2_II} + \end{subfigure} +\end{figure} + + +\newpage + + +\subsubsection{Relative Distances to the Axis Optima} + +The pictures in this section depict the mean distance and the standard deviation per axis for each solver. + +\begin{figure}[h] + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_II/errorbars_50} + \label{fig:errorbars_50_II} + \end{subfigure} + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_II/errorbars_100} + \label{fig:errorbars_100_II} + \end{subfigure} +\end{figure} + +\begin{figure}[h] + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_II/errorbars_250} + \label{fig:errorbars_250_II} + \end{subfigure} + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_II/errorbars_500} + \label{fig:errorbars_500_II} + \end{subfigure} +\end{figure} + + +\newpage + + +\subsubsection{Convergence Beahviour} + +The pictures in this section depict the loss over iteration plots for each of the 50 iterations for each solver. For better visualization the Loss values are sorted, so the mapping between Iteration and Loss values might not be correct. The purpose of these plots is to show the overall Loss curve for each solver and it's variation over different runs. + +\begin{figure}[h] + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_II/losshistory_50} + \label{fig:losshistory_50_II} + \end{subfigure} + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_II/losshistory_100} + \label{fig:losshistory_100_II} + \end{subfigure} +\end{figure} + +\begin{figure}[h] + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_II/losshistory_250} + \label{fig:losshistory_250_II} + \end{subfigure} + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_II/losshistory_500} + \label{fig:losshistory_500_II} + \end{subfigure} +\end{figure} \ No newline at end of file diff --git a/examples/solver_comparison/ReportPage_III.tex b/examples/solver_comparison/ReportPage_III.tex new file mode 100644 index 0000000..06b1e29 --- /dev/null +++ b/examples/solver_comparison/ReportPage_III.tex @@ -0,0 +1,138 @@ +\subsection{Virtual Function III} +The figures below are depicting the axis plots of the virtual hyperparameter space function III to be optimized with the available hyppopy solvers. +\begin{figure}[h] + \begin{subfigure}{0.32\textwidth} + \includegraphics[width=\linewidth]{gt_III/axis_00} + \caption{axis 00} + \label{fig:axis00_III} + \end{subfigure} + \begin{subfigure}{0.32\textwidth} + \includegraphics[width=\linewidth]{gt_III/axis_01} + \caption{axis 01} + \label{fig:axis01_III} + \end{subfigure} + \begin{subfigure}{0.32\textwidth} + \includegraphics[width=\linewidth]{gt_III/axis_02} + \caption{axis 02} + \label{fig:axis02_III} + \end{subfigure} +\end{figure} + +\begin{figure}[h] + \begin{subfigure}{0.32\textwidth} + \includegraphics[width=\linewidth]{gt_III/axis_03} + \caption{axis 03} + \label{fig:axis03_III} + \end{subfigure} + \begin{subfigure}{0.32\textwidth} + \includegraphics[width=\linewidth]{gt_III/axis_04} + \caption{axis 04} + \label{fig:axis04_III} + \end{subfigure} + \begin{subfigure}{0.32\textwidth} + \includegraphics[width=\linewidth]{gt_III/dummy} + \caption{} + \label{fig:dummy1_III} + \end{subfigure} +\end{figure} + + +\newpage + + +\subsubsection{Minimum Finding Abilities} + +The pictures below depict the accuracies reached on the individual axis. The light green region is the ground truth and the red, blue, violet and orange lines are the results after 50, 100, 250, and 500 iterations. Each line is the mean result over 50 individual optimizations on the target function. + +\begin{figure}[h] + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_III/hyperopt_deviation} + \label{fig:hyperopt_deviation_III} + \end{subfigure} + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_III/optunity_deviation} + \label{fig:optunity_deviation_III} + \end{subfigure} +\end{figure} + +\begin{figure}[h] + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_III/optuna_deviation} + \label{fig:optuna_deviation_III} + \end{subfigure} + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_III/randomsearch_deviation} + \label{fig:randomsearch_deviation_III} + \end{subfigure} +\end{figure} + +\begin{figure}[h] + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_III/quasirandomsearch_deviation} + \label{fig:quasirandomsearch_deviation_III} + \end{subfigure} + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_III/dummy} + \label{fig:dummy2_III} + \end{subfigure} +\end{figure} + + +\newpage + + +\subsubsection{Relative Distances to the Axis Optima} + +The pictures in this section depict the mean distance and the standard deviation per axis for each solver. + +\begin{figure}[h] + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_III/errorbars_50} + \label{fig:errorbars_50_III} + \end{subfigure} + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_III/errorbars_100} + \label{fig:errorbars_100_III} + \end{subfigure} +\end{figure} + +\begin{figure}[h] + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_III/errorbars_250} + \label{fig:errorbars_250_III} + \end{subfigure} + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_III/errorbars_500} + \label{fig:errorbars_500_III} + \end{subfigure} +\end{figure} + + +\newpage + + +\subsubsection{Convergence Beahviour} + +The pictures in this section depict the loss over iteration plots for each of the 50 iterations for each solver. For better visualization the Loss values are sorted, so the mapping between Iteration and Loss values might not be correct. The purpose of these plots is to show the overall Loss curve for each solver and it's variation over different runs. + +\begin{figure}[h] + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_III/losshistory_50} + \label{fig:losshistory_50_III} + \end{subfigure} + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_III/losshistory_100} + \label{fig:losshistory_100_III} + \end{subfigure} +\end{figure} + +\begin{figure}[h] + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_III/losshistory_250} + \label{fig:losshistory_250_III} + \end{subfigure} + \begin{subfigure}{0.5\textwidth} + \includegraphics[width=0.9\linewidth]{data_III/losshistory_500} + \label{fig:losshistory_500_III} + \end{subfigure} +\end{figure} \ No newline at end of file diff --git a/hyppopy/__init__.py b/hyppopy/__init__.py index 5956145..bea4fb7 100644 --- a/hyppopy/__init__.py +++ b/hyppopy/__init__.py @@ -1,14 +1,14 @@ # DKFZ # # # Copyright (c) German Cancer Research Center, # Division of Medical Image Computing. # All rights reserved. # # This software is distributed WITHOUT ANY WARRANTY; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. # # See LICENSE -__version__ = '0.4.1.2' +__version__ = '0.5.0.0'