Source code for recbole.trainer.hyper_tuning

# -*- coding: utf-8 -*-
# @Time   : 2020/7/19 19:06
# @Author : Shanlei Mu
# @Email  :
# @File   :


import numpy as np
from functools import partial

from recbole.utils.utils import dict2str

def _recursiveFindNodes(root, node_type='switch'):
    from hyperopt.pyll.base import Apply
    nodes = []
    if isinstance(root, (list, tuple)):
        for node in root:
            nodes.extend(_recursiveFindNodes(node, node_type))
    elif isinstance(root, dict):
        for node in root.values():
            nodes.extend(_recursiveFindNodes(node, node_type))
    elif isinstance(root, (Apply)):
        if == node_type:

        for node in root.pos_args:
            if == node_type:
        for _, node in root.named_args:
            if == node_type:
    return nodes

def _parameters(space):
    # Analyze the domain instance to find parameters
    parameters = {}
    if isinstance(space, dict):
        space = list(space.values())
    for node in _recursiveFindNodes(space, 'switch'):

        # Find the name of this parameter
        paramNode = node.pos_args[0]
        assert == 'hyperopt_param'
        paramName = paramNode.pos_args[0].obj

        # Find all possible choices for this parameter
        values = [literal.obj for literal in node.pos_args[1:]]
        parameters[paramName] = np.array(range(len(values)))
    return parameters

def _spacesize(space):
    # Compute the number of possible combinations
    params = _parameters(space)
    return[len(values) for values in params.values()])

[docs]class ExhaustiveSearchError(Exception): r""" ExhaustiveSearchError """ pass
def _validate_space_exhaustive_search(space): from hyperopt.pyll.base import dfs, as_apply from hyperopt.pyll.stochastic import implicit_stochastic_symbols supported_stochastic_symbols = ['randint', 'quniform', 'qloguniform', 'qnormal', 'qlognormal', 'categorical'] for node in dfs(as_apply(space)): if in implicit_stochastic_symbols: if not in supported_stochastic_symbols: raise ExhaustiveSearchError('Exhaustive search is only possible with the following stochastic symbols: ' '' + ', '.join(supported_stochastic_symbols))
[docs]class HyperTuning(object): r"""HyperTuning Class is used to manage the parameter tuning process of recommender system models. Given objective funciton, parameters range and optimization algorithm, using HyperTuning can find the best result among these parameters Note: HyperTuning is based on the hyperopt ( Thanks to sbrodeur for the exhaustive search code. """ def __init__(self, objective_function, space=None, params_file=None, fixed_config_file_list=None, algo='exhaustive', max_evals=100): self.best_score = None self.best_params = None self.best_test_result = None self.params2result = {} self.objective_function = objective_function self.max_evals = max_evals self.fixed_config_file_list = fixed_config_file_list if space: = space elif params_file: = self._build_space_from_file(params_file) else: raise ValueError('at least one of `space` and `params_file` is provided') if isinstance(algo, str): if algo == 'exhaustive': self.algo = partial(exhaustive_search, nbMaxSucessiveFailures=1000) self.max_evals = _spacesize( else: raise ValueError('Illegal algo [{}]'.format(algo)) else: self.algo = algo @staticmethod def _build_space_from_file(file): from hyperopt import hp space = {} with open(file, 'r') as fp: for line in fp: para_list = line.strip().split(' ') if len(para_list) < 3: continue para_name, para_type, para_value = para_list[0], para_list[1], "".join(para_list[2:]) if para_type == 'choice': para_value = eval(para_value) space[para_name] = hp.choice(para_name, para_value) elif para_type == 'uniform': low, high = para_value.strip().split(',') space[para_name] = hp.uniform(para_name, float(low), float(high)) elif para_type == 'quniform': low, high, q = para_value.strip().split(',') space[para_name] = hp.quniform(para_name, float(low), float(high), float(q)) elif para_type == 'loguniform': low, high = para_value.strip().split(',') space[para_name] = hp.loguniform(para_name, float(low), float(high)) else: raise ValueError('Illegal param type [{}]'.format(para_type)) return space
[docs] @staticmethod def params2str(params): r""" convert dict to str Args: params (dict): parameters dict Returns: str: parameters string """ params_str = '' for param_name in params: params_str += param_name + ':' + str(params[param_name]) + ', ' return params_str[:-2]
@staticmethod def _print_result(result_dict: dict): print('current best valid score: %.4f' % result_dict['best_valid_score']) print('current best valid result:') print(result_dict['best_valid_result']) print('current test result:') print(result_dict['test_result']) print()
[docs] def export_result(self, output_file=None): r""" Write the searched parameters and corresponding results to the file Args: output_file (str): the output file """ with open(output_file, 'w') as fp: for params in self.params2result: fp.write(params + '\n') fp.write('Valid result:\n' + dict2str(self.params2result[params]['best_valid_result']) + '\n') fp.write('Test result:\n' + dict2str(self.params2result[params]['test_result']) + '\n\n')
[docs] def trial(self, params): r"""Given a set of parameters, return results and optimization status Args: params (dict): the parameter dictionary """ import hyperopt config_dict = params.copy() params_str = self.params2str(params) print('running parameters:', config_dict) result_dict = self.objective_function(config_dict, self.fixed_config_file_list) self.params2result[params_str] = result_dict score, bigger = result_dict['best_valid_score'], result_dict['valid_score_bigger'] if not self.best_score: self.best_score = score self.best_params = params self._print_result(result_dict) else: if bigger: if score > self.best_score: self.best_score = score self.best_params = params self._print_result(result_dict) else: if score < self.best_score: self.best_score = score self.best_params = params self._print_result(result_dict) if bigger: score = - score return {'loss': score, 'status': hyperopt.STATUS_OK}
[docs] def run(self): r""" begin to search the best parameters """ from hyperopt import fmin fmin(self.trial,, algo=self.algo, max_evals=self.max_evals)