#优化 #超参数 #机器学习 #KDE #随机

hyperopt

基于 Parzen 估计器的树形超参数优化

16 个版本

0.0.17 2024 年 4 月 16 日
0.0.16 2024 年 4 月 16 日

#134机器学习

Download history 701/week @ 2024-04-16 1/week @ 2024-05-21 3/week @ 2024-06-11 75/week @ 2024-07-02 49/week @ 2024-07-23 36/week @ 2024-07-30

每月 85 次下载

自定义许可

50KB
1K SLoC

hyperopt

Rust 的基于 Parzen 估计器的树形超参数优化

Documentation Check status Code coverage Maintenance

示例

连续

use std::f64::consts::{FRAC_PI_2, PI};

use approx::assert_abs_diff_eq;
use fastrand::Rng;
use ordered_float::NotNan;

use hyperopt::Optimizer;
use hyperopt::kernel::continuous::Epanechnikov;
use hyperopt::kernel::universal::Uniform;

fn main() {
    let min = NotNan::new(FRAC_PI_2).unwrap();
    let max = NotNan::new(PI + FRAC_PI_2).unwrap();
    let mut optimizer = Optimizer::new(
        min..=max,                       // parameter search limits
        Uniform::with_bounds(min..=max), // our initial guess is just as bad
        Rng::with_seed(42),
    );

    // Run 50 trials for the cosine function and try to find the point `(π, -1)`:
    for _ in 0..50 {
        // Generate new trials using Epanechnikov kernel with `<NotNan<f64>>`
        // as both parameter and density:
        let x = optimizer.new_trial::<Epanechnikov<NotNan<f64>>>();
        
        // Tell the optimizer the result of evaluation:
        optimizer.feed_back(x, NotNan::new(x.cos()).unwrap());
    }

    let best_trial = optimizer.best_trial().unwrap();
    assert_abs_diff_eq!(best_trial.parameter.into_inner(), PI, epsilon = 0.05);
    assert_abs_diff_eq!(best_trial.metric.into_inner(), -1.0, epsilon = 0.01);
}

离散

use fastrand::Rng;
use ordered_float::OrderedFloat;

use hyperopt::Optimizer;
use hyperopt::kernel::discrete::Binomial;
use hyperopt::kernel::universal::Uniform;

fn main() {
    let mut optimizer = Optimizer::new(
        -100..=100,
        Uniform::with_bounds(-100..=100),
        Rng::with_seed(42),
    );

    for _ in 0..30 {
        // Use the binomial kernel for `i32` as parameter
        // and `OrderedFloat<f64>` as density:
        let x = optimizer.new_trial::<Binomial<i32, OrderedFloat<f64>>>();
        
        // Optimize the parabola: https://www.wolframalpha.com/input?i=x%5E2+-+4x
        optimizer.feed_back(x, x * x - 4 * x);
    }

    let best_trial = optimizer.best_trial().unwrap();
    assert_eq!(best_trial.parameter, 2);
    assert_eq!(best_trial.metric, -4);
}

功能

  • ordered-float 支持类型 OrderedFloatNotNan

依赖项

~200KB