-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathparams.py
More file actions
118 lines (109 loc) · 4.07 KB
/
params.py
File metadata and controls
118 lines (109 loc) · 4.07 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
#!/usr/bin/env python2.7
import numpy
import random
import copy
class Param(object):
def __init__(self, name, min_val, max_val, init_val=None, distrib = 'uniform',scale='log',logbase = numpy.e, interval = None):
self.name = name
self.init_val = init_val
self.min_val = min_val
self.max_val = max_val
self.scale = scale # log base is 10
self.logbase = logbase
self.param_type = 'continuous'
self.distrib = distrib
self.interval = interval
def __repr__(self):
return "%s(%f,%f,%s)" % ( self.name,self.min_val,self.max_val,self.scale)
def get_param_range(self, num_vals,stochastic=False):
if stochastic:
if self.distrib == 'normal':
# bad design but here min_val is mean and max_val is sigma
val = numpy.random.normal(self.min_val,self.max_val,num_vals)
else:
val = numpy.random.rand(num_vals)*(self.max_val - self.min_val) + self.min_val
if self.scale == "log":
val = numpy.array([self.logbase ** v for v in val])
else:
if self.scale == "log":
val = numpy.logspace(self.min_val, self.max_val, num_vals,base=self.logbase)
else:
val = numpy.linspace(self.min_val, self.max_val, num_vals)
if self.interval:
return (numpy.floor(val / self.interval) * self.interval).astype(int)
return val
def get_transformed_param(self, x):
if self.distrib == 'normal':
print 'not implemented'
return None
else:
val=x
if self.scale == "log":
val = self.logbase**x
if self.interval:
val=(numpy.floor(val / self.interval) * self.interval).astype(int)
return val
def get_min(self):
return self.min_val
def get_max(self):
return self.max_val
def get_type(self):
if self.interval:
return 'integer'
return 'continuous'
class IntParam(Param):
def __init__(self, name, min_val, max_val, init_val=None):
super(IntParam,self).__init__(name, min_val, max_val, init_val=init_val)
self.param_type = "integer"
def get_param_range(self, num_vals,stochastic=False):
#If num_vals greater than range of integer param then constrain to the range and if stochastic param results in
#duplicates, only keep unique entrys
if stochastic:
return numpy.unique(int( numpy.random.rand(num_vals)*(1 + self.max_val - self.min_val) + self.min_val ))
return range(self.min_val, self.max_val+1, max(1, (self.max_val-self.min_val)/num_vals))
class CategoricalParam(object):
def __init__(self, name, val_list, default):
self.name = name
self.val_list = val_list
self.default = default
self.init_val = default
self.num_vals = len(self.val_list)
self.param_type = 'categorical'
def get_param_range(self, num_vals,stochastic=False):
if stochastic:
return [self.val_list[i] for i in numpy.unique(numpy.random.randint(len(self.val_list),size=num_vals))]
if num_vals >= self.num_vals:
return self.val_list
else:
# return random subset, but include default value
tmp = list(self.val_list)
tmp.remove(self.default)
random.shuffle(tmp)
return [self.default] + tmp[0:num_vals-1]
class ConditionalParam(object):
def __init__(self,cond_param,cond_val,param):
self.name = param.name
self.cond_param = cond_param
self.cond_val = cond_val
self.param = param
self.param_type = 'conditional'
def check_condition(self, hps):
if self.cond_param not in hps:
return None
if hps[self.cond_param] == self.cond_val:
return self.param
return None
# Adaptive heuristic zooms into a local portion of the search space.
# Not recommended for actual use as there are no theoretical guarantees.
def zoom_space(params,center,pct=0.40):
new_params=copy.deepcopy(params)
for p in params.keys():
range=params[p].max_val-params[p].min_val
best_val=center[p]
if params[p].scale=='log':
best_val=numpy.log(best_val)
new_min=max(params[p].min_val,best_val-pct/2*range)
new_max=new_min+(pct*range)
new_params[p].min_val=new_min
new_params[p].max_val=new_max
return new_params