获取keras模型的学习率
•浏览 1
Get learning rate of keras model
我似乎无法获得学习率的值。我得到的是下面。
我已经尝试了 200 个 epoch 的模型,并希望查看/更改学习率。这不是正确的方法吗?
>>> print(ig_cnn_model.optimizer.lr)
<tf.Variable 'lr_6:0' shape=() dtype=float32_ref>
import keras.backend as K
from keras.models import Sequential
from keras.layers import Dense
model = Sequential()
model.add(Dense(1, input_shape=(1,)))
model.add(Dense(1))
model.compile(loss='mse', optimizer='adam')
print(K.eval(model.optimizer.lr))
0.001model.compile(optimizer=optimizerF,
loss=lossF,
metrics=['accuracy'])
model.optimizer.get_config()
>>> {'name': 'Adam', 'learning_rate': 0.001, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-07, 'amsgrad': False}
from keras.optimizers import Adam
model.compile(optimizer=Adam(lr=0.001),
loss='categorical_crossentropy',
metrics=['accuracy'])
In [1]: import tensorflow as tf
In [2]: opt = tf.keras.optimizers.Adam()
In [3]: opt.lr.numpy()
Out[3]: 0.001
In [4]: opt.lr.assign(0.1)
Out[4]: <tf.Variable 'UnreadVariable' shape=() dtype=float32, numpy=0.1>
In [5]: opt.lr.numpy()
Out[5]: 0.1
In [6]: opt.decay.numpy()
Out[6]: 0.0
In [7]: opt.beta_1.numpy()
Out[7]: 0.9
In [8]: opt.beta_2.numpy()
Out[8]: 0.999
import keras.optimizers as opt
def get_opt_config(optimizer):
"""
Extract Optimizer Configs from an instance of
keras Optimizer
:param optimizer: instance of keras Optimizer.
:return: dict of optimizer configs.
"""
if not isinstance(optimizer, opt.Optimizer):
raise TypeError('optimizer should be instance of '
'keras.optimizers.Optimizer '
'Got {}.'.format(type(optimizer)))
opt_config = optimizer.get_config()
if 'name' not in opt_config.keys():
_name = str(optimizer.__class__).split('.')[-1] \\
.replace('\'', '').replace('>', '')
opt_config.update({'name': _name})
return opt_config
def clone_opt(opt_config):
"""
Clone keras optimizer from its configurations.
:param opt_config: dict, keras optimizer configs.
:return: instance of keras optimizer.
"""
if not isinstance(opt_config, dict):
raise TypeError('opt_config must be a dict. '
'Got {}'.format(type(opt_config)))
if 'name' not in opt_config.keys():
raise ValueError('could not find the name of optimizer in opt_config')
name = opt_config.get('name')
params = {k: opt_config[k] for k in opt_config.keys() if k != 'name'}
if name.upper() == 'ADAM':
return opt.Adam(**params)
if name.upper() == 'NADAM':
return opt.Nadam(**params)
if name.upper() == 'ADAMAX':
return opt.Adamax(**params)
if name.upper() == 'ADADELTA':
return opt.Adadelta(**params)
if name.upper() == 'ADAGRAD':
return opt.Adagrad(**params)
if name.upper() == 'RMSPROP':
return opt.RMSprop()
if name.upper() == 'SGD':
return opt.SGD(**params)
raise ValueError('Unknown optimizer name. Available are: '
'(\'adam\',\'sgd\', \'rmsprop\', \'adagrad\', '
'\'adadelta\', \'adamax\', \'nadam\'). '
'Got {}.'.format(name))
if __name__ == '__main__':
rmsprop = opt.RMSprop()
configs = get_opt_config(rmsprop)
print(configs)
cloned_rmsprop = clone_opt(configs)
print(cloned_rmsprop)
print(cloned_rmsprop.get_config())
{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07, 'name': 'RMSprop'}
<keras.optimizers.RMSprop object at 0x7f96370a9358>
{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07}
使用 keras.backend 中的 eval():
>>> print(ig_cnn_model.optimizer.lr)
<tf.Variable 'lr_6:0' shape=() dtype=float32_ref>
import keras.backend as K
from keras.models import Sequential
from keras.layers import Dense
model = Sequential()
model.add(Dense(1, input_shape=(1,)))
model.add(Dense(1))
model.compile(loss='mse', optimizer='adam')
print(K.eval(model.optimizer.lr))
0.001model.compile(optimizer=optimizerF,
loss=lossF,
metrics=['accuracy'])
model.optimizer.get_config()
>>> {'name': 'Adam', 'learning_rate': 0.001, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-07, 'amsgrad': False}
from keras.optimizers import Adam
model.compile(optimizer=Adam(lr=0.001),
loss='categorical_crossentropy',
metrics=['accuracy'])
In [1]: import tensorflow as tf
In [2]: opt = tf.keras.optimizers.Adam()
In [3]: opt.lr.numpy()
Out[3]: 0.001
In [4]: opt.lr.assign(0.1)
Out[4]: <tf.Variable 'UnreadVariable' shape=() dtype=float32, numpy=0.1>
In [5]: opt.lr.numpy()
Out[5]: 0.1
In [6]: opt.decay.numpy()
Out[6]: 0.0
In [7]: opt.beta_1.numpy()
Out[7]: 0.9
In [8]: opt.beta_2.numpy()
Out[8]: 0.999
import keras.optimizers as opt
def get_opt_config(optimizer):
"""
Extract Optimizer Configs from an instance of
keras Optimizer
:param optimizer: instance of keras Optimizer.
:return: dict of optimizer configs.
"""
if not isinstance(optimizer, opt.Optimizer):
raise TypeError('optimizer should be instance of '
'keras.optimizers.Optimizer '
'Got {}.'.format(type(optimizer)))
opt_config = optimizer.get_config()
if 'name' not in opt_config.keys():
_name = str(optimizer.__class__).split('.')[-1] \\
.replace('\'', '').replace('>', '')
opt_config.update({'name': _name})
return opt_config
def clone_opt(opt_config):
"""
Clone keras optimizer from its configurations.
:param opt_config: dict, keras optimizer configs.
:return: instance of keras optimizer.
"""
if not isinstance(opt_config, dict):
raise TypeError('opt_config must be a dict. '
'Got {}'.format(type(opt_config)))
if 'name' not in opt_config.keys():
raise ValueError('could not find the name of optimizer in opt_config')
name = opt_config.get('name')
params = {k: opt_config[k] for k in opt_config.keys() if k != 'name'}
if name.upper() == 'ADAM':
return opt.Adam(**params)
if name.upper() == 'NADAM':
return opt.Nadam(**params)
if name.upper() == 'ADAMAX':
return opt.Adamax(**params)
if name.upper() == 'ADADELTA':
return opt.Adadelta(**params)
if name.upper() == 'ADAGRAD':
return opt.Adagrad(**params)
if name.upper() == 'RMSPROP':
return opt.RMSprop()
if name.upper() == 'SGD':
return opt.SGD(**params)
raise ValueError('Unknown optimizer name. Available are: '
'(\'adam\',\'sgd\', \'rmsprop\', \'adagrad\', '
'\'adadelta\', \'adamax\', \'nadam\'). '
'Got {}.'.format(name))
if __name__ == '__main__':
rmsprop = opt.RMSprop()
configs = get_opt_config(rmsprop)
print(configs)
cloned_rmsprop = clone_opt(configs)
print(cloned_rmsprop)
print(cloned_rmsprop.get_config())
{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07, 'name': 'RMSprop'}
<keras.optimizers.RMSprop object at 0x7f96370a9358>
{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07}
输出:
>>> print(ig_cnn_model.optimizer.lr)
<tf.Variable 'lr_6:0' shape=() dtype=float32_ref>
import keras.backend as K
from keras.models import Sequential
from keras.layers import Dense
model = Sequential()
model.add(Dense(1, input_shape=(1,)))
model.add(Dense(1))
model.compile(loss='mse', optimizer='adam')
print(K.eval(model.optimizer.lr))
0.001model.compile(optimizer=optimizerF,
loss=lossF,
metrics=['accuracy'])
model.optimizer.get_config()
>>> {'name': 'Adam', 'learning_rate': 0.001, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-07, 'amsgrad': False}
from keras.optimizers import Adam
model.compile(optimizer=Adam(lr=0.001),
loss='categorical_crossentropy',
metrics=['accuracy'])
In [1]: import tensorflow as tf
In [2]: opt = tf.keras.optimizers.Adam()
In [3]: opt.lr.numpy()
Out[3]: 0.001
In [4]: opt.lr.assign(0.1)
Out[4]: <tf.Variable 'UnreadVariable' shape=() dtype=float32, numpy=0.1>
In [5]: opt.lr.numpy()
Out[5]: 0.1
In [6]: opt.decay.numpy()
Out[6]: 0.0
In [7]: opt.beta_1.numpy()
Out[7]: 0.9
In [8]: opt.beta_2.numpy()
Out[8]: 0.999
import keras.optimizers as opt
def get_opt_config(optimizer):
"""
Extract Optimizer Configs from an instance of
keras Optimizer
:param optimizer: instance of keras Optimizer.
:return: dict of optimizer configs.
"""
if not isinstance(optimizer, opt.Optimizer):
raise TypeError('optimizer should be instance of '
'keras.optimizers.Optimizer '
'Got {}.'.format(type(optimizer)))
opt_config = optimizer.get_config()
if 'name' not in opt_config.keys():
_name = str(optimizer.__class__).split('.')[-1] \\
.replace('\'', '').replace('>', '')
opt_config.update({'name': _name})
return opt_config
def clone_opt(opt_config):
"""
Clone keras optimizer from its configurations.
:param opt_config: dict, keras optimizer configs.
:return: instance of keras optimizer.
"""
if not isinstance(opt_config, dict):
raise TypeError('opt_config must be a dict. '
'Got {}'.format(type(opt_config)))
if 'name' not in opt_config.keys():
raise ValueError('could not find the name of optimizer in opt_config')
name = opt_config.get('name')
params = {k: opt_config[k] for k in opt_config.keys() if k != 'name'}
if name.upper() == 'ADAM':
return opt.Adam(**params)
if name.upper() == 'NADAM':
return opt.Nadam(**params)
if name.upper() == 'ADAMAX':
return opt.Adamax(**params)
if name.upper() == 'ADADELTA':
return opt.Adadelta(**params)
if name.upper() == 'ADAGRAD':
return opt.Adagrad(**params)
if name.upper() == 'RMSPROP':
return opt.RMSprop()
if name.upper() == 'SGD':
return opt.SGD(**params)
raise ValueError('Unknown optimizer name. Available are: '
'(\'adam\',\'sgd\', \'rmsprop\', \'adagrad\', '
'\'adadelta\', \'adamax\', \'nadam\'). '
'Got {}.'.format(name))
if __name__ == '__main__':
rmsprop = opt.RMSprop()
configs = get_opt_config(rmsprop)
print(configs)
cloned_rmsprop = clone_opt(configs)
print(cloned_rmsprop)
print(cloned_rmsprop.get_config())
{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07, 'name': 'RMSprop'}
<keras.optimizers.RMSprop object at 0x7f96370a9358>
{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07}
获取与优化器相关的所有信息的最佳方法是使用 .get_config().
示例:
>>> print(ig_cnn_model.optimizer.lr)
<tf.Variable 'lr_6:0' shape=() dtype=float32_ref>
import keras.backend as K
from keras.models import Sequential
from keras.layers import Dense
model = Sequential()
model.add(Dense(1, input_shape=(1,)))
model.add(Dense(1))
model.compile(loss='mse', optimizer='adam')
print(K.eval(model.optimizer.lr))
0.001model.compile(optimizer=optimizerF,
loss=lossF,
metrics=['accuracy'])
model.optimizer.get_config()
>>> {'name': 'Adam', 'learning_rate': 0.001, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-07, 'amsgrad': False}
from keras.optimizers import Adam
model.compile(optimizer=Adam(lr=0.001),
loss='categorical_crossentropy',
metrics=['accuracy'])
In [1]: import tensorflow as tf
In [2]: opt = tf.keras.optimizers.Adam()
In [3]: opt.lr.numpy()
Out[3]: 0.001
In [4]: opt.lr.assign(0.1)
Out[4]: <tf.Variable 'UnreadVariable' shape=() dtype=float32, numpy=0.1>
In [5]: opt.lr.numpy()
Out[5]: 0.1
In [6]: opt.decay.numpy()
Out[6]: 0.0
In [7]: opt.beta_1.numpy()
Out[7]: 0.9
In [8]: opt.beta_2.numpy()
Out[8]: 0.999
import keras.optimizers as opt
def get_opt_config(optimizer):
"""
Extract Optimizer Configs from an instance of
keras Optimizer
:param optimizer: instance of keras Optimizer.
:return: dict of optimizer configs.
"""
if not isinstance(optimizer, opt.Optimizer):
raise TypeError('optimizer should be instance of '
'keras.optimizers.Optimizer '
'Got {}.'.format(type(optimizer)))
opt_config = optimizer.get_config()
if 'name' not in opt_config.keys():
_name = str(optimizer.__class__).split('.')[-1] \\
.replace('\'', '').replace('>', '')
opt_config.update({'name': _name})
return opt_config
def clone_opt(opt_config):
"""
Clone keras optimizer from its configurations.
:param opt_config: dict, keras optimizer configs.
:return: instance of keras optimizer.
"""
if not isinstance(opt_config, dict):
raise TypeError('opt_config must be a dict. '
'Got {}'.format(type(opt_config)))
if 'name' not in opt_config.keys():
raise ValueError('could not find the name of optimizer in opt_config')
name = opt_config.get('name')
params = {k: opt_config[k] for k in opt_config.keys() if k != 'name'}
if name.upper() == 'ADAM':
return opt.Adam(**params)
if name.upper() == 'NADAM':
return opt.Nadam(**params)
if name.upper() == 'ADAMAX':
return opt.Adamax(**params)
if name.upper() == 'ADADELTA':
return opt.Adadelta(**params)
if name.upper() == 'ADAGRAD':
return opt.Adagrad(**params)
if name.upper() == 'RMSPROP':
return opt.RMSprop()
if name.upper() == 'SGD':
return opt.SGD(**params)
raise ValueError('Unknown optimizer name. Available are: '
'(\'adam\',\'sgd\', \'rmsprop\', \'adagrad\', '
'\'adadelta\', \'adamax\', \'nadam\'). '
'Got {}.'.format(name))
if __name__ == '__main__':
rmsprop = opt.RMSprop()
configs = get_opt_config(rmsprop)
print(configs)
cloned_rmsprop = clone_opt(configs)
print(cloned_rmsprop)
print(cloned_rmsprop.get_config())
{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07, 'name': 'RMSprop'}
<keras.optimizers.RMSprop object at 0x7f96370a9358>
{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07}
它返回一个包含所有信息的字典。
你可以通过
改变你的学习率
>>> print(ig_cnn_model.optimizer.lr)
<tf.Variable 'lr_6:0' shape=() dtype=float32_ref>
import keras.backend as K
from keras.models import Sequential
from keras.layers import Dense
model = Sequential()
model.add(Dense(1, input_shape=(1,)))
model.add(Dense(1))
model.compile(loss='mse', optimizer='adam')
print(K.eval(model.optimizer.lr))
0.001model.compile(optimizer=optimizerF,
loss=lossF,
metrics=['accuracy'])
model.optimizer.get_config()
>>> {'name': 'Adam', 'learning_rate': 0.001, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-07, 'amsgrad': False}
from keras.optimizers import Adam
model.compile(optimizer=Adam(lr=0.001),
loss='categorical_crossentropy',
metrics=['accuracy'])
In [1]: import tensorflow as tf
In [2]: opt = tf.keras.optimizers.Adam()
In [3]: opt.lr.numpy()
Out[3]: 0.001
In [4]: opt.lr.assign(0.1)
Out[4]: <tf.Variable 'UnreadVariable' shape=() dtype=float32, numpy=0.1>
In [5]: opt.lr.numpy()
Out[5]: 0.1
In [6]: opt.decay.numpy()
Out[6]: 0.0
In [7]: opt.beta_1.numpy()
Out[7]: 0.9
In [8]: opt.beta_2.numpy()
Out[8]: 0.999
import keras.optimizers as opt
def get_opt_config(optimizer):
"""
Extract Optimizer Configs from an instance of
keras Optimizer
:param optimizer: instance of keras Optimizer.
:return: dict of optimizer configs.
"""
if not isinstance(optimizer, opt.Optimizer):
raise TypeError('optimizer should be instance of '
'keras.optimizers.Optimizer '
'Got {}.'.format(type(optimizer)))
opt_config = optimizer.get_config()
if 'name' not in opt_config.keys():
_name = str(optimizer.__class__).split('.')[-1] \\
.replace('\'', '').replace('>', '')
opt_config.update({'name': _name})
return opt_config
def clone_opt(opt_config):
"""
Clone keras optimizer from its configurations.
:param opt_config: dict, keras optimizer configs.
:return: instance of keras optimizer.
"""
if not isinstance(opt_config, dict):
raise TypeError('opt_config must be a dict. '
'Got {}'.format(type(opt_config)))
if 'name' not in opt_config.keys():
raise ValueError('could not find the name of optimizer in opt_config')
name = opt_config.get('name')
params = {k: opt_config[k] for k in opt_config.keys() if k != 'name'}
if name.upper() == 'ADAM':
return opt.Adam(**params)
if name.upper() == 'NADAM':
return opt.Nadam(**params)
if name.upper() == 'ADAMAX':
return opt.Adamax(**params)
if name.upper() == 'ADADELTA':
return opt.Adadelta(**params)
if name.upper() == 'ADAGRAD':
return opt.Adagrad(**params)
if name.upper() == 'RMSPROP':
return opt.RMSprop()
if name.upper() == 'SGD':
return opt.SGD(**params)
raise ValueError('Unknown optimizer name. Available are: '
'(\'adam\',\'sgd\', \'rmsprop\', \'adagrad\', '
'\'adadelta\', \'adamax\', \'nadam\'). '
'Got {}.'.format(name))
if __name__ == '__main__':
rmsprop = opt.RMSprop()
configs = get_opt_config(rmsprop)
print(configs)
cloned_rmsprop = clone_opt(configs)
print(cloned_rmsprop)
print(cloned_rmsprop.get_config())
{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07, 'name': 'RMSprop'}
<keras.optimizers.RMSprop object at 0x7f96370a9358>
{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07}
使用 TensorFlow >=2.0:
>>> print(ig_cnn_model.optimizer.lr)
<tf.Variable 'lr_6:0' shape=() dtype=float32_ref>
import keras.backend as K
from keras.models import Sequential
from keras.layers import Dense
model = Sequential()
model.add(Dense(1, input_shape=(1,)))
model.add(Dense(1))
model.compile(loss='mse', optimizer='adam')
print(K.eval(model.optimizer.lr))
0.001model.compile(optimizer=optimizerF,
loss=lossF,
metrics=['accuracy'])
model.optimizer.get_config()
>>> {'name': 'Adam', 'learning_rate': 0.001, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-07, 'amsgrad': False}
from keras.optimizers import Adam
model.compile(optimizer=Adam(lr=0.001),
loss='categorical_crossentropy',
metrics=['accuracy'])
In [1]: import tensorflow as tf
In [2]: opt = tf.keras.optimizers.Adam()
In [3]: opt.lr.numpy()
Out[3]: 0.001
In [4]: opt.lr.assign(0.1)
Out[4]: <tf.Variable 'UnreadVariable' shape=() dtype=float32, numpy=0.1>
In [5]: opt.lr.numpy()
Out[5]: 0.1
In [6]: opt.decay.numpy()
Out[6]: 0.0
In [7]: opt.beta_1.numpy()
Out[7]: 0.9
In [8]: opt.beta_2.numpy()
Out[8]: 0.999
import keras.optimizers as opt
def get_opt_config(optimizer):
"""
Extract Optimizer Configs from an instance of
keras Optimizer
:param optimizer: instance of keras Optimizer.
:return: dict of optimizer configs.
"""
if not isinstance(optimizer, opt.Optimizer):
raise TypeError('optimizer should be instance of '
'keras.optimizers.Optimizer '
'Got {}.'.format(type(optimizer)))
opt_config = optimizer.get_config()
if 'name' not in opt_config.keys():
_name = str(optimizer.__class__).split('.')[-1] \\
.replace('\'', '').replace('>', '')
opt_config.update({'name': _name})
return opt_config
def clone_opt(opt_config):
"""
Clone keras optimizer from its configurations.
:param opt_config: dict, keras optimizer configs.
:return: instance of keras optimizer.
"""
if not isinstance(opt_config, dict):
raise TypeError('opt_config must be a dict. '
'Got {}'.format(type(opt_config)))
if 'name' not in opt_config.keys():
raise ValueError('could not find the name of optimizer in opt_config')
name = opt_config.get('name')
params = {k: opt_config[k] for k in opt_config.keys() if k != 'name'}
if name.upper() == 'ADAM':
return opt.Adam(**params)
if name.upper() == 'NADAM':
return opt.Nadam(**params)
if name.upper() == 'ADAMAX':
return opt.Adamax(**params)
if name.upper() == 'ADADELTA':
return opt.Adadelta(**params)
if name.upper() == 'ADAGRAD':
return opt.Adagrad(**params)
if name.upper() == 'RMSPROP':
return opt.RMSprop()
if name.upper() == 'SGD':
return opt.SGD(**params)
raise ValueError('Unknown optimizer name. Available are: '
'(\'adam\',\'sgd\', \'rmsprop\', \'adagrad\', '
'\'adadelta\', \'adamax\', \'nadam\'). '
'Got {}.'.format(name))
if __name__ == '__main__':
rmsprop = opt.RMSprop()
configs = get_opt_config(rmsprop)
print(configs)
cloned_rmsprop = clone_opt(configs)
print(cloned_rmsprop)
print(cloned_rmsprop.get_config())
{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07, 'name': 'RMSprop'}
<keras.optimizers.RMSprop object at 0x7f96370a9358>
{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07}
lr 只是一个 tf.Variable,所以它的值可以通过 assign() 方法改变:
>>> print(ig_cnn_model.optimizer.lr)
<tf.Variable 'lr_6:0' shape=() dtype=float32_ref>
import keras.backend as K
from keras.models import Sequential
from keras.layers import Dense
model = Sequential()
model.add(Dense(1, input_shape=(1,)))
model.add(Dense(1))
model.compile(loss='mse', optimizer='adam')
print(K.eval(model.optimizer.lr))
0.001model.compile(optimizer=optimizerF,
loss=lossF,
metrics=['accuracy'])
model.optimizer.get_config()
>>> {'name': 'Adam', 'learning_rate': 0.001, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-07, 'amsgrad': False}
from keras.optimizers import Adam
model.compile(optimizer=Adam(lr=0.001),
loss='categorical_crossentropy',
metrics=['accuracy'])
In [1]: import tensorflow as tf
In [2]: opt = tf.keras.optimizers.Adam()
In [3]: opt.lr.numpy()
Out[3]: 0.001
In [4]: opt.lr.assign(0.1)
Out[4]: <tf.Variable 'UnreadVariable' shape=() dtype=float32, numpy=0.1>
In [5]: opt.lr.numpy()
Out[5]: 0.1
In [6]: opt.decay.numpy()
Out[6]: 0.0
In [7]: opt.beta_1.numpy()
Out[7]: 0.9
In [8]: opt.beta_2.numpy()
Out[8]: 0.999
import keras.optimizers as opt
def get_opt_config(optimizer):
"""
Extract Optimizer Configs from an instance of
keras Optimizer
:param optimizer: instance of keras Optimizer.
:return: dict of optimizer configs.
"""
if not isinstance(optimizer, opt.Optimizer):
raise TypeError('optimizer should be instance of '
'keras.optimizers.Optimizer '
'Got {}.'.format(type(optimizer)))
opt_config = optimizer.get_config()
if 'name' not in opt_config.keys():
_name = str(optimizer.__class__).split('.')[-1] \\
.replace('\'', '').replace('>', '')
opt_config.update({'name': _name})
return opt_config
def clone_opt(opt_config):
"""
Clone keras optimizer from its configurations.
:param opt_config: dict, keras optimizer configs.
:return: instance of keras optimizer.
"""
if not isinstance(opt_config, dict):
raise TypeError('opt_config must be a dict. '
'Got {}'.format(type(opt_config)))
if 'name' not in opt_config.keys():
raise ValueError('could not find the name of optimizer in opt_config')
name = opt_config.get('name')
params = {k: opt_config[k] for k in opt_config.keys() if k != 'name'}
if name.upper() == 'ADAM':
return opt.Adam(**params)
if name.upper() == 'NADAM':
return opt.Nadam(**params)
if name.upper() == 'ADAMAX':
return opt.Adamax(**params)
if name.upper() == 'ADADELTA':
return opt.Adadelta(**params)
if name.upper() == 'ADAGRAD':
return opt.Adagrad(**params)
if name.upper() == 'RMSPROP':
return opt.RMSprop()
if name.upper() == 'SGD':
return opt.SGD(**params)
raise ValueError('Unknown optimizer name. Available are: '
'(\'adam\',\'sgd\', \'rmsprop\', \'adagrad\', '
'\'adadelta\', \'adamax\', \'nadam\'). '
'Got {}.'.format(name))
if __name__ == '__main__':
rmsprop = opt.RMSprop()
configs = get_opt_config(rmsprop)
print(configs)
cloned_rmsprop = clone_opt(configs)
print(cloned_rmsprop)
print(cloned_rmsprop.get_config())
{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07, 'name': 'RMSprop'}
<keras.optimizers.RMSprop object at 0x7f96370a9358>
{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07}
其余的超参数也是如此:
>>> print(ig_cnn_model.optimizer.lr)
<tf.Variable 'lr_6:0' shape=() dtype=float32_ref>
import keras.backend as K
from keras.models import Sequential
from keras.layers import Dense
model = Sequential()
model.add(Dense(1, input_shape=(1,)))
model.add(Dense(1))
model.compile(loss='mse', optimizer='adam')
print(K.eval(model.optimizer.lr))
0.001model.compile(optimizer=optimizerF,
loss=lossF,
metrics=['accuracy'])
model.optimizer.get_config()
>>> {'name': 'Adam', 'learning_rate': 0.001, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-07, 'amsgrad': False}
from keras.optimizers import Adam
model.compile(optimizer=Adam(lr=0.001),
loss='categorical_crossentropy',
metrics=['accuracy'])
In [1]: import tensorflow as tf
In [2]: opt = tf.keras.optimizers.Adam()
In [3]: opt.lr.numpy()
Out[3]: 0.001
In [4]: opt.lr.assign(0.1)
Out[4]: <tf.Variable 'UnreadVariable' shape=() dtype=float32, numpy=0.1>
In [5]: opt.lr.numpy()
Out[5]: 0.1
In [6]: opt.decay.numpy()
Out[6]: 0.0
In [7]: opt.beta_1.numpy()
Out[7]: 0.9
In [8]: opt.beta_2.numpy()
Out[8]: 0.999
import keras.optimizers as opt
def get_opt_config(optimizer):
"""
Extract Optimizer Configs from an instance of
keras Optimizer
:param optimizer: instance of keras Optimizer.
:return: dict of optimizer configs.
"""
if not isinstance(optimizer, opt.Optimizer):
raise TypeError('optimizer should be instance of '
'keras.optimizers.Optimizer '
'Got {}.'.format(type(optimizer)))
opt_config = optimizer.get_config()
if 'name' not in opt_config.keys():
_name = str(optimizer.__class__).split('.')[-1] \\
.replace('\'', '').replace('>', '')
opt_config.update({'name': _name})
return opt_config
def clone_opt(opt_config):
"""
Clone keras optimizer from its configurations.
:param opt_config: dict, keras optimizer configs.
:return: instance of keras optimizer.
"""
if not isinstance(opt_config, dict):
raise TypeError('opt_config must be a dict. '
'Got {}'.format(type(opt_config)))
if 'name' not in opt_config.keys():
raise ValueError('could not find the name of optimizer in opt_config')
name = opt_config.get('name')
params = {k: opt_config[k] for k in opt_config.keys() if k != 'name'}
if name.upper() == 'ADAM':
return opt.Adam(**params)
if name.upper() == 'NADAM':
return opt.Nadam(**params)
if name.upper() == 'ADAMAX':
return opt.Adamax(**params)
if name.upper() == 'ADADELTA':
return opt.Adadelta(**params)
if name.upper() == 'ADAGRAD':
return opt.Adagrad(**params)
if name.upper() == 'RMSPROP':
return opt.RMSprop()
if name.upper() == 'SGD':
return opt.SGD(**params)
raise ValueError('Unknown optimizer name. Available are: '
'(\'adam\',\'sgd\', \'rmsprop\', \'adagrad\', '
'\'adadelta\', \'adamax\', \'nadam\'). '
'Got {}.'.format(name))
if __name__ == '__main__':
rmsprop = opt.RMSprop()
configs = get_opt_config(rmsprop)
print(configs)
cloned_rmsprop = clone_opt(configs)
print(cloned_rmsprop)
print(cloned_rmsprop.get_config())
{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07, 'name': 'RMSprop'}
<keras.optimizers.RMSprop object at 0x7f96370a9358>
{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07}
一些优化器没有在配置中包含他们的名字。
这是一个完整的示例,说明如何获取配置以及如何从配置中重建(即克隆)优化器(其中也包括学习率)。
>>> print(ig_cnn_model.optimizer.lr)
<tf.Variable 'lr_6:0' shape=() dtype=float32_ref>
import keras.backend as K
from keras.models import Sequential
from keras.layers import Dense
model = Sequential()
model.add(Dense(1, input_shape=(1,)))
model.add(Dense(1))
model.compile(loss='mse', optimizer='adam')
print(K.eval(model.optimizer.lr))
0.001model.compile(optimizer=optimizerF,
loss=lossF,
metrics=['accuracy'])
model.optimizer.get_config()
>>> {'name': 'Adam', 'learning_rate': 0.001, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-07, 'amsgrad': False}
from keras.optimizers import Adam
model.compile(optimizer=Adam(lr=0.001),
loss='categorical_crossentropy',
metrics=['accuracy'])
In [1]: import tensorflow as tf
In [2]: opt = tf.keras.optimizers.Adam()
In [3]: opt.lr.numpy()
Out[3]: 0.001
In [4]: opt.lr.assign(0.1)
Out[4]: <tf.Variable 'UnreadVariable' shape=() dtype=float32, numpy=0.1>
In [5]: opt.lr.numpy()
Out[5]: 0.1
In [6]: opt.decay.numpy()
Out[6]: 0.0
In [7]: opt.beta_1.numpy()
Out[7]: 0.9
In [8]: opt.beta_2.numpy()
Out[8]: 0.999
import keras.optimizers as opt
def get_opt_config(optimizer):
"""
Extract Optimizer Configs from an instance of
keras Optimizer
:param optimizer: instance of keras Optimizer.
:return: dict of optimizer configs.
"""
if not isinstance(optimizer, opt.Optimizer):
raise TypeError('optimizer should be instance of '
'keras.optimizers.Optimizer '
'Got {}.'.format(type(optimizer)))
opt_config = optimizer.get_config()
if 'name' not in opt_config.keys():
_name = str(optimizer.__class__).split('.')[-1] \\
.replace('\'', '').replace('>', '')
opt_config.update({'name': _name})
return opt_config
def clone_opt(opt_config):
"""
Clone keras optimizer from its configurations.
:param opt_config: dict, keras optimizer configs.
:return: instance of keras optimizer.
"""
if not isinstance(opt_config, dict):
raise TypeError('opt_config must be a dict. '
'Got {}'.format(type(opt_config)))
if 'name' not in opt_config.keys():
raise ValueError('could not find the name of optimizer in opt_config')
name = opt_config.get('name')
params = {k: opt_config[k] for k in opt_config.keys() if k != 'name'}
if name.upper() == 'ADAM':
return opt.Adam(**params)
if name.upper() == 'NADAM':
return opt.Nadam(**params)
if name.upper() == 'ADAMAX':
return opt.Adamax(**params)
if name.upper() == 'ADADELTA':
return opt.Adadelta(**params)
if name.upper() == 'ADAGRAD':
return opt.Adagrad(**params)
if name.upper() == 'RMSPROP':
return opt.RMSprop()
if name.upper() == 'SGD':
return opt.SGD(**params)
raise ValueError('Unknown optimizer name. Available are: '
'(\'adam\',\'sgd\', \'rmsprop\', \'adagrad\', '
'\'adadelta\', \'adamax\', \'nadam\'). '
'Got {}.'.format(name))
if __name__ == '__main__':
rmsprop = opt.RMSprop()
configs = get_opt_config(rmsprop)
print(configs)
cloned_rmsprop = clone_opt(configs)
print(cloned_rmsprop)
print(cloned_rmsprop.get_config())
{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07, 'name': 'RMSprop'}
<keras.optimizers.RMSprop object at 0x7f96370a9358>
{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07}
测试
>>> print(ig_cnn_model.optimizer.lr)
<tf.Variable 'lr_6:0' shape=() dtype=float32_ref>
import keras.backend as K
from keras.models import Sequential
from keras.layers import Dense
model = Sequential()
model.add(Dense(1, input_shape=(1,)))
model.add(Dense(1))
model.compile(loss='mse', optimizer='adam')
print(K.eval(model.optimizer.lr))
0.001model.compile(optimizer=optimizerF,
loss=lossF,
metrics=['accuracy'])
model.optimizer.get_config()
>>> {'name': 'Adam', 'learning_rate': 0.001, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-07, 'amsgrad': False}
from keras.optimizers import Adam
model.compile(optimizer=Adam(lr=0.001),
loss='categorical_crossentropy',
metrics=['accuracy'])
In [1]: import tensorflow as tf
In [2]: opt = tf.keras.optimizers.Adam()
In [3]: opt.lr.numpy()
Out[3]: 0.001
In [4]: opt.lr.assign(0.1)
Out[4]: <tf.Variable 'UnreadVariable' shape=() dtype=float32, numpy=0.1>
In [5]: opt.lr.numpy()
Out[5]: 0.1
In [6]: opt.decay.numpy()
Out[6]: 0.0
In [7]: opt.beta_1.numpy()
Out[7]: 0.9
In [8]: opt.beta_2.numpy()
Out[8]: 0.999
import keras.optimizers as opt
def get_opt_config(optimizer):
"""
Extract Optimizer Configs from an instance of
keras Optimizer
:param optimizer: instance of keras Optimizer.
:return: dict of optimizer configs.
"""
if not isinstance(optimizer, opt.Optimizer):
raise TypeError('optimizer should be instance of '
'keras.optimizers.Optimizer '
'Got {}.'.format(type(optimizer)))
opt_config = optimizer.get_config()
if 'name' not in opt_config.keys():
_name = str(optimizer.__class__).split('.')[-1] \\
.replace('\'', '').replace('>', '')
opt_config.update({'name': _name})
return opt_config
def clone_opt(opt_config):
"""
Clone keras optimizer from its configurations.
:param opt_config: dict, keras optimizer configs.
:return: instance of keras optimizer.
"""
if not isinstance(opt_config, dict):
raise TypeError('opt_config must be a dict. '
'Got {}'.format(type(opt_config)))
if 'name' not in opt_config.keys():
raise ValueError('could not find the name of optimizer in opt_config')
name = opt_config.get('name')
params = {k: opt_config[k] for k in opt_config.keys() if k != 'name'}
if name.upper() == 'ADAM':
return opt.Adam(**params)
if name.upper() == 'NADAM':
return opt.Nadam(**params)
if name.upper() == 'ADAMAX':
return opt.Adamax(**params)
if name.upper() == 'ADADELTA':
return opt.Adadelta(**params)
if name.upper() == 'ADAGRAD':
return opt.Adagrad(**params)
if name.upper() == 'RMSPROP':
return opt.RMSprop()
if name.upper() == 'SGD':
return opt.SGD(**params)
raise ValueError('Unknown optimizer name. Available are: '
'(\'adam\',\'sgd\', \'rmsprop\', \'adagrad\', '
'\'adadelta\', \'adamax\', \'nadam\'). '
'Got {}.'.format(name))
if __name__ == '__main__':
rmsprop = opt.RMSprop()
configs = get_opt_config(rmsprop)
print(configs)
cloned_rmsprop = clone_opt(configs)
print(cloned_rmsprop)
print(cloned_rmsprop.get_config())
{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07, 'name': 'RMSprop'}
<keras.optimizers.RMSprop object at 0x7f96370a9358>
{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07}
输出
>>> print(ig_cnn_model.optimizer.lr)
<tf.Variable 'lr_6:0' shape=() dtype=float32_ref>
import keras.backend as K
from keras.models import Sequential
from keras.layers import Dense
model = Sequential()
model.add(Dense(1, input_shape=(1,)))
model.add(Dense(1))
model.compile(loss='mse', optimizer='adam')
print(K.eval(model.optimizer.lr))
0.001model.compile(optimizer=optimizerF,
loss=lossF,
metrics=['accuracy'])
model.optimizer.get_config()
>>> {'name': 'Adam', 'learning_rate': 0.001, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-07, 'amsgrad': False}
from keras.optimizers import Adam
model.compile(optimizer=Adam(lr=0.001),
loss='categorical_crossentropy',
metrics=['accuracy'])
In [1]: import tensorflow as tf
In [2]: opt = tf.keras.optimizers.Adam()
In [3]: opt.lr.numpy()
Out[3]: 0.001
In [4]: opt.lr.assign(0.1)
Out[4]: <tf.Variable 'UnreadVariable' shape=() dtype=float32, numpy=0.1>
In [5]: opt.lr.numpy()
Out[5]: 0.1
In [6]: opt.decay.numpy()
Out[6]: 0.0
In [7]: opt.beta_1.numpy()
Out[7]: 0.9
In [8]: opt.beta_2.numpy()
Out[8]: 0.999
import keras.optimizers as opt
def get_opt_config(optimizer):
"""
Extract Optimizer Configs from an instance of
keras Optimizer
:param optimizer: instance of keras Optimizer.
:return: dict of optimizer configs.
"""
if not isinstance(optimizer, opt.Optimizer):
raise TypeError('optimizer should be instance of '
'keras.optimizers.Optimizer '
'Got {}.'.format(type(optimizer)))
opt_config = optimizer.get_config()
if 'name' not in opt_config.keys():
_name = str(optimizer.__class__).split('.')[-1] \\
.replace('\'', '').replace('>', '')
opt_config.update({'name': _name})
return opt_config
def clone_opt(opt_config):
"""
Clone keras optimizer from its configurations.
:param opt_config: dict, keras optimizer configs.
:return: instance of keras optimizer.
"""
if not isinstance(opt_config, dict):
raise TypeError('opt_config must be a dict. '
'Got {}'.format(type(opt_config)))
if 'name' not in opt_config.keys():
raise ValueError('could not find the name of optimizer in opt_config')
name = opt_config.get('name')
params = {k: opt_config[k] for k in opt_config.keys() if k != 'name'}
if name.upper() == 'ADAM':
return opt.Adam(**params)
if name.upper() == 'NADAM':
return opt.Nadam(**params)
if name.upper() == 'ADAMAX':
return opt.Adamax(**params)
if name.upper() == 'ADADELTA':
return opt.Adadelta(**params)
if name.upper() == 'ADAGRAD':
return opt.Adagrad(**params)
if name.upper() == 'RMSPROP':
return opt.RMSprop()
if name.upper() == 'SGD':
return opt.SGD(**params)
raise ValueError('Unknown optimizer name. Available are: '
'(\'adam\',\'sgd\', \'rmsprop\', \'adagrad\', '
'\'adadelta\', \'adamax\', \'nadam\'). '
'Got {}.'.format(name))
if __name__ == '__main__':
rmsprop = opt.RMSprop()
configs = get_opt_config(rmsprop)
print(configs)
cloned_rmsprop = clone_opt(configs)
print(cloned_rmsprop)
print(cloned_rmsprop.get_config())
{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07, 'name': 'RMSprop'}
<keras.optimizers.RMSprop object at 0x7f96370a9358>
{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07}
另一种方式:
- 创建优化器实例
- 从实例中获取学习率
- 在模型中使用优化器
opt = keras.optimizers.SGD()
print('learning rate={}'.format(opt.lr.numpy()))
model.compile(optimizer = opt, ...)