获取keras模型的学习率

Get learning rate of keras model

我似乎无法获得学习率的值。我得到的是下面。

我已经尝试了 200 个 epoch 的模型,并希望查看/更改学习率。这不是正确的方法吗?

>>> print(ig_cnn_model.optimizer.lr)

<tf.Variable 'lr_6:0' shape=() dtype=float32_ref>

import keras.backend as K

from keras.models import Sequential

from keras.layers import Dense



model = Sequential()

model.add(Dense(1, input_shape=(1,)))

model.add(Dense(1))

model.compile(loss='mse', optimizer='adam')



print(K.eval(model.optimizer.lr))
0.001model.compile(optimizer=optimizerF,

         loss=lossF,

         metrics=['accuracy'])



model.optimizer.get_config()



>>> {'name': 'Adam', 'learning_rate': 0.001, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-07, 'amsgrad': False}

from keras.optimizers import Adam



model.compile(optimizer=Adam(lr=0.001), 

       loss='categorical_crossentropy', 

       metrics=['accuracy'])
In [1]: import tensorflow as tf



In [2]: opt = tf.keras.optimizers.Adam()



In [3]: opt.lr.numpy()

Out[3]: 0.001
In [4]: opt.lr.assign(0.1)

Out[4]: <tf.Variable 'UnreadVariable' shape=() dtype=float32, numpy=0.1>



In [5]: opt.lr.numpy()

Out[5]: 0.1
In [6]: opt.decay.numpy()

Out[6]: 0.0



In [7]: opt.beta_1.numpy()

Out[7]: 0.9



In [8]: opt.beta_2.numpy()

Out[8]: 0.999

import keras.optimizers as opt



def get_opt_config(optimizer):

 """

  Extract Optimizer Configs from an instance of

  keras Optimizer

  :param optimizer: instance of keras Optimizer.

  :return: dict of optimizer configs.

 """

  if not isinstance(optimizer, opt.Optimizer):

    raise TypeError('optimizer should be instance of '

            'keras.optimizers.Optimizer '

            'Got {}.'.format(type(optimizer)))

  opt_config = optimizer.get_config()

  if 'name' not in opt_config.keys():

    _name = str(optimizer.__class__).split('.')[-1] \\

      .replace('\'', '').replace('>', '')

    opt_config.update({'name': _name})

  return opt_config





def clone_opt(opt_config):

 """

  Clone keras optimizer from its configurations.

  :param opt_config: dict, keras optimizer configs.

  :return: instance of keras optimizer.

 """

  if not isinstance(opt_config, dict):

    raise TypeError('opt_config must be a dict. '

            'Got {}'.format(type(opt_config)))

  if 'name' not in opt_config.keys():

    raise ValueError('could not find the name of optimizer in opt_config')

  name = opt_config.get('name')

  params = {k: opt_config[k] for k in opt_config.keys() if k != 'name'}

  if name.upper() == 'ADAM':

    return opt.Adam(**params)

  if name.upper() == 'NADAM':

    return opt.Nadam(**params)

  if name.upper() == 'ADAMAX':

    return opt.Adamax(**params)

  if name.upper() == 'ADADELTA':

    return opt.Adadelta(**params)

  if name.upper() == 'ADAGRAD':

    return opt.Adagrad(**params)

  if name.upper() == 'RMSPROP':

    return opt.RMSprop()

  if name.upper() == 'SGD':

    return opt.SGD(**params)

  raise ValueError('Unknown optimizer name. Available are: '

          '(\'adam\',\'sgd\', \'rmsprop\', \'adagrad\', '

          '\'adadelta\', \'adamax\', \'nadam\'). '

          'Got {}.'.format(name))

if __name__ == '__main__':

  rmsprop = opt.RMSprop()

  configs = get_opt_config(rmsprop)

  print(configs)

  cloned_rmsprop = clone_opt(configs)

  print(cloned_rmsprop)

  print(cloned_rmsprop.get_config())

{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07, 'name': 'RMSprop'}

<keras.optimizers.RMSprop object at 0x7f96370a9358>

{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07}

使用 keras.backend 中的 eval():

>>> print(ig_cnn_model.optimizer.lr)

<tf.Variable 'lr_6:0' shape=() dtype=float32_ref>

import keras.backend as K

from keras.models import Sequential

from keras.layers import Dense



model = Sequential()

model.add(Dense(1, input_shape=(1,)))

model.add(Dense(1))

model.compile(loss='mse', optimizer='adam')



print(K.eval(model.optimizer.lr))
0.001model.compile(optimizer=optimizerF,

         loss=lossF,

         metrics=['accuracy'])



model.optimizer.get_config()



>>> {'name': 'Adam', 'learning_rate': 0.001, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-07, 'amsgrad': False}

from keras.optimizers import Adam



model.compile(optimizer=Adam(lr=0.001), 

       loss='categorical_crossentropy', 

       metrics=['accuracy'])
In [1]: import tensorflow as tf



In [2]: opt = tf.keras.optimizers.Adam()



In [3]: opt.lr.numpy()

Out[3]: 0.001
In [4]: opt.lr.assign(0.1)

Out[4]: <tf.Variable 'UnreadVariable' shape=() dtype=float32, numpy=0.1>



In [5]: opt.lr.numpy()

Out[5]: 0.1
In [6]: opt.decay.numpy()

Out[6]: 0.0



In [7]: opt.beta_1.numpy()

Out[7]: 0.9



In [8]: opt.beta_2.numpy()

Out[8]: 0.999

import keras.optimizers as opt



def get_opt_config(optimizer):

 """

  Extract Optimizer Configs from an instance of

  keras Optimizer

  :param optimizer: instance of keras Optimizer.

  :return: dict of optimizer configs.

 """

  if not isinstance(optimizer, opt.Optimizer):

    raise TypeError('optimizer should be instance of '

            'keras.optimizers.Optimizer '

            'Got {}.'.format(type(optimizer)))

  opt_config = optimizer.get_config()

  if 'name' not in opt_config.keys():

    _name = str(optimizer.__class__).split('.')[-1] \\

      .replace('\'', '').replace('>', '')

    opt_config.update({'name': _name})

  return opt_config





def clone_opt(opt_config):

 """

  Clone keras optimizer from its configurations.

  :param opt_config: dict, keras optimizer configs.

  :return: instance of keras optimizer.

 """

  if not isinstance(opt_config, dict):

    raise TypeError('opt_config must be a dict. '

            'Got {}'.format(type(opt_config)))

  if 'name' not in opt_config.keys():

    raise ValueError('could not find the name of optimizer in opt_config')

  name = opt_config.get('name')

  params = {k: opt_config[k] for k in opt_config.keys() if k != 'name'}

  if name.upper() == 'ADAM':

    return opt.Adam(**params)

  if name.upper() == 'NADAM':

    return opt.Nadam(**params)

  if name.upper() == 'ADAMAX':

    return opt.Adamax(**params)

  if name.upper() == 'ADADELTA':

    return opt.Adadelta(**params)

  if name.upper() == 'ADAGRAD':

    return opt.Adagrad(**params)

  if name.upper() == 'RMSPROP':

    return opt.RMSprop()

  if name.upper() == 'SGD':

    return opt.SGD(**params)

  raise ValueError('Unknown optimizer name. Available are: '

          '(\'adam\',\'sgd\', \'rmsprop\', \'adagrad\', '

          '\'adadelta\', \'adamax\', \'nadam\'). '

          'Got {}.'.format(name))

if __name__ == '__main__':

  rmsprop = opt.RMSprop()

  configs = get_opt_config(rmsprop)

  print(configs)

  cloned_rmsprop = clone_opt(configs)

  print(cloned_rmsprop)

  print(cloned_rmsprop.get_config())

{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07, 'name': 'RMSprop'}

<keras.optimizers.RMSprop object at 0x7f96370a9358>

{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07}

输出:

>>> print(ig_cnn_model.optimizer.lr)

<tf.Variable 'lr_6:0' shape=() dtype=float32_ref>

import keras.backend as K

from keras.models import Sequential

from keras.layers import Dense



model = Sequential()

model.add(Dense(1, input_shape=(1,)))

model.add(Dense(1))

model.compile(loss='mse', optimizer='adam')



print(K.eval(model.optimizer.lr))
0.001model.compile(optimizer=optimizerF,

         loss=lossF,

         metrics=['accuracy'])



model.optimizer.get_config()



>>> {'name': 'Adam', 'learning_rate': 0.001, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-07, 'amsgrad': False}

from keras.optimizers import Adam



model.compile(optimizer=Adam(lr=0.001), 

       loss='categorical_crossentropy', 

       metrics=['accuracy'])
In [1]: import tensorflow as tf



In [2]: opt = tf.keras.optimizers.Adam()



In [3]: opt.lr.numpy()

Out[3]: 0.001
In [4]: opt.lr.assign(0.1)

Out[4]: <tf.Variable 'UnreadVariable' shape=() dtype=float32, numpy=0.1>



In [5]: opt.lr.numpy()

Out[5]: 0.1
In [6]: opt.decay.numpy()

Out[6]: 0.0



In [7]: opt.beta_1.numpy()

Out[7]: 0.9



In [8]: opt.beta_2.numpy()

Out[8]: 0.999

import keras.optimizers as opt



def get_opt_config(optimizer):

 """

  Extract Optimizer Configs from an instance of

  keras Optimizer

  :param optimizer: instance of keras Optimizer.

  :return: dict of optimizer configs.

 """

  if not isinstance(optimizer, opt.Optimizer):

    raise TypeError('optimizer should be instance of '

            'keras.optimizers.Optimizer '

            'Got {}.'.format(type(optimizer)))

  opt_config = optimizer.get_config()

  if 'name' not in opt_config.keys():

    _name = str(optimizer.__class__).split('.')[-1] \\

      .replace('\'', '').replace('>', '')

    opt_config.update({'name': _name})

  return opt_config





def clone_opt(opt_config):

 """

  Clone keras optimizer from its configurations.

  :param opt_config: dict, keras optimizer configs.

  :return: instance of keras optimizer.

 """

  if not isinstance(opt_config, dict):

    raise TypeError('opt_config must be a dict. '

            'Got {}'.format(type(opt_config)))

  if 'name' not in opt_config.keys():

    raise ValueError('could not find the name of optimizer in opt_config')

  name = opt_config.get('name')

  params = {k: opt_config[k] for k in opt_config.keys() if k != 'name'}

  if name.upper() == 'ADAM':

    return opt.Adam(**params)

  if name.upper() == 'NADAM':

    return opt.Nadam(**params)

  if name.upper() == 'ADAMAX':

    return opt.Adamax(**params)

  if name.upper() == 'ADADELTA':

    return opt.Adadelta(**params)

  if name.upper() == 'ADAGRAD':

    return opt.Adagrad(**params)

  if name.upper() == 'RMSPROP':

    return opt.RMSprop()

  if name.upper() == 'SGD':

    return opt.SGD(**params)

  raise ValueError('Unknown optimizer name. Available are: '

          '(\'adam\',\'sgd\', \'rmsprop\', \'adagrad\', '

          '\'adadelta\', \'adamax\', \'nadam\'). '

          'Got {}.'.format(name))

if __name__ == '__main__':

  rmsprop = opt.RMSprop()

  configs = get_opt_config(rmsprop)

  print(configs)

  cloned_rmsprop = clone_opt(configs)

  print(cloned_rmsprop)

  print(cloned_rmsprop.get_config())

{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07, 'name': 'RMSprop'}

<keras.optimizers.RMSprop object at 0x7f96370a9358>

{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07}

获取与优化器相关的所有信息的最佳方法是使用 .get_config().

示例:

>>> print(ig_cnn_model.optimizer.lr)

<tf.Variable 'lr_6:0' shape=() dtype=float32_ref>

import keras.backend as K

from keras.models import Sequential

from keras.layers import Dense



model = Sequential()

model.add(Dense(1, input_shape=(1,)))

model.add(Dense(1))

model.compile(loss='mse', optimizer='adam')



print(K.eval(model.optimizer.lr))
0.001model.compile(optimizer=optimizerF,

         loss=lossF,

         metrics=['accuracy'])



model.optimizer.get_config()



>>> {'name': 'Adam', 'learning_rate': 0.001, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-07, 'amsgrad': False}

from keras.optimizers import Adam



model.compile(optimizer=Adam(lr=0.001), 

       loss='categorical_crossentropy', 

       metrics=['accuracy'])
In [1]: import tensorflow as tf



In [2]: opt = tf.keras.optimizers.Adam()



In [3]: opt.lr.numpy()

Out[3]: 0.001
In [4]: opt.lr.assign(0.1)

Out[4]: <tf.Variable 'UnreadVariable' shape=() dtype=float32, numpy=0.1>



In [5]: opt.lr.numpy()

Out[5]: 0.1
In [6]: opt.decay.numpy()

Out[6]: 0.0



In [7]: opt.beta_1.numpy()

Out[7]: 0.9



In [8]: opt.beta_2.numpy()

Out[8]: 0.999

import keras.optimizers as opt



def get_opt_config(optimizer):

 """

  Extract Optimizer Configs from an instance of

  keras Optimizer

  :param optimizer: instance of keras Optimizer.

  :return: dict of optimizer configs.

 """

  if not isinstance(optimizer, opt.Optimizer):

    raise TypeError('optimizer should be instance of '

            'keras.optimizers.Optimizer '

            'Got {}.'.format(type(optimizer)))

  opt_config = optimizer.get_config()

  if 'name' not in opt_config.keys():

    _name = str(optimizer.__class__).split('.')[-1] \\

      .replace('\'', '').replace('>', '')

    opt_config.update({'name': _name})

  return opt_config





def clone_opt(opt_config):

 """

  Clone keras optimizer from its configurations.

  :param opt_config: dict, keras optimizer configs.

  :return: instance of keras optimizer.

 """

  if not isinstance(opt_config, dict):

    raise TypeError('opt_config must be a dict. '

            'Got {}'.format(type(opt_config)))

  if 'name' not in opt_config.keys():

    raise ValueError('could not find the name of optimizer in opt_config')

  name = opt_config.get('name')

  params = {k: opt_config[k] for k in opt_config.keys() if k != 'name'}

  if name.upper() == 'ADAM':

    return opt.Adam(**params)

  if name.upper() == 'NADAM':

    return opt.Nadam(**params)

  if name.upper() == 'ADAMAX':

    return opt.Adamax(**params)

  if name.upper() == 'ADADELTA':

    return opt.Adadelta(**params)

  if name.upper() == 'ADAGRAD':

    return opt.Adagrad(**params)

  if name.upper() == 'RMSPROP':

    return opt.RMSprop()

  if name.upper() == 'SGD':

    return opt.SGD(**params)

  raise ValueError('Unknown optimizer name. Available are: '

          '(\'adam\',\'sgd\', \'rmsprop\', \'adagrad\', '

          '\'adadelta\', \'adamax\', \'nadam\'). '

          'Got {}.'.format(name))

if __name__ == '__main__':

  rmsprop = opt.RMSprop()

  configs = get_opt_config(rmsprop)

  print(configs)

  cloned_rmsprop = clone_opt(configs)

  print(cloned_rmsprop)

  print(cloned_rmsprop.get_config())

{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07, 'name': 'RMSprop'}

<keras.optimizers.RMSprop object at 0x7f96370a9358>

{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07}

它返回一个包含所有信息的字典。


你可以通过

改变你的学习率

>>> print(ig_cnn_model.optimizer.lr)

<tf.Variable 'lr_6:0' shape=() dtype=float32_ref>

import keras.backend as K

from keras.models import Sequential

from keras.layers import Dense



model = Sequential()

model.add(Dense(1, input_shape=(1,)))

model.add(Dense(1))

model.compile(loss='mse', optimizer='adam')



print(K.eval(model.optimizer.lr))
0.001model.compile(optimizer=optimizerF,

         loss=lossF,

         metrics=['accuracy'])



model.optimizer.get_config()



>>> {'name': 'Adam', 'learning_rate': 0.001, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-07, 'amsgrad': False}

from keras.optimizers import Adam



model.compile(optimizer=Adam(lr=0.001), 

       loss='categorical_crossentropy', 

       metrics=['accuracy'])
In [1]: import tensorflow as tf



In [2]: opt = tf.keras.optimizers.Adam()



In [3]: opt.lr.numpy()

Out[3]: 0.001
In [4]: opt.lr.assign(0.1)

Out[4]: <tf.Variable 'UnreadVariable' shape=() dtype=float32, numpy=0.1>



In [5]: opt.lr.numpy()

Out[5]: 0.1
In [6]: opt.decay.numpy()

Out[6]: 0.0



In [7]: opt.beta_1.numpy()

Out[7]: 0.9



In [8]: opt.beta_2.numpy()

Out[8]: 0.999

import keras.optimizers as opt



def get_opt_config(optimizer):

 """

  Extract Optimizer Configs from an instance of

  keras Optimizer

  :param optimizer: instance of keras Optimizer.

  :return: dict of optimizer configs.

 """

  if not isinstance(optimizer, opt.Optimizer):

    raise TypeError('optimizer should be instance of '

            'keras.optimizers.Optimizer '

            'Got {}.'.format(type(optimizer)))

  opt_config = optimizer.get_config()

  if 'name' not in opt_config.keys():

    _name = str(optimizer.__class__).split('.')[-1] \\

      .replace('\'', '').replace('>', '')

    opt_config.update({'name': _name})

  return opt_config





def clone_opt(opt_config):

 """

  Clone keras optimizer from its configurations.

  :param opt_config: dict, keras optimizer configs.

  :return: instance of keras optimizer.

 """

  if not isinstance(opt_config, dict):

    raise TypeError('opt_config must be a dict. '

            'Got {}'.format(type(opt_config)))

  if 'name' not in opt_config.keys():

    raise ValueError('could not find the name of optimizer in opt_config')

  name = opt_config.get('name')

  params = {k: opt_config[k] for k in opt_config.keys() if k != 'name'}

  if name.upper() == 'ADAM':

    return opt.Adam(**params)

  if name.upper() == 'NADAM':

    return opt.Nadam(**params)

  if name.upper() == 'ADAMAX':

    return opt.Adamax(**params)

  if name.upper() == 'ADADELTA':

    return opt.Adadelta(**params)

  if name.upper() == 'ADAGRAD':

    return opt.Adagrad(**params)

  if name.upper() == 'RMSPROP':

    return opt.RMSprop()

  if name.upper() == 'SGD':

    return opt.SGD(**params)

  raise ValueError('Unknown optimizer name. Available are: '

          '(\'adam\',\'sgd\', \'rmsprop\', \'adagrad\', '

          '\'adadelta\', \'adamax\', \'nadam\'). '

          'Got {}.'.format(name))

if __name__ == '__main__':

  rmsprop = opt.RMSprop()

  configs = get_opt_config(rmsprop)

  print(configs)

  cloned_rmsprop = clone_opt(configs)

  print(cloned_rmsprop)

  print(cloned_rmsprop.get_config())

{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07, 'name': 'RMSprop'}

<keras.optimizers.RMSprop object at 0x7f96370a9358>

{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07}

使用 TensorFlow >=2.0:

>>> print(ig_cnn_model.optimizer.lr)

<tf.Variable 'lr_6:0' shape=() dtype=float32_ref>

import keras.backend as K

from keras.models import Sequential

from keras.layers import Dense



model = Sequential()

model.add(Dense(1, input_shape=(1,)))

model.add(Dense(1))

model.compile(loss='mse', optimizer='adam')



print(K.eval(model.optimizer.lr))
0.001model.compile(optimizer=optimizerF,

         loss=lossF,

         metrics=['accuracy'])



model.optimizer.get_config()



>>> {'name': 'Adam', 'learning_rate': 0.001, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-07, 'amsgrad': False}

from keras.optimizers import Adam



model.compile(optimizer=Adam(lr=0.001), 

       loss='categorical_crossentropy', 

       metrics=['accuracy'])
In [1]: import tensorflow as tf



In [2]: opt = tf.keras.optimizers.Adam()



In [3]: opt.lr.numpy()

Out[3]: 0.001
In [4]: opt.lr.assign(0.1)

Out[4]: <tf.Variable 'UnreadVariable' shape=() dtype=float32, numpy=0.1>



In [5]: opt.lr.numpy()

Out[5]: 0.1
In [6]: opt.decay.numpy()

Out[6]: 0.0



In [7]: opt.beta_1.numpy()

Out[7]: 0.9



In [8]: opt.beta_2.numpy()

Out[8]: 0.999

import keras.optimizers as opt



def get_opt_config(optimizer):

 """

  Extract Optimizer Configs from an instance of

  keras Optimizer

  :param optimizer: instance of keras Optimizer.

  :return: dict of optimizer configs.

 """

  if not isinstance(optimizer, opt.Optimizer):

    raise TypeError('optimizer should be instance of '

            'keras.optimizers.Optimizer '

            'Got {}.'.format(type(optimizer)))

  opt_config = optimizer.get_config()

  if 'name' not in opt_config.keys():

    _name = str(optimizer.__class__).split('.')[-1] \\

      .replace('\'', '').replace('>', '')

    opt_config.update({'name': _name})

  return opt_config





def clone_opt(opt_config):

 """

  Clone keras optimizer from its configurations.

  :param opt_config: dict, keras optimizer configs.

  :return: instance of keras optimizer.

 """

  if not isinstance(opt_config, dict):

    raise TypeError('opt_config must be a dict. '

            'Got {}'.format(type(opt_config)))

  if 'name' not in opt_config.keys():

    raise ValueError('could not find the name of optimizer in opt_config')

  name = opt_config.get('name')

  params = {k: opt_config[k] for k in opt_config.keys() if k != 'name'}

  if name.upper() == 'ADAM':

    return opt.Adam(**params)

  if name.upper() == 'NADAM':

    return opt.Nadam(**params)

  if name.upper() == 'ADAMAX':

    return opt.Adamax(**params)

  if name.upper() == 'ADADELTA':

    return opt.Adadelta(**params)

  if name.upper() == 'ADAGRAD':

    return opt.Adagrad(**params)

  if name.upper() == 'RMSPROP':

    return opt.RMSprop()

  if name.upper() == 'SGD':

    return opt.SGD(**params)

  raise ValueError('Unknown optimizer name. Available are: '

          '(\'adam\',\'sgd\', \'rmsprop\', \'adagrad\', '

          '\'adadelta\', \'adamax\', \'nadam\'). '

          'Got {}.'.format(name))

if __name__ == '__main__':

  rmsprop = opt.RMSprop()

  configs = get_opt_config(rmsprop)

  print(configs)

  cloned_rmsprop = clone_opt(configs)

  print(cloned_rmsprop)

  print(cloned_rmsprop.get_config())

{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07, 'name': 'RMSprop'}

<keras.optimizers.RMSprop object at 0x7f96370a9358>

{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07}

lr 只是一个 tf.Variable,所以它的值可以通过 assign() 方法改变:

>>> print(ig_cnn_model.optimizer.lr)

<tf.Variable 'lr_6:0' shape=() dtype=float32_ref>

import keras.backend as K

from keras.models import Sequential

from keras.layers import Dense



model = Sequential()

model.add(Dense(1, input_shape=(1,)))

model.add(Dense(1))

model.compile(loss='mse', optimizer='adam')



print(K.eval(model.optimizer.lr))
0.001model.compile(optimizer=optimizerF,

         loss=lossF,

         metrics=['accuracy'])



model.optimizer.get_config()



>>> {'name': 'Adam', 'learning_rate': 0.001, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-07, 'amsgrad': False}

from keras.optimizers import Adam



model.compile(optimizer=Adam(lr=0.001), 

       loss='categorical_crossentropy', 

       metrics=['accuracy'])
In [1]: import tensorflow as tf



In [2]: opt = tf.keras.optimizers.Adam()



In [3]: opt.lr.numpy()

Out[3]: 0.001
In [4]: opt.lr.assign(0.1)

Out[4]: <tf.Variable 'UnreadVariable' shape=() dtype=float32, numpy=0.1>



In [5]: opt.lr.numpy()

Out[5]: 0.1
In [6]: opt.decay.numpy()

Out[6]: 0.0



In [7]: opt.beta_1.numpy()

Out[7]: 0.9



In [8]: opt.beta_2.numpy()

Out[8]: 0.999

import keras.optimizers as opt



def get_opt_config(optimizer):

 """

  Extract Optimizer Configs from an instance of

  keras Optimizer

  :param optimizer: instance of keras Optimizer.

  :return: dict of optimizer configs.

 """

  if not isinstance(optimizer, opt.Optimizer):

    raise TypeError('optimizer should be instance of '

            'keras.optimizers.Optimizer '

            'Got {}.'.format(type(optimizer)))

  opt_config = optimizer.get_config()

  if 'name' not in opt_config.keys():

    _name = str(optimizer.__class__).split('.')[-1] \\

      .replace('\'', '').replace('>', '')

    opt_config.update({'name': _name})

  return opt_config





def clone_opt(opt_config):

 """

  Clone keras optimizer from its configurations.

  :param opt_config: dict, keras optimizer configs.

  :return: instance of keras optimizer.

 """

  if not isinstance(opt_config, dict):

    raise TypeError('opt_config must be a dict. '

            'Got {}'.format(type(opt_config)))

  if 'name' not in opt_config.keys():

    raise ValueError('could not find the name of optimizer in opt_config')

  name = opt_config.get('name')

  params = {k: opt_config[k] for k in opt_config.keys() if k != 'name'}

  if name.upper() == 'ADAM':

    return opt.Adam(**params)

  if name.upper() == 'NADAM':

    return opt.Nadam(**params)

  if name.upper() == 'ADAMAX':

    return opt.Adamax(**params)

  if name.upper() == 'ADADELTA':

    return opt.Adadelta(**params)

  if name.upper() == 'ADAGRAD':

    return opt.Adagrad(**params)

  if name.upper() == 'RMSPROP':

    return opt.RMSprop()

  if name.upper() == 'SGD':

    return opt.SGD(**params)

  raise ValueError('Unknown optimizer name. Available are: '

          '(\'adam\',\'sgd\', \'rmsprop\', \'adagrad\', '

          '\'adadelta\', \'adamax\', \'nadam\'). '

          'Got {}.'.format(name))

if __name__ == '__main__':

  rmsprop = opt.RMSprop()

  configs = get_opt_config(rmsprop)

  print(configs)

  cloned_rmsprop = clone_opt(configs)

  print(cloned_rmsprop)

  print(cloned_rmsprop.get_config())

{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07, 'name': 'RMSprop'}

<keras.optimizers.RMSprop object at 0x7f96370a9358>

{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07}

其余的超参数也是如此:

>>> print(ig_cnn_model.optimizer.lr)

<tf.Variable 'lr_6:0' shape=() dtype=float32_ref>

import keras.backend as K

from keras.models import Sequential

from keras.layers import Dense



model = Sequential()

model.add(Dense(1, input_shape=(1,)))

model.add(Dense(1))

model.compile(loss='mse', optimizer='adam')



print(K.eval(model.optimizer.lr))
0.001model.compile(optimizer=optimizerF,

         loss=lossF,

         metrics=['accuracy'])



model.optimizer.get_config()



>>> {'name': 'Adam', 'learning_rate': 0.001, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-07, 'amsgrad': False}

from keras.optimizers import Adam



model.compile(optimizer=Adam(lr=0.001), 

       loss='categorical_crossentropy', 

       metrics=['accuracy'])
In [1]: import tensorflow as tf



In [2]: opt = tf.keras.optimizers.Adam()



In [3]: opt.lr.numpy()

Out[3]: 0.001
In [4]: opt.lr.assign(0.1)

Out[4]: <tf.Variable 'UnreadVariable' shape=() dtype=float32, numpy=0.1>



In [5]: opt.lr.numpy()

Out[5]: 0.1
In [6]: opt.decay.numpy()

Out[6]: 0.0



In [7]: opt.beta_1.numpy()

Out[7]: 0.9



In [8]: opt.beta_2.numpy()

Out[8]: 0.999

import keras.optimizers as opt



def get_opt_config(optimizer):

 """

  Extract Optimizer Configs from an instance of

  keras Optimizer

  :param optimizer: instance of keras Optimizer.

  :return: dict of optimizer configs.

 """

  if not isinstance(optimizer, opt.Optimizer):

    raise TypeError('optimizer should be instance of '

            'keras.optimizers.Optimizer '

            'Got {}.'.format(type(optimizer)))

  opt_config = optimizer.get_config()

  if 'name' not in opt_config.keys():

    _name = str(optimizer.__class__).split('.')[-1] \\

      .replace('\'', '').replace('>', '')

    opt_config.update({'name': _name})

  return opt_config





def clone_opt(opt_config):

 """

  Clone keras optimizer from its configurations.

  :param opt_config: dict, keras optimizer configs.

  :return: instance of keras optimizer.

 """

  if not isinstance(opt_config, dict):

    raise TypeError('opt_config must be a dict. '

            'Got {}'.format(type(opt_config)))

  if 'name' not in opt_config.keys():

    raise ValueError('could not find the name of optimizer in opt_config')

  name = opt_config.get('name')

  params = {k: opt_config[k] for k in opt_config.keys() if k != 'name'}

  if name.upper() == 'ADAM':

    return opt.Adam(**params)

  if name.upper() == 'NADAM':

    return opt.Nadam(**params)

  if name.upper() == 'ADAMAX':

    return opt.Adamax(**params)

  if name.upper() == 'ADADELTA':

    return opt.Adadelta(**params)

  if name.upper() == 'ADAGRAD':

    return opt.Adagrad(**params)

  if name.upper() == 'RMSPROP':

    return opt.RMSprop()

  if name.upper() == 'SGD':

    return opt.SGD(**params)

  raise ValueError('Unknown optimizer name. Available are: '

          '(\'adam\',\'sgd\', \'rmsprop\', \'adagrad\', '

          '\'adadelta\', \'adamax\', \'nadam\'). '

          'Got {}.'.format(name))

if __name__ == '__main__':

  rmsprop = opt.RMSprop()

  configs = get_opt_config(rmsprop)

  print(configs)

  cloned_rmsprop = clone_opt(configs)

  print(cloned_rmsprop)

  print(cloned_rmsprop.get_config())

{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07, 'name': 'RMSprop'}

<keras.optimizers.RMSprop object at 0x7f96370a9358>

{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07}

一些优化器没有在配置中包含他们的名字。

这是一个完整的示例,说明如何获取配置以及如何从配置中重建(即克隆)优化器(其中也包括学习率)。

>>> print(ig_cnn_model.optimizer.lr)

<tf.Variable 'lr_6:0' shape=() dtype=float32_ref>

import keras.backend as K

from keras.models import Sequential

from keras.layers import Dense



model = Sequential()

model.add(Dense(1, input_shape=(1,)))

model.add(Dense(1))

model.compile(loss='mse', optimizer='adam')



print(K.eval(model.optimizer.lr))
0.001model.compile(optimizer=optimizerF,

         loss=lossF,

         metrics=['accuracy'])



model.optimizer.get_config()



>>> {'name': 'Adam', 'learning_rate': 0.001, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-07, 'amsgrad': False}

from keras.optimizers import Adam



model.compile(optimizer=Adam(lr=0.001), 

       loss='categorical_crossentropy', 

       metrics=['accuracy'])
In [1]: import tensorflow as tf



In [2]: opt = tf.keras.optimizers.Adam()



In [3]: opt.lr.numpy()

Out[3]: 0.001
In [4]: opt.lr.assign(0.1)

Out[4]: <tf.Variable 'UnreadVariable' shape=() dtype=float32, numpy=0.1>



In [5]: opt.lr.numpy()

Out[5]: 0.1
In [6]: opt.decay.numpy()

Out[6]: 0.0



In [7]: opt.beta_1.numpy()

Out[7]: 0.9



In [8]: opt.beta_2.numpy()

Out[8]: 0.999

import keras.optimizers as opt



def get_opt_config(optimizer):

 """

  Extract Optimizer Configs from an instance of

  keras Optimizer

  :param optimizer: instance of keras Optimizer.

  :return: dict of optimizer configs.

 """

  if not isinstance(optimizer, opt.Optimizer):

    raise TypeError('optimizer should be instance of '

            'keras.optimizers.Optimizer '

            'Got {}.'.format(type(optimizer)))

  opt_config = optimizer.get_config()

  if 'name' not in opt_config.keys():

    _name = str(optimizer.__class__).split('.')[-1] \\

      .replace('\'', '').replace('>', '')

    opt_config.update({'name': _name})

  return opt_config





def clone_opt(opt_config):

 """

  Clone keras optimizer from its configurations.

  :param opt_config: dict, keras optimizer configs.

  :return: instance of keras optimizer.

 """

  if not isinstance(opt_config, dict):

    raise TypeError('opt_config must be a dict. '

            'Got {}'.format(type(opt_config)))

  if 'name' not in opt_config.keys():

    raise ValueError('could not find the name of optimizer in opt_config')

  name = opt_config.get('name')

  params = {k: opt_config[k] for k in opt_config.keys() if k != 'name'}

  if name.upper() == 'ADAM':

    return opt.Adam(**params)

  if name.upper() == 'NADAM':

    return opt.Nadam(**params)

  if name.upper() == 'ADAMAX':

    return opt.Adamax(**params)

  if name.upper() == 'ADADELTA':

    return opt.Adadelta(**params)

  if name.upper() == 'ADAGRAD':

    return opt.Adagrad(**params)

  if name.upper() == 'RMSPROP':

    return opt.RMSprop()

  if name.upper() == 'SGD':

    return opt.SGD(**params)

  raise ValueError('Unknown optimizer name. Available are: '

          '(\'adam\',\'sgd\', \'rmsprop\', \'adagrad\', '

          '\'adadelta\', \'adamax\', \'nadam\'). '

          'Got {}.'.format(name))

if __name__ == '__main__':

  rmsprop = opt.RMSprop()

  configs = get_opt_config(rmsprop)

  print(configs)

  cloned_rmsprop = clone_opt(configs)

  print(cloned_rmsprop)

  print(cloned_rmsprop.get_config())

{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07, 'name': 'RMSprop'}

<keras.optimizers.RMSprop object at 0x7f96370a9358>

{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07}

测试

>>> print(ig_cnn_model.optimizer.lr)

<tf.Variable 'lr_6:0' shape=() dtype=float32_ref>

import keras.backend as K

from keras.models import Sequential

from keras.layers import Dense



model = Sequential()

model.add(Dense(1, input_shape=(1,)))

model.add(Dense(1))

model.compile(loss='mse', optimizer='adam')



print(K.eval(model.optimizer.lr))
0.001model.compile(optimizer=optimizerF,

         loss=lossF,

         metrics=['accuracy'])



model.optimizer.get_config()



>>> {'name': 'Adam', 'learning_rate': 0.001, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-07, 'amsgrad': False}

from keras.optimizers import Adam



model.compile(optimizer=Adam(lr=0.001), 

       loss='categorical_crossentropy', 

       metrics=['accuracy'])
In [1]: import tensorflow as tf



In [2]: opt = tf.keras.optimizers.Adam()



In [3]: opt.lr.numpy()

Out[3]: 0.001
In [4]: opt.lr.assign(0.1)

Out[4]: <tf.Variable 'UnreadVariable' shape=() dtype=float32, numpy=0.1>



In [5]: opt.lr.numpy()

Out[5]: 0.1
In [6]: opt.decay.numpy()

Out[6]: 0.0



In [7]: opt.beta_1.numpy()

Out[7]: 0.9



In [8]: opt.beta_2.numpy()

Out[8]: 0.999

import keras.optimizers as opt



def get_opt_config(optimizer):

 """

  Extract Optimizer Configs from an instance of

  keras Optimizer

  :param optimizer: instance of keras Optimizer.

  :return: dict of optimizer configs.

 """

  if not isinstance(optimizer, opt.Optimizer):

    raise TypeError('optimizer should be instance of '

            'keras.optimizers.Optimizer '

            'Got {}.'.format(type(optimizer)))

  opt_config = optimizer.get_config()

  if 'name' not in opt_config.keys():

    _name = str(optimizer.__class__).split('.')[-1] \\

      .replace('\'', '').replace('>', '')

    opt_config.update({'name': _name})

  return opt_config





def clone_opt(opt_config):

 """

  Clone keras optimizer from its configurations.

  :param opt_config: dict, keras optimizer configs.

  :return: instance of keras optimizer.

 """

  if not isinstance(opt_config, dict):

    raise TypeError('opt_config must be a dict. '

            'Got {}'.format(type(opt_config)))

  if 'name' not in opt_config.keys():

    raise ValueError('could not find the name of optimizer in opt_config')

  name = opt_config.get('name')

  params = {k: opt_config[k] for k in opt_config.keys() if k != 'name'}

  if name.upper() == 'ADAM':

    return opt.Adam(**params)

  if name.upper() == 'NADAM':

    return opt.Nadam(**params)

  if name.upper() == 'ADAMAX':

    return opt.Adamax(**params)

  if name.upper() == 'ADADELTA':

    return opt.Adadelta(**params)

  if name.upper() == 'ADAGRAD':

    return opt.Adagrad(**params)

  if name.upper() == 'RMSPROP':

    return opt.RMSprop()

  if name.upper() == 'SGD':

    return opt.SGD(**params)

  raise ValueError('Unknown optimizer name. Available are: '

          '(\'adam\',\'sgd\', \'rmsprop\', \'adagrad\', '

          '\'adadelta\', \'adamax\', \'nadam\'). '

          'Got {}.'.format(name))

if __name__ == '__main__':

  rmsprop = opt.RMSprop()

  configs = get_opt_config(rmsprop)

  print(configs)

  cloned_rmsprop = clone_opt(configs)

  print(cloned_rmsprop)

  print(cloned_rmsprop.get_config())

{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07, 'name': 'RMSprop'}

<keras.optimizers.RMSprop object at 0x7f96370a9358>

{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07}

输出

>>> print(ig_cnn_model.optimizer.lr)

<tf.Variable 'lr_6:0' shape=() dtype=float32_ref>

import keras.backend as K

from keras.models import Sequential

from keras.layers import Dense



model = Sequential()

model.add(Dense(1, input_shape=(1,)))

model.add(Dense(1))

model.compile(loss='mse', optimizer='adam')



print(K.eval(model.optimizer.lr))
0.001model.compile(optimizer=optimizerF,

         loss=lossF,

         metrics=['accuracy'])



model.optimizer.get_config()



>>> {'name': 'Adam', 'learning_rate': 0.001, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-07, 'amsgrad': False}

from keras.optimizers import Adam



model.compile(optimizer=Adam(lr=0.001), 

       loss='categorical_crossentropy', 

       metrics=['accuracy'])
In [1]: import tensorflow as tf



In [2]: opt = tf.keras.optimizers.Adam()



In [3]: opt.lr.numpy()

Out[3]: 0.001
In [4]: opt.lr.assign(0.1)

Out[4]: <tf.Variable 'UnreadVariable' shape=() dtype=float32, numpy=0.1>



In [5]: opt.lr.numpy()

Out[5]: 0.1
In [6]: opt.decay.numpy()

Out[6]: 0.0



In [7]: opt.beta_1.numpy()

Out[7]: 0.9



In [8]: opt.beta_2.numpy()

Out[8]: 0.999

import keras.optimizers as opt



def get_opt_config(optimizer):

 """

  Extract Optimizer Configs from an instance of

  keras Optimizer

  :param optimizer: instance of keras Optimizer.

  :return: dict of optimizer configs.

 """

  if not isinstance(optimizer, opt.Optimizer):

    raise TypeError('optimizer should be instance of '

            'keras.optimizers.Optimizer '

            'Got {}.'.format(type(optimizer)))

  opt_config = optimizer.get_config()

  if 'name' not in opt_config.keys():

    _name = str(optimizer.__class__).split('.')[-1] \\

      .replace('\'', '').replace('>', '')

    opt_config.update({'name': _name})

  return opt_config





def clone_opt(opt_config):

 """

  Clone keras optimizer from its configurations.

  :param opt_config: dict, keras optimizer configs.

  :return: instance of keras optimizer.

 """

  if not isinstance(opt_config, dict):

    raise TypeError('opt_config must be a dict. '

            'Got {}'.format(type(opt_config)))

  if 'name' not in opt_config.keys():

    raise ValueError('could not find the name of optimizer in opt_config')

  name = opt_config.get('name')

  params = {k: opt_config[k] for k in opt_config.keys() if k != 'name'}

  if name.upper() == 'ADAM':

    return opt.Adam(**params)

  if name.upper() == 'NADAM':

    return opt.Nadam(**params)

  if name.upper() == 'ADAMAX':

    return opt.Adamax(**params)

  if name.upper() == 'ADADELTA':

    return opt.Adadelta(**params)

  if name.upper() == 'ADAGRAD':

    return opt.Adagrad(**params)

  if name.upper() == 'RMSPROP':

    return opt.RMSprop()

  if name.upper() == 'SGD':

    return opt.SGD(**params)

  raise ValueError('Unknown optimizer name. Available are: '

          '(\'adam\',\'sgd\', \'rmsprop\', \'adagrad\', '

          '\'adadelta\', \'adamax\', \'nadam\'). '

          'Got {}.'.format(name))

if __name__ == '__main__':

  rmsprop = opt.RMSprop()

  configs = get_opt_config(rmsprop)

  print(configs)

  cloned_rmsprop = clone_opt(configs)

  print(cloned_rmsprop)

  print(cloned_rmsprop.get_config())

{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07, 'name': 'RMSprop'}

<keras.optimizers.RMSprop object at 0x7f96370a9358>

{'lr': 0.0010000000474974513, 'rho': 0.8999999761581421, 'decay': 0.0, 'epsilon': 1e-07}

另一种方式:

  • 创建优化器实例
  • opt = keras.optimizers.SGD()

  • 从实例中获取学习率
  • print('learning rate={}'.format(opt.lr.numpy()))

  • 在模型中使用优化器
  • model.compile(optimizer = opt, ...)


相关推荐

  • Spring部署设置openshift

    Springdeploymentsettingsopenshift我有一个问题让我抓狂了三天。我根据OpenShift帐户上的教程部署了spring-eap6-quickstart代码。我已配置调试选项,并且已将Eclipse工作区与OpehShift服务器同步-服务器上的一切工作正常,但在Eclipse中出现无法消除的错误。我有这个错误:cvc-complex-type.2.4.a:Invali…
    2025-04-161
  • 检查Java中正则表达式中模式的第n次出现

    CheckfornthoccurrenceofpatterninregularexpressioninJava本问题已经有最佳答案,请猛点这里访问。我想使用Java正则表达式检查输入字符串中特定模式的第n次出现。你能建议怎么做吗?这应该可以工作:MatchResultfindNthOccurance(intn,Patternp,CharSequencesrc){Matcherm=p.matcher…
    2025-04-161
  • 如何让 JTable 停留在已编辑的单元格上

    HowtohaveJTablestayingontheeditedcell如果有人编辑JTable的单元格内容并按Enter,则内容会被修改并且表格选择会移动到下一行。是否可以禁止JTable在单元格编辑后转到下一行?原因是我的程序使用ListSelectionListener在单元格选择上同步了其他一些小部件,并且我不想在编辑当前单元格后选择下一行。Enter的默认绑定是名为selectNext…
    2025-04-161
  • Weblogic 12c 部署

    Weblogic12cdeploy我正在尝试将我的应用程序从Tomcat迁移到Weblogic12.2.1.3.0。我能够毫无错误地部署应用程序,但我遇到了与持久性提供程序相关的运行时错误。这是堆栈跟踪:javax.validation.ValidationException:CalltoTraversableResolver.isReachable()threwanexceptionatorg.…
    2025-04-161
  • Resteasy Content-Type 默认值

    ResteasyContent-Typedefaults我正在使用Resteasy编写一个可以返回JSON和XML的应用程序,但可以选择默认为XML。这是我的方法:@GET@Path("/content")@Produces({MediaType.APPLICATION_XML,MediaType.APPLICATION_JSON})publicStringcontentListRequestXm…
    2025-04-161
  • 代码不会停止运行,在 Java 中

    thecodedoesn'tstoprunning,inJava我正在用Java解决项目Euler中的问题10,即"Thesumoftheprimesbelow10is2+3+5+7=17.Findthesumofalltheprimesbelowtwomillion."我的代码是packageprojecteuler_1;importjava.math.BigInteger;importjava…
    2025-04-161
  • Out of memory java heap space

    Outofmemoryjavaheapspace我正在尝试将大量文件从服务器发送到多个客户端。当我尝试发送大小为700mb的文件时,它显示了"OutOfMemoryjavaheapspace"错误。我正在使用Netbeans7.1.2版本。我还在属性中尝试了VMoption。但仍然发生同样的错误。我认为阅读整个文件存在一些问题。下面的代码最多可用于300mb。请给我一些建议。提前致谢publicc…
    2025-04-161
  • Log4j 记录到共享日志文件

    Log4jLoggingtoaSharedLogFile有没有办法将log4j日志记录事件写入也被其他应用程序写入的日志文件。其他应用程序可以是非Java应用程序。有什么缺点?锁定问题?格式化?Log4j有一个SocketAppender,它将向服务发送事件,您可以自己实现或使用与Log4j捆绑的简单实现。它还支持syslogd和Windows事件日志,这对于尝试将日志输出与来自非Java应用程序…
    2025-04-161