# Source code for mxnet.lr_scheduler

# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#
# Unless required by applicable law or agreed to in writing,
[docs]class LRScheduler(object): """Base class of a learning rate scheduler. A scheduler returns a new learning rate based on the number of updates that have been performed. Parameters ---------- base_lr : float, optional The initial learning rate. """ def __init__(self, base_lr=0.01): self.base_lr = base_lr def __call__(self, num_update): """Return a new learning rate. The num_update is the upper bound of the number of updates applied to every weight. Assume the optimizer has updated *i*-th weight by *k_i* times, namely optimizer.update(i, weight_i) is called by *k_i* times. Then:: num_update = max([k_i for all i]) Parameters ---------- num_update: int the maximal number of updates applied to a weight. """ raise NotImplementedError("must override this")