decorate¶
- paddle.incubate.asp. decorate ( optimizer ) [source]
-
Wrap the given optimizer as a OptimizerWithSparsityGuarantee, If running with dynamic graph mode. ASP would creates mask variables for supported parameters. Else if in static graph mode, ASP would creates mask variables and inserts necessary ops when calling minimize()
- Parameters
-
optimizer (Optimizer) – A Optimizer used for training.
- Returns
-
A wrapper for ASP to decorate minimize function of the given optimizer.
- Return type
-
OptimizerWithSparsityGuarantee
Examples
>>> # Example1: Usage of Dynamic Graph >>> import paddle >>> class MyLayer(paddle.nn.Layer): ... def __init__(self): ... super().__init__() ... self.conv1 = paddle.nn.Conv2D( ... in_channels=3, out_channels=4, kernel_size=3, padding=2) ... self.linear1 = paddle.nn.Linear(4624, 32) ... self.linear2 = paddle.nn.Linear(32, 32) ... self.linear3 = paddle.nn.Linear(32, 10) ... ... def forward(self, img): ... hidden = self.conv1(img) ... hidden = paddle.flatten(hidden, start_axis=1) ... hidden = self.linear1(hidden) ... hidden = self.linear2(hidden) ... prediction = self.linear3(hidden) ... return prediction >>> my_layer = MyLayer() >>> optimizer = paddle.optimizer.SGD( ... learning_rate=0.01, parameters=my_layer.parameters()) >>> # Calling paddle.incubate.asp.decorate() to wrap step() in optimizer, which >>> # will apply necessary masking operations for ASP workflow. >>> # In dynamic graph mode, ASP would create related mask variables during decoration. >>> optimizer = paddle.incubate.asp.decorate(optimizer)
>>> # Example2: Usage of Static Graph >>> import paddle >>> paddle.enable_static() >>> class MyLayer(paddle.nn.Layer): ... def __init__(self): ... super().__init__() ... self.conv1 = paddle.nn.Conv2D( ... in_channels=3, out_channels=4, kernel_size=3, padding=2) ... self.linear1 = paddle.nn.Linear(4624, 100) ... ... def forward(self, img): ... hidden = self.conv1(img) ... hidden = paddle.flatten(hidden, start_axis=1) ... prediction = self.linear1(hidden) ... return prediction >>> main_program = paddle.static.Program() >>> startup_program = paddle.static.Program() >>> with paddle.static.program_guard(main_program, startup_program): ... input_data = paddle.static.data(name='data', shape=[None, 3, 224, 224]) ... label = paddle.static.data(name='label', shape=[None, 100]) ... my_layer = MyLayer() ... prob = my_layer(input_data) ... loss = paddle.mean(paddle.nn.functional.square_error_cost(prob, label)) ... ... optimizer = paddle.optimizer.SGD(learning_rate=0.1) ... # Calling paddle.incubate.asp.decorate() to wrap minimize() in optimizer, which ... # will insert necessary masking operations for ASP workflow. ... # In static graph mode, ASP creates related mask variables ... # during minimize(). ... optimizer = paddle.incubate.asp.decorate(optimizer) ... optimizer.minimize(loss, startup_program)