Skip to content

Commit

Permalink
[xdoctest] reformat example code with google style in No.297、298、302 (P…
Browse files Browse the repository at this point in the history
…addlePaddle#56861)

* 更改相关文件

* Update ir.py

* 恢复相关文件

* Update ir.py

* Delete python/paddle/incubate/optimizer/modelaverage.py

* Delete modelaverage.py

* 尝试恢复文件

* Revert "尝试恢复文件"

This reverts commit 8a263cf.

* Revert "恢复相关文件"

This reverts commit 24249b8.

* Revert "Revert "尝试恢复文件""

This reverts commit 1b833d6.

* Revert "Revert "Revert "尝试恢复文件"""

This reverts commit 64b3a81.

* Revert "Delete python/paddle/incubate/optimizer/modelaverage.py"

This reverts commit 6198629.

* Revert "更改相关文件"

This reverts commit a5ba675.

* Apply suggestions from code review

---------

Co-authored-by: Nyakku Shigure <[email protected]>
  • Loading branch information
2 people authored and BeingGod committed Sep 9, 2023
1 parent a431f76 commit d71d8f9
Show file tree
Hide file tree
Showing 3 changed files with 82 additions and 82 deletions.
72 changes: 36 additions & 36 deletions python/paddle/incubate/optimizer/functional/bfgs.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,46 +81,46 @@ def minimize_bfgs(
.. code-block:: python
:name: code-example1
# Example1: 1D Grid Parameters
import paddle
# Randomly simulate a batch of input data
inputs = paddle. normal(shape=(100, 1))
labels = inputs * 2.0
# define the loss function
def loss(w):
y = w * inputs
return paddle.nn.functional.square_error_cost(y, labels).mean()
# Initialize weight parameters
w = paddle.normal(shape=(1,))
# Call the bfgs method to solve the weight that makes the loss the smallest, and update the parameters
for epoch in range(0, 10):
# Call the bfgs method to optimize the loss, note that the third parameter returned represents the weight
w_update = paddle.incubate.optimizer.functional.minimize_bfgs(loss, w)[2]
# Use paddle.assign to update parameters in place
paddle. assign(w_update, w)
>>> # Example1: 1D Grid Parameters
>>> import paddle
>>> # Randomly simulate a batch of input data
>>> inputs = paddle. normal(shape=(100, 1))
>>> labels = inputs * 2.0
>>> # define the loss function
>>> def loss(w):
... y = w * inputs
... return paddle.nn.functional.square_error_cost(y, labels).mean()
>>> # Initialize weight parameters
>>> w = paddle.normal(shape=(1,))
>>> # Call the bfgs method to solve the weight that makes the loss the smallest, and update the parameters
>>> for epoch in range(0, 10):
... # Call the bfgs method to optimize the loss, note that the third parameter returned represents the weight
... w_update = paddle.incubate.optimizer.functional.minimize_bfgs(loss, w)[2]
... # Use paddle.assign to update parameters in place
... paddle. assign(w_update, w)
.. code-block:: python
:name: code-example2
# Example2: Multidimensional Grid Parameters
import paddle
def flatten(x):
return x. flatten()
def unflatten(x):
return x.reshape((2,2))
# Assume the network parameters are more than one dimension
def net(x):
assert len(x.shape) > 1
return x.square().mean()
# function to be optimized
def bfgs_f(flatten_x):
return net(unflatten(flatten_x))
x = paddle.rand([2,2])
for i in range(0, 10):
# Flatten x before using minimize_bfgs
x_update = paddle.incubate.optimizer.functional.minimize_bfgs(bfgs_f, flatten(x))[2]
# unflatten x_update, then update parameters
paddle. assign(unflatten(x_update), x)
>>> # Example2: Multidimensional Grid Parameters
>>> import paddle
>>> def flatten(x):
... return x. flatten()
>>> def unflatten(x):
... return x.reshape((2,2))
>>> # Assume the network parameters are more than one dimension
>>> def net(x):
... assert len(x.shape) > 1
... return x.square().mean()
>>> # function to be optimized
>>> def bfgs_f(flatten_x):
... return net(unflatten(flatten_x))
>>> x = paddle.rand([2,2])
>>> for i in range(0, 10):
... # Flatten x before using minimize_bfgs
... x_update = paddle.incubate.optimizer.functional.minimize_bfgs(bfgs_f, flatten(x))[2]
... # unflatten x_update, then update parameters
... paddle.assign(unflatten(x_update), x)
"""

if dtype not in ['float32', 'float64']:
Expand Down
72 changes: 36 additions & 36 deletions python/paddle/incubate/optimizer/functional/lbfgs.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,46 +82,46 @@ def minimize_lbfgs(
.. code-block:: python
:name: code-example1
# Example1: 1D Grid Parameters
import paddle
# Randomly simulate a batch of input data
inputs = paddle. normal(shape=(100, 1))
labels = inputs * 2.0
# define the loss function
def loss(w):
y = w * inputs
return paddle.nn.functional.square_error_cost(y, labels).mean()
# Initialize weight parameters
w = paddle.normal(shape=(1,))
# Call the bfgs method to solve the weight that makes the loss the smallest, and update the parameters
for epoch in range(0, 10):
# Call the bfgs method to optimize the loss, note that the third parameter returned represents the weight
w_update = paddle.incubate.optimizer.functional.minimize_bfgs(loss, w)[2]
# Use paddle.assign to update parameters in place
paddle. assign(w_update, w)
>>> # Example1: 1D Grid Parameters
>>> import paddle
>>> # Randomly simulate a batch of input data
>>> inputs = paddle. normal(shape=(100, 1))
>>> labels = inputs * 2.0
>>> # define the loss function
>>> def loss(w):
... y = w * inputs
... return paddle.nn.functional.square_error_cost(y, labels).mean()
>>> # Initialize weight parameters
>>> w = paddle.normal(shape=(1,))
>>> # Call the bfgs method to solve the weight that makes the loss the smallest, and update the parameters
>>> for epoch in range(0, 10):
... # Call the bfgs method to optimize the loss, note that the third parameter returned represents the weight
... w_update = paddle.incubate.optimizer.functional.minimize_bfgs(loss, w)[2]
... # Use paddle.assign to update parameters in place
... paddle.assign(w_update, w)
.. code-block:: python
:name: code-example2
# Example2: Multidimensional Grid Parameters
import paddle
def flatten(x):
return x. flatten()
def unflatten(x):
return x.reshape((2,2))
# Assume the network parameters are more than one dimension
def net(x):
assert len(x.shape) > 1
return x.square().mean()
# function to be optimized
def bfgs_f(flatten_x):
return net(unflatten(flatten_x))
x = paddle.rand([2,2])
for i in range(0, 10):
# Flatten x before using minimize_bfgs
x_update = paddle.incubate.optimizer.functional.minimize_bfgs(bfgs_f, flatten(x))[2]
# unflatten x_update, then update parameters
paddle. assign(unflatten(x_update), x)
>>> # Example2: Multidimensional Grid Parameters
>>> import paddle
>>> def flatten(x):
... return x. flatten()
>>> def unflatten(x):
... return x.reshape((2,2))
>>> # Assume the network parameters are more than one dimension
>>> def net(x):
... assert len(x.shape) > 1
... return x.square().mean()
>>> # function to be optimized
>>> def bfgs_f(flatten_x):
... return net(unflatten(flatten_x))
>>> x = paddle.rand([2,2])
>>> for i in range(0, 10):
... # Flatten x before using minimize_bfgs
... x_update = paddle.incubate.optimizer.functional.minimize_bfgs(bfgs_f, flatten(x))[2]
... # unflatten x_update, then update parameters
... paddle.assign(unflatten(x_update), x)
"""
if dtype not in ['float32', 'float64']:
Expand Down
20 changes: 10 additions & 10 deletions python/paddle/incubate/passes/ir.py
Original file line number Diff line number Diff line change
Expand Up @@ -469,16 +469,16 @@ def RegisterPass(function=None, input_specs={}):
Examples:
.. code-block:: python
import paddle
from paddle.fluid.ir import RegisterPass
@RegisterPass
def multi_add_to_addn():
def pattern(x, y, z):
return paddle.add(paddle.add(x, y), z)
def replace(x, y, z):
return paddle.add_n([x, y, z])
return pattern, replace
>>> import paddle
>>> from paddle.fluid.ir import RegisterPass
>>> @RegisterPass
>>> def multi_add_to_addn():
... def pattern(x, y, z):
... return paddle.add(paddle.add(x, y), z)
... def replace(x, y, z):
... return paddle.add_n([x, y, z])
... return pattern, replace
"""

def _is_pass_pair(check_pair):
Expand Down

0 comments on commit d71d8f9

Please sign in to comment.