Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,8 @@ PyTorch 相比 Paddle 支持更多其他参数,具体如下:
| reduction | reduction | 表示应用于输出结果的计算方式。 |

### 转写示例
#### size_averagesize_average 为 True
#### size_average
size_average 为 True
```python
# PyTorch 写法
torch.nn.BCELoss(weight=w, size_average=True)
Expand All @@ -43,7 +44,8 @@ torch.nn.BCELoss(weight=w, size_average=False)
paddle.nn.BCELoss(weight=w, reduction='sum')
```

#### reducereduce 为 True
#### reduce
reduce 为 True
```python
# PyTorch 写法
torch.nn.BCELoss(weight=w, reduce=True)
Expand All @@ -61,7 +63,8 @@ torch.nn.BCELoss(weight=w, reduce=False)
paddle.nn.BCELoss(weight=w, reduction='none')
```

#### reductionreduction 为'none'
#### reduction
reduction 为'none'
```python
# PyTorch 写法
torch.nn.BCELoss(weight=w, reduction='none')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,8 @@ PyTorch 相比 Paddle 支持更多其他参数,具体如下:
| pos_weight | pos_weight | 表示正类的权重。 |

### 转写示例
#### size_averagesize_average 为 True
#### size_average
size_average 为 True
```python
# PyTorch 写法
torch.nn.BCEWithLogitsLoss(weight=w, size_average=True)
Expand All @@ -46,7 +47,8 @@ torch.nn.BCEWithLogitsLoss(weight=w, size_average=False)
paddle.nn.BCEWithLogitsLoss(weight=w, reduction='sum')
```

#### reducereduce 为 True
#### reduce
reduce 为 True
```python
# PyTorch 写法
torch.nn.BCEWithLogitsLoss(weight=w, reduce=True)
Expand All @@ -64,7 +66,8 @@ torch.nn.BCEWithLogitsLoss(weight=w, reduce=False)
paddle.nn.BCEWithLogitsLoss(weight=w, reduction='none')
```

#### reductionreduction 为'none'
#### reduction
reduction 为'none'
```python
# PyTorch 写法
torch.nn.BCEWithLogitsLoss(weight=w, reduction='none')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,17 +21,75 @@ PyTorch 相比 Paddle 支持更多其他参数,具体如下:
| reduction | reduction | 指定应用于输出结果的计算方式。 |

### 转写示例
#### size_average
size_average 为 True

```python
# PyTorch 写法
torch.nn.CosineEmbeddingLoss(weight=w, size_average=True)

# Paddle 写法
paddle.nn.CosineEmbeddingLoss(weight=w, reduction='mean')
```

size_average 为 False

```python
# PyTorch 写法
torch.nn.CosineEmbeddingLoss(weight=w, size_average=False)

# Paddle 写法
paddle.nn.CosineEmbeddingLoss(weight=w, reduction='sum')
```

#### reduce
reduce 为 True

```python
# PyTorch 写法
torch.nn.CosineEmbeddingLoss(weight=w, reduce=True)

# Paddle 写法
paddle.nn.CosineEmbeddingLoss(weight=w, reduction='mean')
```

reduce 为 False

```python
# PyTorch 的 size_average、reduce 参数转为 Paddle 的 reduction 参数
if size_average is None:
size_average = True
if reduce is None:
reduce = True

if size_average and reduce:
reduction = 'mean'
elif reduce:
reduction = 'sum'
else:
reduction = 'none'
# PyTorch 写法
torch.nn.CosineEmbeddingLoss(weight=w, reduce=False)

# Paddle 写法
paddle.nn.CosineEmbeddingLoss(weight=w, reduction='none')
```

#### reduction
reduction 为'none'

```python
# PyTorch 写法
torch.nn.CosineEmbeddingLoss(weight=w, reduction='none')

# Paddle 写法
paddle.nn.CosineEmbeddingLoss(weight=w, reduction='none')
```

reduction 为'mean'

```python
# PyTorch 写法
torch.nn.CosineEmbeddingLoss(weight=w, reduction='mean')

# Paddle 写法
paddle.nn.CosineEmbeddingLoss(weight=w, reduction='mean')
```

reduction 为'sum'

```python
# PyTorch 写法
torch.nn.CosineEmbeddingLoss(weight=w, reduction='sum')

# Paddle 写法
paddle.nn.CosineEmbeddingLoss(weight=w, reduction='sum')
```
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,8 @@ PyTorch 相比 Paddle 支持更多其他参数,具体如下:
| - | axis | 进行 softmax 计算的维度索引,PyTorch 无此参数,Paddle 保持默认即可。 |

### 转写示例
#### size_averagesize_average 为 True
#### size_average
size_average 为 True
```python
# PyTorch 写法
torch.nn.CrossEntropyLoss(weight=w, size_average=True)
Expand All @@ -54,7 +55,8 @@ torch.nn.CrossEntropyLoss(weight=w, size_average=False)
paddle.nn.CrossEntropyLoss(weight=w, reduction='sum')
```

#### reducereduce 为 True
#### reduce
reduce 为 True
```python
# PyTorch 写法
torch.nn.CrossEntropyLoss(weight=w, reduce=True)
Expand All @@ -72,7 +74,8 @@ torch.nn.CrossEntropyLoss(weight=w, reduce=False)
paddle.nn.CrossEntropyLoss(weight=w, reduction='none')
```

#### reductionreduction 为'none'
#### reduction
reduction 为'none'
```python
# PyTorch 写法
torch.nn.CrossEntropyLoss(weight=w, reduction='none')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,17 +27,74 @@ PyTorch 相比 Paddle 支持更多其他参数,具体如下:

### 转写示例
#### size_average
size_average 为 True

```python
# PyTorch 写法
torch.nn.HingeEmbeddingLoss(weight=w, size_average=True)

# Paddle 写法
paddle.nn.HingeEmbeddingLoss(weight=w, reduction='mean')
```

size_average 为 False

```python
# PyTorch 写法
torch.nn.HingeEmbeddingLoss(weight=w, size_average=False)

# Paddle 写法
paddle.nn.HingeEmbeddingLoss(weight=w, reduction='sum')
```

#### reduce
reduce 为 True

```python
# PyTorch 写法
torch.nn.HingeEmbeddingLoss(weight=w, reduce=True)

# Paddle 写法
paddle.nn.HingeEmbeddingLoss(weight=w, reduction='mean')
```

reduce 为 False

```python
# PyTorch 的 size_average、reduce 参数转为 Paddle 的 reduction 参数
if size_average is None:
size_average = True
if reduce is None:
reduce = True

if size_average and reduce:
reduction = 'mean'
elif reduce:
reduction = 'sum'
else:
reduction = 'none'
# PyTorch 写法
torch.nn.HingeEmbeddingLoss(weight=w, reduce=False)

# Paddle 写法
paddle.nn.HingeEmbeddingLoss(weight=w, reduction='none')
```

#### reduction
reduction 为'none'

```python
# PyTorch 写法
torch.nn.HingeEmbeddingLoss(weight=w, reduction='none')

# Paddle 写法
paddle.nn.HingeEmbeddingLoss(weight=w, reduction='none')
```

reduction 为'mean'

```python
# PyTorch 写法
torch.nn.HingeEmbeddingLoss(weight=w, reduction='mean')

# Paddle 写法
paddle.nn.HingeEmbeddingLoss(weight=w, reduction='mean')
```

reduction 为'sum'

```python
# PyTorch 写法
torch.nn.HingeEmbeddingLoss(weight=w, reduction='sum')

# Paddle 写法
paddle.nn.HingeEmbeddingLoss(weight=w, reduction='sum')
```
Original file line number Diff line number Diff line change
Expand Up @@ -25,17 +25,74 @@ PyTorch 相比 Paddle 支持更多其他参数,具体如下:

### 转写示例
#### size_average
size_average 为 True

```python
# PyTorch 写法
torch.nn.KLDivLoss(weight=w, size_average=True)

# Paddle 写法
paddle.nn.KLDivLoss(weight=w, reduction='mean')
```

size_average 为 False

```python
# PyTorch 写法
torch.nn.KLDivLoss(weight=w, size_average=False)

# Paddle 写法
paddle.nn.KLDivLoss(weight=w, reduction='sum')
```

#### reduce
reduce 为 True

```python
# PyTorch 写法
torch.nn.KLDivLoss(weight=w, reduce=True)

# Paddle 写法
paddle.nn.KLDivLoss(weight=w, reduction='mean')
```

reduce 为 False

```python
# PyTorch 的 size_average、reduce 参数转为 Paddle 的 reduction 参数
if size_average is None:
size_average = True
if reduce is None:
reduce = True

if size_average and reduce:
reduction = 'mean'
elif reduce:
reduction = 'sum'
else:
reduction = 'none'
# PyTorch 写法
torch.nn.KLDivLoss(weight=w, reduce=False)

# Paddle 写法
paddle.nn.KLDivLoss(weight=w, reduction='none')
```

#### reduction
reduction 为'none'

```python
# PyTorch 写法
torch.nn.KLDivLoss(weight=w, reduction='none')

# Paddle 写法
paddle.nn.KLDivLoss(weight=w, reduction='none')
```

reduction 为'mean'

```python
# PyTorch 写法
torch.nn.KLDivLoss(weight=w, reduction='mean')

# Paddle 写法
paddle.nn.KLDivLoss(weight=w, reduction='mean')
```

reduction 为'sum'

```python
# PyTorch 写法
torch.nn.KLDivLoss(weight=w, reduction='sum')

# Paddle 写法
paddle.nn.KLDivLoss(weight=w, reduction='sum')
```
Loading