Skip to content

Commit af33f43

Browse files
authored
* homework 3, part1 + sem 05
1 parent 5bd548d commit af33f43

File tree

4 files changed

+965
-0
lines changed

4 files changed

+965
-0
lines changed

homework03/homework03_part1.ipynb

Lines changed: 174 additions & 0 deletions
Large diffs are not rendered by default.

week05_dense_prediction/README.md

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
2+
## Practice
3+
4+
[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/yandexdataschool/Practical_DL/blob/spring2019/week05_dense_prediction/seminar_dense_prediction.ipynb)
5+
6+
Open seminar_dense_prediction.ipynb and follow instructions from there.
7+
8+
9+
Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
import torch
2+
import torch.nn as nn
3+
from torch.autograd import Variable
4+
5+
6+
def init_layer(layer, weight_init=None, bias_init=None):
7+
if weight_init is not None:
8+
layer.weight.data = torch.FloatTensor(weight_init)
9+
if bias_init is not None:
10+
layer.bias.data = torch.FloatTensor(bias_init)
11+
return layer
12+
13+
14+
def init_bn_statistics(layer, mean_init=None, var_init=None):
15+
if mean_init is not None:
16+
layer.running_mean = torch.FloatTensor(mean_init)
17+
if var_init is not None:
18+
layer.running_var = torch.FloatTensor(var_init)
19+
return layer
20+
21+
22+
def Linear(in_features, out_features, bias=True, weight_init=None, bias_init=None):
23+
layer = nn.Linear(in_features, out_features, bias)
24+
return init_layer(layer, weight_init, bias_init)
25+
26+
27+
def Conv2d(in_channels, out_channels, kernel_size, stride=1, padding=0, dilation=1, groups=1,
28+
bias=True, weight_init=None, bias_init=None):
29+
layer = nn.Conv2d(in_channels, out_channels, kernel_size, stride, padding, dilation,
30+
groups, bias)
31+
return init_layer(layer, weight_init, bias_init)
32+
33+
34+
def BatchNorm1d(num_features, eps=1e-05, momentum=0.1, affine=True, weight_init=None,
35+
bias_init=None, mean_init=None, var_init=None):
36+
layer = nn.BatchNorm1d(num_features, eps, momentum, affine)
37+
layer = init_layer(layer, weight_init, bias_init)
38+
return init_bn_statistics(layer, mean_init, var_init)
39+
40+
41+
def BatchNorm2d(num_features, eps=1e-05, momentum=0.1, affine=True, weight_init=None,
42+
bias_init=None, mean_init=None, var_init=None):
43+
layer = nn.BatchNorm2d(num_features, eps, momentum, affine)
44+
layer = init_layer(layer, weight_init, bias_init)
45+
return init_bn_statistics(layer, mean_init, var_init)
46+

week05_dense_prediction/seminar_dense_prediction.ipynb

Lines changed: 736 additions & 0 deletions
Large diffs are not rendered by default.

0 commit comments

Comments
 (0)