Kernel: Python 3 (ipykernel)
In [4]:
In [5]:
Out[5]:
01
In [2]:
Is it a bird?
In [3]:
Out[3]:
(1,
'https://www.superiorwallpapers.com/download/a-beautiful-red-bird-on-a-branch-bird-conservation-1600x1070.jpg')
In [4]:
In [5]:
Out[5]:
All models need numbers as their inputs. Images are already stored as numbers in a computer -- here's a viewer so you can see what that looks like: PixSpy.
In [6]:
In [7]:
In [8]:
Out[8]:
In [9]:
Out[9]:
/home/jhoward/git/fastai/fastai/vision/learner.py:275: UserWarning: `cnn_learner` has been renamed to `vision_learner` -- please update your code
warn("`cnn_learner` has been renamed to `vision_learner` -- please update your code")
---------------------------------------------------------------------------
KeyboardInterrupt Traceback (most recent call last)
Input In [9], in <cell line: 2>()
1 learn = cnn_learner(dls, resnet18, metrics=error_rate)
----> 2 learn.fine_tune(3)
File ~/git/fastai/fastai/callback/schedule.py:166
, in fine_tune(self, epochs, base_lr, freeze_epochs, lr_mult, pct_start, div, **kwargs)
164 "Fine tune with `Learner.freeze` for `freeze_epochs`, then with `Learner.unfreeze` for `epochs`, using discriminative LR."
165 self.freeze()
--> 166 self.fit_one_cycle(freeze_epochs, slice(base_lr), pct_start=0.99, **kwargs)
167 base_lr /= 2
168 self.unfreeze()
File ~/git/fastai/fastai/callback/schedule.py:121
, in fit_one_cycle(self, n_epoch, lr_max, div, div_final, pct_start, wd, moms, cbs, reset_opt)
118 lr_max = np.array([h['lr'] for h in self.opt.hypers])
119 scheds = {'lr': combined_cos(pct_start, lr_max/div, lr_max, lr_max/div_final),
120 'mom': combined_cos(pct_start, *(self.moms if moms is None else moms))}
--> 121 self.fit(n_epoch, cbs=ParamScheduler(scheds)+L(cbs), reset_opt=reset_opt, wd=wd)
File ~/git/fastai/fastai/learner.py:229
, in Learner.fit(self, n_epoch, lr, wd, cbs, reset_opt)
227 self.opt.set_hypers(lr=self.lr if lr is None else lr)
228 self.n_epoch = n_epoch
--> 229 self._with_events(self._do_fit, 'fit', CancelFitException, self._end_cleanup)
File ~/git/fastai/fastai/learner.py:169
, in Learner._with_events(self, f, event_type, ex, final)
168 def _with_events(self, f, event_type, ex, final=noop):
--> 169 try: self(f'before_{event_type}'); f()
170 except ex: self(f'after_cancel_{event_type}')
171 self(f'after_{event_type}'); final()
File ~/git/fastai/fastai/learner.py:220
, in Learner._do_fit(self)
218 for epoch in range(self.n_epoch):
219 self.epoch=epoch
--> 220 self._with_events(self._do_epoch, 'epoch', CancelEpochException)
File ~/git/fastai/fastai/learner.py:169
, in Learner._with_events(self, f, event_type, ex, final)
168 def _with_events(self, f, event_type, ex, final=noop):
--> 169 try: self(f'before_{event_type}'); f()
170 except ex: self(f'after_cancel_{event_type}')
171 self(f'after_{event_type}'); final()
File ~/git/fastai/fastai/learner.py:214
, in Learner._do_epoch(self)
213 def _do_epoch(self):
--> 214 self._do_epoch_train()
215 self._do_epoch_validate()
File ~/git/fastai/fastai/learner.py:206
, in Learner._do_epoch_train(self)
204 def _do_epoch_train(self):
205 self.dl = self.dls.train
--> 206 self._with_events(self.all_batches, 'train', CancelTrainException)
File ~/git/fastai/fastai/learner.py:169
, in Learner._with_events(self, f, event_type, ex, final)
168 def _with_events(self, f, event_type, ex, final=noop):
--> 169 try: self(f'before_{event_type}'); f()
170 except ex: self(f'after_cancel_{event_type}')
171 self(f'after_{event_type}'); final()
File ~/git/fastai/fastai/learner.py:175
, in Learner.all_batches(self)
173 def all_batches(self):
174 self.n_iter = len(self.dl)
--> 175 for o in enumerate(self.dl): self.one_batch(*o)
File ~/git/fastai/fastai/learner.py:202
, in Learner.one_batch(self, i, b)
200 b = self._set_device(b)
201 self._split(b)
--> 202 self._with_events(self._do_one_batch, 'batch', CancelBatchException)
File ~/git/fastai/fastai/learner.py:169
, in Learner._with_events(self, f, event_type, ex, final)
168 def _with_events(self, f, event_type, ex, final=noop):
--> 169 try: self(f'before_{event_type}'); f()
170 except ex: self(f'after_cancel_{event_type}')
171 self(f'after_{event_type}'); final()
File ~/git/fastai/fastai/learner.py:181
, in Learner._do_one_batch(self)
180 def _do_one_batch(self):
--> 181 self.pred = self.model(*self.xb)
182 self('after_pred')
183 if len(self.yb):
File ~/mambaforge/lib/python3.9/site-packages/torch/nn/modules/module.py:1110
, in Module._call_impl(self, *input, **kwargs)
1106 # If we don't have any hooks, we want to skip the rest of the logic in
1107 # this function, and just call forward.
1108 if not (self._backward_hooks or self._forward_hooks or self._forward_pre_hooks or _global_backward_hooks
1109 or _global_forward_hooks or _global_forward_pre_hooks):
-> 1110 return forward_call(*input, **kwargs)
1111 # Do not call functions when jit is used
1112 full_backward_hooks, non_full_backward_hooks = [], []
File ~/mambaforge/lib/python3.9/site-packages/torch/nn/modules/container.py:141
, in Sequential.forward(self, input)
139 def forward(self, input):
140 for module in self:
--> 141 input = module(input)
142 return input
File ~/mambaforge/lib/python3.9/site-packages/torch/nn/modules/module.py:1110
, in Module._call_impl(self, *input, **kwargs)
1106 # If we don't have any hooks, we want to skip the rest of the logic in
1107 # this function, and just call forward.
1108 if not (self._backward_hooks or self._forward_hooks or self._forward_pre_hooks or _global_backward_hooks
1109 or _global_forward_hooks or _global_forward_pre_hooks):
-> 1110 return forward_call(*input, **kwargs)
1111 # Do not call functions when jit is used
1112 full_backward_hooks, non_full_backward_hooks = [], []
File ~/mambaforge/lib/python3.9/site-packages/torch/nn/modules/container.py:141
, in Sequential.forward(self, input)
139 def forward(self, input):
140 for module in self:
--> 141 input = module(input)
142 return input
File ~/mambaforge/lib/python3.9/site-packages/torch/nn/modules/module.py:1110
, in Module._call_impl(self, *input, **kwargs)
1106 # If we don't have any hooks, we want to skip the rest of the logic in
1107 # this function, and just call forward.
1108 if not (self._backward_hooks or self._forward_hooks or self._forward_pre_hooks or _global_backward_hooks
1109 or _global_forward_hooks or _global_forward_pre_hooks):
-> 1110 return forward_call(*input, **kwargs)
1111 # Do not call functions when jit is used
1112 full_backward_hooks, non_full_backward_hooks = [], []
File ~/mambaforge/lib/python3.9/site-packages/torch/nn/modules/conv.py:447
, in Conv2d.forward(self, input)
446 def forward(self, input: Tensor) -> Tensor:
--> 447 return self._conv_forward(input, self.weight, self.bias)
File ~/mambaforge/lib/python3.9/site-packages/torch/nn/modules/conv.py:443
, in Conv2d._conv_forward(self, input, weight, bias)
439 if self.padding_mode != 'zeros':
440 return F.conv2d(F.pad(input, self._reversed_padding_repeated_twice, mode=self.padding_mode),
441 weight, bias, self.stride,
442 _pair(0), self.dilation, self.groups)
--> 443 return F.conv2d(input, weight, bias, self.stride,
444 self.padding, self.dilation, self.groups)
File ~/git/fastai/fastai/torch_core.py:360
, in TensorBase.__torch_function__(self, func, types, args, kwargs)
358 convert=False
359 if _torch_handled(args, self._opt, func): convert,types = type(self),(torch.Tensor,)
--> 360 res = super().__torch_function__(func, types, args=args, kwargs=kwargs)
361 if convert: res = convert(res)
362 if isinstance(res, TensorBase): res.set_meta(self, as_copy=True)
File ~/mambaforge/lib/python3.9/site-packages/torch/_tensor.py:1142
, in Tensor.__torch_function__(cls, func, types, args, kwargs)
1139 return NotImplemented
1141 with _C.DisableTorchFunction():
-> 1142 ret = func(*args, **kwargs)
1143 if func in get_default_nowrap_functions():
1144 return ret
KeyboardInterrupt:
In [ ]:
What happened?
A normal computer program:
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [ ]:
Not just for image recognition
In [ ]:
In [ ]:
Tabular analysis - income prediction
In [ ]:
In [ ]:
In [ ]:
Collaborative filtering - recommendation system
In [ ]:
In [ ]:
In [ ]:
In [ ]:
How do we fit a function to data?
In [10]:
Out[10]:
In [11]:
In [12]:
Out[12]:
10.75
In [13]:
In [14]:
Out[14]:
10.75
In [15]:
Out[15]:
In [16]:
In [17]:
Out[17]:
In [18]:
Out[18]:
interactive(children=(FloatSlider(value=1.5, description='a', max=4.5, min=-1.5), FloatSlider(value=1.5, descr…
In [19]:
In [20]:
Out[20]:
interactive(children=(FloatSlider(value=1.5, description='a', max=4.5, min=-1.5), FloatSlider(value=1.5, descr…
In [21]:
In [22]:
Out[22]:
tensor(5.8336, dtype=torch.float64)
In [23]:
Out[23]:
tensor([1.5000, 1.5000, 1.5000], requires_grad=True)
In [24]:
Out[24]:
tensor(5.8336, dtype=torch.float64, grad_fn=<MeanBackward0>)
In [25]:
In [26]:
Out[26]:
tensor([-5.1419, 2.8472, -1.1009])
In [27]:
Out[27]:
loss=5.49
In [28]:
Out[28]:
step=0; loss=5.49
step=1; loss=4.90
step=2; loss=4.23
step=3; loss=3.67
step=4; loss=3.33
In [29]:
In [30]:
Out[30]:
In [31]:
Out[31]:
interactive(children=(FloatSlider(value=1.5, description='m', max=4.5, min=-1.5), FloatSlider(value=1.5, descr…
In [32]:
In [33]:
Out[33]:
interactive(children=(FloatSlider(value=-1.5, description='m1', max=1.5, min=-4.5), FloatSlider(value=-1.5, de…
How to draw an owl 
Overfitting
In [34]:
In [36]:
Out[36]:
In [37]:
Out[37]:
In [38]:
Out[38]: