Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
fastai
GitHub Repository: fastai/course22
Path: blob/master/slides/birds.ipynb
807 views
Kernel: Python 3 (ipykernel)
i=1
print(f'{i:02d}')
01
from fastbook import * plt.rc('figure', dpi=90) def plot_function(f, title=None, min=-2.1, max=2.1, color='r', ylim=None): x = torch.linspace(min,max, 100)[:,None] if ylim: plt.ylim(ylim) plt.plot(x, f(x), color) if title is not None: plt.title(title)

Is it a bird?

https://xkcd.com/1425/
urls = search_images_ddg('bird photos', max_images=1) len(urls),urls[0]
(1, 'https://www.superiorwallpapers.com/download/a-beautiful-red-bird-on-a-branch-bird-conservation-1600x1070.jpg')
dest = Path('bird.jpg') if not dest.exists(): download_url(urls[0], dest, show_progress=False)
im = Image.open(dest) im.to_thumb(256,256)
Image in a Jupyter notebook

All models need numbers as their inputs. Images are already stored as numbers in a computer -- here's a viewer so you can see what that looks like: PixSpy.

searches = 'forest','bird' path = Path('bird_or_not') if not path.exists(): for o in searches: dest = (path/o) dest.mkdir(exist_ok=True) results = search_images_ddg(f'{o} photo') download_images(dest, urls=results[:200]) resize_images(dest, max_size=400, dest=dest)
failed = verify_images(get_image_files(path)) failed.map(Path.unlink);
dls = DataBlock( blocks=(ImageBlock, CategoryBlock), get_items=get_image_files, splitter=RandomSplitter(valid_pct=0.2, seed=42), get_y=parent_label, item_tfms=[Resize(192, method='squish')] ).dataloaders(path) dls.show_batch(max_n=6)
Image in a Jupyter notebook
learn = cnn_learner(dls, resnet18, metrics=error_rate) learn.fine_tune(3)
/home/jhoward/git/fastai/fastai/vision/learner.py:275: UserWarning: `cnn_learner` has been renamed to `vision_learner` -- please update your code warn("`cnn_learner` has been renamed to `vision_learner` -- please update your code")
--------------------------------------------------------------------------- KeyboardInterrupt Traceback (most recent call last) Input In [9], in <cell line: 2>() 1 learn = cnn_learner(dls, resnet18, metrics=error_rate) ----> 2 learn.fine_tune(3)
File ~/git/fastai/fastai/callback/schedule.py:166, in fine_tune(self, epochs, base_lr, freeze_epochs, lr_mult, pct_start, div, **kwargs) 164 "Fine tune with `Learner.freeze` for `freeze_epochs`, then with `Learner.unfreeze` for `epochs`, using discriminative LR." 165 self.freeze() --> 166 self.fit_one_cycle(freeze_epochs, slice(base_lr), pct_start=0.99, **kwargs) 167 base_lr /= 2 168 self.unfreeze()
File ~/git/fastai/fastai/callback/schedule.py:121, in fit_one_cycle(self, n_epoch, lr_max, div, div_final, pct_start, wd, moms, cbs, reset_opt) 118 lr_max = np.array([h['lr'] for h in self.opt.hypers]) 119 scheds = {'lr': combined_cos(pct_start, lr_max/div, lr_max, lr_max/div_final), 120 'mom': combined_cos(pct_start, *(self.moms if moms is None else moms))} --> 121 self.fit(n_epoch, cbs=ParamScheduler(scheds)+L(cbs), reset_opt=reset_opt, wd=wd)
File ~/git/fastai/fastai/learner.py:229, in Learner.fit(self, n_epoch, lr, wd, cbs, reset_opt) 227 self.opt.set_hypers(lr=self.lr if lr is None else lr) 228 self.n_epoch = n_epoch --> 229 self._with_events(self._do_fit, 'fit', CancelFitException, self._end_cleanup)
File ~/git/fastai/fastai/learner.py:169, in Learner._with_events(self, f, event_type, ex, final) 168 def _with_events(self, f, event_type, ex, final=noop): --> 169 try: self(f'before_{event_type}'); f() 170 except ex: self(f'after_cancel_{event_type}') 171 self(f'after_{event_type}'); final()
File ~/git/fastai/fastai/learner.py:220, in Learner._do_fit(self) 218 for epoch in range(self.n_epoch): 219 self.epoch=epoch --> 220 self._with_events(self._do_epoch, 'epoch', CancelEpochException)
File ~/git/fastai/fastai/learner.py:169, in Learner._with_events(self, f, event_type, ex, final) 168 def _with_events(self, f, event_type, ex, final=noop): --> 169 try: self(f'before_{event_type}'); f() 170 except ex: self(f'after_cancel_{event_type}') 171 self(f'after_{event_type}'); final()
File ~/git/fastai/fastai/learner.py:214, in Learner._do_epoch(self) 213 def _do_epoch(self): --> 214 self._do_epoch_train() 215 self._do_epoch_validate()
File ~/git/fastai/fastai/learner.py:206, in Learner._do_epoch_train(self) 204 def _do_epoch_train(self): 205 self.dl = self.dls.train --> 206 self._with_events(self.all_batches, 'train', CancelTrainException)
File ~/git/fastai/fastai/learner.py:169, in Learner._with_events(self, f, event_type, ex, final) 168 def _with_events(self, f, event_type, ex, final=noop): --> 169 try: self(f'before_{event_type}'); f() 170 except ex: self(f'after_cancel_{event_type}') 171 self(f'after_{event_type}'); final()
File ~/git/fastai/fastai/learner.py:175, in Learner.all_batches(self) 173 def all_batches(self): 174 self.n_iter = len(self.dl) --> 175 for o in enumerate(self.dl): self.one_batch(*o)
File ~/git/fastai/fastai/learner.py:202, in Learner.one_batch(self, i, b) 200 b = self._set_device(b) 201 self._split(b) --> 202 self._with_events(self._do_one_batch, 'batch', CancelBatchException)
File ~/git/fastai/fastai/learner.py:169, in Learner._with_events(self, f, event_type, ex, final) 168 def _with_events(self, f, event_type, ex, final=noop): --> 169 try: self(f'before_{event_type}'); f() 170 except ex: self(f'after_cancel_{event_type}') 171 self(f'after_{event_type}'); final()
File ~/git/fastai/fastai/learner.py:181, in Learner._do_one_batch(self) 180 def _do_one_batch(self): --> 181 self.pred = self.model(*self.xb) 182 self('after_pred') 183 if len(self.yb):
File ~/mambaforge/lib/python3.9/site-packages/torch/nn/modules/module.py:1110, in Module._call_impl(self, *input, **kwargs) 1106 # If we don't have any hooks, we want to skip the rest of the logic in 1107 # this function, and just call forward. 1108 if not (self._backward_hooks or self._forward_hooks or self._forward_pre_hooks or _global_backward_hooks 1109 or _global_forward_hooks or _global_forward_pre_hooks): -> 1110 return forward_call(*input, **kwargs) 1111 # Do not call functions when jit is used 1112 full_backward_hooks, non_full_backward_hooks = [], []
File ~/mambaforge/lib/python3.9/site-packages/torch/nn/modules/container.py:141, in Sequential.forward(self, input) 139 def forward(self, input): 140 for module in self: --> 141 input = module(input) 142 return input
File ~/mambaforge/lib/python3.9/site-packages/torch/nn/modules/module.py:1110, in Module._call_impl(self, *input, **kwargs) 1106 # If we don't have any hooks, we want to skip the rest of the logic in 1107 # this function, and just call forward. 1108 if not (self._backward_hooks or self._forward_hooks or self._forward_pre_hooks or _global_backward_hooks 1109 or _global_forward_hooks or _global_forward_pre_hooks): -> 1110 return forward_call(*input, **kwargs) 1111 # Do not call functions when jit is used 1112 full_backward_hooks, non_full_backward_hooks = [], []
File ~/mambaforge/lib/python3.9/site-packages/torch/nn/modules/container.py:141, in Sequential.forward(self, input) 139 def forward(self, input): 140 for module in self: --> 141 input = module(input) 142 return input
File ~/mambaforge/lib/python3.9/site-packages/torch/nn/modules/module.py:1110, in Module._call_impl(self, *input, **kwargs) 1106 # If we don't have any hooks, we want to skip the rest of the logic in 1107 # this function, and just call forward. 1108 if not (self._backward_hooks or self._forward_hooks or self._forward_pre_hooks or _global_backward_hooks 1109 or _global_forward_hooks or _global_forward_pre_hooks): -> 1110 return forward_call(*input, **kwargs) 1111 # Do not call functions when jit is used 1112 full_backward_hooks, non_full_backward_hooks = [], []
File ~/mambaforge/lib/python3.9/site-packages/torch/nn/modules/conv.py:447, in Conv2d.forward(self, input) 446 def forward(self, input: Tensor) -> Tensor: --> 447 return self._conv_forward(input, self.weight, self.bias)
File ~/mambaforge/lib/python3.9/site-packages/torch/nn/modules/conv.py:443, in Conv2d._conv_forward(self, input, weight, bias) 439 if self.padding_mode != 'zeros': 440 return F.conv2d(F.pad(input, self._reversed_padding_repeated_twice, mode=self.padding_mode), 441 weight, bias, self.stride, 442 _pair(0), self.dilation, self.groups) --> 443 return F.conv2d(input, weight, bias, self.stride, 444 self.padding, self.dilation, self.groups)
File ~/git/fastai/fastai/torch_core.py:360, in TensorBase.__torch_function__(self, func, types, args, kwargs) 358 convert=False 359 if _torch_handled(args, self._opt, func): convert,types = type(self),(torch.Tensor,) --> 360 res = super().__torch_function__(func, types, args=args, kwargs=kwargs) 361 if convert: res = convert(res) 362 if isinstance(res, TensorBase): res.set_meta(self, as_copy=True)
File ~/mambaforge/lib/python3.9/site-packages/torch/_tensor.py:1142, in Tensor.__torch_function__(cls, func, types, args, kwargs) 1139 return NotImplemented 1141 with _C.DisableTorchFunction(): -> 1142 ret = func(*args, **kwargs) 1143 if func in get_default_nowrap_functions(): 1144 return ret
KeyboardInterrupt:
is_bird,_,probs = learn.predict(PILImage.create('bird.jpg')) print(f"This is a: {is_bird}.") print(f"Probability it's a bird: {probs[0]:.4f}")

What happened?

A normal computer program:

gv2('''program[shape=box3d width=2 height=1] inputs->program->results''')
gv2('''model[shape=box3d width=2 height=1] inputs->model->results; weights->model''')
gv2('''ordering=in model[shape=box3d width=1.5 height=1] inputs->model->results; weights->model; results->loss loss->weights[constraint=false label=update]''')
gv2('''model[shape=box3d width=2 height=1] inputs->model->results''')
gv2('''program[shape=box3d width=2 height=1] inputs->program->results''')
warnings.filterwarnings("ignore")

Not just for image recognition

path = untar_data(URLs.CAMVID_TINY) dls = SegmentationDataLoaders.from_label_func( path, bs=8, fnames = get_image_files(path/"images"), label_func = lambda o: path/'labels'/f'{o.stem}_P{o.suffix}', codes = np.loadtxt(path/'codes.txt', dtype=str) ) learn = unet_learner(dls, resnet34) learn.fine_tune(8)
learn.show_results(max_n=3, figsize=(7,8))

Tabular analysis - income prediction

from fastai.tabular.all import * path = untar_data(URLs.ADULT_SAMPLE) dls = TabularDataLoaders.from_csv(path/'adult.csv', path=path, y_names="salary", cat_names = ['workclass', 'education', 'marital-status', 'occupation', 'relationship', 'race'], cont_names = ['age', 'fnlwgt', 'education-num'], procs = [Categorify, FillMissing, Normalize])
dls.show_batch()
learn = tabular_learner(dls, metrics=accuracy) learn.fit_one_cycle(2)

Collaborative filtering - recommendation system

from fastai.collab import * path = untar_data(URLs.ML_SAMPLE) dls = CollabDataLoaders.from_csv(path/'ratings.csv')
dls.show_batch()
learn = collab_learner(dls, y_range=(0.5,5.5)) learn.fine_tune(10)
learn.show_results()

How do we fit a function to data?

def f(x): return 3*x**2 + 2*x + 1 plot_function(f, "$3x^2 + 2x + 1$")
Image in a Jupyter notebook
def quad(a, b, c, x): return a*x**2 + b*x + c
quad(3,2,1, 1.5)
10.75
from functools import partial def mk_quad(a,b,c): return partial(quad, a,b,c)
f = mk_quad(3,2,1) f(1.5)
10.75
plot_function(f)
Image in a Jupyter notebook
from numpy.random import normal,seed,uniform np.random.seed(42) def noise(x, scale): return normal(scale=scale, size=x.shape) def add_noise(x, mult, add): return x * (1+noise(x,mult)) + noise(x,add)
x = torch.linspace(-2, 2, steps=20)[:,None] y = add_noise(f(x), 0.3, 1.5) plt.scatter(x,y);
Image in a Jupyter notebook
from ipywidgets import interact @interact(a=1.5, b=1.5, c=1.5) def plot_quad(a, b, c): plt.scatter(x,y) plot_function(mk_quad(a,b,c), ylim=(-3,12))
interactive(children=(FloatSlider(value=1.5, description='a', max=4.5, min=-1.5), FloatSlider(value=1.5, descr…
def mse(preds, acts): return ((preds-acts)**2).mean()
@interact(a=1.5, b=1.5, c=1.5) def plot_quad(a, b, c): f = mk_quad(a,b,c) plt.scatter(x,y) loss = mse(f(x), y) plot_function(f, ylim=(-3,12), title=f"MSE: {loss:.2f}")
interactive(children=(FloatSlider(value=1.5, description='a', max=4.5, min=-1.5), FloatSlider(value=1.5, descr…
def quad_mse(params): f = mk_quad(*params) return mse(f(x), y)
quad_mse([1.5, 1.5, 1.5])
tensor(5.8336, dtype=torch.float64)
abc = torch.tensor([1.5,1.5,1.5]) abc.requires_grad_()
tensor([1.5000, 1.5000, 1.5000], requires_grad=True)
loss = quad_mse(abc) loss
tensor(5.8336, dtype=torch.float64, grad_fn=<MeanBackward0>)
loss.backward()
abc.grad
tensor([-5.1419, 2.8472, -1.1009])
with torch.no_grad(): abc -= abc.grad*0.01 loss = quad_mse(abc) print(f'loss={loss:.2f}')
loss=5.49
for i in range(5): loss = quad_mse(abc) loss.backward() with torch.no_grad(): abc -= abc.grad*0.01 print(f'step={i}; loss={loss:.2f}')
step=0; loss=5.49 step=1; loss=4.90 step=2; loss=4.23 step=3; loss=3.67 step=4; loss=3.33
def rectified_linear(m,b,x): y = m*x+b return torch.clip(y, 0.)
plot_function(partial(rectified_linear, 1,1))
Image in a Jupyter notebook
@interact(m=1.5, b=1.5) def plot_relu(m, b): plot_function(partial(rectified_linear, m,b), ylim=(-1,4))
interactive(children=(FloatSlider(value=1.5, description='m', max=4.5, min=-1.5), FloatSlider(value=1.5, descr…
def double_relu(m1,b1,m2,b2,x): return rectified_linear(m1,b1,x) + rectified_linear(m2,b2,x)
@interact(m1=-1.5, b1=-1.5, m2=1.5, b2=1.5) def plot_double_relu(m1, b1, m2, b2): plot_function(partial(double_relu, m1,b1,m2,b2), ylim=(-1,6))
interactive(children=(FloatSlider(value=-1.5, description='m1', max=1.5, min=-4.5), FloatSlider(value=-1.5, de…

How to draw an owl image.png

Overfitting

from sklearn.linear_model import LinearRegression from sklearn.preprocessing import PolynomialFeatures from sklearn.pipeline import make_pipeline def plot_poly(degree): model = make_pipeline(PolynomialFeatures(degree), LinearRegression()) model.fit(x, y) plt.scatter(x,y) plot_function(model.predict)
plot_poly(1)
Image in a Jupyter notebook
plot_poly(10)
Image in a Jupyter notebook
plot_poly(2) plot_function(f, color='b')
Image in a Jupyter notebook

fin -