InĀ [1]:
import jax.numpy as jnp
import plotly.express as px
from plotly.subplots import make_subplots
import jax
import numpy as np
from datasets import mnist
import plotly.graph_objects as go
InĀ [2]:
train_images, train_labels, test_images, test_labels = mnist()
train_images = train_images.astype(jnp.float32)
test_images = test_images.astype(jnp.float32)
train_labels = jnp.asarray(train_labels, dtype=jnp.int32)
test_labels = jnp.asarray(test_labels, dtype=jnp.int32)
InĀ [3]:
# this is because my laptop is not very powerful
train_images = train_images[:100]
train_images.shape
Out[3]:
(100, 784)
InĀ [4]:
def visualize_images(images_tensor):
img = images_tensor.reshape(-1, 28, 28)
fig = px.imshow(img[:, :, :], binary_string=False, facet_col=0, facet_col_wrap=5)
item_map={f'{i}':"" for i, key in enumerate(range(img.shape[0]))}
fig.for_each_annotation(lambda a: a.update(text=item_map[a.text.split("=")[1]]))
fig.show()
InĀ [5]:
visualize_images(train_images[0:10])
InĀ [6]:
eta = 0.05
hidden_units = 1024
net_parameters = {
'w0' : np.random.randn(784, hidden_units) * eta,
'w1' : np.random.randn(hidden_units, hidden_units) * eta,
'w2' : np.random.randn(hidden_units, hidden_units) * eta,
'w3' : np.random.randn(hidden_units, hidden_units) * eta,
'w4' : np.random.randn(hidden_units, hidden_units) * eta,
'w5' : np.random.randn(hidden_units, hidden_units) * eta,
'w6' : np.random.randn(hidden_units, hidden_units) * eta,
'w7' : np.random.randn(hidden_units, 784) * eta,
}
InĀ [7]:
def ReLU(x):
return jnp.maximum(0,x)
def sigmoid(x):
return 1 / (1 + jnp.exp(-x))
def forward(parameters, x):
x = x @ parameters['w0']
x = ReLU(x)
x = x @ parameters['w1']
x = ReLU(x)
x = x @ parameters['w2']
x = ReLU(x)
x = x @ parameters['w3']
x = ReLU(x)
x = x @ parameters['w4']
x = ReLU(x)
x = x @ parameters['w5']
x = ReLU(x)
x = x @ parameters['w6']
x = ReLU(x)
x = x @ parameters['w7']
return sigmoid(x)
InĀ [8]:
# the convex combination constant
alpha = 0.95
def noising_step(images):
# add noise to an image
noise = np.random.randn(*images.shape)
return alpha * images + (1-alpha) * noise
InĀ [9]:
visualize_images(forward(net_parameters, noising_step(train_images[:10])))
InĀ [10]:
def diffusion_loss(parameters, x, y):
out = forward(parameters, x)
# mean squared error loss
return ((out - y) ** 2).mean()
diffusion_loss(net_parameters, noising_step(train_images[:100]), train_images[:100])
Out[10]:
Array(0.28075558, dtype=float32)
InĀ [11]:
loss_grad_fn = jax.grad(diffusion_loss)
InĀ [12]:
def generate_training_sample():
y = np.array([train_images])
x = np.array([noising_step(train_images)])
for i in range(5):
# progressively add noise to each image
y = np.append(y,[x[-1]],0)
x = np.append(x, [noising_step(x[-1])],0)
return x, y
InĀ [13]:
x,y = generate_training_sample()
visualize_images(x[:10,:1,:])
visualize_images(y[:10,:1,:])
InĀ [14]:
def train_loop(epochs = 100, lr = 0.01):
for epoch in range(epochs):
# get a training sample
x,y = generate_training_sample()
# calculate gradients
grad = loss_grad_fn(net_parameters, x, y)
# perform weight updates
net_parameters['w0'] -= lr * grad['w0']
net_parameters['w1'] -= lr * grad['w1']
net_parameters['w2'] -= lr * grad['w2']
net_parameters['w3'] -= lr * grad['w3']
net_parameters['w4'] -= lr * grad['w4']
net_parameters['w5'] -= lr * grad['w5']
net_parameters['w6'] -= lr * grad['w6']
net_parameters['w7'] -= lr * grad['w7']
# print out the loss, and show the model's performance on some of the images
print(f"Epoch ({epoch + 1}) Training Loss {diffusion_loss(net_parameters, x, y)}")
#if epoch % 25 == 0:
# visualize_images(forward(net_parameters, noisy_train_images[:10]))
InĀ [15]:
train_loop(epochs=100, lr = 0.9)
Epoch (1) Training Loss 0.22670875489711761 Epoch (2) Training Loss 0.20996986329555511 Epoch (3) Training Loss 0.19593730568885803 Epoch (4) Training Loss 0.18226933479309082 Epoch (5) Training Loss 0.16723154485225677 Epoch (6) Training Loss 0.1511985957622528 Epoch (7) Training Loss 0.13394472002983093 Epoch (8) Training Loss 0.1173085942864418 Epoch (9) Training Loss 0.10163397341966629 Epoch (10) Training Loss 0.08907722681760788 Epoch (11) Training Loss 0.08010371029376984 Epoch (12) Training Loss 0.07469869405031204 Epoch (13) Training Loss 0.07118804007768631 Epoch (14) Training Loss 0.06869049370288849 Epoch (15) Training Loss 0.06634930521249771 Epoch (16) Training Loss 0.06495488435029984 Epoch (17) Training Loss 0.06430596113204956 Epoch (18) Training Loss 0.06363603472709656 Epoch (19) Training Loss 0.06303177028894424 Epoch (20) Training Loss 0.062384720891714096 Epoch (21) Training Loss 0.061764903366565704 Epoch (22) Training Loss 0.06128591671586037 Epoch (23) Training Loss 0.06118389591574669 Epoch (24) Training Loss 0.06066169962286949 Epoch (25) Training Loss 0.0605321079492569 Epoch (26) Training Loss 0.05988624691963196 Epoch (27) Training Loss 0.059903714805841446 Epoch (28) Training Loss 0.05960245430469513 Epoch (29) Training Loss 0.05934605002403259 Epoch (30) Training Loss 0.05872771143913269 Epoch (31) Training Loss 0.05879480391740799 Epoch (32) Training Loss 0.058381445705890656 Epoch (33) Training Loss 0.05791440233588219 Epoch (34) Training Loss 0.057919953018426895 Epoch (35) Training Loss 0.057755887508392334 Epoch (36) Training Loss 0.05793648585677147 Epoch (37) Training Loss 0.058128613978624344 Epoch (38) Training Loss 0.05845807492733002 Epoch (39) Training Loss 0.059137385338544846 Epoch (40) Training Loss 0.05861949548125267 Epoch (41) Training Loss 0.05867329239845276 Epoch (42) Training Loss 0.05777619406580925 Epoch (43) Training Loss 0.057767290621995926 Epoch (44) Training Loss 0.056944675743579865 Epoch (45) Training Loss 0.05689511075615883 Epoch (46) Training Loss 0.056379709392786026 Epoch (47) Training Loss 0.056130919605493546 Epoch (48) Training Loss 0.056141480803489685 Epoch (49) Training Loss 0.05595334246754646 Epoch (50) Training Loss 0.05564844608306885 Epoch (51) Training Loss 0.0553605780005455 Epoch (52) Training Loss 0.05483260005712509 Epoch (53) Training Loss 0.055108580738306046 Epoch (54) Training Loss 0.05468160659074783 Epoch (55) Training Loss 0.05473452806472778 Epoch (56) Training Loss 0.05447251722216606 Epoch (57) Training Loss 0.05449804291129112 Epoch (58) Training Loss 0.0540502704679966 Epoch (59) Training Loss 0.05415553227066994 Epoch (60) Training Loss 0.05355435609817505 Epoch (61) Training Loss 0.053710173815488815 Epoch (62) Training Loss 0.05326269194483757 Epoch (63) Training Loss 0.053563669323921204 Epoch (64) Training Loss 0.053110651671886444 Epoch (65) Training Loss 0.0532405711710453 Epoch (66) Training Loss 0.05238015577197075 Epoch (67) Training Loss 0.05234314873814583 Epoch (68) Training Loss 0.05178098380565643 Epoch (69) Training Loss 0.051717374473810196 Epoch (70) Training Loss 0.05139248073101044 Epoch (71) Training Loss 0.051777441054582596 Epoch (72) Training Loss 0.05113869905471802 Epoch (73) Training Loss 0.051412664353847504 Epoch (74) Training Loss 0.050942033529281616 Epoch (75) Training Loss 0.05087243393063545 Epoch (76) Training Loss 0.050508953630924225 Epoch (77) Training Loss 0.05035725235939026 Epoch (78) Training Loss 0.0500103235244751 Epoch (79) Training Loss 0.050067536532878876 Epoch (80) Training Loss 0.04951946437358856 Epoch (81) Training Loss 0.049557916820049286 Epoch (82) Training Loss 0.04934307187795639 Epoch (83) Training Loss 0.04949622228741646 Epoch (84) Training Loss 0.04919903352856636 Epoch (85) Training Loss 0.04917682334780693 Epoch (86) Training Loss 0.04890799894928932 Epoch (87) Training Loss 0.04893887788057327 Epoch (88) Training Loss 0.0483865849673748 Epoch (89) Training Loss 0.04856280982494354 Epoch (90) Training Loss 0.047823917120695114 Epoch (91) Training Loss 0.0483308807015419 Epoch (92) Training Loss 0.04786442220211029 Epoch (93) Training Loss 0.04769463837146759 Epoch (94) Training Loss 0.04736889898777008 Epoch (95) Training Loss 0.047559019178152084 Epoch (96) Training Loss 0.04730962589383125 Epoch (97) Training Loss 0.047423552721738815 Epoch (98) Training Loss 0.047161318361759186 Epoch (99) Training Loss 0.047317199409008026 Epoch (100) Training Loss 0.04704659804701805
InĀ [16]:
train_loop(epochs=500, lr = 0.15)
Epoch (1) Training Loss 0.046059686690568924 Epoch (2) Training Loss 0.045229721814394 Epoch (3) Training Loss 0.045414283871650696 Epoch (4) Training Loss 0.04520338401198387 Epoch (5) Training Loss 0.04488924890756607 Epoch (6) Training Loss 0.04496042802929878 Epoch (7) Training Loss 0.04495877027511597 Epoch (8) Training Loss 0.045063525438308716 Epoch (9) Training Loss 0.04497422277927399 Epoch (10) Training Loss 0.044845208525657654 Epoch (11) Training Loss 0.044749729335308075 Epoch (12) Training Loss 0.04497332125902176 Epoch (13) Training Loss 0.044767722487449646 Epoch (14) Training Loss 0.044798918068408966 Epoch (15) Training Loss 0.044718727469444275 Epoch (16) Training Loss 0.044807955622673035 Epoch (17) Training Loss 0.04464927688241005 Epoch (18) Training Loss 0.04461966082453728 Epoch (19) Training Loss 0.04474571347236633 Epoch (20) Training Loss 0.04454177990555763 Epoch (21) Training Loss 0.04454660415649414 Epoch (22) Training Loss 0.04452791064977646 Epoch (23) Training Loss 0.04454003646969795 Epoch (24) Training Loss 0.04447699338197708 Epoch (25) Training Loss 0.04448425769805908 Epoch (26) Training Loss 0.044434718787670135 Epoch (27) Training Loss 0.044354673475027084 Epoch (28) Training Loss 0.04437999054789543 Epoch (29) Training Loss 0.04434071481227875 Epoch (30) Training Loss 0.04417021945118904 Epoch (31) Training Loss 0.0443277545273304 Epoch (32) Training Loss 0.04423593729734421 Epoch (33) Training Loss 0.04419362172484398 Epoch (34) Training Loss 0.044199373573064804 Epoch (35) Training Loss 0.044088948518037796 Epoch (36) Training Loss 0.04410981386899948 Epoch (37) Training Loss 0.044236645102500916 Epoch (38) Training Loss 0.044023334980010986 Epoch (39) Training Loss 0.044170964509248734 Epoch (40) Training Loss 0.04417794197797775 Epoch (41) Training Loss 0.043992504477500916 Epoch (42) Training Loss 0.0441206730902195 Epoch (43) Training Loss 0.044033393263816833 Epoch (44) Training Loss 0.04393637180328369 Epoch (45) Training Loss 0.044095706194639206 Epoch (46) Training Loss 0.04401455447077751 Epoch (47) Training Loss 0.04386761784553528 Epoch (48) Training Loss 0.04393826425075531 Epoch (49) Training Loss 0.04384889081120491 Epoch (50) Training Loss 0.043735530227422714 Epoch (51) Training Loss 0.04387547820806503 Epoch (52) Training Loss 0.04378286749124527 Epoch (53) Training Loss 0.043771639466285706 Epoch (54) Training Loss 0.043756820261478424 Epoch (55) Training Loss 0.043627675622701645 Epoch (56) Training Loss 0.04365669563412666 Epoch (57) Training Loss 0.04361308738589287 Epoch (58) Training Loss 0.04355143383145332 Epoch (59) Training Loss 0.04353584349155426 Epoch (60) Training Loss 0.043552201241254807 Epoch (61) Training Loss 0.04357731342315674 Epoch (62) Training Loss 0.04357652738690376 Epoch (63) Training Loss 0.043631572276353836 Epoch (64) Training Loss 0.043421220034360886 Epoch (65) Training Loss 0.04348466917872429 Epoch (66) Training Loss 0.043547675013542175 Epoch (67) Training Loss 0.043320078402757645 Epoch (68) Training Loss 0.043443892151117325 Epoch (69) Training Loss 0.043509405106306076 Epoch (70) Training Loss 0.04341686889529228 Epoch (71) Training Loss 0.0433514304459095 Epoch (72) Training Loss 0.0433979295194149 Epoch (73) Training Loss 0.04325678199529648 Epoch (74) Training Loss 0.04325910285115242 Epoch (75) Training Loss 0.043238457292318344 Epoch (76) Training Loss 0.043267760425806046 Epoch (77) Training Loss 0.043243762105703354 Epoch (78) Training Loss 0.0431804284453392 Epoch (79) Training Loss 0.04330746829509735 Epoch (80) Training Loss 0.043082643300294876 Epoch (81) Training Loss 0.043041057884693146 Epoch (82) Training Loss 0.043056994676589966 Epoch (83) Training Loss 0.04287709668278694 Epoch (84) Training Loss 0.04307049512863159 Epoch (85) Training Loss 0.04305858910083771 Epoch (86) Training Loss 0.04300561919808388 Epoch (87) Training Loss 0.042937587946653366 Epoch (88) Training Loss 0.04287160560488701 Epoch (89) Training Loss 0.042909566313028336 Epoch (90) Training Loss 0.042871396988630295 Epoch (91) Training Loss 0.04290831461548805 Epoch (92) Training Loss 0.042694125324487686 Epoch (93) Training Loss 0.04269737750291824 Epoch (94) Training Loss 0.042852722108364105 Epoch (95) Training Loss 0.04263642430305481 Epoch (96) Training Loss 0.042757969349622726 Epoch (97) Training Loss 0.042717449367046356 Epoch (98) Training Loss 0.04269852116703987 Epoch (99) Training Loss 0.04251594468951225 Epoch (100) Training Loss 0.04250817745923996 Epoch (101) Training Loss 0.042568955570459366 Epoch (102) Training Loss 0.04278537258505821 Epoch (103) Training Loss 0.042728833854198456 Epoch (104) Training Loss 0.042583566159009933 Epoch (105) Training Loss 0.042524561285972595 Epoch (106) Training Loss 0.042564310133457184 Epoch (107) Training Loss 0.04256103187799454 Epoch (108) Training Loss 0.04248781502246857 Epoch (109) Training Loss 0.042700670659542084 Epoch (110) Training Loss 0.04230932146310806 Epoch (111) Training Loss 0.04240042716264725 Epoch (112) Training Loss 0.042416252195835114 Epoch (113) Training Loss 0.04231877624988556 Epoch (114) Training Loss 0.04231453686952591 Epoch (115) Training Loss 0.04230835288763046 Epoch (116) Training Loss 0.042225971817970276 Epoch (117) Training Loss 0.04221880063414574 Epoch (118) Training Loss 0.04229674115777016 Epoch (119) Training Loss 0.042206890881061554 Epoch (120) Training Loss 0.04225039854645729 Epoch (121) Training Loss 0.042215991765260696 Epoch (122) Training Loss 0.04219327121973038 Epoch (123) Training Loss 0.041997335851192474 Epoch (124) Training Loss 0.04227866977453232 Epoch (125) Training Loss 0.0421297550201416 Epoch (126) Training Loss 0.042086079716682434 Epoch (127) Training Loss 0.04206792265176773 Epoch (128) Training Loss 0.04216443747282028 Epoch (129) Training Loss 0.04195191338658333 Epoch (130) Training Loss 0.04179422929883003 Epoch (131) Training Loss 0.04196177423000336 Epoch (132) Training Loss 0.042018841952085495 Epoch (133) Training Loss 0.0418768934905529 Epoch (134) Training Loss 0.041871171444654465 Epoch (135) Training Loss 0.04182175546884537 Epoch (136) Training Loss 0.041645899415016174 Epoch (137) Training Loss 0.0419234037399292 Epoch (138) Training Loss 0.041794031858444214 Epoch (139) Training Loss 0.041763387620449066 Epoch (140) Training Loss 0.04183300584554672 Epoch (141) Training Loss 0.041713517159223557 Epoch (142) Training Loss 0.041770659387111664 Epoch (143) Training Loss 0.04171808809041977 Epoch (144) Training Loss 0.04169105365872383 Epoch (145) Training Loss 0.04163094237446785 Epoch (146) Training Loss 0.041781745851039886 Epoch (147) Training Loss 0.041731540113687515 Epoch (148) Training Loss 0.04182261601090431 Epoch (149) Training Loss 0.04162069410085678 Epoch (150) Training Loss 0.04143395647406578 Epoch (151) Training Loss 0.04153698682785034 Epoch (152) Training Loss 0.04148998484015465 Epoch (153) Training Loss 0.04160167649388313 Epoch (154) Training Loss 0.04148009419441223 Epoch (155) Training Loss 0.04137614369392395 Epoch (156) Training Loss 0.04138192906975746 Epoch (157) Training Loss 0.04153166711330414 Epoch (158) Training Loss 0.041444070637226105 Epoch (159) Training Loss 0.041412707418203354 Epoch (160) Training Loss 0.04133965075016022 Epoch (161) Training Loss 0.0411352701485157 Epoch (162) Training Loss 0.0413120836019516 Epoch (163) Training Loss 0.041342344135046005 Epoch (164) Training Loss 0.041239988058805466 Epoch (165) Training Loss 0.0412476621568203 Epoch (166) Training Loss 0.0411493256688118 Epoch (167) Training Loss 0.04122975096106529 Epoch (168) Training Loss 0.041102878749370575 Epoch (169) Training Loss 0.041145846247673035 Epoch (170) Training Loss 0.041089922189712524 Epoch (171) Training Loss 0.041216570883989334 Epoch (172) Training Loss 0.040932830423116684 Epoch (173) Training Loss 0.040953271090984344 Epoch (174) Training Loss 0.04106729477643967 Epoch (175) Training Loss 0.040954798460006714 Epoch (176) Training Loss 0.04095832258462906 Epoch (177) Training Loss 0.04087761044502258 Epoch (178) Training Loss 0.04095512256026268 Epoch (179) Training Loss 0.040975116193294525 Epoch (180) Training Loss 0.041028670966625214 Epoch (181) Training Loss 0.04093342646956444 Epoch (182) Training Loss 0.04072296619415283 Epoch (183) Training Loss 0.04101390019059181 Epoch (184) Training Loss 0.04083946719765663 Epoch (185) Training Loss 0.040746841579675674 Epoch (186) Training Loss 0.04085025191307068 Epoch (187) Training Loss 0.04078857973217964 Epoch (188) Training Loss 0.04074076935648918 Epoch (189) Training Loss 0.04081699624657631 Epoch (190) Training Loss 0.040775563567876816 Epoch (191) Training Loss 0.04053071141242981 Epoch (192) Training Loss 0.04053201526403427 Epoch (193) Training Loss 0.04062788188457489 Epoch (194) Training Loss 0.040514297783374786 Epoch (195) Training Loss 0.040704213082790375 Epoch (196) Training Loss 0.04054168239235878 Epoch (197) Training Loss 0.04049496352672577 Epoch (198) Training Loss 0.04049990698695183 Epoch (199) Training Loss 0.04056219384074211 Epoch (200) Training Loss 0.04043235629796982 Epoch (201) Training Loss 0.040479738265275955 Epoch (202) Training Loss 0.04045146331191063 Epoch (203) Training Loss 0.040293823927640915 Epoch (204) Training Loss 0.04035429283976555 Epoch (205) Training Loss 0.0401422455906868 Epoch (206) Training Loss 0.04043133556842804 Epoch (207) Training Loss 0.040208376944065094 Epoch (208) Training Loss 0.04008159041404724 Epoch (209) Training Loss 0.040262408554553986 Epoch (210) Training Loss 0.04019618034362793 Epoch (211) Training Loss 0.04032331332564354 Epoch (212) Training Loss 0.04033077135682106 Epoch (213) Training Loss 0.040018096566200256 Epoch (214) Training Loss 0.0403108224272728 Epoch (215) Training Loss 0.04022226855158806 Epoch (216) Training Loss 0.04015371575951576 Epoch (217) Training Loss 0.04014873132109642 Epoch (218) Training Loss 0.040089283138513565 Epoch (219) Training Loss 0.03994163125753403 Epoch (220) Training Loss 0.04007607698440552 Epoch (221) Training Loss 0.04006066545844078 Epoch (222) Training Loss 0.04013173282146454 Epoch (223) Training Loss 0.04005446285009384 Epoch (224) Training Loss 0.039898209273815155 Epoch (225) Training Loss 0.04016988351941109 Epoch (226) Training Loss 0.039953239262104034 Epoch (227) Training Loss 0.04002140089869499 Epoch (228) Training Loss 0.039836104959249496 Epoch (229) Training Loss 0.03991236910223961 Epoch (230) Training Loss 0.03984138369560242 Epoch (231) Training Loss 0.039770424365997314 Epoch (232) Training Loss 0.039844099432229996 Epoch (233) Training Loss 0.039654817432165146 Epoch (234) Training Loss 0.03970734775066376 Epoch (235) Training Loss 0.039834409952163696 Epoch (236) Training Loss 0.03961959481239319 Epoch (237) Training Loss 0.039593834429979324 Epoch (238) Training Loss 0.039732035249471664 Epoch (239) Training Loss 0.03963923081755638 Epoch (240) Training Loss 0.03958585113286972 Epoch (241) Training Loss 0.03953530639410019 Epoch (242) Training Loss 0.039716970175504684 Epoch (243) Training Loss 0.039674803614616394 Epoch (244) Training Loss 0.03963450342416763 Epoch (245) Training Loss 0.03954802453517914 Epoch (246) Training Loss 0.03957704082131386 Epoch (247) Training Loss 0.03953831270337105 Epoch (248) Training Loss 0.03948123753070831 Epoch (249) Training Loss 0.039659284055233 Epoch (250) Training Loss 0.03957439213991165 Epoch (251) Training Loss 0.039434876292943954 Epoch (252) Training Loss 0.03942209854722023 Epoch (253) Training Loss 0.03930030018091202 Epoch (254) Training Loss 0.03951764479279518 Epoch (255) Training Loss 0.03919876366853714 Epoch (256) Training Loss 0.03946756571531296 Epoch (257) Training Loss 0.03934640437364578 Epoch (258) Training Loss 0.039356306195259094 Epoch (259) Training Loss 0.03941104933619499 Epoch (260) Training Loss 0.03932663053274155 Epoch (261) Training Loss 0.03935572877526283 Epoch (262) Training Loss 0.039177753031253815 Epoch (263) Training Loss 0.03931182250380516 Epoch (264) Training Loss 0.039235495030879974 Epoch (265) Training Loss 0.03906344249844551 Epoch (266) Training Loss 0.039078280329704285 Epoch (267) Training Loss 0.03928617760539055 Epoch (268) Training Loss 0.039296913892030716 Epoch (269) Training Loss 0.0391087606549263 Epoch (270) Training Loss 0.03905482590198517 Epoch (271) Training Loss 0.03905954584479332 Epoch (272) Training Loss 0.03902279958128929 Epoch (273) Training Loss 0.03896971046924591 Epoch (274) Training Loss 0.039050765335559845 Epoch (275) Training Loss 0.039027564227581024 Epoch (276) Training Loss 0.03891237825155258 Epoch (277) Training Loss 0.03895420581102371 Epoch (278) Training Loss 0.0388253852725029 Epoch (279) Training Loss 0.038934435695409775 Epoch (280) Training Loss 0.03897583857178688 Epoch (281) Training Loss 0.038852985948324203 Epoch (282) Training Loss 0.038752637803554535 Epoch (283) Training Loss 0.03871014714241028 Epoch (284) Training Loss 0.03899053484201431 Epoch (285) Training Loss 0.038758259266614914 Epoch (286) Training Loss 0.03875149041414261 Epoch (287) Training Loss 0.0387980155646801 Epoch (288) Training Loss 0.03871475160121918 Epoch (289) Training Loss 0.03870720788836479 Epoch (290) Training Loss 0.038798682391643524 Epoch (291) Training Loss 0.038532670587301254 Epoch (292) Training Loss 0.03867548331618309 Epoch (293) Training Loss 0.03860403969883919 Epoch (294) Training Loss 0.03858016058802605 Epoch (295) Training Loss 0.03862915188074112 Epoch (296) Training Loss 0.03852216526865959 Epoch (297) Training Loss 0.03856254369020462 Epoch (298) Training Loss 0.03853462636470795 Epoch (299) Training Loss 0.03848689794540405 Epoch (300) Training Loss 0.03866594657301903 Epoch (301) Training Loss 0.038301244378089905 Epoch (302) Training Loss 0.038636405020952225 Epoch (303) Training Loss 0.038545623421669006 Epoch (304) Training Loss 0.03834531456232071 Epoch (305) Training Loss 0.03835544362664223 Epoch (306) Training Loss 0.0384063757956028 Epoch (307) Training Loss 0.03842691332101822 Epoch (308) Training Loss 0.038338664919137955 Epoch (309) Training Loss 0.03818870335817337 Epoch (310) Training Loss 0.03823588788509369 Epoch (311) Training Loss 0.038273584097623825 Epoch (312) Training Loss 0.03843928501009941 Epoch (313) Training Loss 0.03835253790020943 Epoch (314) Training Loss 0.03828031197190285 Epoch (315) Training Loss 0.03839634731411934 Epoch (316) Training Loss 0.038160260766744614 Epoch (317) Training Loss 0.03810649737715721 Epoch (318) Training Loss 0.03829482942819595 Epoch (319) Training Loss 0.03822280466556549 Epoch (320) Training Loss 0.03813096508383751 Epoch (321) Training Loss 0.03812028467655182 Epoch (322) Training Loss 0.03796334192156792 Epoch (323) Training Loss 0.038225241005420685 Epoch (324) Training Loss 0.03821723908185959 Epoch (325) Training Loss 0.038104765117168427 Epoch (326) Training Loss 0.03818339854478836 Epoch (327) Training Loss 0.03803835064172745 Epoch (328) Training Loss 0.03794482350349426 Epoch (329) Training Loss 0.0381355807185173 Epoch (330) Training Loss 0.037787895649671555 Epoch (331) Training Loss 0.03793420270085335 Epoch (332) Training Loss 0.03792307898402214 Epoch (333) Training Loss 0.03799967095255852 Epoch (334) Training Loss 0.037921227514743805 Epoch (335) Training Loss 0.03777247294783592 Epoch (336) Training Loss 0.03789105266332626 Epoch (337) Training Loss 0.03771619871258736 Epoch (338) Training Loss 0.03780191019177437 Epoch (339) Training Loss 0.03777178376913071 Epoch (340) Training Loss 0.037962306290864944 Epoch (341) Training Loss 0.03786603361368179 Epoch (342) Training Loss 0.03775791451334953 Epoch (343) Training Loss 0.03773610666394234 Epoch (344) Training Loss 0.03778860345482826 Epoch (345) Training Loss 0.03774441033601761 Epoch (346) Training Loss 0.03760997951030731 Epoch (347) Training Loss 0.037633366882801056 Epoch (348) Training Loss 0.03745182603597641 Epoch (349) Training Loss 0.03740895912051201 Epoch (350) Training Loss 0.03771760314702988 Epoch (351) Training Loss 0.03753598406910896 Epoch (352) Training Loss 0.037643034011125565 Epoch (353) Training Loss 0.03757920488715172 Epoch (354) Training Loss 0.03751308098435402 Epoch (355) Training Loss 0.0374426506459713 Epoch (356) Training Loss 0.037456534802913666 Epoch (357) Training Loss 0.037426624447107315 Epoch (358) Training Loss 0.03743211179971695 Epoch (359) Training Loss 0.03744282200932503 Epoch (360) Training Loss 0.0374317541718483 Epoch (361) Training Loss 0.037336356937885284 Epoch (362) Training Loss 0.03752440959215164 Epoch (363) Training Loss 0.037346500903367996 Epoch (364) Training Loss 0.037307411432266235 Epoch (365) Training Loss 0.037373486906290054 Epoch (366) Training Loss 0.037212688475847244 Epoch (367) Training Loss 0.0374237522482872 Epoch (368) Training Loss 0.03726428374648094 Epoch (369) Training Loss 0.037315722554922104 Epoch (370) Training Loss 0.03710178658366203 Epoch (371) Training Loss 0.037091583013534546 Epoch (372) Training Loss 0.037128984928131104 Epoch (373) Training Loss 0.03731704503297806 Epoch (374) Training Loss 0.037182874977588654 Epoch (375) Training Loss 0.037090133875608444 Epoch (376) Training Loss 0.0371207594871521 Epoch (377) Training Loss 0.037130050361156464 Epoch (378) Training Loss 0.03701680898666382 Epoch (379) Training Loss 0.037083547562360764 Epoch (380) Training Loss 0.03725171834230423 Epoch (381) Training Loss 0.036996979266405106 Epoch (382) Training Loss 0.03705475479364395 Epoch (383) Training Loss 0.03716135025024414 Epoch (384) Training Loss 0.03699364513158798 Epoch (385) Training Loss 0.03699199855327606 Epoch (386) Training Loss 0.03699500486254692 Epoch (387) Training Loss 0.036912743002176285 Epoch (388) Training Loss 0.03707713633775711 Epoch (389) Training Loss 0.036910530179739 Epoch (390) Training Loss 0.036833759397268295 Epoch (391) Training Loss 0.03702457994222641 Epoch (392) Training Loss 0.036914724856615067 Epoch (393) Training Loss 0.036658599972724915 Epoch (394) Training Loss 0.03682558238506317 Epoch (395) Training Loss 0.03671441972255707 Epoch (396) Training Loss 0.03682715445756912 Epoch (397) Training Loss 0.03670340031385422 Epoch (398) Training Loss 0.03675011545419693 Epoch (399) Training Loss 0.036715518683195114 Epoch (400) Training Loss 0.03667021915316582 Epoch (401) Training Loss 0.036633510142564774 Epoch (402) Training Loss 0.036564599722623825 Epoch (403) Training Loss 0.0364968404173851 Epoch (404) Training Loss 0.03658444806933403 Epoch (405) Training Loss 0.036693789064884186 Epoch (406) Training Loss 0.036630768328905106 Epoch (407) Training Loss 0.036475781351327896 Epoch (408) Training Loss 0.03648785129189491 Epoch (409) Training Loss 0.0364990159869194 Epoch (410) Training Loss 0.0365862101316452 Epoch (411) Training Loss 0.036391325294971466 Epoch (412) Training Loss 0.03641554340720177 Epoch (413) Training Loss 0.03655619919300079 Epoch (414) Training Loss 0.03652739152312279 Epoch (415) Training Loss 0.03661568462848663 Epoch (416) Training Loss 0.03650630638003349 Epoch (417) Training Loss 0.03650214523077011 Epoch (418) Training Loss 0.036408811807632446 Epoch (419) Training Loss 0.036224231123924255 Epoch (420) Training Loss 0.036454301327466965 Epoch (421) Training Loss 0.03630222752690315 Epoch (422) Training Loss 0.03641963750123978 Epoch (423) Training Loss 0.03632832318544388 Epoch (424) Training Loss 0.036204833537340164 Epoch (425) Training Loss 0.036312490701675415 Epoch (426) Training Loss 0.03629502281546593 Epoch (427) Training Loss 0.03633604943752289 Epoch (428) Training Loss 0.03632725775241852 Epoch (429) Training Loss 0.036172863095998764 Epoch (430) Training Loss 0.03629234433174133 Epoch (431) Training Loss 0.036340486258268356 Epoch (432) Training Loss 0.036249205470085144 Epoch (433) Training Loss 0.036042552441358566 Epoch (434) Training Loss 0.03614804521203041 Epoch (435) Training Loss 0.03611388057470322 Epoch (436) Training Loss 0.03608500957489014 Epoch (437) Training Loss 0.03618036210536957 Epoch (438) Training Loss 0.03614133968949318 Epoch (439) Training Loss 0.036128584295511246 Epoch (440) Training Loss 0.035938650369644165 Epoch (441) Training Loss 0.03601201996207237 Epoch (442) Training Loss 0.03607712313532829 Epoch (443) Training Loss 0.036000486463308334 Epoch (444) Training Loss 0.03589944541454315 Epoch (445) Training Loss 0.035946086049079895 Epoch (446) Training Loss 0.03590446710586548 Epoch (447) Training Loss 0.036011580377817154 Epoch (448) Training Loss 0.03598550707101822 Epoch (449) Training Loss 0.035845011472702026 Epoch (450) Training Loss 0.036041952669620514 Epoch (451) Training Loss 0.035978786647319794 Epoch (452) Training Loss 0.035825371742248535 Epoch (453) Training Loss 0.035932186990976334 Epoch (454) Training Loss 0.03578345105051994 Epoch (455) Training Loss 0.03580465912818909 Epoch (456) Training Loss 0.03583350032567978 Epoch (457) Training Loss 0.03584564849734306 Epoch (458) Training Loss 0.03570552170276642 Epoch (459) Training Loss 0.035704854875802994 Epoch (460) Training Loss 0.03577948361635208 Epoch (461) Training Loss 0.03561114892363548 Epoch (462) Training Loss 0.03561948239803314 Epoch (463) Training Loss 0.03568495810031891 Epoch (464) Training Loss 0.03571700304746628 Epoch (465) Training Loss 0.03561393544077873 Epoch (466) Training Loss 0.03564368188381195 Epoch (467) Training Loss 0.035640157759189606 Epoch (468) Training Loss 0.035583365708589554 Epoch (469) Training Loss 0.03563423827290535 Epoch (470) Training Loss 0.03554102033376694 Epoch (471) Training Loss 0.03555462509393692 Epoch (472) Training Loss 0.03547300025820732 Epoch (473) Training Loss 0.03542269766330719 Epoch (474) Training Loss 0.03550848737359047 Epoch (475) Training Loss 0.03547004610300064 Epoch (476) Training Loss 0.03549005836248398 Epoch (477) Training Loss 0.03549226373434067 Epoch (478) Training Loss 0.035476915538311005 Epoch (479) Training Loss 0.03545977920293808 Epoch (480) Training Loss 0.03542718663811684 Epoch (481) Training Loss 0.03535669296979904 Epoch (482) Training Loss 0.03532540425658226 Epoch (483) Training Loss 0.03540704399347305 Epoch (484) Training Loss 0.035167206078767776 Epoch (485) Training Loss 0.03533993661403656 Epoch (486) Training Loss 0.035329658538103104 Epoch (487) Training Loss 0.03521429002285004 Epoch (488) Training Loss 0.03526606038212776 Epoch (489) Training Loss 0.03543960303068161 Epoch (490) Training Loss 0.03529183566570282 Epoch (491) Training Loss 0.035229094326496124 Epoch (492) Training Loss 0.03513405844569206 Epoch (493) Training Loss 0.03513704240322113 Epoch (494) Training Loss 0.03529181703925133 Epoch (495) Training Loss 0.03533374145627022 Epoch (496) Training Loss 0.03528158366680145 Epoch (497) Training Loss 0.035065699368715286 Epoch (498) Training Loss 0.03513208031654358 Epoch (499) Training Loss 0.03527403622865677 Epoch (500) Training Loss 0.035124458372592926
InĀ [17]:
x, _ = generate_training_sample()
for i in range(5):
visualize_images(np.append( [train_images[i]] , forward(net_parameters, x[:,i,:]) ))
InĀ [18]:
def generate_image():
image = np.random.randn(784) * 0.01
for i in range(5):
image = forward(net_parameters, image)
return image
visualize_images(generate_image())