[{"data":1,"prerenderedAt":1161},["ShallowReactive",2],{"i-kinnu:logo":3,"i-kinnu:origami-folding":8,"tile-technology-artificial-intelligence-neural-networks":12,"i-lucide:chevron-right":1156,"i-lucide:menu":1159},{"left":4,"top":4,"width":5,"height":5,"rotate":4,"vFlip":6,"hFlip":6,"body":7},0,27,false,"\u003Cg fill=\"none\">\u003Cpath d=\"M0.046875 1.05555C0.046875 1.03541 0.048197 1.01579 0.0507438 0.996728C0.0987149 0.438619 0.586845 0 1.18194 0H25.4398C26.451 0 26.9575 1.171 26.2424 1.85585L15.7301 11.9243L1.31574 0.903476C1.17475 0.79568 1.01137 0.761884 0.859586 0.784111L26.2936 25.1441C27.0086 25.829 26.5022 27 25.4909 27H1.18194C0.555061 27 0.046875 26.5133 0.046875 25.9129V1.05555Z\" fill=\"currentColor\"/>\u003C/g>",{"left":4,"top":4,"width":9,"height":10,"rotate":4,"vFlip":6,"hFlip":6,"body":11},1000,236,"\u003Cg fill=\"none\">\u003Cpath fill-rule=\"evenodd\" clip-rule=\"evenodd\"\n    d=\"M193.68 38.2238C195.994 38.2238 197.87 40.0989 197.87 42.412V231.812C197.87 234.125 195.994 236 193.68 236H4.19013C1.87603 236 2.02305e-07 234.125 0 231.812V42.412C-2.02305e-07 40.0989 1.87603 38.2238 4.19013 38.2238H193.68ZM111.76 89.0072C111.685 87.9474 110.572 87.2905 109.608 87.7376L96.8872 93.641C95.7786 94.1554 95.702 95.7016 96.7545 96.3225L101.579 99.167C94.7045 109.365 90.5733 122.892 90.5732 137.642C90.5733 154.323 95.8569 169.439 104.416 179.945C105.301 181.032 106.9 181.196 107.987 180.311C109.075 179.426 109.238 177.828 108.353 176.741C100.621 167.25 95.6522 153.305 95.6521 137.642C95.6522 123.661 99.6138 111.051 105.963 101.754L110.456 104.403C111.508 105.024 112.826 104.21 112.74 102.991L111.76 89.0072ZM9.63194 136.286C9.14864 136.286 8.75684 136.678 8.75684 137.161C8.7569 137.644 9.14868 138.035 9.63194 138.035H17.2161C17.6993 138.035 18.0912 137.644 18.0912 137.161C18.0912 136.678 17.6994 136.286 17.2161 136.286H9.63194ZM22.6813 136.286C22.198 136.286 21.8062 136.678 21.8062 137.161C21.8063 137.644 22.1981 138.035 22.6813 138.035H30.2655C30.7487 138.035 31.1406 137.644 31.1406 137.161C31.1406 136.678 30.7488 136.286 30.2655 136.286H22.6813ZM35.7464 136.286C35.2631 136.286 34.8713 136.678 34.8713 137.161C34.8713 137.644 35.2631 138.035 35.7464 138.035H44.4973C44.9805 138.035 45.3724 137.644 45.3724 137.161C45.3724 136.678 44.9806 136.286 44.4973 136.286H35.7464ZM49.9977 136.286C49.5144 136.286 49.1226 136.678 49.1226 137.161C49.1226 137.644 49.5144 138.035 49.9977 138.035H57.5819C58.0651 138.035 58.4569 137.644 58.457 137.161C58.457 136.678 58.0651 136.286 57.5819 136.286H49.9977ZM63.0783 136.286C62.595 136.286 62.2032 136.678 62.2032 137.161C62.2033 137.644 62.5951 138.035 63.0783 138.035H70.6625C71.1457 138.035 71.5375 137.644 71.5376 137.161C71.5376 136.678 71.1457 136.286 70.6625 136.286H63.0783ZM76.1277 136.286C75.6444 136.286 75.2526 136.678 75.2526 137.161C75.2527 137.644 75.6445 138.035 76.1277 138.035H83.7119C84.1951 138.035 84.5869 137.644 84.587 137.161C84.587 136.678 84.1951 136.286 83.7119 136.286H76.1277ZM102.266 136.286C101.782 136.286 101.39 136.678 101.39 137.161C101.391 137.644 101.782 138.035 102.266 138.035H109.85C110.333 138.035 110.725 137.644 110.725 137.161C110.725 136.678 110.333 136.286 109.85 136.286H102.266ZM115.338 136.286C114.855 136.286 114.463 136.678 114.463 137.161C114.463 137.644 114.855 138.035 115.338 138.035H122.923C123.406 138.035 123.798 137.644 123.798 137.161C123.798 136.678 123.406 136.286 122.923 136.286H115.338ZM128.403 136.286C127.92 136.286 127.528 136.678 127.528 137.161C127.528 137.644 127.92 138.035 128.403 138.035H135.988C136.471 138.035 136.863 137.644 136.863 137.161C136.863 136.678 136.471 136.286 135.988 136.286H128.403ZM141.468 136.286C140.985 136.286 140.593 136.678 140.593 137.161C140.593 137.644 140.985 138.035 141.468 138.035H149.053C149.536 138.035 149.928 137.644 149.928 137.161C149.928 136.678 149.536 136.286 149.053 136.286H141.468ZM154.541 136.286C154.058 136.286 153.666 136.678 153.666 137.161C153.666 137.644 154.058 138.035 154.541 138.035H162.125C162.609 138.035 163 137.644 163.001 137.161C163.001 136.678 162.609 136.286 162.125 136.286H154.541ZM167.614 136.286C167.131 136.286 166.739 136.678 166.739 137.161C166.739 137.644 167.131 138.035 167.614 138.035H175.198C175.681 138.035 176.073 137.644 176.073 137.161C176.073 136.678 175.681 136.286 175.198 136.286H167.614ZM180.671 136.286C180.188 136.286 179.796 136.678 179.796 137.161C179.796 137.644 180.188 138.035 180.671 138.035H188.255C188.739 138.035 189.13 137.644 189.131 137.161C189.131 136.678 188.739 136.286 188.255 136.286H180.671Z\"\n    fill=\"currentColor\" />\n  \u003Cpath fill-rule=\"evenodd\" clip-rule=\"evenodd\"\n    d=\"M444.85 38.2277C447.164 38.2277 449.04 40.1028 449.04 42.4159V132.928C449.04 135.241 447.164 137.116 444.85 137.116H255.36C253.046 137.116 251.17 135.241 251.17 132.928V42.4159C251.17 40.1028 253.046 38.2277 255.36 38.2277H444.85ZM361.96 125.388C361.618 125.046 361.064 125.046 360.722 125.388L354.534 131.572C354.192 131.914 354.192 132.468 354.534 132.81C354.876 133.151 355.43 133.151 355.772 132.81L361.96 126.624C362.301 126.283 362.301 125.73 361.96 125.388ZM371.047 116.311C370.705 115.969 370.15 115.969 369.809 116.311L364.446 121.671C364.104 122.012 364.104 122.567 364.446 122.908C364.788 123.249 365.342 123.25 365.684 122.908L371.047 117.548C371.388 117.207 371.388 116.652 371.047 116.311ZM380.124 107.246C379.782 106.904 379.227 106.904 378.885 107.246L373.523 112.606C373.181 112.948 373.181 113.502 373.523 113.844C373.864 114.185 374.419 114.185 374.761 113.844L380.124 108.483C380.465 108.142 380.465 107.587 380.124 107.246ZM385.736 65.8841C385.891 64.6727 384.622 63.7845 383.536 64.3434L371.069 70.7636C370.124 71.2504 369.96 72.5334 370.752 73.2424L381.2 82.5938C382.11 83.4081 383.561 82.8672 383.717 81.6557L384.393 76.3725C391.143 77.1933 398.567 80.7709 404.771 86.9711C411.124 93.3213 414.726 100.952 415.43 107.827C415.573 109.221 416.819 110.236 418.214 110.093C419.609 109.95 420.624 108.703 420.481 107.309C419.644 99.1317 415.435 90.4514 408.362 83.3817C401.466 76.489 393.038 72.3185 385.038 71.338L385.736 65.8841ZM389.2 98.1733C388.859 97.8319 388.304 97.8318 387.962 98.1733L382.6 103.534C382.258 103.875 382.258 104.429 382.6 104.771C382.941 105.112 383.496 105.112 383.838 104.771L389.2 99.4108C389.542 99.0693 389.542 98.5149 389.2 98.1733ZM398.262 89.1047C397.92 88.7633 397.365 88.7632 397.024 89.1047L391.661 94.4649C391.319 94.8065 391.319 95.3608 391.661 95.7024C392.002 96.0436 392.557 96.0438 392.899 95.7024L398.262 90.3421C398.603 90.0007 398.603 89.4463 398.262 89.1047ZM416.431 70.9616C416.089 70.6202 415.534 70.6201 415.193 70.9616L409.83 76.3218C409.488 76.6634 409.488 77.2177 409.83 77.5592C410.172 77.9005 410.726 77.9007 411.068 77.5592L416.431 72.199C416.772 71.8575 416.772 71.3032 416.431 70.9616ZM425.508 61.891C425.166 61.5496 424.611 61.5495 424.27 61.891L418.907 67.2512C418.565 67.5928 418.565 68.1471 418.907 68.4887C419.249 68.8299 419.803 68.8301 420.145 68.4887L425.508 63.1284C425.849 62.787 425.849 62.2326 425.508 61.891ZM434.569 52.8146C434.227 52.4731 433.673 52.4731 433.331 52.8146L427.968 58.1748C427.626 58.5163 427.627 59.0706 427.968 59.4122C428.31 59.7534 428.864 59.7537 429.206 59.4122L434.569 54.052C434.91 53.7105 434.91 53.1562 434.569 52.8146ZM443.638 43.7479C443.296 43.4065 442.742 43.4064 442.4 43.7479L437.037 49.1081C436.695 49.4496 436.696 50.004 437.037 50.3455C437.379 50.6868 437.933 50.687 438.275 50.3455L443.638 44.9853C443.98 44.6438 443.979 44.0895 443.638 43.7479Z\"\n    fill=\"currentColor\" />\n  \u003Cpath fill-rule=\"evenodd\" clip-rule=\"evenodd\"\n    d=\"M684.066 38.2277C687.798 38.2281 689.667 42.7391 687.027 45.3773L596.473 135.889C595.687 136.675 594.621 137.116 593.51 137.116H506.335C504.021 137.116 502.145 135.241 502.145 132.928V42.4159C502.145 40.1028 504.021 38.2277 506.335 38.2277H684.066ZM514.603 124.566C514.261 124.224 513.707 124.224 513.365 124.566L507.178 130.751C506.836 131.093 506.836 131.646 507.178 131.988C507.519 132.329 508.073 132.329 508.415 131.988L514.603 125.803C514.945 125.462 514.945 124.908 514.603 124.566ZM523.689 115.491C523.348 115.15 522.794 115.15 522.452 115.491L517.09 120.852C516.748 121.193 516.748 121.747 517.09 122.088C517.431 122.43 517.985 122.43 518.327 122.088L523.689 116.728C524.031 116.386 524.031 115.833 523.689 115.491ZM532.102 65.8295C530.707 65.6872 529.46 66.7017 529.318 68.0957C529.175 69.4896 530.189 70.7355 531.584 70.8787C538.463 71.5825 546.096 75.1826 552.45 81.5329C558.723 87.8037 562.312 95.3226 563.079 102.13L557.738 102.392C556.518 102.452 555.865 103.855 556.607 104.827L565.115 115.969C565.76 116.814 567.051 116.751 567.611 115.847L574.992 103.928C575.635 102.889 574.848 101.555 573.628 101.615L568.161 101.882C568.161 101.878 568.162 101.874 568.161 101.871C567.324 93.6931 563.114 85.0124 556.041 77.9425C548.968 70.873 540.283 66.6668 532.102 65.8295ZM532.766 106.421C532.425 106.079 531.871 106.079 531.529 106.421L526.166 111.781C525.825 112.123 525.825 112.676 526.166 113.018C526.508 113.359 527.062 113.359 527.403 113.018L532.766 107.657C533.108 107.316 533.108 106.762 532.766 106.421ZM541.843 97.3445C541.501 97.003 540.948 97.003 540.606 97.3445L535.243 102.705C534.901 103.046 534.902 103.6 535.243 103.941C535.585 104.283 536.139 104.283 536.48 103.941L541.843 98.5809C542.185 98.2393 542.185 97.686 541.843 97.3445ZM550.92 88.2778C550.578 87.9363 550.025 87.9363 549.683 88.2778L544.32 93.638C543.978 93.9796 543.978 94.5329 544.32 94.8745C544.662 95.2161 545.215 95.2161 545.557 94.8745L550.92 89.5142C551.262 89.1727 551.262 88.6193 550.92 88.2778ZM569.066 70.1405C568.724 69.799 568.17 69.7991 567.829 70.1405L562.466 75.5008C562.124 75.8423 562.124 76.3956 562.466 76.7372C562.808 77.0788 563.361 77.0788 563.703 76.7372L569.066 71.377C569.407 71.0354 569.407 70.4821 569.066 70.1405ZM578.143 61.0699C577.801 60.7284 577.247 60.7285 576.906 61.0699L571.543 66.4302C571.201 66.7717 571.201 67.3251 571.543 67.6666C571.885 68.0082 572.438 68.0082 572.78 67.6666L578.143 62.3064C578.484 61.9648 578.484 61.4115 578.143 61.0699ZM587.219 51.9896C586.878 51.6481 586.324 51.6481 585.982 51.9896L580.62 57.3498C580.278 57.6914 580.278 58.2447 580.62 58.5863C580.961 58.9279 581.515 58.9279 581.857 58.5863L587.219 53.2261C587.561 52.8845 587.561 52.3312 587.219 51.9896ZM596.288 42.9249C595.947 42.5833 595.392 42.5833 595.05 42.9249L589.689 48.2851C589.347 48.6267 589.347 49.18 589.689 49.5216C590.03 49.863 590.584 49.8631 590.926 49.5216L596.288 44.1613C596.63 43.8198 596.63 43.2664 596.288 42.9249Z\"\n    fill=\"currentColor\" />\n  \u003Cpath fill-rule=\"evenodd\" clip-rule=\"evenodd\"\n    d=\"M850.814 38.2277C854.547 38.2281 856.416 42.739 853.777 45.3773L763.223 135.889C762.437 136.674 761.371 137.116 760.26 137.116H673.176C669.443 137.116 667.574 132.605 670.213 129.966L760.768 39.4544C761.554 38.6692 762.62 38.2277 763.731 38.2277H850.814ZM761.338 121.8C760.855 121.8 760.463 122.191 760.463 122.674V131.13H762.213V122.674C762.213 122.191 761.821 121.8 761.338 121.8ZM761.338 108.971C760.855 108.971 760.463 109.363 760.463 109.846V118.301H762.213V109.846C762.213 109.363 761.821 108.971 761.338 108.971ZM761.338 96.1402C760.855 96.1406 760.463 96.5321 760.463 97.0149V105.47H762.213V97.0149C762.213 96.532 761.821 96.1404 761.338 96.1402ZM782.263 71.887C781.043 71.951 780.395 73.3571 781.139 74.3257L784.474 78.6631C779.115 82.951 771.242 85.7443 762.35 85.7444C753.366 85.7442 745.421 82.8944 740.059 78.5305C738.972 77.6461 737.373 77.8099 736.488 78.8961C735.602 79.983 735.766 81.582 736.853 82.467C743.231 87.6574 752.348 90.8207 762.35 90.8209C772.209 90.8208 781.205 87.746 787.568 82.6884L790.833 86.9341C791.577 87.9025 793.103 87.6391 793.479 86.4767L797.791 73.138C798.118 72.127 797.33 71.1017 796.268 71.1566L782.263 71.887ZM761.338 70.4847C760.855 70.4851 760.463 70.8767 760.463 71.3594V79.8147H762.213V71.3594C762.213 70.8766 761.821 70.485 761.338 70.4847ZM761.338 57.656C760.855 57.6564 760.463 58.048 760.463 58.5307V66.986H762.213V58.5307C762.213 58.0479 761.821 57.6563 761.338 57.656ZM761.338 44.8293C760.855 44.8297 760.463 45.2212 760.463 45.704V54.1592H762.213V45.704C762.213 45.2211 761.821 44.8295 761.338 44.8293Z\"\n    fill=\"currentColor\" />\n  \u003Cpath\n    d=\"M995.759 38.2277C999.53 38.228 1001.42 42.5171 998.752 45.0253L959.55 81.9005L905.796 41.5363C905.271 41.1418 904.662 41.0182 904.096 41.0994L997.485 130.319C1000.15 132.828 998.262 137.116 994.491 137.116H905.298C902.96 137.116 901.065 135.333 901.065 133.134V42.0941C901.065 42.0204 901.07 41.9483 901.079 41.8786C901.258 39.8345 903.079 38.2277 905.298 38.2277H995.759Z\"\n    fill=\"currentColor\" />\n  \u003Cpath\n    d=\"M505.873 0C506.657 4.57042e-05 507.307 0.195499 507.823 0.587023C508.338 0.969046 508.596 1.53802 508.596 2.29251C508.596 2.76034 508.467 3.19015 508.209 3.58162C507.951 3.96344 507.497 4.26401 506.848 4.48361V4.54114C507.65 4.67487 508.205 4.96191 508.51 5.4012C508.816 5.83087 508.969 6.31772 508.969 6.86193C508.969 7.74056 508.672 8.41851 508.08 8.89604C507.497 9.38304 506.733 9.62731 505.787 9.62738C504.861 9.62738 504.158 9.42172 503.68 9.0111C503.212 8.60054 502.935 8.08005 502.849 7.44993L503.881 7.10571L503.924 7.24028C504.035 7.54934 504.211 7.82925 504.454 8.07986C504.731 8.36635 505.166 8.50986 505.758 8.50989C506.465 8.50989 506.943 8.32772 507.191 7.9648C507.449 7.6019 507.579 7.20078 507.579 6.7615C507.579 6.2173 507.378 5.80683 506.977 5.52992C506.585 5.25295 505.93 5.10026 505.013 5.07161V4.15402C505.901 4.12537 506.489 3.92484 506.776 3.55237C507.062 3.18009 507.206 2.82242 507.206 2.47876C507.206 1.62801 506.752 1.17539 505.845 1.12237L505.658 1.11749C505.467 1.11752 505.242 1.14605 504.985 1.2033C504.736 1.25105 504.511 1.3274 504.31 1.43245L504.081 2.56457L503.05 2.44951L503.322 0.687461C503.666 0.49653 504.068 0.33454 504.526 0.200875C504.985 0.0671945 505.434 0 505.873 0Z\"\n    fill=\"currentColor\" />\n  \u003Cpath\n    d=\"M905.727 2.30616L904.638 2.4066L904.466 1.26083H901.428V3.72497C901.533 3.71544 901.643 3.71034 901.757 3.71034H902.086C902.755 3.71034 903.386 3.78668 903.979 3.93949C904.58 4.09229 905.068 4.38363 905.44 4.8132C905.822 5.23335 906.014 5.84949 906.014 6.66106C906.014 7.64468 905.722 8.38068 905.14 8.86776C904.557 9.36434 903.783 9.6127 902.818 9.61275C901.91 9.61275 901.213 9.40711 900.725 8.99648C900.248 8.59544 899.96 8.08007 899.865 7.44993L900.911 7.10571C901.007 7.49723 901.203 7.8271 901.499 8.09449C901.795 8.37131 902.211 8.50985 902.746 8.50989C903.395 8.50989 903.869 8.33787 904.165 7.99405C904.461 7.65981 904.609 7.22507 904.609 6.69031C904.609 5.87861 904.337 5.3625 903.792 5.14279C903.248 4.91361 902.612 4.79958 901.886 4.79955C901.695 4.79955 901.489 4.80365 901.27 4.8132C901.059 4.82275 900.854 4.83701 900.653 4.85611L900.224 4.44071V0.143343H905.569L905.727 2.30616Z\"\n    fill=\"currentColor\" />\n  \u003Cpath fill-rule=\"evenodd\" clip-rule=\"evenodd\"\n    d=\"M765.49 6.04576H766.966L766.837 7.14862H765.49V9.48404H764.185V7.14862H759.857L759.713 6.04576L762.909 0.143343H765.49V6.04576ZM760.96 6.04576H764.185V1.26083H763.541L760.96 6.04576Z\"\n    fill=\"currentColor\" />\n  \u003Cpath d=\"M4.80573 6.47481H6.41154V7.60693H1.81068V6.47481H3.50235V1.27546H1.81068V0.143343H4.80573V6.47481Z\"\n    fill=\"currentColor\" />\n  \u003Cpath\n    d=\"M254.359 0C255.353 0 256.055 0.239186 256.466 0.716715C256.877 1.18447 257.083 1.68072 257.083 2.20573C257.083 2.85516 256.849 3.44346 256.38 3.96875C255.912 4.49397 255.348 4.96638 254.689 5.38657C254.039 5.79717 253.437 6.15968 252.883 6.47481H256.423L256.538 5.42948L257.599 5.51529L257.426 7.60693H251.407L251.292 6.58987C252.582 5.73032 253.638 4.98523 254.46 4.35489C255.281 3.71509 255.693 3.05632 255.693 2.37832C255.693 1.53787 255.166 1.11749 254.115 1.12237L254.115 1.11749C253.924 1.11754 253.695 1.14604 253.427 1.2033C253.16 1.25104 252.916 1.32238 252.697 1.41783L252.467 2.47876L251.45 2.3637L251.707 0.60165C252.118 0.401088 252.563 0.253475 253.041 0.15797C253.519 0.0529708 253.958 1.99446e-05 254.359 0Z\"\n    fill=\"currentColor\" />\u003C/g>",{"tile":13,"orbsWithOnlyMarkdownPages":483},{"id":14,"data":15,"type":16,"maxContentLevel":19,"version":20,"orbs":21},"480d4d34-4f3e-4fa0-a251-f18a09851856",{"type":16,"title":17,"tagline":18},9,"Neural Networks","The great leap forward",3,1,[22,134,215,298,397],{"id":23,"data":24,"type":25,"version":20,"maxContentLevel":19,"summaryPage":27,"introPage":34,"pages":41},"c265f697-d4d5-4225-8e42-8527920369c1",{"type":25,"title":26},2,"What are neural networks?",{"id":28,"data":29,"type":19,"maxContentLevel":19,"version":20},"d9762c17-1cc0-4be4-971b-d5486f2613f3",{"type":19,"summary":30},[31,32,33],"Neural networks are webs of notes connected with great complexity","Early neural networks were limited in scope, as they only had one layer of nodes","Nowadays, neural networks have multiple layers, which allow them to perform powerful processes",{"id":35,"data":36,"type":37,"maxContentLevel":19,"version":20},"e9a98102-1aa0-4ef2-9b7c-75e5d7a6bffd",{"type":37,"intro":38},10,[39,40],"What sparked the idea for neural networks back in the 1940s?","Why are modern neural networks more powerful than the models back then?",[42,69,112],{"id":43,"data":44,"type":20,"maxContentLevel":19,"version":20,"reviews":47},"4b191e9c-5ed3-411f-9965-19693ba985bc",{"type":20,"contentRole":25,"markdownContent":45,"audioMediaId":46},"Earlier, we touched upon **neural networks**, and mentioned how this key innovation was a major factor in the rise of modern AI. Now, it's time to look at this technology in more detail.\n\nIt's based on an idea that first cropped up in the 1940s – that's around the same time that Alan Turing was active. It was put forward by Warren McCulloch – a professor of psychiatry – and Walter Pitts – a student mathematician.\n\nTheir idea was this: neurons in the brain could basically be viewed as binary gates, just like the ones in a computer. By extension, if you built a man-made network of binary gates, connected together with great complexity, it would potentially be able to perform the same processes as a brain.","59ee65d6-4bd9-4f6c-a774-18853d3df3c2",[48],{"id":49,"data":50,"type":51,"version":20,"maxContentLevel":19},"60c54579-7ff0-4106-8349-2b965cc0f663",{"type":51,"reviewType":19,"spacingBehaviour":20,"collapsingSiblings":52,"multiChoiceQuestion":56,"multiChoiceCorrect":58,"multiChoiceIncorrect":60,"multiChoiceMultiSelect":6,"multiChoiceRevealAnswerOption":6,"matchPairsQuestion":64,"matchPairsPairs":66},11,[53,54,55],"ada192e0-586e-485b-85f7-f1aa3896e776","ebe7001b-03fa-4e1c-a146-185849d6a729","e97e3df6-3f86-4e78-b7e3-2c0cf09033b4",[57],"In the 1940s, who proposed the idea that neurons were similar to binary gates?",[59],"McCulloch & Pitts",[61,62,63],"Frank Rosenblatt","Alan Turing","Babbage & Lovelace",[65],"Match the pairs below:",[67],{"left":59,"right":68,"direction":19},"Proposed theory of neural networks",{"id":70,"data":71,"type":20,"maxContentLevel":19,"version":20,"reviews":74},"1ae0ba51-b4e8-4472-a0b5-7680988374a4",{"type":20,"contentRole":25,"markdownContent":72,"audioMediaId":73},"About a decade later, in 1957, an American psychologist called Frank Rosenblatt managed to put the ideas put forward by McCullock and Pitts into practice.\n\nHe constructed a network of node-like neurons, which he referred to as the **Mark I Perceptron**. Incredibly, this network used photocells to 'look' at images, and recognize objects within them.\n\n![Graph](image://0a17b588-5b3c-447d-a796-9dd1d80845aa \"Frank Rosenblatt and the Mark I Perceptron. (Public domain), via Wikimedia Commons\")\n\nThe Mark I Perceptron was only one layer thick – imagine a 2D net of nodes, as opposed to the 3D web of a real human brain. This limited the number of connections between nodes, which in turn limited the model's potential for human-like cognitive processes.\n\n![Graph](image://0646ad9b-a104-48ed-843a-d42290992929 \"Simplified diagram of a single layer network.\")\n\nBut nowadays, thanks to hundreds of innovations, we've found ways to build multilayer networks. They're still a long way away from the complex connections of a human brain. But they have enough connections to perform some pretty powerful processes.","74df243e-15cc-4eef-a9f9-a23f97872bb0",[75,94,105],{"id":76,"data":77,"type":51,"version":20,"maxContentLevel":19},"dae72ea9-487b-4523-84d4-fe9d3e7511be",{"type":51,"reviewType":19,"spacingBehaviour":20,"collapsingSiblings":78,"multiChoiceQuestion":82,"multiChoiceCorrect":84,"multiChoiceIncorrect":86,"multiChoiceMultiSelect":6,"multiChoiceRevealAnswerOption":6,"matchPairsQuestion":90,"matchPairsPairs":91},[79,80,81],"42661e37-7495-436b-a519-6b976d7ba479","37029b16-f9d4-43c1-bac7-30c9382ce308","14a4785e-81f2-43b7-b271-ba31052cd5c3",[83],"Which of these is generally thought of as the world's first neural network?",[85],"Mark I Perceptron",[87,88,89],"Logic Theorist","Eliza","AlphaGo",[65],[92],{"left":85,"right":93,"direction":19},"World's first neural network",{"id":53,"data":95,"type":51,"version":20,"maxContentLevel":19},{"type":51,"reviewType":19,"spacingBehaviour":20,"collapsingSiblings":96,"multiChoiceQuestion":97,"multiChoiceCorrect":99,"multiChoiceIncorrect":100,"multiChoiceMultiSelect":6,"multiChoiceRevealAnswerOption":6,"matchPairsQuestion":101,"matchPairsPairs":102},[54,55,49],[98],"Who constructed the Mark I Perceptron?",[61],[59,62,63],[65],[103],{"left":61,"right":104,"direction":19},"Constructed the first neural network",{"id":106,"data":107,"type":51,"version":20,"maxContentLevel":19},"57b9357b-c1af-4a34-9fb3-46b46788e5d5",{"type":51,"reviewType":20,"spacingBehaviour":20,"activeRecallQuestion":108,"activeRecallAnswers":110},[109],"What feature prevented the Mark I Perceptron from performing complex processes?",[111],"It was only one layer thick",{"id":113,"data":114,"type":20,"maxContentLevel":19,"version":20,"reviews":117},"fb58016b-6e99-43dc-bb69-5df400856280",{"type":20,"contentRole":25,"markdownContent":115,"audioMediaId":116},"It's worth pointing out that a neural network isn't usually a physical object. These artificial neurons aren't physical nodes linked together in a physical web.\n\nInstead, it's a **computational model**: a set of digital nodes in a digital web. Just think of it like a piece of software. You can even download some neural networks, and install them on your personal computer.\n\nPhysical neural networks (PNNs) are occasionally used as well. But as you can probably imagine, they're much more fiddly to build than their digital counterparts, and harder to run at the equivalent level of complexity.","7043c54c-2551-45f1-8b71-61fb84772821",[118,127],{"id":119,"data":120,"type":51,"version":20,"maxContentLevel":19},"acfbc9a7-36a2-4433-a4c2-a79723550582",{"type":51,"reviewType":25,"spacingBehaviour":20,"binaryQuestion":121,"binaryCorrect":123,"binaryIncorrect":125},[122],"Which of these would best describe a typical neural network?",[124],"Computational model",[126],"Physical model",{"id":128,"data":129,"type":51,"version":20,"maxContentLevel":19},"781a3a13-4c35-4a33-8df0-97e73ee07a44",{"type":51,"reviewType":20,"spacingBehaviour":20,"activeRecallQuestion":130,"activeRecallAnswers":132},[131],"Why are physical neural networks (PNNs) less common than digital versions?",[133],"They are more difficult and fiddly to build",{"id":135,"data":136,"type":25,"version":20,"maxContentLevel":19,"summaryPage":138,"introPage":145,"pages":151},"67b859fc-bef1-44ca-a76b-99f3d52504c1",{"type":25,"title":137},"Layers",{"id":139,"data":140,"type":19,"maxContentLevel":19,"version":20},"60fad00b-7bc5-4f3d-93e8-93e55c99e95c",{"type":19,"summary":141},[142,143,144],"The input layer is where a neural network receives data","The hidden layers are where the network 'thinks', as data bounces through nodes","The output layer is where the AI produces a final output",{"id":146,"data":147,"type":37,"maxContentLevel":19,"version":20},"7e5ed9c6-d2fc-4749-a8d8-8160af69a03a",{"type":37,"intro":148},[149,150],"What are the three types of layer in a neural network?","How many layers would you expect to find in a typical neural network?",[152,157,170],{"id":153,"data":154,"type":20,"maxContentLevel":19,"version":20},"9058f82e-9a4a-4fc1-928d-3a57a2019b1a",{"type":20,"contentRole":25,"markdownContent":155,"audioMediaId":156},"The layers in a modern neural network are usually arranged like this. You have an **input layer**, one or more **hidden layers**, and an **output layer**.\n\n![Graph](image://3d64ee32-129a-4527-b836-00a9537e9384 \"Simplified diagram of a neural network.\")\n\nWhen you ask an AI to do something, you're interacting with the input layer. For example, you might show it a photo of an animal, and ask it \"is this a cat or a dog?\"\n\nThe input layer will send that data down into the hidden layers. As this data bounces through the web of nodes, the network is effectively 'thinking'. Assuming this model was designed to identify cats from dogs, it will try to work out what kind of animal is present in your photograph.\n\n![Graph](image://7d0a49ab-0dcb-47bd-8d67-61b17b15241f \"Cat or dog?\")\n\nEventually, the data hits the output layer. \"It's a cat,\" the AI announces.","ae4be60b-704e-4c27-b09b-2a83edfd3189",{"id":158,"data":159,"type":20,"maxContentLevel":19,"version":20,"reviews":162},"b5bdfc87-d49c-43d2-a26b-899eb8c2b4f4",{"type":20,"contentRole":25,"markdownContent":160,"audioMediaId":161},"Interestingly, while each hidden layer might have hundreds of nodes, an output layer could have as few as two or three.\n\nFor example, in that example model we talked about, which tells the difference between cats and dogs, there are only three possible outputs: \"it's a cat\", \"it's a dog\", or \"it's neither\". All that 'thinking' in the hidden layers is just filtering to one of those options.\n\nDepending on the nature of the input, the network will take a different path through the hidden layers. If you fed it a photo of a greyhound, for example, it would 'think' about that photo in a different way than it might think about a photo of a chihuahua.\n\nBut both paths would still lead to the same output node. The AI would announce: \"It's a dog\".\n\n![Graph](image://272f283f-fcc1-4073-af53-5b0ad58aecdf \"21.png\")","9843b798-c56a-4aff-a262-a6b7950ff53c",[163],{"id":164,"data":165,"type":51,"version":20,"maxContentLevel":19},"a6a1b1a5-07d5-47ab-b5fb-742fb929f1c3",{"type":51,"reviewType":20,"spacingBehaviour":20,"activeRecallQuestion":166,"activeRecallAnswers":168},[167],"Imagine a neural network which determines whether statements are true or false. How many nodes might it have in its output layer?",[169],"Two – one for true and one for false",{"id":171,"data":172,"type":20,"maxContentLevel":19,"version":20,"reviews":175},"b4073b2a-73a3-4b31-ad16-386ef06e5a4d",{"type":20,"contentRole":25,"markdownContent":173,"audioMediaId":174},"That cat/dog model is just a simple example. Another neural network might have hundreds of nodes in the output layer. It depends how many possible outputs the model needs to produce.\n\nIt's the same with the number of hidden layers. A simple neural network might only have one, but a more complex model might have hundreds. As a general rule, more hidden layers mean more possible paths through the web of nodes, and more powerful decision-making processes.\n\nThis principle is what brought us some of the world's most famous AI models, like AlphaGo and ChatGPT. Supposedly, the latest version of ChatGPT (GPT-4) uses a neural network with 120 hidden layers, and an enormous number of nodes.","89713acb-cd95-432b-bc33-0c81787dbac6",[176,187,206],{"id":177,"data":178,"type":51,"version":20,"maxContentLevel":19},"1506ad91-2c03-496e-be6b-28ca95cf2272",{"type":51,"reviewType":19,"spacingBehaviour":20,"multiChoiceQuestion":179,"multiChoiceCorrect":181,"multiChoiceIncorrect":183,"multiChoiceMultiSelect":6,"multiChoiceRevealAnswerOption":6},[180],"Supposedly, how many hidden layers are used by ChatGPT?",[182],"120",[184,185,186],"80","40","160",{"id":188,"data":189,"type":51,"version":20,"maxContentLevel":19},"dd6b4791-55a7-4e46-b7f8-89774b934fd0",{"type":51,"reviewType":190,"spacingBehaviour":20,"matchPairsQuestion":191,"matchPairsPairs":193,"matchPairsShowExamples":6},6,[192],"What are the three main layers in a neural network?",[194,197,200,203],{"left":195,"right":196,"direction":19},"Input Layer","Receives some data",{"left":198,"right":199,"direction":19},"Hidden Layer","'Thinks' about data",{"left":201,"right":202,"direction":19},"Output layer","Produces an output",{"left":204,"right":205,"direction":19},"Logic layer","Not a real type of layer",{"id":207,"data":208,"type":51,"version":20,"maxContentLevel":19},"ebee58e8-cb50-4f29-982e-677325bf28ec",{"type":51,"reviewType":25,"spacingBehaviour":20,"binaryQuestion":209,"binaryCorrect":211,"binaryIncorrect":213},[210],"Which of these is a general rule for neural networks?",[212],"More hidden layers means more powerful processes",[214],"More hidden layers means less powerful processes",{"id":216,"data":217,"type":25,"version":20,"maxContentLevel":19,"summaryPage":219,"introPage":226,"pages":232},"223addfd-ae2f-4a3f-88e3-b9d40c3a1978",{"type":25,"title":218},"Parameters",{"id":220,"data":221,"type":19,"maxContentLevel":19,"version":20},"f9158a92-84b9-45cd-a91a-2ff8dd79cde4",{"type":19,"summary":222},[223,224,225],"The path a neural network takes through its nodes determines the final output","Weights are attached to connections, and are used to help the model choose a path","Biases are attached to nodes, and give it an extra nudge in one direction or another",{"id":227,"data":228,"type":37,"maxContentLevel":19,"version":20},"f370c1d4-5a94-4aff-b7e3-86501a590e9c",{"type":37,"intro":229},[230,231],"How does an AI decide which path to take through a neural network?","What do scientists mean when they talk about weights and biases?",[233,258,273],{"id":234,"data":235,"type":20,"maxContentLevel":19,"version":20,"reviews":238},"e426e3f9-c80e-4ea6-9a27-80c0419ceafb",{"type":20,"contentRole":25,"markdownContent":236,"audioMediaId":237},"So, a neural network is a series of layers. These layers are made of interconnected nodes.\n\nAnd here's an important thing to add: every connection between two different nodes has a numerical parameter attached to it. This numerical parameter is what scientists call a **weight**.\n\nAs the AI works its way through the hidden layers, following connections from node to node, these different weights will help it decide which node to jump to next. It’s more likely to choose a connection with more weight – that’s how it’s programmed to behave.\n\n![Graph](image://2b6f0b55-25e3-4948-900d-e652eae33fbe \"Simple diagram of weights.\")","49100b62-6d3b-484f-bc7b-34d16a13360a",[239],{"id":240,"data":241,"type":51,"version":20,"maxContentLevel":19},"177b2812-9888-4dbd-aaa8-3b730288d77b",{"type":51,"reviewType":19,"spacingBehaviour":20,"collapsingSiblings":242,"multiChoiceQuestion":246,"multiChoiceCorrect":248,"multiChoiceIncorrect":250,"multiChoiceMultiSelect":6,"multiChoiceRevealAnswerOption":6,"matchPairsQuestion":254,"matchPairsPairs":255},[243,244,245],"a748a768-3049-4ef0-b5d3-3e7f6da73489","7d24a430-b17f-4bbd-acd5-9499fbb4731f","3bac6e62-3c67-4f61-93bb-6a8c1232651b",[247],"In a neural network, every connection between nodes has a parameter attached. What are these parameters called?",[249],"Weights",[251,252,253],"Biases","Tokens","Nudges",[65],[256],{"left":249,"right":257,"direction":19},"Parameters attached to connections",{"id":259,"data":260,"type":20,"maxContentLevel":19,"version":20,"reviews":263},"eb177b1b-7e21-4627-ab51-717b62b251ae",{"type":20,"contentRole":25,"markdownContent":261,"audioMediaId":262},"You can think of the connections in a neural network like a tangled forest. When the network has to 'think', it's like following a path through that forest.\n\nThis path has lots of different branches. Some of them are narrow and overgrown, while others are wide and open. If you were walking, you'd probably take the open branch, just as an AI is more likely to choose a connection with more weight.\n\nThis process is essentially how a neural network makes decisions. Whichever path it takes through the web of nodes will result in a different output.","095e21d0-fa39-4406-a1c8-0ac11c06cdfa",[264],{"id":265,"data":266,"type":51,"version":20,"maxContentLevel":19},"57320799-5a04-4137-afd1-dc2677160ad7",{"type":51,"reviewType":25,"spacingBehaviour":20,"binaryQuestion":267,"binaryCorrect":269,"binaryIncorrect":271},[268],"If a neural network had the choice of following one of two connections, which one would you expect it to take?",[270],"Connection with weight of 0.8",[272],"Connection with weight of 0.2",{"id":274,"data":275,"type":20,"maxContentLevel":19,"version":20,"reviews":278},"50651bcc-50dc-4b30-99c5-1d6f82ddaf98",{"type":20,"contentRole":25,"markdownContent":276,"audioMediaId":277},"Weights aren't the only type of parameter that you'll find in a neural network. The other main one is something called a **bias**.\n\nUnlike weights, which are attached to the connections between nodes, a bias is attached to the nodes themselves. They're basically there to give the network an extra little nudge in one direction or another.\n\n![Graph](image://eedcb2b2-45ba-4640-ba52-87394f1de5aa \"Simple diagram of weights and biases.\")\n\nSay you had two possible connections, each with a weight of 1. The network might struggle to decide which connection to follow. But the bias nudges it down the second connection. To continue with that forest analogy, it's like a little signpost: \"if in doubt, go here.\"\n\nBiases can also be negative. \"If in doubt, do *not* go here.\"","952e8c30-71c4-4973-9379-e7cc64f87e69",[279,290],{"id":243,"data":280,"type":51,"version":20,"maxContentLevel":19},{"type":51,"reviewType":19,"spacingBehaviour":20,"collapsingSiblings":281,"multiChoiceQuestion":282,"multiChoiceCorrect":284,"multiChoiceIncorrect":285,"multiChoiceMultiSelect":6,"multiChoiceRevealAnswerOption":6,"matchPairsQuestion":286,"matchPairsPairs":287},[240,244,245],[283],"In a neural network, what do we call the parameters which give decision-making an extra nudge?",[251],[249,252,253],[65],[288],{"left":251,"right":289,"direction":19},"Parameters attached to nodes",{"id":291,"data":292,"type":51,"version":20,"maxContentLevel":19},"f746d9ae-eb41-41e6-811e-7adc757970ff",{"type":51,"reviewType":20,"spacingBehaviour":20,"activeRecallQuestion":293,"activeRecallAnswers":295},[294],"In neural networks, what are weights and biases attached to?",[296,297],"Weights are attached to connections","Biases are attached to nodes",{"id":299,"data":300,"type":25,"version":20,"maxContentLevel":19,"summaryPage":302,"introPage":309,"pages":315},"0fed2916-3258-47e8-bf4e-d86f9f182599",{"type":25,"title":301},"Deep learning",{"id":303,"data":304,"type":19,"maxContentLevel":19,"version":20},"a3fd8b48-248b-4647-9393-56abb36eed73",{"type":19,"summary":305},[306,307,308],"Deep learning is the official name for networks that learn through many hidden layers","Neural networks use backpropagation to learn from mistakes and adjust their parameters","Deep learning models are powerful, and capable of extremely complex learning",{"id":310,"data":311,"type":37,"maxContentLevel":19,"version":20},"3fde3079-29a7-4db0-b3ef-494967f54a86",{"type":37,"intro":312},[313,314],"Why are neural networks so good at learning?","What do scientists mean when they talk about backpropagation?",[316,329,354,369],{"id":317,"data":318,"type":20,"maxContentLevel":19,"version":20,"reviews":321},"95fdb0a8-d51e-4b2d-9bd3-3ce4e554c136",{"type":20,"contentRole":25,"markdownContent":319,"audioMediaId":320},"Now, you might remember what we said at the start. Neural networks were the driving force behind the modern AI spring. But why are these models so important?\n\nAs it happens, these webs of nodes are *extremely* good at learning.\n\nThis learning is most effective when a neural network has lots of hidden layers. **Deep learning** is the official name for it. 'Deep' because of all those layers.\n\nRemember: when we say that a machine is 'learning', we really just mean that numerical parameters are changing. And that's exactly what happens with a neural network: the model is able to adjust its weights and biases.","41db8541-055b-417c-b146-1959bdf80462",[322],{"id":323,"data":324,"type":51,"version":20,"maxContentLevel":19},"428b1aeb-2c3e-4623-8bf2-551861cab785",{"type":51,"reviewType":20,"spacingBehaviour":20,"activeRecallQuestion":325,"activeRecallAnswers":327},[326],"What do we call the type of learning that happens in a neural network with lots of layers?",[328],"Deep Learning",{"id":330,"data":331,"type":20,"maxContentLevel":19,"version":20,"reviews":334},"85cf432d-8fb4-4f6c-b29a-33937f777429",{"type":20,"contentRole":25,"markdownContent":332,"audioMediaId":333},"Once a neural network has performed a task, it can check the loss function afterwards. For example, if it was solving a complex math problem, how close did it get to the right answer?\n\nAfter checking the loss function, the neural network uses a technique called **backpropagation**. This is a special algorithm which travels back up the path that the AI just took through all those layers of nodes.\n\nAlong the way, it adjusts the weights and biases according to the size of the loss function. “Actually, this was a bad path to take – let's lower the weight on this one, and this one, and bump up the bias right here.\"","4608a584-2ff8-4bf9-9ba3-a56d93c08597",[335],{"id":336,"data":337,"type":51,"version":20,"maxContentLevel":19},"40b646f9-87d1-42a7-affa-48dfb7102234",{"type":51,"reviewType":19,"spacingBehaviour":20,"collapsingSiblings":338,"multiChoiceQuestion":342,"multiChoiceCorrect":344,"multiChoiceIncorrect":346,"multiChoiceMultiSelect":6,"multiChoiceRevealAnswerOption":6,"matchPairsQuestion":350,"matchPairsPairs":351},[339,340,341],"ea05be6e-cdeb-44f6-9f31-8aec11f44810","2e5c3c78-0e9c-4d53-996b-4757282d0c67","4fb75b36-77c9-4875-890f-79bad100dc81",[343],"What type of algorithm do neural networks use to adjust their parameters?",[345],"Backpropagation",[347,348,349],"Forward propagation","Backgradiation","Forward gradiation",[65],[352],{"left":345,"right":353,"direction":19},"Algorithm which adjusts parameters",{"id":355,"data":356,"type":20,"maxContentLevel":19,"version":20,"reviews":359},"6c68832f-5c01-4cfa-a477-dd2bc02bbd35",{"type":20,"contentRole":25,"markdownContent":357,"audioMediaId":358},"With plenty of time, and thousands of iterations, a neural network can finetune its parameters to the point that it starts reliably following the most effective path.\n\nThat's not always the *same* path. Different inputs will require different paths. That's what the model is ultimately learning – for every single input it could possibly receive, it needs to know exactly which path to take in order to produce the best and most appropriate output.\n\nImagine, for example, that you wanted your AI to tell the difference between types of fish. You input thousands of photos of different fish, and it learns the best path for each of them. If it sees a fish with *these* markings, it should take *this* path. If it sees a fish with *these* fin-shapes, it should take *this* path.\n\nEventually, it will choose the right path for every input you throw at it.\n\n![Graph](image://1515bf34-c139-4ad0-adb6-4d2d8f17eac8 \"Can you name the fish?\")","7bd7fe07-fc1f-4d22-8d44-f53d53f95342",[360],{"id":361,"data":362,"type":51,"version":20,"maxContentLevel":19},"117269a9-0724-4393-a0cb-1237b53fd9a0",{"type":51,"reviewType":25,"spacingBehaviour":20,"binaryQuestion":363,"binaryCorrect":365,"binaryIncorrect":367},[364],"In which direction does backpropagation move through a neural network?",[366],"From output layer to input layer",[368],"From input layer to output layer",{"id":370,"data":371,"type":20,"maxContentLevel":19,"version":20,"reviews":374},"d2fdba64-d08c-4703-b4e9-1b0cd2387260",{"type":20,"contentRole":25,"markdownContent":372,"audioMediaId":373},"Just to be clear: neural networks aren't the only type of AI model which is capable of machine learning. But as things stand, they have a couple of advantages over a lot of other approaches.\n\nFirst of all, these networks are extremely versatile. You can train them to analyze data for you. You can also train them to play games, or control self-driving vehicles. You can train them to speak, or recognize images. The list goes on and on.\n\nThey're also extremely powerful, especially deep learning models. More layers mean more nodes, and more weight and biases. In other words, more detailed and complex ways for the AI to learn to behave.\n\nAccording to some numbers leaked in 2023, ChatGPT uses a neural network with more than a trillion different parameters. Just imagine how many paths you could take through such a complex neural network.","743b1634-3bb8-4d8b-a817-275320f7b99b",[375,382],{"id":376,"data":377,"type":51,"version":20,"maxContentLevel":19},"8f19c49e-1687-4c07-b6ff-f25cb1e31e2e",{"type":51,"reviewType":20,"spacingBehaviour":20,"activeRecallQuestion":378,"activeRecallAnswers":380},[379],"According to some numbers leaked in 2023, roughly how many parameters does ChatGPT's neural network have?",[381],"More than a trillion",{"id":383,"data":384,"type":51,"version":20,"maxContentLevel":19},"01dd16d5-43fe-487d-b8a1-b4c711a59fc6",{"type":51,"reviewType":385,"spacingBehaviour":20,"orderAxisType":190,"orderQuestion":386,"orderItems":388},8,[387],"Put these stages of deep learning in order:",[389,391,393,395],{"label":390,"sortOrder":4},"The neural network performs a task",{"label":392,"sortOrder":20},"The neural network checks the loss function",{"label":394,"sortOrder":25},"Backpropagation travels back through the network",{"label":396,"sortOrder":19},"The network's parameters are adjusted",{"id":398,"data":399,"type":25,"version":20,"maxContentLevel":19,"summaryPage":401,"introPage":409,"pages":415},"aa0cd939-6a46-4208-89f0-56282efae907",{"type":25,"title":400},"Types of neural network",{"id":402,"data":403,"type":19,"maxContentLevel":19,"version":20},"fb0a6beb-848f-45ed-92d0-14cbfa66e785",{"type":19,"summary":404},[405,406,407,408],"Physical neural networks use networks of physical nodes and connections","Recurrent neural networks use a loop-back function to remember parts of a sentence","Convolutional neural networks are amazing at analyzing images","Generative adversarial networks take a pair of networks then task them to compete",{"id":410,"data":411,"type":37,"maxContentLevel":19,"version":20},"adf918ec-a2e6-47a4-9310-f06a1d226581",{"type":37,"intro":412},[413,414],"What are some of the main types of neural networks?","When would each of these different types be used?",[416,431,446,451],{"id":417,"data":418,"type":20,"maxContentLevel":19,"version":20,"reviews":421},"f4d08f0d-9523-4b70-9cdc-88598f72caa4",{"type":20,"contentRole":25,"markdownContent":419,"audioMediaId":420},"It's worth pointing out that there are a few different types of neural network.\n\nEarlier we mentioned **physical neural networks (PNNs)**. These are pieces of hardware, which use networks of physical nodes and connections, rather than the digital versions which are much more commonly used.\n\nAnother example is a **recurrent neural network (RNN)**. This one is actually quite simple. In a classic multilayer neural network, data is passed from hidden layer to hidden layer in one direction. Let's say from layer 1, to layer 2, to layer 3.\n\nBut in a recurrent neural network, the data will also loop back to previous layers. Effectively, this gives the network a memory – each loop reminds the previous layers what kind of data has already come through.","9657d40f-4e47-4c11-8541-f5a5f6019010",[422],{"id":423,"data":424,"type":51,"version":20,"maxContentLevel":19},"323d1ac8-d4ea-4a07-a879-44f1ea6177de",{"type":51,"reviewType":25,"spacingBehaviour":20,"binaryQuestion":425,"binaryCorrect":427,"binaryIncorrect":429},[426],"What is a key characteristic of a recurrent neural network (RNN)?",[428],"Data loops back to previous layers",[430],"Data jumps ahead to future layers",{"id":432,"data":433,"type":20,"maxContentLevel":19,"version":20,"reviews":436},"b8dffd2e-d83d-4012-8d93-c91b841c523f",{"type":20,"contentRole":25,"markdownContent":434,"audioMediaId":435},"The loop-back function of an RNN is useful in loads of contexts.\n\nImagine, for example, that you want an AI to finish this sentence: \"The color of the sky is \\[something\\].\"\n\nIf it only remembers the final word (\"is\"), it might output something random like \"yellow\" or \"tasty\", which logically follows \"is\", but doesn't make sense in the context of the sentence as a whole.\n\nIf each word is looped back though, and 'remembered' by the network, it's more likely to give an answer that fits the context of the sentence as a whole: \"The color of the sky is blue.\"\n\n![Graph](image://ac4575bd-0ca3-4da8-a632-57a684a3de9a \"Blue sky. Image via Pexels\")","e1a905c8-bb7b-4320-84ad-2e5b354e7abf",[437],{"id":438,"data":439,"type":51,"version":20,"maxContentLevel":19},"ad5fabc9-0429-4879-9998-09022b4e03fd",{"type":51,"reviewType":25,"spacingBehaviour":20,"binaryQuestion":440,"binaryCorrect":442,"binaryIncorrect":444},[441],"What is the main benefit of the loop-back function used by an RNN?",[443],"It lets the network 'remember' what kind of data has already come through",[445],"It lets the network 'think' about data more quickly and efficiently",{"id":447,"data":448,"type":20,"maxContentLevel":19,"version":20},"7cde803a-78ca-491f-b129-054848e62ecc",{"type":20,"contentRole":25,"markdownContent":449,"audioMediaId":450},"Here's another example. This time, imagine some satellite images of a hurricane out at sea. You want your AI to calculate whether the hurricane will hit any landmasses.\n\nIf you gave the AI just one image of that hurricane, in its current position, it would be hard to predict the trajectory.\n\n![Graph](image://9742350c-46d6-48e7-92e3-44871a4cc0e1 \"One image of a hurricane. August 28 2005 NASA (Public domain), via Wikimedia Commons\")\n\nBut if you gave it ten images, showing the hurricane's progress from initial position to current position – and your AI could 'remember' these positions, in order – it would do a much better job predicting where the hurricane will go.\n\nAny time you're working with a sequence of data – be it words, or images, or something else – a recurrent approach is often more effective than a classic neural network.","af967161-0a71-4375-8bc2-3750aa533f0a",{"id":452,"data":453,"type":20,"maxContentLevel":19,"version":20,"reviews":456},"fa9c1697-3924-461c-8fb5-46863bc70c34",{"type":20,"contentRole":25,"markdownContent":454,"audioMediaId":455},"Throughout the rest of this pathway, we'll also encounter a few other types of neural network.\n\nAnother important one is a **convolutional neural network (CNN)**. We'll take a proper look at CNNs when we get to our tile on Computer Vision, but for now, the main thing you need to know is that they're amazing at analyzing images.\n\nOne more type is an **generative adversarial network (GAN)**. We'll be learning more about this one in our tile on generative AI – but it essentially works by taking a pair of neural networks, and tasking them to compete against one another.\n\nThere are plenty of other examples. Part of the reason why neural networks are so popular is the fact they can be used in so many different ways.","023960d9-e6c7-4ceb-b121-40e889e808b1",[457,466],{"id":458,"data":459,"type":51,"version":20,"maxContentLevel":19},"84b814bc-902e-4721-8a89-4a12ec0a801c",{"type":51,"reviewType":25,"spacingBehaviour":20,"binaryQuestion":460,"binaryCorrect":462,"binaryIncorrect":464},[461],"True or false: neural networks aren't particularly versatile.",[463],"FALSE",[465],"TRUE",{"id":467,"data":468,"type":51,"version":20,"maxContentLevel":19},"d10b62fd-ff80-4c07-90ca-16a621044f90",{"type":51,"reviewType":190,"spacingBehaviour":20,"matchPairsQuestion":469,"matchPairsPairs":470,"matchPairsShowExamples":6},[65],[471,474,477,480],{"left":472,"right":473,"direction":19},"Physical neural network (PNN)","Piece of physical hardware",{"left":475,"right":476,"direction":19},"Recurrent neural network (RNN)","Specializes in sequential data",{"left":478,"right":479,"direction":19},"Convolutional neural network (CNN)","Specializes in visual data",{"left":481,"right":482,"direction":19},"Generative adversarial network (GAN) ","Two networks in competition",[484,602,726,836,1002],{"id":23,"data":24,"type":25,"version":20,"maxContentLevel":19,"summaryPage":27,"introPage":34,"pages":485},[486,522,573],{"id":43,"data":44,"type":20,"maxContentLevel":19,"version":20,"reviews":47,"parsed":487},{"data":488,"body":491,"toc":520},{"title":489,"description":490},"","Earlier, we touched upon neural networks, and mentioned how this key innovation was a major factor in the rise of modern AI. Now, it's time to look at this technology in more detail.",{"type":492,"children":493},"root",[494,510,515],{"type":495,"tag":496,"props":497,"children":498},"element","p",{},[499,502,508],{"type":500,"value":501},"text","Earlier, we touched upon ",{"type":495,"tag":503,"props":504,"children":505},"strong",{},[506],{"type":500,"value":507},"neural networks",{"type":500,"value":509},", and mentioned how this key innovation was a major factor in the rise of modern AI. Now, it's time to look at this technology in more detail.",{"type":495,"tag":496,"props":511,"children":512},{},[513],{"type":500,"value":514},"It's based on an idea that first cropped up in the 1940s – that's around the same time that Alan Turing was active. It was put forward by Warren McCulloch – a professor of psychiatry – and Walter Pitts – a student mathematician.",{"type":495,"tag":496,"props":516,"children":517},{},[518],{"type":500,"value":519},"Their idea was this: neurons in the brain could basically be viewed as binary gates, just like the ones in a computer. By extension, if you built a man-made network of binary gates, connected together with great complexity, it would potentially be able to perform the same processes as a brain.",{"title":489,"searchDepth":25,"depth":25,"links":521},[],{"id":70,"data":71,"type":20,"maxContentLevel":19,"version":20,"reviews":74,"parsed":523},{"data":524,"body":526,"toc":571},{"title":489,"description":525},"About a decade later, in 1957, an American psychologist called Frank Rosenblatt managed to put the ideas put forward by McCullock and Pitts into practice.",{"type":492,"children":527},[528,532,543,553,558,566],{"type":495,"tag":496,"props":529,"children":530},{},[531],{"type":500,"value":525},{"type":495,"tag":496,"props":533,"children":534},{},[535,537,541],{"type":500,"value":536},"He constructed a network of node-like neurons, which he referred to as the ",{"type":495,"tag":503,"props":538,"children":539},{},[540],{"type":500,"value":85},{"type":500,"value":542},". Incredibly, this network used photocells to 'look' at images, and recognize objects within them.",{"type":495,"tag":496,"props":544,"children":545},{},[546],{"type":495,"tag":547,"props":548,"children":552},"img",{"alt":549,"src":550,"title":551},"Graph","image://0a17b588-5b3c-447d-a796-9dd1d80845aa","Frank Rosenblatt and the Mark I Perceptron. (Public domain), via Wikimedia Commons",[],{"type":495,"tag":496,"props":554,"children":555},{},[556],{"type":500,"value":557},"The Mark I Perceptron was only one layer thick – imagine a 2D net of nodes, as opposed to the 3D web of a real human brain. This limited the number of connections between nodes, which in turn limited the model's potential for human-like cognitive processes.",{"type":495,"tag":496,"props":559,"children":560},{},[561],{"type":495,"tag":547,"props":562,"children":565},{"alt":549,"src":563,"title":564},"image://0646ad9b-a104-48ed-843a-d42290992929","Simplified diagram of a single layer network.",[],{"type":495,"tag":496,"props":567,"children":568},{},[569],{"type":500,"value":570},"But nowadays, thanks to hundreds of innovations, we've found ways to build multilayer networks. They're still a long way away from the complex connections of a human brain. But they have enough connections to perform some pretty powerful processes.",{"title":489,"searchDepth":25,"depth":25,"links":572},[],{"id":113,"data":114,"type":20,"maxContentLevel":19,"version":20,"reviews":117,"parsed":574},{"data":575,"body":577,"toc":600},{"title":489,"description":576},"It's worth pointing out that a neural network isn't usually a physical object. These artificial neurons aren't physical nodes linked together in a physical web.",{"type":492,"children":578},[579,583,595],{"type":495,"tag":496,"props":580,"children":581},{},[582],{"type":500,"value":576},{"type":495,"tag":496,"props":584,"children":585},{},[586,588,593],{"type":500,"value":587},"Instead, it's a ",{"type":495,"tag":503,"props":589,"children":590},{},[591],{"type":500,"value":592},"computational model",{"type":500,"value":594},": a set of digital nodes in a digital web. Just think of it like a piece of software. You can even download some neural networks, and install them on your personal computer.",{"type":495,"tag":496,"props":596,"children":597},{},[598],{"type":500,"value":599},"Physical neural networks (PNNs) are occasionally used as well. But as you can probably imagine, they're much more fiddly to build than their digital counterparts, and harder to run at the equivalent level of complexity.",{"title":489,"searchDepth":25,"depth":25,"links":601},[],{"id":135,"data":136,"type":25,"version":20,"maxContentLevel":19,"summaryPage":138,"introPage":145,"pages":603},[604,669,704],{"id":153,"data":154,"type":20,"maxContentLevel":19,"version":20,"parsed":605},{"data":606,"body":608,"toc":667},{"title":489,"description":607},"The layers in a modern neural network are usually arranged like this. You have an input layer, one or more hidden layers, and an output layer.",{"type":492,"children":609},[610,636,644,649,654,662],{"type":495,"tag":496,"props":611,"children":612},{},[613,615,620,622,627,629,634],{"type":500,"value":614},"The layers in a modern neural network are usually arranged like this. You have an ",{"type":495,"tag":503,"props":616,"children":617},{},[618],{"type":500,"value":619},"input layer",{"type":500,"value":621},", one or more ",{"type":495,"tag":503,"props":623,"children":624},{},[625],{"type":500,"value":626},"hidden layers",{"type":500,"value":628},", and an ",{"type":495,"tag":503,"props":630,"children":631},{},[632],{"type":500,"value":633},"output layer",{"type":500,"value":635},".",{"type":495,"tag":496,"props":637,"children":638},{},[639],{"type":495,"tag":547,"props":640,"children":643},{"alt":549,"src":641,"title":642},"image://3d64ee32-129a-4527-b836-00a9537e9384","Simplified diagram of a neural network.",[],{"type":495,"tag":496,"props":645,"children":646},{},[647],{"type":500,"value":648},"When you ask an AI to do something, you're interacting with the input layer. For example, you might show it a photo of an animal, and ask it \"is this a cat or a dog?\"",{"type":495,"tag":496,"props":650,"children":651},{},[652],{"type":500,"value":653},"The input layer will send that data down into the hidden layers. As this data bounces through the web of nodes, the network is effectively 'thinking'. Assuming this model was designed to identify cats from dogs, it will try to work out what kind of animal is present in your photograph.",{"type":495,"tag":496,"props":655,"children":656},{},[657],{"type":495,"tag":547,"props":658,"children":661},{"alt":549,"src":659,"title":660},"image://7d0a49ab-0dcb-47bd-8d67-61b17b15241f","Cat or dog?",[],{"type":495,"tag":496,"props":663,"children":664},{},[665],{"type":500,"value":666},"Eventually, the data hits the output layer. \"It's a cat,\" the AI announces.",{"title":489,"searchDepth":25,"depth":25,"links":668},[],{"id":158,"data":159,"type":20,"maxContentLevel":19,"version":20,"reviews":162,"parsed":670},{"data":671,"body":673,"toc":702},{"title":489,"description":672},"Interestingly, while each hidden layer might have hundreds of nodes, an output layer could have as few as two or three.",{"type":492,"children":674},[675,679,684,689,694],{"type":495,"tag":496,"props":676,"children":677},{},[678],{"type":500,"value":672},{"type":495,"tag":496,"props":680,"children":681},{},[682],{"type":500,"value":683},"For example, in that example model we talked about, which tells the difference between cats and dogs, there are only three possible outputs: \"it's a cat\", \"it's a dog\", or \"it's neither\". All that 'thinking' in the hidden layers is just filtering to one of those options.",{"type":495,"tag":496,"props":685,"children":686},{},[687],{"type":500,"value":688},"Depending on the nature of the input, the network will take a different path through the hidden layers. If you fed it a photo of a greyhound, for example, it would 'think' about that photo in a different way than it might think about a photo of a chihuahua.",{"type":495,"tag":496,"props":690,"children":691},{},[692],{"type":500,"value":693},"But both paths would still lead to the same output node. The AI would announce: \"It's a dog\".",{"type":495,"tag":496,"props":695,"children":696},{},[697],{"type":495,"tag":547,"props":698,"children":701},{"alt":549,"src":699,"title":700},"image://272f283f-fcc1-4073-af53-5b0ad58aecdf","21.png",[],{"title":489,"searchDepth":25,"depth":25,"links":703},[],{"id":171,"data":172,"type":20,"maxContentLevel":19,"version":20,"reviews":175,"parsed":705},{"data":706,"body":708,"toc":724},{"title":489,"description":707},"That cat/dog model is just a simple example. Another neural network might have hundreds of nodes in the output layer. It depends how many possible outputs the model needs to produce.",{"type":492,"children":709},[710,714,719],{"type":495,"tag":496,"props":711,"children":712},{},[713],{"type":500,"value":707},{"type":495,"tag":496,"props":715,"children":716},{},[717],{"type":500,"value":718},"It's the same with the number of hidden layers. A simple neural network might only have one, but a more complex model might have hundreds. As a general rule, more hidden layers mean more possible paths through the web of nodes, and more powerful decision-making processes.",{"type":495,"tag":496,"props":720,"children":721},{},[722],{"type":500,"value":723},"This principle is what brought us some of the world's most famous AI models, like AlphaGo and ChatGPT. Supposedly, the latest version of ChatGPT (GPT-4) uses a neural network with 120 hidden layers, and an enormous number of nodes.",{"title":489,"searchDepth":25,"depth":25,"links":725},[],{"id":216,"data":217,"type":25,"version":20,"maxContentLevel":19,"summaryPage":219,"introPage":226,"pages":727},[728,764,786],{"id":234,"data":235,"type":20,"maxContentLevel":19,"version":20,"reviews":238,"parsed":729},{"data":730,"body":732,"toc":762},{"title":489,"description":731},"So, a neural network is a series of layers. These layers are made of interconnected nodes.",{"type":492,"children":733},[734,738,749,754],{"type":495,"tag":496,"props":735,"children":736},{},[737],{"type":500,"value":731},{"type":495,"tag":496,"props":739,"children":740},{},[741,743,748],{"type":500,"value":742},"And here's an important thing to add: every connection between two different nodes has a numerical parameter attached to it. This numerical parameter is what scientists call a ",{"type":495,"tag":503,"props":744,"children":745},{},[746],{"type":500,"value":747},"weight",{"type":500,"value":635},{"type":495,"tag":496,"props":750,"children":751},{},[752],{"type":500,"value":753},"As the AI works its way through the hidden layers, following connections from node to node, these different weights will help it decide which node to jump to next. It’s more likely to choose a connection with more weight – that’s how it’s programmed to behave.",{"type":495,"tag":496,"props":755,"children":756},{},[757],{"type":495,"tag":547,"props":758,"children":761},{"alt":549,"src":759,"title":760},"image://2b6f0b55-25e3-4948-900d-e652eae33fbe","Simple diagram of weights.",[],{"title":489,"searchDepth":25,"depth":25,"links":763},[],{"id":259,"data":260,"type":20,"maxContentLevel":19,"version":20,"reviews":263,"parsed":765},{"data":766,"body":768,"toc":784},{"title":489,"description":767},"You can think of the connections in a neural network like a tangled forest. When the network has to 'think', it's like following a path through that forest.",{"type":492,"children":769},[770,774,779],{"type":495,"tag":496,"props":771,"children":772},{},[773],{"type":500,"value":767},{"type":495,"tag":496,"props":775,"children":776},{},[777],{"type":500,"value":778},"This path has lots of different branches. Some of them are narrow and overgrown, while others are wide and open. If you were walking, you'd probably take the open branch, just as an AI is more likely to choose a connection with more weight.",{"type":495,"tag":496,"props":780,"children":781},{},[782],{"type":500,"value":783},"This process is essentially how a neural network makes decisions. Whichever path it takes through the web of nodes will result in a different output.",{"title":489,"searchDepth":25,"depth":25,"links":785},[],{"id":274,"data":275,"type":20,"maxContentLevel":19,"version":20,"reviews":278,"parsed":787},{"data":788,"body":790,"toc":834},{"title":489,"description":789},"Weights aren't the only type of parameter that you'll find in a neural network. The other main one is something called a bias.",{"type":492,"children":791},[792,803,808,816,821],{"type":495,"tag":496,"props":793,"children":794},{},[795,797,802],{"type":500,"value":796},"Weights aren't the only type of parameter that you'll find in a neural network. The other main one is something called a ",{"type":495,"tag":503,"props":798,"children":799},{},[800],{"type":500,"value":801},"bias",{"type":500,"value":635},{"type":495,"tag":496,"props":804,"children":805},{},[806],{"type":500,"value":807},"Unlike weights, which are attached to the connections between nodes, a bias is attached to the nodes themselves. They're basically there to give the network an extra little nudge in one direction or another.",{"type":495,"tag":496,"props":809,"children":810},{},[811],{"type":495,"tag":547,"props":812,"children":815},{"alt":549,"src":813,"title":814},"image://eedcb2b2-45ba-4640-ba52-87394f1de5aa","Simple diagram of weights and biases.",[],{"type":495,"tag":496,"props":817,"children":818},{},[819],{"type":500,"value":820},"Say you had two possible connections, each with a weight of 1. The network might struggle to decide which connection to follow. But the bias nudges it down the second connection. To continue with that forest analogy, it's like a little signpost: \"if in doubt, go here.\"",{"type":495,"tag":496,"props":822,"children":823},{},[824,826,832],{"type":500,"value":825},"Biases can also be negative. \"If in doubt, do ",{"type":495,"tag":827,"props":828,"children":829},"em",{},[830],{"type":500,"value":831},"not",{"type":500,"value":833}," go here.\"",{"title":489,"searchDepth":25,"depth":25,"links":835},[],{"id":299,"data":300,"type":25,"version":20,"maxContentLevel":19,"summaryPage":302,"introPage":309,"pages":837},[838,878,907,975],{"id":317,"data":318,"type":20,"maxContentLevel":19,"version":20,"reviews":321,"parsed":839},{"data":840,"body":842,"toc":876},{"title":489,"description":841},"Now, you might remember what we said at the start. Neural networks were the driving force behind the modern AI spring. But why are these models so important?",{"type":492,"children":843},[844,848,860,871],{"type":495,"tag":496,"props":845,"children":846},{},[847],{"type":500,"value":841},{"type":495,"tag":496,"props":849,"children":850},{},[851,853,858],{"type":500,"value":852},"As it happens, these webs of nodes are ",{"type":495,"tag":827,"props":854,"children":855},{},[856],{"type":500,"value":857},"extremely",{"type":500,"value":859}," good at learning.",{"type":495,"tag":496,"props":861,"children":862},{},[863,865,869],{"type":500,"value":864},"This learning is most effective when a neural network has lots of hidden layers. ",{"type":495,"tag":503,"props":866,"children":867},{},[868],{"type":500,"value":301},{"type":500,"value":870}," is the official name for it. 'Deep' because of all those layers.",{"type":495,"tag":496,"props":872,"children":873},{},[874],{"type":500,"value":875},"Remember: when we say that a machine is 'learning', we really just mean that numerical parameters are changing. And that's exactly what happens with a neural network: the model is able to adjust its weights and biases.",{"title":489,"searchDepth":25,"depth":25,"links":877},[],{"id":330,"data":331,"type":20,"maxContentLevel":19,"version":20,"reviews":334,"parsed":879},{"data":880,"body":882,"toc":905},{"title":489,"description":881},"Once a neural network has performed a task, it can check the loss function afterwards. For example, if it was solving a complex math problem, how close did it get to the right answer?",{"type":492,"children":883},[884,888,900],{"type":495,"tag":496,"props":885,"children":886},{},[887],{"type":500,"value":881},{"type":495,"tag":496,"props":889,"children":890},{},[891,893,898],{"type":500,"value":892},"After checking the loss function, the neural network uses a technique called ",{"type":495,"tag":503,"props":894,"children":895},{},[896],{"type":500,"value":897},"backpropagation",{"type":500,"value":899},". This is a special algorithm which travels back up the path that the AI just took through all those layers of nodes.",{"type":495,"tag":496,"props":901,"children":902},{},[903],{"type":500,"value":904},"Along the way, it adjusts the weights and biases according to the size of the loss function. “Actually, this was a bad path to take – let's lower the weight on this one, and this one, and bump up the bias right here.\"",{"title":489,"searchDepth":25,"depth":25,"links":906},[],{"id":355,"data":356,"type":20,"maxContentLevel":19,"version":20,"reviews":359,"parsed":908},{"data":909,"body":911,"toc":973},{"title":489,"description":910},"With plenty of time, and thousands of iterations, a neural network can finetune its parameters to the point that it starts reliably following the most effective path.",{"type":492,"children":912},[913,917,929,960,965],{"type":495,"tag":496,"props":914,"children":915},{},[916],{"type":500,"value":910},{"type":495,"tag":496,"props":918,"children":919},{},[920,922,927],{"type":500,"value":921},"That's not always the ",{"type":495,"tag":827,"props":923,"children":924},{},[925],{"type":500,"value":926},"same",{"type":500,"value":928}," path. Different inputs will require different paths. That's what the model is ultimately learning – for every single input it could possibly receive, it needs to know exactly which path to take in order to produce the best and most appropriate output.",{"type":495,"tag":496,"props":930,"children":931},{},[932,934,939,941,946,948,952,954,958],{"type":500,"value":933},"Imagine, for example, that you wanted your AI to tell the difference between types of fish. You input thousands of photos of different fish, and it learns the best path for each of them. If it sees a fish with ",{"type":495,"tag":827,"props":935,"children":936},{},[937],{"type":500,"value":938},"these",{"type":500,"value":940}," markings, it should take ",{"type":495,"tag":827,"props":942,"children":943},{},[944],{"type":500,"value":945},"this",{"type":500,"value":947}," path. If it sees a fish with ",{"type":495,"tag":827,"props":949,"children":950},{},[951],{"type":500,"value":938},{"type":500,"value":953}," fin-shapes, it should take ",{"type":495,"tag":827,"props":955,"children":956},{},[957],{"type":500,"value":945},{"type":500,"value":959}," path.",{"type":495,"tag":496,"props":961,"children":962},{},[963],{"type":500,"value":964},"Eventually, it will choose the right path for every input you throw at it.",{"type":495,"tag":496,"props":966,"children":967},{},[968],{"type":495,"tag":547,"props":969,"children":972},{"alt":549,"src":970,"title":971},"image://1515bf34-c139-4ad0-adb6-4d2d8f17eac8","Can you name the fish?",[],{"title":489,"searchDepth":25,"depth":25,"links":974},[],{"id":370,"data":371,"type":20,"maxContentLevel":19,"version":20,"reviews":374,"parsed":976},{"data":977,"body":979,"toc":1000},{"title":489,"description":978},"Just to be clear: neural networks aren't the only type of AI model which is capable of machine learning. But as things stand, they have a couple of advantages over a lot of other approaches.",{"type":492,"children":980},[981,985,990,995],{"type":495,"tag":496,"props":982,"children":983},{},[984],{"type":500,"value":978},{"type":495,"tag":496,"props":986,"children":987},{},[988],{"type":500,"value":989},"First of all, these networks are extremely versatile. You can train them to analyze data for you. You can also train them to play games, or control self-driving vehicles. You can train them to speak, or recognize images. The list goes on and on.",{"type":495,"tag":496,"props":991,"children":992},{},[993],{"type":500,"value":994},"They're also extremely powerful, especially deep learning models. More layers mean more nodes, and more weight and biases. In other words, more detailed and complex ways for the AI to learn to behave.",{"type":495,"tag":496,"props":996,"children":997},{},[998],{"type":500,"value":999},"According to some numbers leaked in 2023, ChatGPT uses a neural network with more than a trillion different parameters. Just imagine how many paths you could take through such a complex neural network.",{"title":489,"searchDepth":25,"depth":25,"links":1001},[],{"id":398,"data":399,"type":25,"version":20,"maxContentLevel":19,"summaryPage":401,"introPage":409,"pages":1003},[1004,1045,1080,1115],{"id":417,"data":418,"type":20,"maxContentLevel":19,"version":20,"reviews":421,"parsed":1005},{"data":1006,"body":1008,"toc":1043},{"title":489,"description":1007},"It's worth pointing out that there are a few different types of neural network.",{"type":492,"children":1009},[1010,1014,1026,1038],{"type":495,"tag":496,"props":1011,"children":1012},{},[1013],{"type":500,"value":1007},{"type":495,"tag":496,"props":1015,"children":1016},{},[1017,1019,1024],{"type":500,"value":1018},"Earlier we mentioned ",{"type":495,"tag":503,"props":1020,"children":1021},{},[1022],{"type":500,"value":1023},"physical neural networks (PNNs)",{"type":500,"value":1025},". These are pieces of hardware, which use networks of physical nodes and connections, rather than the digital versions which are much more commonly used.",{"type":495,"tag":496,"props":1027,"children":1028},{},[1029,1031,1036],{"type":500,"value":1030},"Another example is a ",{"type":495,"tag":503,"props":1032,"children":1033},{},[1034],{"type":500,"value":1035},"recurrent neural network (RNN)",{"type":500,"value":1037},". This one is actually quite simple. In a classic multilayer neural network, data is passed from hidden layer to hidden layer in one direction. Let's say from layer 1, to layer 2, to layer 3.",{"type":495,"tag":496,"props":1039,"children":1040},{},[1041],{"type":500,"value":1042},"But in a recurrent neural network, the data will also loop back to previous layers. Effectively, this gives the network a memory – each loop reminds the previous layers what kind of data has already come through.",{"title":489,"searchDepth":25,"depth":25,"links":1044},[],{"id":432,"data":433,"type":20,"maxContentLevel":19,"version":20,"reviews":436,"parsed":1046},{"data":1047,"body":1049,"toc":1078},{"title":489,"description":1048},"The loop-back function of an RNN is useful in loads of contexts.",{"type":492,"children":1050},[1051,1055,1060,1065,1070],{"type":495,"tag":496,"props":1052,"children":1053},{},[1054],{"type":500,"value":1048},{"type":495,"tag":496,"props":1056,"children":1057},{},[1058],{"type":500,"value":1059},"Imagine, for example, that you want an AI to finish this sentence: \"The color of the sky is [something].\"",{"type":495,"tag":496,"props":1061,"children":1062},{},[1063],{"type":500,"value":1064},"If it only remembers the final word (\"is\"), it might output something random like \"yellow\" or \"tasty\", which logically follows \"is\", but doesn't make sense in the context of the sentence as a whole.",{"type":495,"tag":496,"props":1066,"children":1067},{},[1068],{"type":500,"value":1069},"If each word is looped back though, and 'remembered' by the network, it's more likely to give an answer that fits the context of the sentence as a whole: \"The color of the sky is blue.\"",{"type":495,"tag":496,"props":1071,"children":1072},{},[1073],{"type":495,"tag":547,"props":1074,"children":1077},{"alt":549,"src":1075,"title":1076},"image://ac4575bd-0ca3-4da8-a632-57a684a3de9a","Blue sky. Image via Pexels",[],{"title":489,"searchDepth":25,"depth":25,"links":1079},[],{"id":447,"data":448,"type":20,"maxContentLevel":19,"version":20,"parsed":1081},{"data":1082,"body":1084,"toc":1113},{"title":489,"description":1083},"Here's another example. This time, imagine some satellite images of a hurricane out at sea. You want your AI to calculate whether the hurricane will hit any landmasses.",{"type":492,"children":1085},[1086,1090,1095,1103,1108],{"type":495,"tag":496,"props":1087,"children":1088},{},[1089],{"type":500,"value":1083},{"type":495,"tag":496,"props":1091,"children":1092},{},[1093],{"type":500,"value":1094},"If you gave the AI just one image of that hurricane, in its current position, it would be hard to predict the trajectory.",{"type":495,"tag":496,"props":1096,"children":1097},{},[1098],{"type":495,"tag":547,"props":1099,"children":1102},{"alt":549,"src":1100,"title":1101},"image://9742350c-46d6-48e7-92e3-44871a4cc0e1","One image of a hurricane. August 28 2005 NASA (Public domain), via Wikimedia Commons",[],{"type":495,"tag":496,"props":1104,"children":1105},{},[1106],{"type":500,"value":1107},"But if you gave it ten images, showing the hurricane's progress from initial position to current position – and your AI could 'remember' these positions, in order – it would do a much better job predicting where the hurricane will go.",{"type":495,"tag":496,"props":1109,"children":1110},{},[1111],{"type":500,"value":1112},"Any time you're working with a sequence of data – be it words, or images, or something else – a recurrent approach is often more effective than a classic neural network.",{"title":489,"searchDepth":25,"depth":25,"links":1114},[],{"id":452,"data":453,"type":20,"maxContentLevel":19,"version":20,"reviews":456,"parsed":1116},{"data":1117,"body":1119,"toc":1154},{"title":489,"description":1118},"Throughout the rest of this pathway, we'll also encounter a few other types of neural network.",{"type":492,"children":1120},[1121,1125,1137,1149],{"type":495,"tag":496,"props":1122,"children":1123},{},[1124],{"type":500,"value":1118},{"type":495,"tag":496,"props":1126,"children":1127},{},[1128,1130,1135],{"type":500,"value":1129},"Another important one is a ",{"type":495,"tag":503,"props":1131,"children":1132},{},[1133],{"type":500,"value":1134},"convolutional neural network (CNN)",{"type":500,"value":1136},". We'll take a proper look at CNNs when we get to our tile on Computer Vision, but for now, the main thing you need to know is that they're amazing at analyzing images.",{"type":495,"tag":496,"props":1138,"children":1139},{},[1140,1142,1147],{"type":500,"value":1141},"One more type is an ",{"type":495,"tag":503,"props":1143,"children":1144},{},[1145],{"type":500,"value":1146},"generative adversarial network (GAN)",{"type":500,"value":1148},". We'll be learning more about this one in our tile on generative AI – but it essentially works by taking a pair of neural networks, and tasking them to compete against one another.",{"type":495,"tag":496,"props":1150,"children":1151},{},[1152],{"type":500,"value":1153},"There are plenty of other examples. Part of the reason why neural networks are so popular is the fact they can be used in so many different ways.",{"title":489,"searchDepth":25,"depth":25,"links":1155},[],{"left":4,"top":4,"width":1157,"height":1157,"rotate":4,"vFlip":6,"hFlip":6,"body":1158},24,"\u003Cpath fill=\"none\" stroke=\"currentColor\" stroke-linecap=\"round\" stroke-linejoin=\"round\" stroke-width=\"2\" d=\"m9 18l6-6l-6-6\"/>",{"left":4,"top":4,"width":1157,"height":1157,"rotate":4,"vFlip":6,"hFlip":6,"body":1160},"\u003Cpath fill=\"none\" stroke=\"currentColor\" stroke-linecap=\"round\" stroke-linejoin=\"round\" stroke-width=\"2\" d=\"M4 5h16M4 12h16M4 19h16\"/>",1778179449613]