-
-
Notifications
You must be signed in to change notification settings - Fork 21
/
index.html
24 lines (19 loc) · 79.7 KB
/
index.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
<!DOCTYPE html><html><head><meta charSet="utf-8"/><meta http-equiv="x-ua-compatible" content="ie=edge"/><meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no"/><style data-href="/styles.f4dbc894744c3c869f21.css" id="gatsby-global-css">@import url(https://fonts.googleapis.com/css2?family=Roboto:ital,wght@0,100;0,300;0,400;0,500;0,700;0,900;1,100;1,300;1,400;1,500;1,700;1,900&display=swap);.post--2021--binary-floating-point--bit-button:hover{box-shadow:0 0 5px 1px rgba(0,0,0,.2);transition:box-shadow .2s ease-in-out}input:checked~.toggle__dot{transform:translateX(100%)}input:checked~.toggle__line{background-color:#48bb78}.post--2021--water-line--gyro-cube .gyro-cube-container{height:400px;display:flex;justify-content:center;align-items:center;perspective:800px;perspective-origin:50%}.post--2021--water-line--gyro-cube .gyro-cube{position:relative;width:200px;height:200px;transform-style:preserve-3d}.post--2021--water-line--gyro-cube .gyro-cube-side{position:absolute;width:100%;height:100%;opacity:.8;border:2px solid #fff;display:flex;justify-content:center;align-items:center;color:#fff;font-weight:700;font-size:100px}.post--2021--water-line--gyro-cube .gyro-cube-front{background-color:#d50000;transform:translateZ(100px)}.post--2021--water-line--gyro-cube .gyro-cube-back{background-color:#a0f;transform:translateZ(-100px)}.post--2021--water-line--gyro-cube .gyro-cube-left{background-color:#304ffe;transform:translateX(100px) rotateY(90deg)}.post--2021--water-line--gyro-cube .gyro-cube-right{background-color:#0091ea;transform:translateX(-100px) rotateY(90deg)}.post--2021--water-line--gyro-cube .gyro-cube-top{background-color:#00bfa5;transform:translateY(-100px) rotateX(90deg)}.post--2021--water-line--gyro-cube .gyro-cube-bottom{background-color:#64dd17;transform:translateY(100px) rotateX(90deg)}.custom-fade-in-opacity{opacity:1;-webkit-animation-name:customFadeInOpacity;animation-name:customFadeInOpacity;-webkit-animation-iteration-count:1;animation-iteration-count:1;-webkit-animation-timing-function:ease-out;animation-timing-function:ease-out;-webkit-animation-duration:.8s;animation-duration:.8s}@-webkit-keyframes customFadeInOpacity{0%{opacity:0}to{opacity:1}}@keyframes customFadeInOpacity{0%{opacity:0}to{opacity:1}}.prose blockquote p:first-of-type:before,.prose blockquote p:last-of-type:after,.prose code:after,.prose code:before{content:""!important}.prose blockquote{font-weight:400!important}.prose li code.language-text,.prose p code.language-text,.prose td code.language-text,.prose th code.language-text,.prose tr code.language-text{padding:2px 5px 1px;font-weight:400}.prose p>img{margin:auto}.prose h1>a.gatsby-remark-autolink-header-anchor,.prose h2>a.gatsby-remark-autolink-header-anchor,.prose h3>a.gatsby-remark-autolink-header-anchor,.prose h4>a.gatsby-remark-autolink-header-anchor,.prose h5>a.gatsby-remark-autolink-header-anchor{visibility:hidden;display:inline-block;margin-left:10px}.prose h1:hover>a.gatsby-remark-autolink-header-anchor,.prose h2:hover>a.gatsby-remark-autolink-header-anchor,.prose h3:hover>a.gatsby-remark-autolink-header-anchor,.prose h4:hover>a.gatsby-remark-autolink-header-anchor,.prose h5:hover>a.gatsby-remark-autolink-header-anchor{visibility:visible}
/* ! tailwindcss v2.1.2 | MIT License | https://tailwindcss.com */
/*! modern-normalize v1.0.0 | MIT License | https://github.com/sindresorhus/modern-normalize */:root{-moz-tab-size:4;-o-tab-size:4;tab-size:4}html{line-height:1.15;-webkit-text-size-adjust:100%}body{margin:0;font-family:system-ui,-apple-system,Segoe UI,Roboto,Helvetica,Arial,sans-serif,Apple Color Emoji,Segoe UI Emoji}hr{height:0;color:inherit}abbr[title]{-webkit-text-decoration:underline dotted;text-decoration:underline dotted}b,strong{font-weight:bolder}code,kbd,pre,samp{font-family:ui-monospace,SFMono-Regular,Consolas,Liberation Mono,Menlo,monospace;font-size:1em}small{font-size:80%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sub{bottom:-.25em}sup{top:-.5em}table{text-indent:0;border-color:inherit}button,input,optgroup,select,textarea{font-family:inherit;font-size:100%;line-height:1.15;margin:0}button,select{text-transform:none}[type=button],[type=reset],[type=submit],button{-webkit-appearance:button}legend{padding:0}progress{vertical-align:baseline}[type=search]{-webkit-appearance:textfield;outline-offset:-2px}summary{display:list-item}blockquote,dd,dl,figure,h1,h2,h3,h4,h5,h6,hr,p,pre{margin:0}button{background-color:transparent;background-image:none}button:focus{outline:1px dotted;outline:5px auto -webkit-focus-ring-color}fieldset,ol,ul{margin:0;padding:0}ol,ul{list-style:none}html{font-family:Roboto,ui-sans-serif,system-ui,-apple-system,BlinkMacSystemFont,Segoe UI,Helvetica Neue,Arial,Noto Sans,sans-serif,Apple Color Emoji,Segoe UI Emoji,Segoe UI Symbol,Noto Color Emoji;line-height:1.5}body{font-family:inherit;line-height:inherit}*,:after,:before{box-sizing:border-box;border:0 solid #e5e7eb}hr{border-top-width:1px}img{border-style:solid}textarea{resize:vertical}input::-moz-placeholder,textarea::-moz-placeholder{opacity:1;color:#9ca3af}input:-ms-input-placeholder,textarea:-ms-input-placeholder{opacity:1;color:#9ca3af}input::placeholder,textarea::placeholder{opacity:1;color:#9ca3af}button{cursor:pointer}table{border-collapse:collapse}h1,h2,h3,h4,h5,h6{font-size:inherit;font-weight:inherit}a{color:inherit;text-decoration:inherit}button,input,optgroup,select,textarea{padding:0;line-height:inherit;color:inherit}code,kbd,pre,samp{font-family:ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,monospace}audio,canvas,embed,iframe,img,object,svg,video{display:block;vertical-align:middle}img,video{max-width:100%;height:auto}.container{width:100%}@media (min-width:640px){.container{max-width:640px}}@media (min-width:768px){.container{max-width:768px}}@media (min-width:1024px){.container{max-width:1024px}}@media (min-width:1280px){.container{max-width:1280px}}@media (min-width:1536px){.container{max-width:1536px}}.prose{color:#374151;max-width:65ch}.prose [class~=lead]{color:#4b5563;font-size:1.25em;line-height:1.6;margin-top:1.2em;margin-bottom:1.2em}.prose a{color:#dc2626;text-decoration:underline;font-weight:500}.prose a:hover{color:#ef4444}.prose strong{color:#111827;font-weight:600}.prose ol[type=A]{--list-counter-style:upper-alpha}.prose ol[type=a]{--list-counter-style:lower-alpha}.prose ol[type=I]{--list-counter-style:upper-roman}.prose ol[type=i]{--list-counter-style:lower-roman}.prose ol[type="1"]{--list-counter-style:decimal}.prose ol>li{position:relative;padding-left:1.75em}.prose ol>li:before{content:counter(list-item,var(--list-counter-style,decimal)) ".";position:absolute;font-weight:400;color:#6b7280;left:0}.prose ul>li{position:relative;padding-left:1.75em}.prose ul>li:before{content:"";position:absolute;background-color:#d1d5db;border-radius:50%;width:.375em;height:.375em;top:.6875em;left:.25em}.prose hr{border-color:#e5e7eb;border-top-width:1px;margin-top:3em;margin-bottom:3em}.prose blockquote{font-weight:500;font-style:italic;color:#111827;border-left-width:.25rem;border-left-color:#e5e7eb;quotes:"\201C""\201D""\2018""\2019";margin-top:1.6em;margin-bottom:1.6em;padding-left:1em}.prose blockquote p:first-of-type:before{content:open-quote}.prose blockquote p:last-of-type:after{content:close-quote}.prose h1{color:#111827;font-weight:800;font-size:2.25em;margin-top:0;margin-bottom:.8888889em;line-height:1.1111111}.prose h2{color:#111827;font-weight:700;font-size:1.5em;margin-top:2em;margin-bottom:1em;line-height:1.3333333}.prose h3{font-size:1.25em;margin-top:1.6em;margin-bottom:.6em;line-height:1.6}.prose h3,.prose h4{color:#111827;font-weight:600}.prose h4{margin-top:1.5em;margin-bottom:.5em;line-height:1.5}.prose figure figcaption{color:#6b7280;font-size:.875em;line-height:1.4285714;margin-top:.8571429em}.prose code{color:#111827;font-weight:600;font-size:.875em}.prose code:after,.prose code:before{content:"`"}.prose a code{color:#111827}.prose pre{color:#e5e7eb;background-color:#1f2937;overflow-x:auto;font-size:.875em;line-height:1.7142857;margin-top:1.7142857em;margin-bottom:1.7142857em;border-radius:.375rem;padding:.8571429em 1.1428571em}.prose pre code{background-color:transparent;border-width:0;border-radius:0;padding:0;font-weight:400;color:inherit;font-size:inherit;font-family:inherit;line-height:inherit}.prose pre code:after,.prose pre code:before{content:none}.prose table{width:100%;table-layout:auto;text-align:left;margin-top:2em;margin-bottom:2em;font-size:.875em;line-height:1.7142857}.prose thead{color:#111827;font-weight:600;border-bottom-width:1px;border-bottom-color:#d1d5db}.prose thead th{vertical-align:bottom;padding-right:.5714286em;padding-bottom:.5714286em;padding-left:.5714286em}.prose tbody tr{border-bottom-width:1px;border-bottom-color:#e5e7eb}.prose tbody tr:last-child{border-bottom-width:0}.prose tbody td{vertical-align:top;padding:.5714286em}.prose{font-size:1rem;line-height:1.75}.prose p{margin-top:1.25em;margin-bottom:1.25em}.prose figure,.prose img,.prose video{margin-top:2em;margin-bottom:2em}.prose figure>*{margin-top:0;margin-bottom:0}.prose h2 code{font-size:.875em}.prose h3 code{font-size:.9em}.prose ol,.prose ul{margin-top:1.25em;margin-bottom:1.25em}.prose li{margin-top:.5em;margin-bottom:.5em}.prose>ul>li p{margin-top:.75em;margin-bottom:.75em}.prose>ul>li>:first-child{margin-top:1.25em}.prose>ul>li>:last-child{margin-bottom:1.25em}.prose>ol>li>:first-child{margin-top:1.25em}.prose>ol>li>:last-child{margin-bottom:1.25em}.prose ol ol,.prose ol ul,.prose ul ol,.prose ul ul{margin-top:.75em;margin-bottom:.75em}.prose h2+*,.prose h3+*,.prose h4+*,.prose hr+*{margin-top:0}.prose thead th:first-child{padding-left:0}.prose thead th:last-child{padding-right:0}.prose tbody td:first-child{padding-left:0}.prose tbody td:last-child{padding-right:0}.prose>:first-child{margin-top:0}.prose>:last-child{margin-bottom:0}.prose-sm{font-size:.875rem;line-height:1.7142857}.prose-sm p{margin-top:1.1428571em;margin-bottom:1.1428571em}.prose-sm [class~=lead]{font-size:1.2857143em;line-height:1.5555556;margin-top:.8888889em;margin-bottom:.8888889em}.prose-sm blockquote{margin-top:1.3333333em;margin-bottom:1.3333333em;padding-left:1.1111111em}.prose-sm h1{font-size:2.1428571em;margin-top:0;margin-bottom:.8em;line-height:1.2}.prose-sm h2{font-size:1.4285714em;margin-top:1.6em;margin-bottom:.8em;line-height:1.4}.prose-sm h3{font-size:1.2857143em;margin-top:1.5555556em;margin-bottom:.4444444em;line-height:1.5555556}.prose-sm h4{margin-top:1.4285714em;margin-bottom:.5714286em;line-height:1.4285714}.prose-sm figure,.prose-sm img,.prose-sm video{margin-top:1.7142857em;margin-bottom:1.7142857em}.prose-sm figure>*{margin-top:0;margin-bottom:0}.prose-sm figure figcaption{font-size:.8571429em;line-height:1.3333333;margin-top:.6666667em}.prose-sm code{font-size:.8571429em}.prose-sm h2 code{font-size:.9em}.prose-sm h3 code{font-size:.8888889em}.prose-sm pre{font-size:.8571429em;line-height:1.6666667;margin-top:1.6666667em;margin-bottom:1.6666667em;border-radius:.25rem;padding:.6666667em 1em}.prose-sm ol,.prose-sm ul{margin-top:1.1428571em;margin-bottom:1.1428571em}.prose-sm li{margin-top:.2857143em;margin-bottom:.2857143em}.prose-sm ol>li{padding-left:1.5714286em}.prose-sm ol>li:before{left:0}.prose-sm ul>li{padding-left:1.5714286em}.prose-sm ul>li:before{height:.3571429em;width:.3571429em;top:.67857em;left:.2142857em}.prose-sm>ul>li p{margin-top:.5714286em;margin-bottom:.5714286em}.prose-sm>ul>li>:first-child{margin-top:1.1428571em}.prose-sm>ul>li>:last-child{margin-bottom:1.1428571em}.prose-sm>ol>li>:first-child{margin-top:1.1428571em}.prose-sm>ol>li>:last-child{margin-bottom:1.1428571em}.prose-sm ol ol,.prose-sm ol ul,.prose-sm ul ol,.prose-sm ul ul{margin-top:.5714286em;margin-bottom:.5714286em}.prose-sm hr{margin-top:2.8571429em;margin-bottom:2.8571429em}.prose-sm h2+*,.prose-sm h3+*,.prose-sm h4+*,.prose-sm hr+*{margin-top:0}.prose-sm table{font-size:.8571429em;line-height:1.5}.prose-sm thead th{padding-right:1em;padding-bottom:.6666667em;padding-left:1em}.prose-sm thead th:first-child{padding-left:0}.prose-sm thead th:last-child{padding-right:0}.prose-sm tbody td{padding:.6666667em 1em}.prose-sm tbody td:first-child{padding-left:0}.prose-sm tbody td:last-child{padding-right:0}.prose-sm>:first-child{margin-top:0}.prose-sm>:last-child{margin-bottom:0}.prose-red a,.prose-red a code{color:#dc2626}.appearance-none{-webkit-appearance:none;-moz-appearance:none;appearance:none}.bg-black{--tw-bg-opacity:1;background-color:rgba(0,0,0,var(--tw-bg-opacity))}.bg-white{--tw-bg-opacity:1;background-color:rgba(255,255,255,var(--tw-bg-opacity))}.bg-gray-100{--tw-bg-opacity:1;background-color:rgba(243,244,246,var(--tw-bg-opacity))}.bg-gray-200{--tw-bg-opacity:1;background-color:rgba(229,231,235,var(--tw-bg-opacity))}.bg-gray-400{--tw-bg-opacity:1;background-color:rgba(156,163,175,var(--tw-bg-opacity))}.bg-red-100{--tw-bg-opacity:1;background-color:rgba(254,226,226,var(--tw-bg-opacity))}.bg-blue-100{--tw-bg-opacity:1;background-color:rgba(219,234,254,var(--tw-bg-opacity))}.hover\:bg-black:hover{--tw-bg-opacity:1;background-color:rgba(0,0,0,var(--tw-bg-opacity))}.hover\:bg-white:hover{--tw-bg-opacity:1;background-color:rgba(255,255,255,var(--tw-bg-opacity))}.hover\:bg-gray-800:hover{--tw-bg-opacity:1;background-color:rgba(31,41,55,var(--tw-bg-opacity))}.hover\:bg-red-500:hover{--tw-bg-opacity:1;background-color:rgba(239,68,68,var(--tw-bg-opacity))}.bg-cover{background-size:cover}.border-black{--tw-border-opacity:1;border-color:rgba(0,0,0,var(--tw-border-opacity))}.border-white{--tw-border-opacity:1;border-color:rgba(255,255,255,var(--tw-border-opacity))}.border-gray-300{--tw-border-opacity:1;border-color:rgba(209,213,219,var(--tw-border-opacity))}.border-gray-400{--tw-border-opacity:1;border-color:rgba(156,163,175,var(--tw-border-opacity))}.border-red-500{--tw-border-opacity:1;border-color:rgba(239,68,68,var(--tw-border-opacity))}.hover\:border-white:hover{--tw-border-opacity:1;border-color:rgba(255,255,255,var(--tw-border-opacity))}.hover\:border-gray-400:hover{--tw-border-opacity:1;border-color:rgba(156,163,175,var(--tw-border-opacity))}.rounded-sm{border-radius:.125rem}.rounded{border-radius:.25rem}.rounded-md{border-radius:.375rem}.rounded-full{border-radius:9999px}.border-solid{border-style:solid}.border-dashed{border-style:dashed}.border{border-width:1px}.cursor-pointer{cursor:pointer}.cursor-not-allowed{cursor:not-allowed}.block{display:block}.inline-block{display:inline-block}.flex{display:flex}.table{display:table}.grid{display:grid}.hidden{display:none}.flex-row{flex-direction:row}.flex-col{flex-direction:column}.flex-wrap{flex-wrap:wrap}.items-start{align-items:flex-start}.items-center{align-items:center}.items-stretch{align-items:stretch}.self-start{align-self:flex-start}.self-stretch{align-self:stretch}.justify-start{justify-content:flex-start}.justify-end{justify-content:flex-end}.justify-center{justify-content:center}.justify-between{justify-content:space-between}.justify-self-end{justify-self:end}.flex-1{flex:1 1 0%}.font-light{font-weight:300}.font-normal{font-weight:400}.font-medium{font-weight:500}.font-semibold{font-weight:600}.font-bold{font-weight:700}.font-extrabold{font-weight:800}.h-0{height:0}.h-4{height:1rem}.h-5{height:1.25rem}.h-6{height:1.5rem}.h-48{height:12rem}.h-64{height:16rem}.h-96{height:24rem}.h-0\.5{height:.125rem}.h-full{height:100%}.text-xs{font-size:.75rem;line-height:1rem}.text-sm{font-size:.875rem;line-height:1.25rem}.text-xl{font-size:1.25rem;line-height:1.75rem}.text-2xl{font-size:1.5rem;line-height:2rem}.text-3xl{font-size:1.875rem;line-height:2.25rem}.m-auto{margin:auto}.mr-0{margin-right:0}.mb-0{margin-bottom:0}.mr-1{margin-right:.25rem}.mb-1{margin-bottom:.25rem}.ml-1{margin-left:.25rem}.mt-2{margin-top:.5rem}.mr-2{margin-right:.5rem}.mb-2{margin-bottom:.5rem}.ml-2{margin-left:.5rem}.mt-3{margin-top:.75rem}.mr-3{margin-right:.75rem}.mb-3{margin-bottom:.75rem}.ml-3{margin-left:.75rem}.mr-4{margin-right:1rem}.mb-4{margin-bottom:1rem}.ml-4{margin-left:1rem}.mr-5{margin-right:1.25rem}.ml-5{margin-left:1.25rem}.mt-6{margin-top:1.5rem}.mr-6{margin-right:1.5rem}.mb-6{margin-bottom:1.5rem}.mb-12{margin-bottom:3rem}.mt-16{margin-top:4rem}.max-w-md{max-width:28rem}.max-w-screen-xl{max-width:1280px}.overflow-hidden{overflow:hidden}.overflow-scroll{overflow:scroll}.p-6{padding:1.5rem}.p-8{padding:2rem}.py-1{padding-top:.25rem;padding-bottom:.25rem}.px-1{padding-left:.25rem;padding-right:.25rem}.py-2{padding-top:.5rem;padding-bottom:.5rem}.px-2{padding-left:.5rem;padding-right:.5rem}.py-3{padding-top:.75rem;padding-bottom:.75rem}.px-3{padding-left:.75rem;padding-right:.75rem}.px-4{padding-left:1rem;padding-right:1rem}.py-6{padding-top:1.5rem;padding-bottom:1.5rem}.px-6{padding-left:1.5rem;padding-right:1.5rem}.py-12{padding-top:3rem;padding-bottom:3rem}.pb-6{padding-bottom:1.5rem}.static{position:static}.absolute{position:absolute}.relative{position:relative}.inset-y-0{top:0;bottom:0}.left-0{left:0}.resize{resize:both}*{--tw-shadow:0 0 transparent}.shadow-sm{--tw-shadow:0 1px 2px 0 rgba(0,0,0,0.05)}.shadow,.shadow-sm{box-shadow:var(--tw-ring-offset-shadow,0 0 transparent),var(--tw-ring-shadow,0 0 transparent),var(--tw-shadow)}.shadow{--tw-shadow:0 1px 3px 0 rgba(0,0,0,0.1),0 1px 2px 0 rgba(0,0,0,0.06)}.shadow-md{--tw-shadow:0 4px 6px -1px rgba(0,0,0,0.1),0 2px 4px -1px rgba(0,0,0,0.06)}.shadow-inner,.shadow-md{box-shadow:var(--tw-ring-offset-shadow,0 0 transparent),var(--tw-ring-shadow,0 0 transparent),var(--tw-shadow)}.shadow-inner{--tw-shadow:inset 0 2px 4px 0 rgba(0,0,0,0.06)}.shadow-card{--tw-shadow:0 2px 1px -1px rgba(0,0,0,0.2),0 1px 1px 0 rgba(0,0,0,0.14),0 1px 3px 0 rgba(0,0,0,0.12);box-shadow:var(--tw-ring-offset-shadow,0 0 transparent),var(--tw-ring-shadow,0 0 transparent),var(--tw-shadow)}*{--tw-ring-inset:var(--tw-empty,/*!*/ /*!*/);--tw-ring-offset-width:0px;--tw-ring-offset-color:#fff;--tw-ring-color:rgba(59,130,246,0.5);--tw-ring-offset-shadow:0 0 transparent;--tw-ring-shadow:0 0 transparent}.text-center{text-align:center}.text-black{--tw-text-opacity:1;color:rgba(0,0,0,var(--tw-text-opacity))}.text-white{--tw-text-opacity:1;color:rgba(255,255,255,var(--tw-text-opacity))}.text-gray-400{--tw-text-opacity:1;color:rgba(156,163,175,var(--tw-text-opacity))}.text-gray-500{--tw-text-opacity:1;color:rgba(107,114,128,var(--tw-text-opacity))}.text-gray-700{--tw-text-opacity:1;color:rgba(55,65,81,var(--tw-text-opacity))}.text-red-500{--tw-text-opacity:1;color:rgba(239,68,68,var(--tw-text-opacity))}.text-red-600{--tw-text-opacity:1;color:rgba(220,38,38,var(--tw-text-opacity))}.text-blue-600{--tw-text-opacity:1;color:rgba(37,99,235,var(--tw-text-opacity))}.hover\:text-black:hover{--tw-text-opacity:1;color:rgba(0,0,0,var(--tw-text-opacity))}.hover\:text-white:hover{--tw-text-opacity:1;color:rgba(255,255,255,var(--tw-text-opacity))}.hover\:text-gray-500:hover{--tw-text-opacity:1;color:rgba(107,114,128,var(--tw-text-opacity))}.hover\:text-red-600:hover{--tw-text-opacity:1;color:rgba(220,38,38,var(--tw-text-opacity))}.uppercase{text-transform:uppercase}.underline{text-decoration:underline}.tracking-wider{letter-spacing:.05em}.tracking-widest{letter-spacing:.1em}.whitespace-nowrap{white-space:nowrap}.w-1{width:.25rem}.w-2{width:.5rem}.w-4{width:1rem}.w-5{width:1.25rem}.w-6{width:1.5rem}.w-10{width:2.5rem}.w-14{width:3.5rem}.w-16{width:4rem}.w-36{width:9rem}.w-64{width:16rem}.w-full{width:100%}.gap-12{gap:3rem}.grid-cols-1{grid-template-columns:repeat(1,minmax(0,1fr))}.transform{--tw-translate-x:0;--tw-translate-y:0;--tw-rotate:0;--tw-skew-x:0;--tw-skew-y:0;--tw-scale-x:1;--tw-scale-y:1;transform:translateX(var(--tw-translate-x)) translateY(var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}.hover\:scale-105:hover{--tw-scale-x:1.05;--tw-scale-y:1.05}.hover\:-translate-y-1:hover{--tw-translate-y:-0.25rem}.transition{transition-property:background-color,border-color,color,fill,stroke,opacity,box-shadow,transform,filter,-webkit-backdrop-filter;transition-property:background-color,border-color,color,fill,stroke,opacity,box-shadow,transform,filter,backdrop-filter;transition-property:background-color,border-color,color,fill,stroke,opacity,box-shadow,transform,filter,backdrop-filter,-webkit-backdrop-filter;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-duration:.15s}.ease-in-out{transition-timing-function:cubic-bezier(.4,0,.2,1)}.duration-200{transition-duration:.2s}.duration-500{transition-duration:.5s}@-webkit-keyframes spin{to{transform:rotate(1turn)}}@keyframes spin{to{transform:rotate(1turn)}}@-webkit-keyframes ping{75%,to{transform:scale(2);opacity:0}}@keyframes ping{75%,to{transform:scale(2);opacity:0}}@-webkit-keyframes pulse{50%{opacity:.5}}@keyframes pulse{50%{opacity:.5}}@-webkit-keyframes bounce{0%,to{transform:translateY(-25%);-webkit-animation-timing-function:cubic-bezier(.8,0,1,1);animation-timing-function:cubic-bezier(.8,0,1,1)}50%{transform:none;-webkit-animation-timing-function:cubic-bezier(0,0,.2,1);animation-timing-function:cubic-bezier(0,0,.2,1)}}@keyframes bounce{0%,to{transform:translateY(-25%);-webkit-animation-timing-function:cubic-bezier(.8,0,1,1);animation-timing-function:cubic-bezier(.8,0,1,1)}50%{transform:none;-webkit-animation-timing-function:cubic-bezier(0,0,.2,1);animation-timing-function:cubic-bezier(0,0,.2,1)}}.filter{--tw-blur:var(--tw-empty,/*!*/ /*!*/);--tw-brightness:var(--tw-empty,/*!*/ /*!*/);--tw-contrast:var(--tw-empty,/*!*/ /*!*/);--tw-grayscale:var(--tw-empty,/*!*/ /*!*/);--tw-hue-rotate:var(--tw-empty,/*!*/ /*!*/);--tw-invert:var(--tw-empty,/*!*/ /*!*/);--tw-saturate:var(--tw-empty,/*!*/ /*!*/);--tw-sepia:var(--tw-empty,/*!*/ /*!*/);--tw-drop-shadow:var(--tw-empty,/*!*/ /*!*/);filter:var(--tw-blur) var(--tw-brightness) var(--tw-contrast) var(--tw-grayscale) var(--tw-hue-rotate) var(--tw-invert) var(--tw-saturate) var(--tw-sepia) var(--tw-drop-shadow)}.grayscale{--tw-grayscale:grayscale(100%)}@media (min-width:640px){.sm\:prose{color:#374151;max-width:65ch}.sm\:prose [class~=lead]{color:#4b5563;font-size:1.25em;line-height:1.6;margin-top:1.2em;margin-bottom:1.2em}.sm\:prose a{color:#dc2626;text-decoration:underline;font-weight:500}.sm\:prose a:hover{color:#ef4444}.sm\:prose strong{color:#111827;font-weight:600}.sm\:prose ol[type=A]{--list-counter-style:upper-alpha}.sm\:prose ol[type=a]{--list-counter-style:lower-alpha}.sm\:prose ol[type=I]{--list-counter-style:upper-roman}.sm\:prose ol[type=i]{--list-counter-style:lower-roman}.sm\:prose ol[type="1"]{--list-counter-style:decimal}.sm\:prose ol>li{position:relative;padding-left:1.75em}.sm\:prose ol>li:before{content:counter(list-item,var(--list-counter-style,decimal)) ".";position:absolute;font-weight:400;color:#6b7280;left:0}.sm\:prose ul>li{position:relative;padding-left:1.75em}.sm\:prose ul>li:before{content:"";position:absolute;background-color:#d1d5db;border-radius:50%;width:.375em;height:.375em;top:.6875em;left:.25em}.sm\:prose hr{border-color:#e5e7eb;border-top-width:1px;margin-top:3em;margin-bottom:3em}.sm\:prose blockquote{font-weight:500;font-style:italic;color:#111827;border-left-width:.25rem;border-left-color:#e5e7eb;quotes:"\201C""\201D""\2018""\2019";margin-top:1.6em;margin-bottom:1.6em;padding-left:1em}.sm\:prose blockquote p:first-of-type:before{content:open-quote}.sm\:prose blockquote p:last-of-type:after{content:close-quote}.sm\:prose h1{color:#111827;font-weight:800;font-size:2.25em;margin-top:0;margin-bottom:.8888889em;line-height:1.1111111}.sm\:prose h2{color:#111827;font-weight:700;font-size:1.5em;margin-top:2em;margin-bottom:1em;line-height:1.3333333}.sm\:prose h3{font-size:1.25em;margin-top:1.6em;margin-bottom:.6em;line-height:1.6}.sm\:prose h3,.sm\:prose h4{color:#111827;font-weight:600}.sm\:prose h4{margin-top:1.5em;margin-bottom:.5em;line-height:1.5}.sm\:prose figure figcaption{color:#6b7280;font-size:.875em;line-height:1.4285714;margin-top:.8571429em}.sm\:prose code{color:#111827;font-weight:600;font-size:.875em}.sm\:prose code:after,.sm\:prose code:before{content:"`"}.sm\:prose a code{color:#111827}.sm\:prose pre{color:#e5e7eb;background-color:#1f2937;overflow-x:auto;font-size:.875em;line-height:1.7142857;margin-top:1.7142857em;margin-bottom:1.7142857em;border-radius:.375rem;padding:.8571429em 1.1428571em}.sm\:prose pre code{background-color:transparent;border-width:0;border-radius:0;padding:0;font-weight:400;color:inherit;font-size:inherit;font-family:inherit;line-height:inherit}.sm\:prose pre code:after,.sm\:prose pre code:before{content:none}.sm\:prose table{width:100%;table-layout:auto;text-align:left;margin-top:2em;margin-bottom:2em;font-size:.875em;line-height:1.7142857}.sm\:prose thead{color:#111827;font-weight:600;border-bottom-width:1px;border-bottom-color:#d1d5db}.sm\:prose thead th{vertical-align:bottom;padding-right:.5714286em;padding-bottom:.5714286em;padding-left:.5714286em}.sm\:prose tbody tr{border-bottom-width:1px;border-bottom-color:#e5e7eb}.sm\:prose tbody tr:last-child{border-bottom-width:0}.sm\:prose tbody td{vertical-align:top;padding:.5714286em}.sm\:prose{font-size:1rem;line-height:1.75}.sm\:prose p{margin-top:1.25em;margin-bottom:1.25em}.sm\:prose figure,.sm\:prose img,.sm\:prose video{margin-top:2em;margin-bottom:2em}.sm\:prose figure>*{margin-top:0;margin-bottom:0}.sm\:prose h2 code{font-size:.875em}.sm\:prose h3 code{font-size:.9em}.sm\:prose ol,.sm\:prose ul{margin-top:1.25em;margin-bottom:1.25em}.sm\:prose li{margin-top:.5em;margin-bottom:.5em}.sm\:prose>ul>li p{margin-top:.75em;margin-bottom:.75em}.sm\:prose>ul>li>:first-child{margin-top:1.25em}.sm\:prose>ul>li>:last-child{margin-bottom:1.25em}.sm\:prose>ol>li>:first-child{margin-top:1.25em}.sm\:prose>ol>li>:last-child{margin-bottom:1.25em}.sm\:prose ol ol,.sm\:prose ol ul,.sm\:prose ul ol,.sm\:prose ul ul{margin-top:.75em;margin-bottom:.75em}.sm\:prose h2+*,.sm\:prose h3+*,.sm\:prose h4+*,.sm\:prose hr+*{margin-top:0}.sm\:prose thead th:first-child{padding-left:0}.sm\:prose thead th:last-child{padding-right:0}.sm\:prose tbody td:first-child{padding-left:0}.sm\:prose tbody td:last-child{padding-right:0}.sm\:prose>:first-child{margin-top:0}.sm\:prose>:last-child{margin-bottom:0}.sm\:flex{display:flex}.sm\:flex-row{flex-direction:row}.sm\:items-start{align-items:flex-start}.sm\:items-center{align-items:center}.sm\:flex-1{flex:1 1 0%}.sm\:h-auto{height:auto}.sm\:mb-0{margin-bottom:0}.sm\:mr-6{margin-right:1.5rem}.sm\:px-12{padding-left:3rem;padding-right:3rem}.sm\:text-left{text-align:left}.sm\:w-2\/5{width:40%}.sm\:w-3\/5{width:60%}.sm\:grid-cols-2{grid-template-columns:repeat(2,minmax(0,1fr))}}@media (min-width:1024px){.lg\:w-1\/4{width:25%}.lg\:w-3\/4{width:75%}.lg\:grid-cols-3{grid-template-columns:repeat(3,minmax(0,1fr))}}code[class*=language-],pre[class*=language-]{color:#f8f8f2;background:none;text-shadow:0 1px rgba(0,0,0,.3);font-family:Consolas,Monaco,Andale Mono,Ubuntu Mono,monospace;font-size:1em;text-align:left;white-space:pre;word-spacing:normal;word-break:normal;word-wrap:normal;line-height:1.5;-moz-tab-size:4;-o-tab-size:4;tab-size:4;-webkit-hyphens:none;-ms-hyphens:none;hyphens:none}pre[class*=language-]{padding:1em;margin:.5em 0;overflow:auto;border-radius:.3em}:not(pre)>code[class*=language-],pre[class*=language-]{background:#272822}:not(pre)>code[class*=language-]{padding:.1em;border-radius:.3em;white-space:normal}.token.cdata,.token.comment,.token.doctype,.token.prolog{color:#8292a2}.token.punctuation{color:#f8f8f2}.token.namespace{opacity:.7}.token.constant,.token.deleted,.token.property,.token.symbol,.token.tag{color:#f92672}.token.boolean,.token.number{color:#ae81ff}.token.attr-name,.token.builtin,.token.char,.token.inserted,.token.selector,.token.string{color:#a6e22e}.language-css .token.string,.style .token.string,.token.entity,.token.operator,.token.url,.token.variable{color:#f8f8f2}.token.atrule,.token.attr-value,.token.class-name,.token.function{color:#e6db74}.token.keyword{color:#66d9ef}.token.important,.token.regex{color:#fd971f}.token.bold,.token.important{font-weight:700}.token.italic{font-style:italic}.token.entity{cursor:help}</style><meta name="generator" content="Gatsby 3.4.0"/><title data-react-helmet="true">Interactive Machine Learning Experiments | Trekhleb</title><meta data-react-helmet="true" name="description" content="Recognize digits and sketches. Detect objects. Classify images. Write a Shakespeare poem. All with TensorFlow 2 models demo."/><meta data-react-helmet="true" name="image" content="https://trekhleb.dev/static/3c14e781aa2f97061b54d8177f6802b5/eb0de/01-cover.png"/><meta data-react-helmet="true" property="og:title" content="Interactive Machine Learning Experiments | Trekhleb"/><meta data-react-helmet="true" property="og:description" content="Recognize digits and sketches. Detect objects. Classify images. Write a Shakespeare poem. All with TensorFlow 2 models demo."/><meta data-react-helmet="true" property="og:url" content="https://trekhleb.dev/blog/2020/machine-learning-experiments/"/><meta data-react-helmet="true" property="og:image" content="https://trekhleb.dev/static/3c14e781aa2f97061b54d8177f6802b5/eb0de/01-cover.png"/><meta data-react-helmet="true" property="og:type" content="article"/><meta data-react-helmet="true" name="twitter:card" content="summary_large_image"/><meta data-react-helmet="true" name="twitter:creator" content="@Trekhleb"/><meta data-react-helmet="true" name="twitter:title" content="Interactive Machine Learning Experiments | Trekhleb"/><meta data-react-helmet="true" name="twitter:description" content="Recognize digits and sketches. Detect objects. Classify images. Write a Shakespeare poem. All with TensorFlow 2 models demo."/><meta data-react-helmet="true" name="twitter:image" content="https://trekhleb.dev/static/3c14e781aa2f97061b54d8177f6802b5/eb0de/01-cover.png"/><meta data-react-helmet="true" name="twitter:url" content="https://trekhleb.dev/blog/2020/machine-learning-experiments/"/><link rel="preconnect dns-prefetch" href="https://www.google-analytics.com"/><link rel="alternate" type="application/rss+xml" title="Trekhleb.dev RSS Feed" href="/rss.xml"/><link as="script" rel="preload" href="/webpack-runtime-6451c7fe678794e00b4b.js"/><link as="script" rel="preload" href="/framework-d63adeb7e1b44b7b8aa5.js"/><link as="script" rel="preload" href="/app-2e0826ec06cafce3bdee.js"/><link as="script" rel="preload" href="/commons-213c962999d4e181c8a0.js"/><link as="script" rel="preload" href="/component---src-templates-post-tsx-c4045391b1a7c095d609.js"/><link as="fetch" rel="preload" href="/page-data/blog/2020/machine-learning-experiments/page-data.json" crossorigin="anonymous"/><link as="fetch" rel="preload" href="/page-data/app-data.json" crossorigin="anonymous"/></head><body><div id="___gatsby"><div style="outline:none" tabindex="-1" id="gatsby-focus-wrapper"><main class="flex flex-col items-center"><div class="max-w-screen-xl self-stretch m-auto w-full"><header class="flex flex-row items-center px-6 sm:px-12 py-6"><div class="mr-6"><div><a class="transition duration-200 ease-in-out flex flex-row items-center hover:text-red-600 font-extrabold text-sm tracking-widest uppercase" href="/">Trekhleb</a></div></div><nav><ul class="flex flex-row"><li class="ml-5"><a class="transition duration-200 ease-in-out flex flex-row items-center hover:text-red-600 uppercase text-xs" href="/">About</a></li><li class="ml-5"><a class="transition duration-200 ease-in-out flex flex-row items-center hover:text-red-600 uppercase text-xs" href="/projects/">Projects</a></li><li class="ml-5"><a class="transition duration-200 ease-in-out flex flex-row items-center hover:text-red-600 uppercase text-xs" href="/blog/">Blog</a></li></ul></nav></header><article class="px-6 sm:px-12 py-6"><div class="flex flex-col items-center"><article class="w-full prose prose-sm sm:prose overflow-hidden prose-red" style="max-width:860px"><h1 class="text-3xl mb-6 uppercase font-extrabold ">Interactive Machine Learning Experiments</h1><div class="flex flex-row items-center "><div class="flex flex-row items-center mr-6"><svg stroke="currentColor" fill="none" stroke-width="2" viewBox="0 0 24 24" stroke-linecap="round" stroke-linejoin="round" class="mr-1" height="1em" width="1em" xmlns="http://www.w3.org/2000/svg"><rect x="3" y="4" width="18" height="18" rx="2" ry="2"></rect><line x1="16" y1="2" x2="16" y2="6"></line><line x1="8" y1="2" x2="8" y2="6"></line><line x1="3" y1="10" x2="21" y2="10"></line></svg>05 May, 2020</div><div class="flex flex-row items-center "><svg stroke="currentColor" fill="none" stroke-width="2" viewBox="0 0 24 24" stroke-linecap="round" stroke-linejoin="round" class="mr-1" height="1em" width="1em" xmlns="http://www.w3.org/2000/svg"><circle cx="12" cy="12" r="10"></circle><polyline points="12 6 12 12 16 14"></polyline></svg>6<!-- --> min to read</div></div><h2 id="tldr" style="position:relative">TL;DR<a href="#tldr" aria-label="tldr permalink" class="gatsby-remark-autolink-header-anchor after"><svg aria-hidden="true" focusable="false" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a></h2><p>Hey readers!</p><p>I've open-sourced new <a href="https://github.com/trekhleb/machine-learning-experiments">🤖 Interactive Machine Learning Experiments</a> project on GitHub. Each experiment consists of 🏋️ <em>Jupyter/Colab notebook</em> (to see how a model was trained) and 🎨 <em>demo page</em> (to see a model in action right in your browser).</p><p>Although the models may be a little dumb (remember, these are just experiments, not a production ready code), they will try to do their best to:</p><ul><li>🖌 Recognize digits or sketches you draw in your browser</li><li>📸 Detect and recognize the objects you'll show to your camera</li><li>🌅 Classify your uploaded image</li><li>📝 Write a Shakespeare poem with you</li><li>✊🖐✌️ Play with you in Rock-Paper-Scissors game</li><li>etc.</li></ul><p>I've trained the models on <em>Python</em> using <em>TensorFlow 2</em> with <em>Keras</em> support and then consumed them for a demo in a browser using <em>React</em> and <em>JavaScript</em> version of <em>Tensorflow</em>.</p><p><span class="gatsby-resp-image-wrapper" style="position:relative;display:block;margin-left:auto;margin-right:auto;max-width:1000px">
<span class="gatsby-resp-image-background-image" style="padding-bottom:50%;position:relative;bottom:0;left:0;background-image:url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABQAAAAKCAIAAAA7N+mxAAAACXBIWXMAAAsTAAALEwEAmpwYAAACJUlEQVQozyXL3W6aUAAA4OMP8ice6KooIpwKVEBORQSkuGRLSHczwcaurbONbU2rxjTpetWXWbJkV8uyV1iW7GY3fagl7Xf/gSAIfN8PXriuy3Ecy7Icx9EMw0PoHXR7HS30vMB1sd1FSG2r6p4qqa0aakngtfm+H0VRHMflcpllWZIkS6VSheOcfR13jEGv17dsx+xoOtJ11Go1EZL19h4IX7iue3Nzs1wuCYIQRVFRFFEUG43623Dw/nAYB+EAY9extbZiGGhPlVFbNVEVBGEYhGHf8zabzXq9zufzYRguFos0TbMsfbzf3q9X69Wds2/sa22lXlfkpqHIpq4ZqggCf+B7XjQcHk+yLB1DCJMkWa1Wo9Ho/Oz04uT4ZDKeZunVbDY/Pz05zuazme8eNGu7Uq0KejZqtCtJNvr188f3b18nk8l0Ov3y8PB5Pr++WoyPkneH/uXZ5PJsOuz3NE2L4xhjDADI5XJA1QWqDrLFh+fnf3///L69u0uSZLvdPj09rW5vp+nH64tPj9vrzfJil2NejyRJNE0DAMCbOsmLJGxQfI2SBBZjB2NsWdYwinDXZkrFJk9EVt2QeZ4mWnKrUqkIglCtViGEYEcg2TJFEwW6mId0EWNsmqZhGLZt22ZnF7KGUjMlRt4hIUkoisLzPACAYRiKokC1XBToAs8UODIHacLr903T1DTNsizbtlmaxE7X6SADNTmSEASBIAgAQOHFf9TSaU5dVh82AAAAAElFTkSuQmCC');background-size:cover;display:block"></span>
<img class="gatsby-resp-image-image" alt="Interactive Machine Learning Experiments" title="Interactive Machine Learning Experiments" src="/static/3e4b832a607b38fe6ebf64a74304925e/00d43/13.png" srcSet="/static/3e4b832a607b38fe6ebf64a74304925e/63868/13.png 250w,/static/3e4b832a607b38fe6ebf64a74304925e/0b533/13.png 500w,/static/3e4b832a607b38fe6ebf64a74304925e/00d43/13.png 1000w,/static/3e4b832a607b38fe6ebf64a74304925e/aa440/13.png 1500w,/static/3e4b832a607b38fe6ebf64a74304925e/d61c2/13.png 1800w" sizes="(max-width: 1000px) 100vw, 1000px" style="width:100%;height:100%;margin:0;vertical-align:middle;position:absolute;top:0;left:0" loading="lazy"/>
</span></p><h2 id="models-performance" style="position:relative">Models performance<a href="#models-performance" aria-label="models performance permalink" class="gatsby-remark-autolink-header-anchor after"><svg aria-hidden="true" focusable="false" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a></h2><p>⚠️ First, let's set our expectations.️ The repository contains machine learning <strong>experiments</strong> and <strong>not</strong> a production ready, reusable, optimised and fine-tuned code and models. This is rather a sandbox or a playground for learning and trying different machine learning approaches, algorithms and data-sets. Models might not perform well and there is a place for overfitting/underfitting.</p><p>Therefore, sometimes you might see things like this:</p><p><span class="gatsby-resp-image-wrapper" style="position:relative;display:block;margin-left:auto;margin-right:auto;max-width:1000px">
<span class="gatsby-resp-image-background-image" style="padding-bottom:42.4%;position:relative;bottom:0;left:0;background-image:url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABQAAAAICAYAAAD5nd/tAAAACXBIWXMAABYlAAAWJQFJUiTwAAABP0lEQVQoz1VRu46CQBTlA/kSezsLW0NH6x8QWxIh9jZ0Wu4mazZCQCJvdHh4Nue6Q+Qkh2Hm3nvuy2jbFk3TgCiKQqiUwjiO6LpO2Pf9xOfziaqqhPQhHo8H6roWm6EfdrsdNF6vl5BibdOgpkBZSWKKapzPZ+z3e6RpKnf6G/wQpmlitVohDEO5D8Mgwbc8w+V6xfflB1EUyTvBatbrNTzPA7ucBPmTZRls28bxeMRiscD9fhcHxXa/Qgy/Kca0xMAxqHcBTOa6LrbbLU6n09tfKRjMyGxJkiAIAiyXy1lGdcuhihqqbiRAd1SWpbR8OBymcU0zzPMcm80GlmUhjmMx6qWooUfXk91MkPB9H47jyIJmM+QCPsF2WLkIUuSfWpA2vWEN7W+wTG6ZG+Tq2S7vpBZQH2I8tZ0xpI5hy389+luYIqKWHQAAAABJRU5ErkJggg==');background-size:cover;display:block"></span>
<img class="gatsby-resp-image-image" alt="Dumb model" title="Dumb model" src="/static/07973cc81709f3ceb90984bf76ea68b7/00d43/0.png" srcSet="/static/07973cc81709f3ceb90984bf76ea68b7/63868/0.png 250w,/static/07973cc81709f3ceb90984bf76ea68b7/0b533/0.png 500w,/static/07973cc81709f3ceb90984bf76ea68b7/00d43/0.png 1000w,/static/07973cc81709f3ceb90984bf76ea68b7/e72de/0.png 1198w" sizes="(max-width: 1000px) 100vw, 1000px" style="width:100%;height:100%;margin:0;vertical-align:middle;position:absolute;top:0;left:0" loading="lazy"/>
</span></p><p>But be patient, sometimes the model might get smarter 🤓 and give you this:</p><p><span class="gatsby-resp-image-wrapper" style="position:relative;display:block;margin-left:auto;margin-right:auto;max-width:1000px">
<span class="gatsby-resp-image-background-image" style="padding-bottom:42%;position:relative;bottom:0;left:0;background-image:url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABQAAAAICAYAAAD5nd/tAAAACXBIWXMAABYlAAAWJQFJUiTwAAABO0lEQVQoz12Rz6qCYBDFfTIfp4doI71AaxctXRhtW0cLlxFIoCDUpQzsKhkqieWfczlzmdutgYHvG3+eOd+MURQF4jjG4XDA8XiUzPMcbdvi8Xi85fP5RFVVwp/PZ5xOJzkzkyRBXdcwCDEoQJjRdR36vhcB1nkehl7qrPH+GayRNXhhB8uyMB6P4bquAMMwiKuyLJFlGZLLBddrDjVwv9+Fnc/nwjD4zWDXpmlEPYoimKaJ5XIpAAU5kjTNkCTvgrfbDbvdDrPZDGEY/vEGheiGsd1usVqtMBqNsNlsfqEwRvf1jT4t0A3925PpbDqdYr/fvxxqxzRNZbDr9RqTyUSeJO4/FsNkcCmLxQKO48D3/ZdDBTzPEyHbtkVMAd0uU8/6oiAIJHXmshT9SSHdMueqAv+FP50ylKXDH1P+VvF5/ufWAAAAAElFTkSuQmCC');background-size:cover;display:block"></span>
<img class="gatsby-resp-image-image" alt="Smart model" title="Smart model" src="/static/f0c246e87d8461bedaac585f7531b9b0/00d43/1.png" srcSet="/static/f0c246e87d8461bedaac585f7531b9b0/63868/1.png 250w,/static/f0c246e87d8461bedaac585f7531b9b0/0b533/1.png 500w,/static/f0c246e87d8461bedaac585f7531b9b0/00d43/1.png 1000w,/static/f0c246e87d8461bedaac585f7531b9b0/84f4d/1.png 1208w" sizes="(max-width: 1000px) 100vw, 1000px" style="width:100%;height:100%;margin:0;vertical-align:middle;position:absolute;top:0;left:0" loading="lazy"/>
</span></p><h2 id="background" style="position:relative">Background<a href="#background" aria-label="background permalink" class="gatsby-remark-autolink-header-anchor after"><svg aria-hidden="true" focusable="false" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a></h2><p>I'm a <a href="https://www.linkedin.com/in/trekhleb/">software engineer</a> and for the last several years now I've been doing mostly frontend and backend programming. In my spare time, as a hobby, I decided to dig into machine learning topics to make it less <em>like magic</em> and <em>more like math</em> to myself.</p><ol><li><p>🗓 Since <strong>Python</strong> might be a good choice to start experimenting with Machine Learning I decided to learn its basic syntax first. As a result a <a href="https://github.com/trekhleb/learn-python">🐍 Playground and Cheatsheet for Learning Python</a> project came out. This was just to practice Python and at the same time to have a cheatsheet of basic syntax once I need it (for things like <code class="language-text">dict_via_comprehension = {x: x**2 for x in (2, 4, 6)}</code> etc.).</p></li><li><p>🗓 After learning a bit of Python I wanted to dig into the basic <strong>math</strong> behind Machine Learning. So after passing an awesome <a href="https://www.coursera.org/learn/machine-learning">Machine Learning course by Andrew Ng</a> on Coursera the <a href="https://github.com/trekhleb/homemade-machine-learning">🤖 Homemade Machine Learning</a> project came out. This time it was about creating a cheatsheet for basic machine learning math algorithms like linear regression, logistic regression, k-means, multilayer perceptron etc.</p></li><li><p>🗓 The next attempt to play around with basic Machine Learning math was <a href="https://github.com/trekhleb/nano-neuron">🤖 NanoNeuron</a>. It was about 7 simple JavaScript functions that supposed to give you a feeling of how machines can actually "learn".</p></li><li><p>🗓 After finishing yet another awesome <a href="https://www.coursera.org/specializations/deep-learning">Deep Learning Specialization by Andrew Ng</a> on Coursera I decided to practice a bit more with <strong>multilayer perceptrons</strong>, <strong>convolutional</strong> and <strong>recurrent neural networks</strong> (CNNs and RNNs). This time instead of implementing everything from scratch I decided to start using some machine learning framework. I ended up using <a href="https://www.tensorflow.org/">TensorFlow 2</a> with <a href="https://www.tensorflow.org/guide/keras/overview">Keras</a>. I also didn't want to focus too much on math (letting the framework do it for me) and instead I wanted to come up with something more practical, applicable and something I could try to play with right in my browser. As a result new <a href="https://github.com/trekhleb/machine-learning-experiments">🤖 Interactive Machine Learning Experiments</a> came out that I want to describe a bit more here.</p></li></ol><h2 id="tech-stack" style="position:relative">Tech-stack<a href="#tech-stack" aria-label="tech stack permalink" class="gatsby-remark-autolink-header-anchor after"><svg aria-hidden="true" focusable="false" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a></h2><h3 id="models-training" style="position:relative">Models training<a href="#models-training" aria-label="models training permalink" class="gatsby-remark-autolink-header-anchor after"><svg aria-hidden="true" focusable="false" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a></h3><ul><li><p>🏋🏻 I used <a href="https://www.tensorflow.org/guide/keras/overview">Keras</a> inside <a href="https://www.tensorflow.org/">TensorFlow 2</a> for modelling and training. Since I had zero experience with machine learning frameworks, I needed to start with something. One of the selling points in favor of TensorFlow was that it has both Python and <a href="https://www.tensorflow.org/js">JavaScript flavor</a> of the library with similar API. So eventually I used Python version for training and JavaScript version for demos.</p></li><li><p>🏋🏻 I trained TensorFlow models on Python inside <a href="https://jupyter.org/">Jupyter</a> notebooks locally and sometimes used <a href="https://colab.research.google.com/">Colab</a> to make the training faster on GPU.</p></li><li><p>💻 Most of the models were trained on good old MacBook's Pro CPU (2,9 GHz Dual-Core Intel Core i5).</p></li><li><p>🔢 Of course there is no way you could run away from <a href="https://numpy.org/">NumPy</a> for matrix/tensors operations.</p></li></ul><h3 id="models-demo" style="position:relative">Models demo<a href="#models-demo" aria-label="models demo permalink" class="gatsby-remark-autolink-header-anchor after"><svg aria-hidden="true" focusable="false" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a></h3><ul><li><p>🏋🏻 I used <a href="https://www.tensorflow.org/js">TensorFlow.js</a> to do predictions with previously trained models.</p></li><li><p>♻️ To convert <em>Keras HDF5</em> models to <em>TensorFlow.js Layers</em> format I used <a href="https://github.com/tensorflow/tfjs/tree/master/tfjs-converter">TensorFlow.js converter</a>. This might be inefficient to transfer the whole model (megabytes of data) to the browser instead of making predictions through HTTP requests, but again, remember that these are just experiments and not production-ready code and architecture. I wanted to avoid having a dedicated back-end service to make architecture simpler.</p></li><li><p>👨🏻🎨 The <a href="http://trekhleb.github.io/machine-learning-experiments">Demo application</a> was created on <a href="https://reactjs.org/">React</a> using <a href="https://github.com/facebook/create-react-app">create-react-app</a> starter with a default <a href="https://flow.org/en/">Flow</a> flavour for type checking.</p></li><li><p>💅🏻 For styling, I used <a href="https://material-ui.com/">Material UI</a>. It was, as they say, "to kill two birds" at once and try out a new styling framework (sorry, <a href="https://getbootstrap.com/">Bootstrap</a> 🤷🏻).</p></li></ul><h2 id="experiments" style="position:relative">Experiments<a href="#experiments" aria-label="experiments permalink" class="gatsby-remark-autolink-header-anchor after"><svg aria-hidden="true" focusable="false" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a></h2><p>So, in short, you may access Demo page and Jupyter notebooks by these links:</p><ul><li>🎨 <a href="http://trekhleb.github.io/machine-learning-experiments">Launch ML experiments demo</a></li><li>🏋️ <a href="https://github.com/trekhleb/machine-learning-experiments">Check ML experiments Jupyter notebooks</a></li></ul><h3 id="experiments-with-multilayer-perceptron-mlp" style="position:relative">Experiments with Multilayer Perceptron (MLP)<a href="#experiments-with-multilayer-perceptron-mlp" aria-label="experiments with multilayer perceptron mlp permalink" class="gatsby-remark-autolink-header-anchor after"><svg aria-hidden="true" focusable="false" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a></h3><blockquote><p>A <a href="https://en.wikipedia.org/wiki/Multilayer_perceptron">multilayer perceptron (MLP)</a> is a class of feedforward artificial neural network (ANN). Multilayer perceptrons are sometimes referred to as "vanilla" neural networks (composed of multiple layers of perceptrons), especially when they have a single hidden layer.</p></blockquote><h4 id="handwritten-digits-recognition" style="position:relative">Handwritten Digits Recognition<a href="#handwritten-digits-recognition" aria-label="handwritten digits recognition permalink" class="gatsby-remark-autolink-header-anchor after"><svg aria-hidden="true" focusable="false" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a></h4><p>You draw a digit, and the model tries to recognize it.</p><ul><li>🎨 <a href="https://trekhleb.dev/machine-learning-experiments/#/experiments/DigitsRecognitionMLP">Demo</a></li><li>🏋️ <a href="https://nbviewer.jupyter.org/github/trekhleb/machine-learning-experiments/blob/master/experiments/digits_recognition_mlp/digits_recognition_mlp.ipynb">Training in Jupyter</a></li><li>️🏋️ <a href="https://colab.research.google.com/github/trekhleb/machine-learning-experiments/blob/master/experiments/digits_recognition_mlp/digits_recognition_mlp.ipynb">Training in Colab</a></li></ul><p><img src="/posts-assets/ddeabb820e5eaf3a6737ea3798805b22/2.gif" alt="Handwritten Digits Recognition"/></p><h4 id="handwritten-sketch-recognition" style="position:relative">Handwritten Sketch Recognition<a href="#handwritten-sketch-recognition" aria-label="handwritten sketch recognition permalink" class="gatsby-remark-autolink-header-anchor after"><svg aria-hidden="true" focusable="false" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a></h4><p>You draw a sketch, and the model tries to recognize it.</p><ul><li>🎨 <a href="https://trekhleb.dev/machine-learning-experiments/#/experiments/SketchRecognitionMLP">Demo</a></li><li>🏋️ <a href="https://nbviewer.jupyter.org/github/trekhleb/machine-learning-experiments/blob/master/experiments/sketch_recognition_mlp/sketch_recognition_mlp.ipynb">Training in Jupyter</a></li><li>️🏋️ <a href="https://colab.research.google.com/github/trekhleb/machine-learning-experiments/blob/master/experiments/sketch_recognition_mlp/sketch_recognition_mlp.ipynb">Training in Colab</a></li></ul><p><img src="/posts-assets/a1af98f24f9687516f436c3997306916/3.gif" alt="Handwritten Sketch Recognition"/></p><h3 id="experiments-with-convolutional-neural-networks-cnn" style="position:relative">Experiments with Convolutional Neural Networks (CNN)<a href="#experiments-with-convolutional-neural-networks-cnn" aria-label="experiments with convolutional neural networks cnn permalink" class="gatsby-remark-autolink-header-anchor after"><svg aria-hidden="true" focusable="false" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a></h3><blockquote><p>A <a href="https://en.wikipedia.org/wiki/Convolutional_neural_network">convolutional neural network (CNN, or ConvNet)</a> is a class of deep neural networks, most commonly applied to analyzing visual imagery (photos, videos). They are used for detecting and classifying objects on photos and videos, style transfer, face recognition, pose estimation etc.</p></blockquote><h4 id="handwritten-digits-recognition-cnn" style="position:relative">Handwritten Digits Recognition (CNN)<a href="#handwritten-digits-recognition-cnn" aria-label="handwritten digits recognition cnn permalink" class="gatsby-remark-autolink-header-anchor after"><svg aria-hidden="true" focusable="false" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a></h4><p>You draw a digit, and the model tries to recognize it. This experiment is similar to the one from MLP section, but it uses CNN under the hood.</p><ul><li>🎨 <a href="https://trekhleb.dev/machine-learning-experiments/#/experiments/DigitsRecognitionCNN">Demo</a></li><li>🏋️ <a href="https://nbviewer.jupyter.org/github/trekhleb/machine-learning-experiments/blob/master/experiments/digits_recognition_cnn/digits_recognition_cnn.ipynb">Training in Jupyter</a></li><li>️🏋️ <a href="https://colab.research.google.com/github/trekhleb/machine-learning-experiments/blob/master/experiments/digits_recognition_cnn/digits_recognition_cnn.ipynb">Training in Colab</a></li></ul><p><img src="/posts-assets/ddeabb820e5eaf3a6737ea3798805b22/4.gif" alt="Handwritten Digits Recognition (CNN)"/></p><h4 id="handwritten-sketch-recognition-cnn" style="position:relative">Handwritten Sketch Recognition (CNN)<a href="#handwritten-sketch-recognition-cnn" aria-label="handwritten sketch recognition cnn permalink" class="gatsby-remark-autolink-header-anchor after"><svg aria-hidden="true" focusable="false" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a></h4><p>You draw a sketch, and the model tries to recognize it. This experiment is similar to the one from MLP section, but it uses CNN under the hood.</p><ul><li>🎨 <a href="https://trekhleb.dev/machine-learning-experiments/#/experiments/SketchRecognitionCNN">Demo</a></li><li>🏋️ <a href="https://nbviewer.jupyter.org/github/trekhleb/machine-learning-experiments/blob/master/experiments/sketch_recognition_cnn/sketch_recognition_cnn.ipynb">Training in Jupyter</a></li><li>️🏋️ <a href="https://colab.research.google.com/github/trekhleb/machine-learning-experiments/blob/master/experiments/sketch_recognition_cnn/sketch_recognition_cnn.ipynb">Training in Colab</a></li></ul><p><img src="/posts-assets/a1af98f24f9687516f436c3997306916/5.gif" alt="Handwritten Sketch Recognition (CNN)"/></p><h4 id="rock-paper-scissors-cnn" style="position:relative">Rock Paper Scissors (CNN)<a href="#rock-paper-scissors-cnn" aria-label="rock paper scissors cnn permalink" class="gatsby-remark-autolink-header-anchor after"><svg aria-hidden="true" focusable="false" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a></h4><p>You play a Rock-Paper-Scissors game with the model. This experiment uses CNN that is trained from scratch.</p><ul><li>🎨 <a href="https://trekhleb.dev/machine-learning-experiments/#/experiments/RockPaperScissorsCNN">Demo</a></li><li>🏋️ <a href="https://nbviewer.jupyter.org/github/trekhleb/machine-learning-experiments/blob/master/experiments/rock_paper_scissors_cnn/rock_paper_scissors_cnn.ipynb">Training in Jupyter</a></li><li>️🏋️ <a href="https://colab.research.google.com/github/trekhleb/machine-learning-experiments/blob/master/experiments/rock_paper_scissors_cnn/rock_paper_scissors_cnn.ipynb">Training in Colab</a></li></ul><p><img src="/posts-assets/ba1ce2c409ef1f859f2379aa913b884a/6.gif" alt="Rock Paper Scissors (CNN)"/></p><h4 id="rock-paper-scissors-mobilenetv2" style="position:relative">Rock Paper Scissors (MobilenetV2)<a href="#rock-paper-scissors-mobilenetv2" aria-label="rock paper scissors mobilenetv2 permalink" class="gatsby-remark-autolink-header-anchor after"><svg aria-hidden="true" focusable="false" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a></h4><p>You play a Rock-Paper-Scissors game with the model. This model uses transfer learning and is based on <a href="https://www.tensorflow.org/api_docs/python/tf/keras/applications/MobileNetV2">MobilenetV2</a>.</p><ul><li>🎨 <a href="https://trekhleb.dev/machine-learning-experiments/#/experiments/RockPaperScissorsMobilenetV2">Demo</a></li><li>🏋️ <a href="https://nbviewer.jupyter.org/github/trekhleb/machine-learning-experiments/blob/master/experiments/rock_paper_scissors_mobilenet_v2/rock_paper_scissors_mobilenet_v2.ipynb">Training in Jupyter</a></li><li>️🏋️ <a href="https://colab.research.google.com/github/trekhleb/machine-learning-experiments/blob/master/experiments/rock_paper_scissors_mobilenet_v2/rock_paper_scissors_mobilenet_v2.ipynb">Training in Colab</a></li></ul><p><img src="/posts-assets/62bd44fd2baf2044c85dcf300ca340e2/7.gif" alt="Rock Paper Scissors (MobilenetV2)"/></p><h4 id="objects-detection-mobilenetv2" style="position:relative">Objects Detection (MobileNetV2)<a href="#objects-detection-mobilenetv2" aria-label="objects detection mobilenetv2 permalink" class="gatsby-remark-autolink-header-anchor after"><svg aria-hidden="true" focusable="false" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a></h4><p>You show to the model your environment through your camera, and it will try to detect and recognize the objects. This model uses transfer learning and is based on <a href="https://www.tensorflow.org/api_docs/python/tf/keras/applications/MobileNetV2">MobilenetV2</a>.</p><ul><li>🎨 <a href="https://trekhleb.dev/machine-learning-experiments/#/experiments/ObjectsDetectionSSDLiteMobilenetV2">Demo</a></li><li>🏋️ <a href="https://nbviewer.jupyter.org/github/trekhleb/machine-learning-experiments/blob/master/experiments/objects_detection_ssdlite_mobilenet_v2/objects_detection_ssdlite_mobilenet_v2.ipynb">Training in Jupyter</a></li><li>️🏋️ <a href="https://colab.research.google.com/github/trekhleb/machine-learning-experiments/blob/master/experiments/objects_detection_ssdlite_mobilenet_v2/objects_detection_ssdlite_mobilenet_v2.ipynb">Training in Colab</a></li></ul><p><img src="/posts-assets/85eceefa39aff6781db023fe14c41d3e/8.gif" alt="Objects Detection (MobileNetV2)"/></p><h4 id="image-classification-mobilenetv2" style="position:relative">Image Classification (MobileNetV2)<a href="#image-classification-mobilenetv2" aria-label="image classification mobilenetv2 permalink" class="gatsby-remark-autolink-header-anchor after"><svg aria-hidden="true" focusable="false" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a></h4><p>You upload a picture, and the model tries to classify it depending on what it "sees" on the picture. This model uses transfer learning and is based on <a href="https://www.tensorflow.org/api_docs/python/tf/keras/applications/MobileNetV2">MobilenetV2</a>.</p><ul><li>🎨 <a href="https://trekhleb.dev/machine-learning-experiments/#/experiments/ImageClassificationMobilenetV2">Demo</a></li><li>🏋️ <a href="https://nbviewer.jupyter.org/github/trekhleb/machine-learning-experiments/blob/master/experiments/image_classification_mobilenet_v2/image_classification_mobilenet_v2.ipynb">Training in Jupyter</a></li><li>️🏋️ <a href="https://colab.research.google.com/github/trekhleb/machine-learning-experiments/blob/master/experiments/image_classification_mobilenet_v2/image_classification_mobilenet_v2.ipynb">Training in Colab</a></li></ul><p><img src="/posts-assets/687de96293f15861031ad7fc1b6da1ed/9.gif" alt="Image Classification (MobileNetV2)"/></p><h3 id="experiments-with-recurrent-neural-networks-rnn" style="position:relative">Experiments with Recurrent Neural Networks (RNN)<a href="#experiments-with-recurrent-neural-networks-rnn" aria-label="experiments with recurrent neural networks rnn permalink" class="gatsby-remark-autolink-header-anchor after"><svg aria-hidden="true" focusable="false" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a></h3><blockquote><p>A <a href="https://en.wikipedia.org/wiki/Recurrent_neural_network">recurrent neural network (RNN)</a> is a class of deep neural networks, most commonly applied to sequence-based data like speech, voice, text or music. They are used for machine translation, speech recognition, voice synthesis etc.</p></blockquote><h4 id="numbers-summation" style="position:relative">Numbers Summation<a href="#numbers-summation" aria-label="numbers summation permalink" class="gatsby-remark-autolink-header-anchor after"><svg aria-hidden="true" focusable="false" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a></h4><p>You type a summation expression (i.e. <code class="language-text">17+38</code>), and the model predicts the result (i.e. <code class="language-text">55</code>). The interesting part here is that the model treats the input as a <em>sequence</em>, meaning it learned that when you type a sequence <code class="language-text">1</code> → <code class="language-text">17</code> → <code class="language-text">17+</code> → <code class="language-text">17+3</code> → <code class="language-text">17+38</code> it "translates" it to another sequence <code class="language-text">55</code>. You may think about it as translating a Spanish <code class="language-text">Hola</code> sequence to English <code class="language-text">Hello</code>.</p><ul><li>🎨 <a href="https://trekhleb.dev/machine-learning-experiments/#/experiments/NumbersSummationRNN">Demo</a></li><li>🏋️ <a href="https://nbviewer.jupyter.org/github/trekhleb/machine-learning-experiments/blob/master/experiments/numbers_summation_rnn/numbers_summation_rnn.ipynb">Training in Jupyter</a></li><li>️🏋️ <a href="https://colab.research.google.com/github/trekhleb/machine-learning-experiments/blob/master/experiments/numbers_summation_rnn/numbers_summation_rnn.ipynb">Training in Colab</a></li></ul><p><img src="/posts-assets/e493ba110b3512b3c8367d0c1287905f/10.gif" alt="Numbers Summation"/></p><h4 id="shakespeare-text-generation" style="position:relative">Shakespeare Text Generation<a href="#shakespeare-text-generation" aria-label="shakespeare text generation permalink" class="gatsby-remark-autolink-header-anchor after"><svg aria-hidden="true" focusable="false" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a></h4><p>You start typing a poem like Shakespeare, and the model will continue it like Shakespeare. At least it will try to do so 😀.</p><ul><li>🎨 <a href="https://trekhleb.dev/machine-learning-experiments/#/experiments/TextGenerationShakespeareRNN">Demo</a></li><li>🏋️ <a href="https://nbviewer.jupyter.org/github/trekhleb/machine-learning-experiments/blob/master/experiments/text_generation_shakespeare_rnn/text_generation_shakespeare_rnn.ipynb">Training in Jupyter</a></li><li>️🏋️ <a href="https://colab.research.google.com/github/trekhleb/machine-learning-experiments/blob/master/experiments/text_generation_shakespeare_rnn/text_generation_shakespeare_rnn.ipynb">Training in Colab</a></li></ul><p><img src="/posts-assets/93ebaea4c757d34a8d9ba02e705fcaa9/11.gif" alt="Shakespeare Text Generation"/></p><h4 id="wikipedia-text-generation" style="position:relative">Wikipedia Text Generation<a href="#wikipedia-text-generation" aria-label="wikipedia text generation permalink" class="gatsby-remark-autolink-header-anchor after"><svg aria-hidden="true" focusable="false" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a></h4><p>You start typing a Wiki article, and the model tries to continue it.</p><ul><li>🎨 <a href="https://trekhleb.dev/machine-learning-experiments/#/experiments/TextGenerationWikipediaRNN">Demo</a></li><li>🏋️ <a href="https://nbviewer.jupyter.org/github/trekhleb/machine-learning-experiments/blob/master/experiments/text_generation_wikipedia_rnn/text_generation_wikipedia_rnn.ipynb">Training in Jupyter</a></li><li>️🏋️ <a href="https://colab.research.google.com/github/trekhleb/machine-learning-experiments/blob/master/experiments/text_generation_wikipedia_rnn/text_generation_wikipedia_rnn.ipynb">Training in Colab</a></li></ul><p><img src="/posts-assets/9efc3106b06044a6e4f6ff91e2d5cbbc/12.gif" alt="Wikipedia Text Generation"/></p><h2 id="future-plans" style="position:relative">Future plans<a href="#future-plans" aria-label="future plans permalink" class="gatsby-remark-autolink-header-anchor after"><svg aria-hidden="true" focusable="false" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a></h2><p>As I've mentioned above the main purpose of <a href="https://github.com/trekhleb/machine-learning-experiments">the repository</a> is to be more like a playground for learning rather than for production-ready models. Therefore, the main plan is to <strong>continue learning and experimenting</strong> with deep-learning challenges and approaches. The next interesting challenges to play with might be:</p><ul><li>Emotions detection</li><li>Style transfer</li><li>Language translation</li><li>Generating images (i.e. handwritten numbers)</li><li>etc.</li></ul><p>Another interesting opportunity would be to <strong>tune existing models to make them more performant</strong>. I believe it might give a better understanding of how to overcome overfitting and underfitting and what to do with the model if it just stuck on <code class="language-text">60%</code> accuracy level for both training and validation sets and doesn't want to improve anymore 🤔.</p><p>Anyways, I hope you might find some useful insights for models training from <a href="https://github.com/trekhleb/machine-learning-experiments">the repository</a> or at least to have some fun playing around with the demos!</p><p>Happy learning! 🤖</p></article></div><div class="flex flex-row justify-center items-center mt-16"><div class="max-w-md"><div class="bg-white rounded-md shadow-md p-8"><h1 class="text-grey-darkest uppercase font-bold text-xl mb-3">Subscribe to the Newsletter</h1><p class="text-sm mb-3">Get my latest posts and project updates by email</p><form action="https://dev.us1.list-manage.com/subscribe/post?u=7714f14ff32085c685da2cfaa&amp;id=53ffa81463" method="post" class="flex flex-col"><input type="text" placeholder="First Name" name="FNAME" class="border py-2 px-3 mb-3 rounded border-gray-300 border-solid appearance-none" required=""/><input type="email" placeholder="Email" name="EMAIL" class="border py-2 px-3 mb-3 rounded border-gray-300 border-solid appearance-none" required=""/><div class="hidden" aria-hidden="true"><input type="text" name="b_7714f14ff32085c685da2cfaa_53ffa81463" tabindex="-1"/></div><input type="submit" value="Subscribe" class="transition duration-200 ease-in-out bg-black text-white py-2 px-3 rounded shadow-sm cursor-pointer hover:bg-gray-800"/></form></div></div></div></article><footer class="px-6 sm:px-12 py-12"><div class="flex flex-col sm:flex-row items-center "><div style="flex:1" class="flex flex-row items-center mb-6 sm:mb-0"><a class="transition duration-200 ease-in-out flex flex-row items-center hover:text-red-600 text-xs mr-5" href="/subscribe"><svg stroke="currentColor" fill="currentColor" stroke-width="0" viewBox="0 0 1024 1024" height="20" width="20" xmlns="http://www.w3.org/2000/svg"><path d="M928 160H96c-17.7 0-32 14.3-32 32v640c0 17.7 14.3 32 32 32h832c17.7 0 32-14.3 32-32V192c0-17.7-14.3-32-32-32zm-40 110.8V792H136V270.8l-27.6-21.5 39.3-50.5 42.8 33.3h643.1l42.8-33.3 39.3 50.5-27.7 21.5zM833.6 232L512 482 190.4 232l-42.8-33.3-39.3 50.5 27.6 21.5 341.6 265.6a55.99 55.99 0 0 0 68.7 0L888 270.8l27.6-21.5-39.3-50.5-42.7 33.2z"></path></svg><span class="w-2"></span>Subscribe</a><a href="https://github.com/trekhleb/trekhleb.github.io/discussions" class="transition duration-200 ease-in-out flex flex-row items-center hover:text-red-600 text-xs mr-5"><svg stroke="currentColor" fill="none" stroke-width="2" viewBox="0 0 24 24" stroke-linecap="round" stroke-linejoin="round" height="20" width="20" xmlns="http://www.w3.org/2000/svg"><path d="M9 19c-5 1.5-5-2.5-7-3m14 6v-3.87a3.37 3.37 0 0 0-.94-2.61c3.14-.35 6.44-1.54 6.44-7A5.44 5.44 0 0 0 20 4.77 5.07 5.07 0 0 0 19.91 1S18.73.65 16 2.48a13.38 13.38 0 0 0-7 0C6.27.65 5.09 1 5.09 1A5.07 5.07 0 0 0 5 4.77a5.44 5.44 0 0 0-1.5 3.78c0 5.42 3.3 6.61 6.44 7A3.37 3.37 0 0 0 9 18.13V22"></path></svg><span class="w-2"></span>Feedback</a><a class="transition duration-200 ease-in-out flex flex-row items-center hover:text-red-600 text-xs" href="/rss.xml"><svg stroke="currentColor" fill="none" stroke-width="2" viewBox="0 0 24 24" stroke-linecap="round" stroke-linejoin="round" height="20" width="20" xmlns="http://www.w3.org/2000/svg"><path d="M4 11a9 9 0 0 1 9 9"></path><path d="M4 4a16 16 0 0 1 16 16"></path><circle cx="5" cy="19" r="1"></circle></svg><span class="w-2"></span>RSS</a></div><div style="flex:1" class="flex flex-row items-center justify-center"><ul class="flex flex-row flex-wrap "><li class="flex flex-row items-center last:mr-0 mr-2 ml-2"><a href="https://www.linkedin.com/in/trekhleb/" class="transition duration-200 ease-in-out flex flex-row items-center hover:text-red-600 " title="Oleksii Trekhleb on LinkedIn"><svg stroke="currentColor" fill="currentColor" stroke-width="0" viewBox="0 0 448 512" class="w-5 h-5" height="1em" width="1em" xmlns="http://www.w3.org/2000/svg"><path d="M416 32H31.9C14.3 32 0 46.5 0 64.3v383.4C0 465.5 14.3 480 31.9 480H416c17.6 0 32-14.5 32-32.3V64.3c0-17.8-14.4-32.3-32-32.3zM135.4 416H69V202.2h66.5V416zm-33.2-243c-21.3 0-38.5-17.3-38.5-38.5S80.9 96 102.2 96c21.2 0 38.5 17.3 38.5 38.5 0 21.3-17.2 38.5-38.5 38.5zm282.1 243h-66.4V312c0-24.8-.5-56.7-34.5-56.7-34.6 0-39.9 27-39.9 54.9V416h-66.4V202.2h63.7v29.2h.9c8.9-16.8 30.6-34.5 62.9-34.5 67.2 0 79.7 44.3 79.7 101.9V416z"></path></svg></a></li><li class="flex flex-row items-center last:mr-0 mr-2 ml-2"><a href="https://github.com/trekhleb" class="transition duration-200 ease-in-out flex flex-row items-center hover:text-red-600 " title="Oleksii Trekhleb on GitHub"><svg stroke="currentColor" fill="currentColor" stroke-width="0" viewBox="0 0 496 512" class="w-5 h-5" height="1em" width="1em" xmlns="http://www.w3.org/2000/svg"><path d="M165.9 397.4c0 2-2.3 3.6-5.2 3.6-3.3.3-5.6-1.3-5.6-3.6 0-2 2.3-3.6 5.2-3.6 3-.3 5.6 1.3 5.6 3.6zm-31.1-4.5c-.7 2 1.3 4.3 4.3 4.9 2.6 1 5.6 0 6.2-2s-1.3-4.3-4.3-5.2c-2.6-.7-5.5.3-6.2 2.3zm44.2-1.7c-2.9.7-4.9 2.6-4.6 4.9.3 2 2.9 3.3 5.9 2.6 2.9-.7 4.9-2.6 4.6-4.6-.3-1.9-3-3.2-5.9-2.9zM244.8 8C106.1 8 0 113.3 0 252c0 110.9 69.8 205.8 169.5 239.2 12.8 2.3 17.3-5.6 17.3-12.1 0-6.2-.3-40.4-.3-61.4 0 0-70 15-84.7-29.8 0 0-11.4-29.1-27.8-36.6 0 0-22.9-15.7 1.6-15.4 0 0 24.9 2 38.6 25.8 21.9 38.6 58.6 27.5 72.9 20.9 2.3-16 8.8-27.1 16-33.7-55.9-6.2-112.3-14.3-112.3-110.5 0-27.5 7.6-41.3 23.6-58.9-2.6-6.5-11.1-33.3 2.6-67.9 20.9-6.5 69 27 69 27 20-5.6 41.5-8.5 62.8-8.5s42.8 2.9 62.8 8.5c0 0 48.1-33.6 69-27 13.7 34.7 5.2 61.4 2.6 67.9 16 17.7 25.8 31.5 25.8 58.9 0 96.5-58.9 104.2-114.8 110.5 9.2 7.9 17 22.9 17 46.4 0 33.7-.3 75.4-.3 83.6 0 6.5 4.6 14.4 17.3 12.1C428.2 457.8 496 362.9 496 252 496 113.3 383.5 8 244.8 8zM97.2 352.9c-1.3 1-1 3.3.7 5.2 1.6 1.6 3.9 2.3 5.2 1 1.3-1 1-3.3-.7-5.2-1.6-1.6-3.9-2.3-5.2-1zm-10.8-8.1c-.7 1.3.3 2.9 2.3 3.9 1.6 1 3.6.7 4.3-.7.7-1.3-.3-2.9-2.3-3.9-2-.6-3.6-.3-4.3.7zm32.4 35.6c-1.6 1.3-1 4.3 1.3 6.2 2.3 2.3 5.2 2.6 6.5 1 1.3-1.3.7-4.3-1.3-6.2-2.2-2.3-5.2-2.6-6.5-1zm-11.4-14.7c-1.6 1-1.6 3.6 0 5.9 1.6 2.3 4.3 3.3 5.6 2.3 1.6-1.3 1.6-3.9 0-6.2-1.4-2.3-4-3.3-5.6-2z"></path></svg></a></li><li class="flex flex-row items-center last:mr-0 mr-2 ml-2"><a href="https://twitter.com/Trekhleb" class="transition duration-200 ease-in-out flex flex-row items-center hover:text-red-600 " title="Oleksii Trekhleb on Twitter"><svg stroke="currentColor" fill="currentColor" stroke-width="0" viewBox="0 0 512 512" class="w-5 h-5" height="1em" width="1em" xmlns="http://www.w3.org/2000/svg"><path d="M459.37 151.716c.325 4.548.325 9.097.325 13.645 0 138.72-105.583 298.558-298.558 298.558-59.452 0-114.68-17.219-161.137-47.106 8.447.974 16.568 1.299 25.34 1.299 49.055 0 94.213-16.568 130.274-44.832-46.132-.975-84.792-31.188-98.112-72.772 6.498.974 12.995 1.624 19.818 1.624 9.421 0 18.843-1.3 27.614-3.573-48.081-9.747-84.143-51.98-84.143-102.985v-1.299c13.969 7.797 30.214 12.67 47.431 13.319-28.264-18.843-46.781-51.005-46.781-87.391 0-19.492 5.197-37.36 14.294-52.954 51.655 63.675 129.3 105.258 216.365 109.807-1.624-7.797-2.599-15.918-2.599-24.04 0-57.828 46.782-104.934 104.934-104.934 30.213 0 57.502 12.67 76.67 33.137 23.715-4.548 46.456-13.32 66.599-25.34-7.798 24.366-24.366 44.833-46.132 57.827 21.117-2.273 41.584-8.122 60.426-16.243-14.292 20.791-32.161 39.308-52.628 54.253z"></path></svg></a></li></ul></div><div style="flex:1" class="hidden sm:flex"> </div></div></footer></div></main></div><div id="gatsby-announcer" style="position:absolute;top:0;width:1px;height:1px;padding:0;overflow:hidden;clip:rect(0, 0, 0, 0);white-space:nowrap;border:0" aria-live="assertive" aria-atomic="true"></div></div><script async="" src="https://www.googletagmanager.com/gtag/js?id=G-YJ73BX984Z"></script><script>
if(true) {
window.dataLayer = window.dataLayer || [];
function gtag(){window.dataLayer && window.dataLayer.push(arguments);}
gtag('js', new Date());
gtag('config', 'G-YJ73BX984Z', {"send_page_view":false});
}
</script><script id="gatsby-script-loader">/*<![CDATA[*/window.pagePath="/blog/2020/machine-learning-experiments/";/*]]>*/</script><script id="gatsby-chunk-mapping">/*<![CDATA[*/window.___chunkMapping={"polyfill":["/polyfill-b7afeec9af34d175ba4b.js"],"app":["/app-2e0826ec06cafce3bdee.js"],"component---src-pages-404-tsx":["/component---src-pages-404-tsx-63f1eb2c9d9eccd00add.js"],"component---src-pages-blog-tsx":["/component---src-pages-blog-tsx-09e5ed745cd76684f8ac.js"],"component---src-pages-index-tsx":["/component---src-pages-index-tsx-700f213869b8e0037911.js"],"component---src-pages-projects-tsx":["/component---src-pages-projects-tsx-781dc12fd2babbe9fac0.js"],"component---src-pages-subscribe-confirm-index-tsx":["/component---src-pages-subscribe-confirm-index-tsx-7d43f1b2228f03a924f8.js"],"component---src-pages-subscribe-index-tsx":["/component---src-pages-subscribe-index-tsx-c02ecb9201e3fc4b2ce6.js"],"component---src-pages-subscribe-thanks-index-tsx":["/component---src-pages-subscribe-thanks-index-tsx-cec855950644a6361532.js"],"component---src-templates-post-tsx":["/component---src-templates-post-tsx-c4045391b1a7c095d609.js"],"component---src-templates-project-tsx":["/component---src-templates-project-tsx-d7e84ca35145045f8249.js"]};/*]]>*/</script><script src="/polyfill-b7afeec9af34d175ba4b.js" nomodule=""></script><script src="/component---src-templates-post-tsx-c4045391b1a7c095d609.js" async=""></script><script src="/commons-213c962999d4e181c8a0.js" async=""></script><script src="/app-2e0826ec06cafce3bdee.js" async=""></script><script src="/framework-d63adeb7e1b44b7b8aa5.js" async=""></script><script src="/webpack-runtime-6451c7fe678794e00b4b.js" async=""></script></body></html>