Skip to content

Instantly share code, notes, and snippets.

View arif9799's full-sized avatar

Arif Waghbakriwala arif9799

View GitHub Profile
@arif9799
arif9799 / Figure11.py
Created August 9, 2023 21:33
Attention is not enough! Multihead Attention in Transformers
from manim import *
from manim.utils.unit import Percent, Pixels
from colour import Color
import gensim
from gensim.models import Word2Vec
import numpy as np
import torch as t
@arif9799
arif9799 / Figure10.py
Created August 8, 2023 02:19
Attention is not enough! Self-Attention via Scaled Dot Product Attention
from manim import *
from manim.utils.unit import Percent, Pixels
from colour import Color
import gensim
from gensim.models import Word2Vec
import numpy as np
import torch as t
@arif9799
arif9799 / Figure9.py
Created August 8, 2023 02:18
Attention is not enough! Scaled Dot Product Attention Animation
from manim import *
from manim.utils.unit import Percent, Pixels
from colour import Color
config.frame_width = 146
config.frame_height = 85.5
HIGH = True
if HIGH:
print("Its high")
@arif9799
arif9799 / Figure8.py
Last active August 8, 2023 02:17
Attention is not enough! Recurrent Neural Nets with Luong's Dot Product Attention Worked-out Example
from manim import *
from manim.utils.unit import Percent, Pixels
from colour import Color
import gensim
from gensim.models import Word2Vec
import numpy as np
import torch as t
config.frame_width = 146
@arif9799
arif9799 / Figure7.py
Last active August 8, 2023 02:17
Attention is not enough! Recurrent Neural Nets Worked-out Example
from manim import *
from manim.utils.unit import Percent, Pixels
from colour import Color
import gensim
from gensim.models import Word2Vec
import numpy as np
import torch as t
config.frame_width = 146
@arif9799
arif9799 / Figure6.py
Last active August 10, 2023 16:23
Attention is not enough! Positional Encoding Formula
from manim import *
from manim.utils.unit import Percent, Pixels
from colour import Color
import gensim
from gensim.models import Word2Vec
import numpy as np
import torch as t
@arif9799
arif9799 / Figure5.py
Last active August 8, 2023 02:11
Attention is not enough! Word Embeddings plus Positional Encoding
from manim import *
from manim.utils.unit import Percent, Pixels
from colour import Color
import gensim
from gensim.models import Word2Vec
import numpy as np
import torch as t
@arif9799
arif9799 / Figure4.py
Last active August 8, 2023 02:10
Attention is not enough! RNNs to Transformers Transition Demo Animation
from manim import *
from manim.utils.unit import Percent, Pixels
from colour import Color
import gensim
from gensim.models import Word2Vec
import numpy as np
config.frame_width = 48
config.frame_height = 27
@arif9799
arif9799 / Figure3.py
Last active August 8, 2023 02:10
Attention is not enough! Mathematical Representation of Words -embedding vectors
from manim import *
from manim.utils.unit import Percent, Pixels
from colour import Color
import gensim
from gensim.models import Word2Vec
import numpy as np
import Util_Functions
from Util_Functions import *
@arif9799
arif9799 / Figure2.py
Last active August 8, 2023 02:09
Attention is not enough! Feed Forward Neural Nets & Recurrent Neural Nets Architecture Animation
from manim import *
from manim.utils.unit import Percent, Pixels
from colour import Color
config.frame_width = 24
config.frame_height = 13.5
HIGH = True
if HIGH:
print("Its high")