Created
January 20, 2025 11:33
-
-
Save EteimZ/ba49a5ef06346f1f0fcafdfcfdb838b2 to your computer and use it in GitHub Desktop.
ReLU graph animation using manim
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| class ReLU(Scene): | |
| """ | |
| Animation of Rectified Linear Unit activation function | |
| used in Neural Networks. | |
| """ | |
| def construct(self): | |
| # Define graph axes | |
| axes = Axes( | |
| x_range = [-10, 10, 1], | |
| y_range = [0, 6, 1], | |
| x_length = 10, | |
| y_length = 4, | |
| tips=False, | |
| axis_config={"include_numbers": True}) | |
| # define ReLU function | |
| def relu(x): | |
| if x < 0: | |
| return 0 | |
| return x | |
| # define ReLU formula in Latex | |
| relu_formula = MathTex( | |
| r"\text{ReLU}(x) = \begin{cases} " | |
| r"0 & \text{if } x < 0, \\ " | |
| r"x & \text{if } x \geq 0." | |
| r"\end{cases}" | |
| ) | |
| # define graph title | |
| title = Title("Rectified Linear Unit (ReLU)", font_size=40, include_underline=False) | |
| # plot ReLU on graph | |
| graph = axes.plot(relu, x_range=[-10, 6, 0.01], color=BLUE) | |
| # Animation Sequence | |
| self.play(Write(title)) # write title to scene | |
| self.play(Write(relu_formula), run_time=2) # write ReLU formula to scene | |
| self.wait(1) # wait for a second | |
| self.play(relu_formula.animate.scale(0.5)) # reduce the size of the formula | |
| self.play(relu_formula.animate.move_to([-2., 1., 0.])) # move the formula to the upper right | |
| self.play(Create(axes)) # Create the axes | |
| self.play(Create(graph), run_time=3) # Plot the graph | |
| self.wait(3) # Wait for the user admire the graph | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment