changeset 3:0f2249f75a9e

Write short documentation
author Lewin Bormann <lbo@spheniscida.de>
date Thu, 23 Dec 2021 08:12:19 +0100
parents 3cd197157ea7
children f88dda95d735
files autodiff.py gad.py
diffstat 2 files changed, 36 insertions(+), 2 deletions(-) [+]
line wrap: on
line diff
--- a/autodiff.py	Thu Dec 23 07:56:07 2021 +0100
+++ b/autodiff.py	Thu Dec 23 08:12:19 2021 +0100
@@ -1,7 +1,14 @@
 """
 Copyright (c) 2021 Lewin Bormann
 
-Simple backpropagation, stateful, naive approach.
+Reverse-mode automatic differentiation algorithm.
+
+First an expression tree is built. When evaluating a gradient, the initial
+seed gradient is propagated backwards through the expression tree. Finally,
+the accumulated chain-rule products are summed at each input variable node
+and read out by the "driver function" (jacobian()).
+
+Simple backpropagation; stateful, naive approach.
 """
 
 import numpy as np
@@ -89,6 +96,12 @@
 def cos(e):
     return UnaryExpression(np.cos, lambda x: -np.sin(x), e)
 
+def sqrt(e):
+    return UnaryExpression(np.sqrt, lambda x: 1/(2*np.sqrt(x)), e)
+
+def log(e):
+    return UnaryExpression(np.log, lambda x: 1/x, e)
+
 class Num(Expression):
     def __init__(self, name=None, value=None):
         self.name = name
@@ -106,6 +119,7 @@
         self.grad += grad
 
 def jacobian(f, at):
+    """Returns function value and jacobian."""
     j = np.zeros((len(f), len(at)))
     val = np.zeros(len(f))
     for i, ff in enumerate(f):
@@ -157,6 +171,11 @@
         c = c + a*b
     return c, a, b, a*b
 
+@gradify
+def complex_calculation2(*x):
+    y = np.array([x[i]+x[i+1] for i in range(len(x)-1)])
+    z = np.array([sqrt(log(e)) for e in y])
+    return z
 
 before = time.time_ns()
 print(complex_calculation(1,4,5))
@@ -167,3 +186,8 @@
 print(complex_calculation(2,8,10))
 after = time.time_ns()
 print((after-before)/1e9)
+
+before = time.time_ns()
+print(complex_calculation2(*list(range(1, 100, 2)))[1].shape)
+after = time.time_ns()
+print((after-before)/1e9)
--- a/gad.py	Thu Dec 23 07:56:07 2021 +0100
+++ b/gad.py	Thu Dec 23 08:12:19 2021 +0100
@@ -3,6 +3,10 @@
 
 Simple automatic differentiation algorithm using "parallel forward mode".
 
+First, an expression tree is built (and cached, when using @gradify). When evaluating
+a gradient, the expression tree is recursively evaluated, propagating derivatives
+from the bottom-up using Jacobian-gradient products.
+
 See the end of this file for examples.
 """
 
@@ -225,6 +229,12 @@
         c = c + a*b
     return c, a, b, a*b
 
+@gradify
+def complex_calculation2(*x):
+    y = np.array([x[i]+x[i+1]**2 for i in range(len(x)-1)])
+    z = np.array([sqrt(log(e)) for e in y])
+    return z
+
 # ...or automatically using @gradify
 # Equivalent to (without @gradify): print(ade.grad([complex_calculation(x,y,z)], [1,4,5]))
 before = time.time_ns()
@@ -233,6 +243,6 @@
 print((after-before)/1e9)
 
 before = time.time_ns()
-print(complex_calculation(2,8,10))
+print(complex_calculation2(*list(range(1, 10, 2)))[1].shape)
 after = time.time_ns()
 print((after-before)/1e9)