trying to calculate the loss gradient when varying parameters

This commit is contained in:
Helmut Merz 2025-05-26 16:11:54 +02:00
parent 19ca8adbfc
commit 8025938b81

View file

@ -5,7 +5,7 @@
(:local-nicknames (:shape :scopes/shape) (:local-nicknames (:shape :scopes/shape)
(:util :scopes/util)) (:util :scopes/util))
(:export #:rapply #:rreduce #:radd #:rmul #:rsub #:rdiv (:export #:rapply #:rreduce #:radd #:rmul #:rsub #:rdiv
#:default-deviation #:l2-loss #:trial #:default-deviation #:l2-loss #:trial #:gradient
#:line #:line
#:*obj* #:*trials* #:try)) #:*obj* #:*trials* #:try))
@ -66,6 +66,16 @@
(defmethod print-object ((tr trial) stream) (defmethod print-object ((tr trial) stream)
(shape:print-slots tr stream 'theta 'loss)) (shape:print-slots tr stream 'theta 'loss))
(defun gradient (target dataset)
(let ((expect (funcall (l2-loss target) dataset)))
(lambda (theta)
(let* ((loss0 (funcall expect theta))
(loss1 (funcall expect (vary theta))))
(- loss0 loss1)))))
(defun vary (theta)
(mapcar (lambda (x) (- x 0.01)) theta))
;;;; parameterized target functions ;;;; parameterized target functions
(defun line (x) (defun line (x)