REVISED: Wednesday, October 8, 2025
1. OCAML ACTIVATION FUNCTIONS & DERIVATIVES
(*
============================
ocaml C:\AI2025\lesson4.ml
Lesson 4: Activation Functions & Derivatives
============================
Objective:
1. Implement key activation functions for neural networks
2. Compute their derivatives
3. Prepare for gradient-based learning
*)
(* -----------------------------
1. ReLU (Rectified Linear Unit)
----------------------------- *)
let relu x = if x > 0. then x else 0.
(* ReLU returns x if positive, 0 otherwise *)
let relu_derivative x = if x > 0. then 1. else 0.
(* Derivative of ReLU: 1 if x > 0, else 0 *)
(* -----------------------------
2. Sigmoid
----------------------------- *)
let sigmoid x = 1. /. (1. +. exp (-.x))
(* Sigmoid squashes input to range (0,1) *)
let sigmoid_derivative x =
let s = sigmoid x in
s *. (1. -. s)
(* Derivative of sigmoid: s*(1-s) *)
(* -----------------------------
3. Tanh
----------------------------- *)
let tanh_activation x = tanh x
(* OCaml's built-in tanh *)
let tanh_derivative x =
1. -. (tanh x) ** 2.
(* Derivative of tanh: 1 - tanh^2(x) *)
(* -----------------------------
4. Vectorized application
----------------------------- *)
type vector = float list
let vector_map f v = List.map f v
(* Applies function f to each element of vector v *)
(* -----------------------------
5. Example usage
----------------------------- *)
let inputs = [-2.; -1.; 0.; 1.; 2.]
(* Apply ReLU *)
let relu_outputs = vector_map relu inputs
let relu_derivs = vector_map relu_derivative inputs
let () =
Printf.printf "ReLU outputs: [%s]\n"
(String.concat "; " (List.map string_of_float relu_outputs));
Printf.printf "ReLU derivatives: [%s]\n"
(String.concat "; " (List.map string_of_float relu_derivs))
(* Apply Sigmoid *)
let sigmoid_outputs = vector_map sigmoid inputs
let sigmoid_derivs = vector_map sigmoid_derivative inputs
let () =
Printf.printf "Sigmoid outputs: [%s]\n"
(String.concat "; " (List.map string_of_float sigmoid_outputs));
Printf.printf "Sigmoid derivatives: [%s]\n"
(String.concat "; " (List.map string_of_float sigmoid_derivs))
(* Apply Tanh *)
let tanh_outputs = vector_map tanh_activation inputs
let tanh_derivs = vector_map tanh_derivative inputs
let () =
Printf.printf "Tanh outputs: [%s]\n"
(String.concat "; " (List.map string_of_float tanh_outputs));
Printf.printf "Tanh derivatives: [%s]\n"
(String.concat "; " (List.map string_of_float tanh_derivs))
2. CONCLUSION
Windows PowerShell
Copyright (C) Microsoft Corporation. All rights reserved.
Install the latest PowerShell for new features and improvements! https://aka.ms/PSWindows
OCaml version: The OCaml toplevel, version 5.3.0
Coq-LSP version: 0.2.3
Loading personal and system profiles took 4456ms.
PS C:\Users\User> ocaml C:\AI2025\lesson4.ml
ReLU outputs: [0.; 0.; 0.; 1.; 2.]
ReLU derivatives: [0.; 0.; 0.; 1.; 1.]
Sigmoid outputs: [0.119202922022; 0.26894142137; 0.5; 0.73105857863; 0.880797077978]
Sigmoid derivatives: [0.104993585404; 0.196611933241; 0.25; 0.196611933241; 0.104993585404]
Tanh outputs: [-0.964027580076; -0.761594155956; 0.; 0.761594155956; 0.964027580076]
Tanh derivatives: [0.0706508248532; 0.419974341614; 1.; 0.419974341614; 0.0706508248532]
PS C:\Users\User>
3. REFERENCES
Bird, R. (2015). Thinking Functionally with Haskell. Cambridge, England: Cambridge University Press.
Davie, A. (1992). Introduction to Functional Programming Systems Using Haskell. Cambridge, England: Cambridge University Press.
Goerzen, J. & O'Sullivan, B. & Stewart, D. (2008). Real World Haskell. Sebastopol, CA: O'Reilly Media, Inc.
Hutton, G. (2007). Programming in Haskell. New York: Cambridge University Press.
Lipovača, M. (2011). Learn You a Haskell for Great Good!: A Beginner's Guide. San Francisco, CA: No Starch Press, Inc.
Thompson, S. (2011). The Craft of Functional Programming. Edinburgh Gate, Harlow, England: Pearson Education Limited.