REVISED: Wednesday, October 8, 2025
1. OCAML SINGLE-LAYER NEURAL NETWORK TRAINING (BACKPROPAGATION)
(*
============================
ocaml C:\AI2025\lesson6.ml
Lesson 6: Single-Layer Neural Network Training (Backpropagation)
============================
Objective:
1. Implement a simple single-layer network with training
2. Use mean squared error loss
3. Update weights and biases with gradient descent
*)
(* -----------------------------
1. Vector and Matrix Utilities
----------------------------- *)
type vector = float list
type matrix = float list list
let vector_add v1 v2 = List.map2 ( +. ) v1 v2
(* Adds two vectors element-wise *)
let vector_sub v1 v2 = List.map2 ( -. ) v1 v2
(* Subtracts two vectors element-wise *)
let vector_map f v = List.map f v
(* Apply a function to each element of a vector *)
let scalar_vector_mul s v = List.map (fun x -> s *. x) v
(* Multiply a vector by a scalar *)
let dot_product v1 v2 = List.fold_left (+.) 0.0 (List.map2 ( *. ) v1 v2)
(* Dot product of two vectors *)
let mat_vec_mul m v = List.map (fun row -> dot_product row v) m
(* Matrix-vector multiplication *)
let transpose m =
let rec transpose_aux m acc =
match List.hd m with
| [] -> List.rev acc
| _ ->
let heads = List.map List.hd m in
let tails = List.map List.tl m in
transpose_aux tails (heads :: acc)
in transpose_aux m []
(* Transpose a matrix *)
let outer_product v1 v2 =
List.map (fun x -> List.map (fun y -> x *. y) v2) v1
(* Outer product of two vectors *)
let matrix_sub m1 m2 = List.map2 vector_sub m1 m2
(* Subtract two matrices element-wise *)
let matrix_add m1 m2 = List.map2 vector_add m1 m2
(* Add two matrices element-wise *)
let scalar_matrix_mul s m = List.map (scalar_vector_mul s) m
(* Multiply matrix by scalar *)
(* -----------------------------
2. Activation Functions
----------------------------- *)
let relu x = if x > 0. then x else 0.
let relu_derivative x = if x > 0. then 1. else 0.
let sigmoid x = 1. /. (1. +. exp (-.x))
(* -----------------------------
3. Network Parameters
----------------------------- *)
let weights : matrix = [
[0.5; -0.2];
[0.3; 0.8];
[-0.7; 0.1]
]
let biases : vector = [0.1; -0.1; 0.05]
let learning_rate = 0.01
(* -----------------------------
4. Forward Pass
----------------------------- *)
let forward_pass input weights biases =
let z = vector_add (mat_vec_mul weights input) biases in
let activated = vector_map relu z in
(activated, z)
(* Returns both activation and pre-activation (z) *)
(* -----------------------------
5. Backpropagation for Single Layer
----------------------------- *)
let backprop input target weights biases =
(* Forward pass *)
let (output, z) = forward_pass input weights biases in
(* Compute error *)
let error = vector_sub output target in
(* Compute gradient for ReLU *)
let delta = List.map2 ( *. ) error (vector_map relu_derivative z) in
(* Weight gradient: delta outer input *)
let weight_grad = outer_product delta input in
(* Bias gradient is delta *)
let bias_grad = delta in
(* Update weights and biases *)
let weights' = matrix_sub weights (scalar_matrix_mul learning_rate weight_grad) in
let biases' = vector_sub biases (scalar_vector_mul learning_rate bias_grad) in
(weights', biases')
(* Returns updated weights and biases *)
(* -----------------------------
6. Training Loop
----------------------------- *)
let train inputs targets weights biases epochs =
let rec loop w b epoch =
if epoch = 0 then (w,b)
else
let (w_new,b_new) =
List.fold_left2 (fun (w_acc,b_acc) input target ->
backprop input target w_acc b_acc
) (w,b) inputs targets
in
loop w_new b_new (epoch-1)
in loop weights biases epochs
(* -----------------------------
7. Example usage
----------------------------- *)
let inputs = [
[1.0; 2.0];
[0.5; -1.0];
[0.0; 0.0]
]
let targets = [
[1.0; 0.0; 1.0];
[0.0; 1.0; 0.0];
[0.0; 0.0; 0.0]
]
let epochs = 1000
let () =
let (w_trained,b_trained) = train inputs targets weights biases epochs in
List.iter2 (fun inp targ ->
let (out,_) = forward_pass inp w_trained b_trained in
Printf.printf "Input: [%s] -> Predicted: [%s], Target: [%s]\n"
(String.concat "; " (List.map string_of_float inp))
(String.concat "; " (List.map string_of_float out))
(String.concat "; " (List.map string_of_float targ))
) inputs targets
2. CONCLUSION
Windows PowerShell
Copyright (C) Microsoft Corporation. All rights reserved.
Install the latest PowerShell for new features and improvements! https://aka.ms/PSWindows
OCaml version: The OCaml toplevel, version 5.3.0
Coq-LSP version: 0.2.3
Loading personal and system profiles took 4395ms.
PS C:\Users\User> ocaml C:\AI2025\lesson6.ml
Input: [1.; 2.] -> Predicted: [1.00000000001; 0.; 0.], Target: [1.; 0.; 1.]
Input: [0.5; -1.] -> Predicted: [8.26041977875e-011; 0.; 0.], Target: [0.; 1.; 0.]
Input: [0.; 0.] -> Predicted: [0.; 0.; 2.15856237053e-006], Target: [0.; 0.; 0.]
PS C:\Users\User>
3. REFERENCES
Bird, R. (2015). Thinking Functionally with Haskell. Cambridge, England: Cambridge University Press.
Davie, A. (1992). Introduction to Functional Programming Systems Using Haskell. Cambridge, England: Cambridge University Press.
Goerzen, J. & O'Sullivan, B. & Stewart, D. (2008). Real World Haskell. Sebastopol, CA: O'Reilly Media, Inc.
Hutton, G. (2007). Programming in Haskell. New York: Cambridge University Press.
Lipovača, M. (2011). Learn You a Haskell for Great Good!: A Beginner's Guide. San Francisco, CA: No Starch Press, Inc.
Thompson, S. (2011). The Craft of Functional Programming. Edinburgh Gate, Harlow, England: Pearson Education Limited.