Module Net.Vector

Operations on vectors.

include sig ... end
module Var : sig ... end
val var : Algebra.Vector.t Stdlib.ref -> Algebra.Vector.t t
val dup : ?label:string -> int -> Algebra.Vector.t t -> Algebra.Vector.t * (Algebra.Vector.t -> unit)

Make a value useable exactly n times.

val drop : Algebra.Vector.t t -> unit

Should be called when a vector is not used.

val repeat : int -> Algebra.Vector.t t -> (Algebra.Vector.t * (Algebra.Vector.t -> unit)) array

Make n explicit copies of x

val demux : Algebra.Vector.t array t -> (Algebra.Vector.t * (Algebra.Vector.t -> unit)) array
val drop_pair : (Algebra.Vector.t * Algebra.Vector.t) t -> unit

Drop a pair of vectors.

val to_scalar : Algebra.Vector.t t -> float t
val cadd : float -> Algebra.Vector.t t -> Algebra.Vector.t t

Add a constant.

val cmul : float -> Algebra.Vector.t t -> Algebra.Vector.t t

Multiply by a constant.

Add two vectors.

Hadamard product of two vectors.

val squared_norm : Algebra.Vector.t t -> float t

Squared norm.

val squared_distance_to : Algebra.Vector.t -> Algebra.Vector.t t -> float t

Squared distance to fixed vector.

Add a bias vector which can be optimized.

Affine layer.

Sigmoid layer.

Rectified linear unit.

Log.

val dot : Algebra.Vector.t t -> Algebra.Vector.t t -> float t

Dot

val sum : Algebra.Vector.t t -> float t

Sum

val cross_entropy : Algebra.Vector.t -> Algebra.Vector.t t -> float t

Cross-entropy

val activation : [< `None | `ReLU | `Sigmoid | `Tanh ] -> Algebra.Vector.t t -> Algebra.Vector.t t
val bias_fun : Algebra.Vector.t Stdlib.ref -> Algebra.Vector.t t -> Algebra.Vector.t t
val activation_fun : [< `None | `ReLU | `Sigmoid | `Tanh ] -> Algebra.Vector.t t -> Algebra.Vector.t t
val neural_network : ?activation:[< `None | `ReLU | `Sigmoid | `Tanh Sigmoid ] -> weights:Algebra.Linear.t Stdlib.ref -> ?bias:Algebra.Vector.t Stdlib.ref -> Algebra.Vector.t t -> Algebra.Vector.t t

Neural network layer.

module RNN : sig ... end

Recurrent neural network.

module Matrix : sig ... end