This version still has many shortcomings in the operations that can be performed on tensors, including:
add
mod rotta_rs;fnmain(){let tensor_a = Tensor::new([[1.0,2.0,3.0],[1.0,2.0,3.0],]);let tensor_b = Tensor::new([[1.0,2.0,3.0],[1.0,2.0,3.0],]);let result = &tensor_a + &tensor_b;// orlet result = add(&tensor_a,&tensor_b);}
sub
mod rotta_rs;fnmain(){let tensor_a = Tensor::new([[1.0,2.0,3.0],[1.0,2.0,3.0],]);let tensor_b = Tensor::new([[1.0,2.0,3.0],[1.0,2.0,3.0],]);let result = &tensor_a - &tensor_b;// orlet result = sub(&tensor_a,&tensor_b);}
mul
mod rotta_rs;fnmain(){let tensor_a = Tensor::new([[1.0,2.0,3.0],[1.0,2.0,3.0],]);let tensor_b = Tensor::new([[1.0,2.0,3.0],[1.0,2.0,3.0],]);let result = &tensor_a *&tensor_b;// orlet result = mul(&tensor_a,&tensor_b);}
devided
mod rotta_rs;fnmain(){let tensor_a = Tensor::new([[1.0,2.0,3.0],[1.0,2.0,3.0],]);let tensor_b = Tensor::new([[1.0,2.0,3.0],[1.0,2.0,3.0],]);let result = &tensor_a / &tensor_b;// orlet result = divided(&tensor_a,&tensor_b);}
dot product
mod rotta_rs;fnmain(){let tensor_a = Tensor::new([1.0,2.0,3.0]);let tensor_b = Tensor::new([1.0,2.0,3.0]);let result = dot(&tensor_a,&tensor_b);}
matmul
mod rotta_rs;fnmain(){let tensor_a = Tensor::new([[1.0,2.0,3.0],[1.0,2.0,3.0],]);let tensor_b = Tensor::new([[1.0,2.0],[1.0,2.0],[1.0,2.0],]);let result = matmul(&tensor_a,&tensor_b);}
other operations
exp
sum axis
powi
ln
mod rotta_rs;fnmain(){letmut model = Module::init();let optimazer = Sgd::init(model.parameters(),0.00001);let loss_fn = SSResidual::init();let linear = model.liniar_init(1,1);}
mod rotta_rs;fnmain(){letmut model = Module::init();let optimazer = Sgd::init(model.parameters(),0.00001);let loss_fn = SSResidual::init();let linear = model.liniar_init(1,1);let linear_2 = model.liniar_init(1,1);let input = Tensor::new([[1.0],[2.0]]);let actual = Tensor::new([[1.0],[4.0]]);for epoch in0..100{let x = linear.forward(&input);let x = relu(&x);let output = linear_2.forward(&x);let loss = loss_fn.forward(&output,&actual);println!("epoch:{epoch} | loss => {loss}");
optimazer.zero_grad();
loss.backward();
optimazer.optim();}}