Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
99 changes: 0 additions & 99 deletions catgrad-llm/src/helpers/conv.rs
Original file line number Diff line number Diff line change
Expand Up @@ -117,102 +117,3 @@ fn depthwise_conv1d_param_padded(

conv_out
}

#[cfg(test)]
mod tests {
use super::*;
use catgrad::abstract_interpreter::Value as TypeValue;
use catgrad::category::core::Shape;
use catgrad::interpreter::backend::Backend;
use catgrad::interpreter::backend::ndarray::NdArrayBackend;
use catgrad::interpreter::{
Interpreter, Parameters, TaggedTensor, TaggedTensorTuple, Value, tensor,
};
use catgrad::stdlib::{Module, stdlib};
use catgrad::typecheck::value_types::*;

struct DepthwiseConv1dTest;

impl Module<2, 1> for DepthwiseConv1dTest {
fn ty(&self) -> ([Type; 2], [Type; 1]) {
let t_x = TypeValue::Tensor(TypeExpr::NdArrayType(NdArrayType {
dtype: DtypeExpr::Constant(Dtype::F32),
shape: ShapeExpr::Shape(vec![
NatExpr::Constant(1),
NatExpr::Constant(2),
NatExpr::Constant(4),
]),
}));
let t_w = TypeValue::Tensor(TypeExpr::NdArrayType(NdArrayType {
dtype: DtypeExpr::Constant(Dtype::F32),
shape: ShapeExpr::Shape(vec![
NatExpr::Constant(2),
NatExpr::Constant(1),
NatExpr::Constant(3),
]),
}));
let t_y = TypeValue::Tensor(TypeExpr::NdArrayType(NdArrayType {
dtype: DtypeExpr::Constant(Dtype::F32),
shape: ShapeExpr::Shape(vec![
NatExpr::Constant(1),
NatExpr::Constant(2),
NatExpr::Constant(4),
]),
}));
([t_x, t_w], [t_y])
}

fn path(&self) -> Path {
path(vec!["test", "depthwise_conv1d"]).unwrap()
}

fn def(&self, builder: &Builder, [x, w]: [Var; 2]) -> [Var; 1] {
[depthwise_conv1d_no_bias_param(builder, w, 3, 2, x)]
}
}

#[test]
fn test_depthwise_conv1d_no_bias_param_matches_reference_values() {
let typed_term = DepthwiseConv1dTest.term().unwrap();
let backend = NdArrayBackend;
let interpreter = Interpreter::new(backend, stdlib(), Parameters::default());

let x = tensor(
&interpreter.backend,
Shape(vec![1, 2, 4]),
vec![1.0f32, 2.0, 3.0, 4.0, 10.0, 20.0, 30.0, 40.0],
)
.unwrap();
let w = tensor(
&interpreter.backend,
Shape(vec![2, 1, 3]),
vec![1.0f32, 2.0, 3.0, 0.5, -1.0, 2.0],
)
.unwrap();

let mut outputs = interpreter.run(typed_term.term, vec![x, w]).unwrap();
let y = outputs.pop().expect("missing output");

let expected = tensor(
&interpreter.backend,
Shape(vec![1, 2, 4]),
vec![3.0f32, 8.0, 14.0, 20.0, 20.0, 30.0, 45.0, 60.0],
)
.unwrap();

match (y, expected) {
(
Value::Tensor(TaggedTensor::F32([actual])),
Value::Tensor(TaggedTensor::F32([exp])),
) => {
assert!(
interpreter
.backend
.compare(TaggedTensorTuple::F32([actual, exp])),
"depthwise conv output should match expected reference values"
);
}
_ => panic!("expected f32 tensor outputs"),
}
}
}
3 changes: 3 additions & 0 deletions catgrad-llm/src/helpers/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,9 @@ pub use rope::*;
mod module;
pub use module::*;

#[cfg(test)]
mod tests;

use crate::config::{LLMConfig, RopeScaling};
use catgrad::prelude::ops::*;
use catgrad::prelude::*;
Expand Down
Loading
Loading