@@ -801,4 +801,38 @@ TYPED_TEST(NetTest, TestParamPropagateDown) {
801
801
}
802
802
}
803
803
804
+ TYPED_TEST (NetTest, TestFromTo) {
805
+ typedef typename TypeParam::Dtype Dtype;
806
+ this ->InitTinyNet ();
807
+
808
+ // Run Forward and Backward, recording the data diff and loss.
809
+ Blob<Dtype> data;
810
+ data.ReshapeLike (*this ->net_ ->blob_by_name (" data" ));
811
+ this ->net_ ->ForwardPrefilled ();
812
+ this ->net_ ->Backward ();
813
+ data.CopyFrom (*this ->net_ ->blob_by_name (" data" ), true , true );
814
+ const Dtype *loss_ptr = this ->net_ ->output_blobs ()[0 ]->cpu_data ();
815
+ Dtype loss = *loss_ptr;
816
+
817
+ // Check that combining partial Forwards gives the same loss.
818
+ for (int i = 1 ; i < this ->net_ ->layers ().size (); ++i) {
819
+ // Note that we skip layer zero to keep the same data.
820
+ this ->net_ ->ForwardFromTo (1 , 1 );
821
+ if (i < this ->net_ ->layers ().size () - 1 ) {
822
+ this ->net_ ->ForwardFrom (i + 1 );
823
+ }
824
+ EXPECT_EQ (loss, *loss_ptr);
825
+ }
826
+
827
+ // Check that combining partial Backwards gives the same data diff.
828
+ for (int i = 1 ; i < this ->net_ ->layers ().size (); ++i) {
829
+ this ->net_ ->BackwardTo (i);
830
+ this ->net_ ->BackwardFrom (i - 1 );
831
+ for (int j = 0 ; j < data.count (); ++j) {
832
+ EXPECT_EQ (data.cpu_diff ()[j],
833
+ this ->net_ ->blob_by_name (" data" )->cpu_diff ()[j]);
834
+ }
835
+ }
836
+ }
837
+
804
838
} // namespace caffe
0 commit comments