Skip to content

Commit

Permalink
[release/8.0-staging] Fix Vector512.CopyTo (#95026)
Browse files Browse the repository at this point in the history
* Fix Vector512.CopyTo

* Also unify Vector

* Handle float vectors too

* Fix legacy vectors

* Simplify the change

* Update Vector_1.cs

* Add tests

* Fix build

---------

Co-authored-by: Michał Petryka <35800402+MichalPetryka@users.noreply.github.com>
  • Loading branch information
github-actions[bot] and MichalPetryka committed Nov 21, 2023
1 parent b126b9a commit f7652ef
Show file tree
Hide file tree
Showing 5 changed files with 66 additions and 4 deletions.
Expand Up @@ -441,8 +441,7 @@ public static void CopyTo<T>(this Vector512<T> vector, T[] destination)
ThrowHelper.ThrowArgumentException_DestinationTooShort();
}

ref byte address = ref Unsafe.As<T, byte>(ref MemoryMarshal.GetArrayDataReference(destination));
Unsafe.WriteUnaligned(ref address, vector);
Unsafe.WriteUnaligned(ref Unsafe.As<T, byte>(ref destination[0]), vector);
}

/// <summary>Copies a <see cref="Vector512{T}" /> to a given array starting at the specified index.</summary>
Expand All @@ -468,8 +467,7 @@ public static void CopyTo<T>(this Vector512<T> vector, T[] destination, int star
ThrowHelper.ThrowArgumentException_DestinationTooShort();
}

ref byte address = ref Unsafe.As<T, byte>(ref MemoryMarshal.GetArrayDataReference(destination));
Unsafe.WriteUnaligned(ref Unsafe.Add(ref address, startIndex), vector);
Unsafe.WriteUnaligned(ref Unsafe.As<T, byte>(ref destination[startIndex]), vector);
}

/// <summary>Copies a <see cref="Vector512{T}" /> to a given span.</summary>
Expand Down
Expand Up @@ -4517,6 +4517,22 @@ public void Vector128SingleEqualsNonCanonicalNaNTest()
}
}

[Fact]
public void Vector128SingleCopyToTest()
{
float[] array = new float[4];
Vector128.Create(2.0f).CopyTo(array);
Assert.True(array.AsSpan().SequenceEqual([2.0f, 2.0f, 2.0f, 2.0f]));
}

[Fact]
public void Vector128SingleCopyToOffsetTest()
{
float[] array = new float[5];
Vector128.Create(2.0f).CopyTo(array, 1);
Assert.True(array.AsSpan().SequenceEqual([0.0f, 2.0f, 2.0f, 2.0f, 2.0f]));
}

[Fact]
public void IsSupportedByte() => TestIsSupported<byte>();

Expand Down
Expand Up @@ -5539,6 +5539,22 @@ public void Vector256SingleEqualsNonCanonicalNaNTest()
}
}

[Fact]
public void Vector256SingleCopyToTest()
{
float[] array = new float[8];
Vector256.Create(2.0f).CopyTo(array);
Assert.True(array.AsSpan().SequenceEqual([2.0f, 2.0f, 2.0f, 2.0f, 2.0f, 2.0f, 2.0f, 2.0f]));
}

[Fact]
public void Vector256SingleCopyToOffsetTest()
{
float[] array = new float[9];
Vector256.Create(2.0f).CopyTo(array, 1);
Assert.True(array.AsSpan().SequenceEqual([0.0f, 2.0f, 2.0f, 2.0f, 2.0f, 2.0f, 2.0f, 2.0f, 2.0f]));
}

[Fact]
public void IsSupportedByte() => TestIsSupported<byte>();

Expand Down
Expand Up @@ -5016,6 +5016,22 @@ public void Vector512SingleEqualsNonCanonicalNaNTest()
}
}

[Fact]
public void Vector512SingleCopyToTest()
{
float[] array = new float[16];
Vector512.Create(2.0f).CopyTo(array);
Assert.True(array.AsSpan().SequenceEqual([2.0f, 2.0f, 2.0f, 2.0f, 2.0f, 2.0f, 2.0f, 2.0f, 2.0f, 2.0f, 2.0f, 2.0f, 2.0f, 2.0f, 2.0f, 2.0f]));
}

[Fact]
public void Vector512SingleCopyToOffsetTest()
{
float[] array = new float[17];
Vector512.Create(2.0f).CopyTo(array, 1);
Assert.True(array.AsSpan().SequenceEqual([0.0f, 2.0f, 2.0f, 2.0f, 2.0f, 2.0f, 2.0f, 2.0f, 2.0f, 2.0f, 2.0f, 2.0f, 2.0f, 2.0f, 2.0f, 2.0f, 2.0f]));
}

[Fact]
public void IsSupportedByte() => TestIsSupported<byte>();

Expand Down
Expand Up @@ -3937,6 +3937,22 @@ public void Vector64SingleEqualsNonCanonicalNaNTest()
}
}

[Fact]
public void Vector64SingleCopyToTest()
{
float[] array = new float[2];
Vector64.Create(2.0f).CopyTo(array);
Assert.True(array.AsSpan().SequenceEqual([2.0f, 2.0f]));
}

[Fact]
public void Vector64SingleCopyToOffsetTest()
{
float[] array = new float[3];
Vector64.Create(2.0f).CopyTo(array, 1);
Assert.True(array.AsSpan().SequenceEqual([0.0f, 2.0f, 2.0f]));
}

[Fact]
public void IsSupportedByte() => TestIsSupported<byte>();

Expand Down

0 comments on commit f7652ef

Please sign in to comment.