using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
-using Newtonsoft.Json;
-using Newtonsoft.Json.Linq;
using Xunit;
namespace System.Text.Json.Tests
{
// TestCaseType is only used to give the json strings a descriptive name.
[Theory]
- [MemberData(nameof(TestCases))]
+ // Skipping large JSON since slicing them (O(n^2)) is too slow.
+ [MemberData(nameof(SmallTestCases))]
public static void TestJsonReaderUtf8SegmentSizeOne(bool compactData, TestCaseType type, string jsonString)
{
+ ReadPartialSegmentSizeOne(compactData, type, jsonString);
+ }
+
+ // TestCaseType is only used to give the json strings a descriptive name.
+ [Theory]
+ [MemberData(nameof(LargeTestCases))]
+ public static void TestJsonReaderLargeUtf8SegmentSizeOne(bool compactData, TestCaseType type, string jsonString)
+ {
+ ReadFullySegmentSizeOne(compactData, type, jsonString);
+ }
+
+ // TestCaseType is only used to give the json strings a descriptive name.
+ [Theory]
+ [OuterLoop]
+ [MemberData(nameof(LargeTestCases))]
+ public static void TestJsonReaderLargestUtf8SegmentSizeOne(bool compactData, TestCaseType type, string jsonString)
+ {
+ // Skipping really large JSON since slicing them (O(n^2)) is too slow.
+ if (type == TestCaseType.Json40KB || type == TestCaseType.Json400KB || type == TestCaseType.ProjectLockJson)
+ {
+ return;
+ }
+
+ ReadPartialSegmentSizeOne(compactData, type, jsonString);
+ }
+
+ private static void ReadPartialSegmentSizeOne(bool compactData, TestCaseType type, string jsonString)
+ {
// Remove all formatting/indendation
if (compactData)
{
- using (JsonTextReader jsonReader = new JsonTextReader(new StringReader(jsonString)))
- {
- jsonReader.FloatParseHandling = FloatParseHandling.Decimal;
- JToken jtoken = JToken.ReadFrom(jsonReader);
- var stringWriter = new StringWriter();
- using (JsonTextWriter jsonWriter = new JsonTextWriter(stringWriter))
- {
- jtoken.WriteTo(jsonWriter);
- jsonString = stringWriter.ToString();
- }
- }
+ jsonString = JsonTestHelper.GetCompactString(jsonString);
}
byte[] dataUtf8 = Encoding.UTF8.GetBytes(jsonString);
ReadOnlySequence<byte> sequence = JsonTestHelper.GetSequence(dataUtf8, 1);
- // Skipping really large JSON since slicing them (O(n^2)) is too slow.
- if (type == TestCaseType.Json40KB || type == TestCaseType.Json400KB || type == TestCaseType.ProjectLockJson)
- {
- var utf8JsonReader = new Utf8JsonReader(sequence, isFinalBlock: true, default);
- byte[] resultSequence = JsonTestHelper.ReaderLoop(dataUtf8.Length, out int length, ref utf8JsonReader);
- string actualStrSequence = Encoding.UTF8.GetString(resultSequence, 0, length);
- Assert.Equal(expectedStr, actualStrSequence);
- return;
- }
-
for (int j = 0; j < dataUtf8.Length; j++)
{
var utf8JsonReader = new Utf8JsonReader(sequence.Slice(0, j), isFinalBlock: false, default);
}
}
- [Theory]
- [MemberData(nameof(TestCases))]
- public static void TestPartialJsonReaderMultiSegment(bool compactData, TestCaseType type, string jsonString)
+ private static void ReadFullySegmentSizeOne(bool compactData, TestCaseType type, string jsonString)
{
- // Skipping really large JSON since slicing them (O(n^2)) is too slow.
- if (type == TestCaseType.Json40KB || type == TestCaseType.Json400KB || type == TestCaseType.ProjectLockJson
- || type == TestCaseType.DeepTree || type == TestCaseType.BroadTree || type == TestCaseType.LotsOfNumbers
- || type == TestCaseType.LotsOfStrings || type == TestCaseType.Json4KB)
+ // Remove all formatting/indendation
+ if (compactData)
{
- return;
+ jsonString = JsonTestHelper.GetCompactString(jsonString);
}
+ byte[] dataUtf8 = Encoding.UTF8.GetBytes(jsonString);
+
+ Stream stream = new MemoryStream(dataUtf8);
+ TextReader reader = new StreamReader(stream, Encoding.UTF8, false, 1024, true);
+ string expectedStr = JsonTestHelper.NewtonsoftReturnStringHelper(reader);
+
+ ReadOnlySequence<byte> sequence = JsonTestHelper.GetSequence(dataUtf8, 1);
+
+ var utf8JsonReader = new Utf8JsonReader(sequence, isFinalBlock: true, default);
+ byte[] resultSequence = JsonTestHelper.ReaderLoop(dataUtf8.Length, out int length, ref utf8JsonReader);
+ string actualStrSequence = Encoding.UTF8.GetString(resultSequence, 0, length);
+ Assert.Equal(expectedStr, actualStrSequence);
+ }
+
+ [Theory]
+ [MemberData(nameof(SmallTestCases))]
+ public static void TestPartialJsonReaderMultiSegment(bool compactData, TestCaseType type, string jsonString)
+ {
// Remove all formatting/indendation
if (compactData)
{
- using (JsonTextReader jsonReader = new JsonTextReader(new StringReader(jsonString)))
- {
- jsonReader.FloatParseHandling = FloatParseHandling.Decimal;
- JToken jtoken = JToken.ReadFrom(jsonReader);
- var stringWriter = new StringWriter();
- using (JsonTextWriter jsonWriter = new JsonTextWriter(stringWriter))
- {
- jtoken.WriteTo(jsonWriter);
- jsonString = stringWriter.ToString();
- }
- }
+ jsonString = JsonTestHelper.GetCompactString(jsonString);
}
byte[] dataUtf8 = Encoding.UTF8.GetBytes(jsonString);
ReadOnlyMemory<byte> dataMemory = dataUtf8;
- var sequences = new List<ReadOnlySequence<byte>>
- {
- new ReadOnlySequence<byte>(dataMemory)
- };
-
- for (int i = 0; i < dataUtf8.Length; i++)
- {
- var firstSegment = new BufferSegment<byte>(dataMemory.Slice(0, i));
- ReadOnlyMemory<byte> secondMem = dataMemory.Slice(i);
- BufferSegment<byte> secondSegment = firstSegment.Append(secondMem);
- var sequence = new ReadOnlySequence<byte>(firstSegment, 0, secondSegment, secondMem.Length);
- sequences.Add(sequence);
- }
+ List<ReadOnlySequence<byte>> sequences = JsonTestHelper.GetSequences(dataMemory);
for (int i = 0; i < sequences.Count; i++)
{
- var json = new Utf8JsonReader(sequences[i], isFinalBlock: true, default);
+ ReadOnlySequence<byte> sequence = sequences[i];
+ var json = new Utf8JsonReader(sequence, isFinalBlock: true, default);
while (json.Read())
;
- Assert.Equal(sequences[i].Length, json.BytesConsumed);
- Assert.Equal(sequences[i].Length, json.CurrentState.BytesConsumed);
+ Assert.Equal(sequence.Length, json.BytesConsumed);
+ Assert.Equal(sequence.Length, json.CurrentState.BytesConsumed);
+
+ Assert.True(sequence.Slice(json.Position).IsEmpty);
+ Assert.True(sequence.Slice(json.CurrentState.Position).IsEmpty);
+ }
+ }
- Assert.True(sequences[i].Slice(json.Position).IsEmpty);
- Assert.True(sequences[i].Slice(json.CurrentState.Position).IsEmpty);
+ [Theory]
+ [OuterLoop]
+ [MemberData(nameof(SmallTestCases))]
+ public static void TestPartialJsonReaderSlicesMultiSegment(bool compactData, TestCaseType type, string jsonString)
+ {
+ // Remove all formatting/indendation
+ if (compactData)
+ {
+ jsonString = JsonTestHelper.GetCompactString(jsonString);
}
+ byte[] dataUtf8 = Encoding.UTF8.GetBytes(jsonString);
+ ReadOnlyMemory<byte> dataMemory = dataUtf8;
+
+ List<ReadOnlySequence<byte>> sequences = JsonTestHelper.GetSequences(dataMemory);
+
for (int i = 0; i < sequences.Count; i++)
{
ReadOnlySequence<byte> sequence = sequences[i];
JsonReaderState jsonState = json.CurrentState;
byte[] consumedArray = sequence.Slice(0, consumed).ToArray();
Assert.Equal(consumedArray, sequence.Slice(0, json.Position).ToArray());
- Assert.Equal(consumedArray, sequence.Slice(0, jsonState.Position).ToArray());
- json = new Utf8JsonReader(sequence.Slice(consumed), isFinalBlock: true, json.CurrentState);
+ Assert.True(json.Position.Equals(jsonState.Position));
+ json = new Utf8JsonReader(sequence.Slice(consumed), isFinalBlock: true, jsonState);
while (json.Read())
;
Assert.Equal(dataUtf8.Length - consumed, json.BytesConsumed);
Assert.Equal(expectedWithComments, builder.ToString());
Assert.Equal(inputData, sequence.Slice(0, json.Position).ToArray());
- Assert.Equal(inputData, sequence.Slice(0, json.CurrentState.Position).ToArray());
+ Assert.True(json.Position.Equals(json.CurrentState.Position));
state = new JsonReaderState(options: new JsonReaderOptions { CommentHandling = JsonCommentHandling.Skip });
json = new Utf8JsonReader(sequence, isFinalBlock: true, state);
Assert.Equal(expectedWithoutComments, builder.ToString());
Assert.Equal(inputData, sequence.Slice(0, json.Position).ToArray());
- Assert.Equal(inputData, sequence.Slice(0, json.CurrentState.Position).ToArray());
+ Assert.True(json.Position.Equals(json.CurrentState.Position));
}
[Theory]
Assert.Equal(json.BytesConsumed, json.CurrentState.BytesConsumed);
Assert.Equal(inputData, sequence.Slice(0, json.Position).ToArray());
- Assert.Equal(inputData, sequence.Slice(0, json.CurrentState.Position).ToArray());
+ Assert.True(json.Position.Equals(json.CurrentState.Position));
}
}
using System.Globalization;
using System.IO;
using Newtonsoft.Json;
-using Newtonsoft.Json.Linq;
using Xunit;
namespace System.Text.Json.Tests
// Remove all formatting/indendation
if (compactData)
{
- using (JsonTextReader jsonReader = new JsonTextReader(new StringReader(jsonString)))
- {
- jsonReader.FloatParseHandling = FloatParseHandling.Decimal;
- JToken jtoken = JToken.ReadFrom(jsonReader);
- var stringWriter = new StringWriter();
- using (JsonTextWriter jsonWriter = new JsonTextWriter(stringWriter))
- {
- jtoken.WriteTo(jsonWriter);
- jsonString = stringWriter.ToString();
- }
- }
+ jsonString = JsonTestHelper.GetCompactString(jsonString);
}
byte[] dataUtf8 = Encoding.UTF8.GetBytes(jsonString);
}
[Theory]
- [MemberData(nameof(TestCases))]
- public static void TestPartialJsonReader(bool compactData, TestCaseType type, string jsonString)
+ [MemberData(nameof(LargeTestCases))]
+ public static void TestPartialLargeJsonReader(bool compactData, TestCaseType type, string jsonString)
{
// Skipping really large JSON since slicing them (O(n^2)) is too slow.
if (type == TestCaseType.Json40KB || type == TestCaseType.Json400KB || type == TestCaseType.ProjectLockJson)
// Remove all formatting/indendation
if (compactData)
{
- using (JsonTextReader jsonReader = new JsonTextReader(new StringReader(jsonString)))
- {
- jsonReader.FloatParseHandling = FloatParseHandling.Decimal;
- JToken jtoken = JToken.ReadFrom(jsonReader);
- var stringWriter = new StringWriter();
- using (JsonTextWriter jsonWriter = new JsonTextWriter(stringWriter))
- {
- jtoken.WriteTo(jsonWriter);
- jsonString = stringWriter.ToString();
- }
- }
+ jsonString = JsonTestHelper.GetCompactString(jsonString);
}
byte[] dataUtf8 = Encoding.UTF8.GetBytes(jsonString);
Assert.Equal(consumed, json.CurrentState.BytesConsumed);
Assert.Equal(default, json.Position);
- // Skipping large JSON since slicing them (O(n^3)) is too slow.
- if (type == TestCaseType.DeepTree || type == TestCaseType.BroadTree || type == TestCaseType.LotsOfNumbers
- || type == TestCaseType.LotsOfStrings || type == TestCaseType.Json4KB)
+ json = new Utf8JsonReader(dataUtf8.AsSpan((int)consumed), isFinalBlock: true, json.CurrentState);
+ output = JsonTestHelper.ReaderLoop(outputSpan.Length - written, out int length, ref json);
+ output.AsSpan(0, length).CopyTo(outputSpan.Slice(written));
+ written += length;
+ Assert.Equal(dataUtf8.Length - consumed, json.BytesConsumed);
+ Assert.Equal(json.BytesConsumed, json.CurrentState.BytesConsumed);
+ Assert.Equal(default, json.Position);
+ Assert.Equal(default, json.CurrentState.Position);
+
+ Assert.Equal(outputSpan.Length, written);
+ string actualStr = Encoding.UTF8.GetString(outputArray);
+ Assert.Equal(expectedStr, actualStr);
+ }
+ }
+
+ [Theory]
+ // Skipping large JSON since slicing them (O(n^2)) is too slow.
+ [MemberData(nameof(SmallTestCases))]
+ public static void TestPartialJsonReader(bool compactData, TestCaseType type, string jsonString)
+ {
+ // Remove all formatting/indendation
+ if (compactData)
+ {
+ jsonString = JsonTestHelper.GetCompactString(jsonString);
+ }
+
+ byte[] dataUtf8 = Encoding.UTF8.GetBytes(jsonString);
+
+ byte[] result = JsonTestHelper.ReturnBytesHelper(dataUtf8, out int outputLength);
+ var outputArray = new byte[outputLength];
+ Span<byte> outputSpan = outputArray;
+
+ Stream stream = new MemoryStream(dataUtf8);
+ TextReader reader = new StreamReader(stream, Encoding.UTF8, false, 1024, true);
+ string expectedStr = JsonTestHelper.NewtonsoftReturnStringHelper(reader);
+
+ for (int i = 0; i < dataUtf8.Length; i++)
+ {
+ JsonReaderState state = default;
+ var json = new Utf8JsonReader(dataUtf8.AsSpan(0, i), isFinalBlock: false, state);
+ byte[] output = JsonTestHelper.ReaderLoop(outputSpan.Length, out int firstLength, ref json);
+ output.AsSpan(0, firstLength).CopyTo(outputSpan);
+ int written = firstLength;
+
+ long consumed = json.BytesConsumed;
+ Assert.Equal(consumed, json.CurrentState.BytesConsumed);
+ Assert.Equal(default, json.Position);
+
+ for (long j = consumed; j < dataUtf8.Length - consumed; j++)
{
- json = new Utf8JsonReader(dataUtf8.AsSpan((int)consumed), isFinalBlock: true, json.CurrentState);
+ // Need to re-initialize the state and reader to avoid using the previous state stack.
+ state = default;
+ json = new Utf8JsonReader(dataUtf8.AsSpan(0, i), isFinalBlock: false, state);
+ while (json.Read())
+ ;
+
+ JsonReaderState jsonState = json.CurrentState;
+
+ written = firstLength;
+ json = new Utf8JsonReader(dataUtf8.AsSpan((int)consumed, (int)j), isFinalBlock: false, jsonState);
output = JsonTestHelper.ReaderLoop(outputSpan.Length - written, out int length, ref json);
output.AsSpan(0, length).CopyTo(outputSpan.Slice(written));
written += length;
- Assert.Equal(dataUtf8.Length - consumed, json.BytesConsumed);
+
+ long consumedInner = json.BytesConsumed;
+ Assert.Equal(consumedInner, json.CurrentState.BytesConsumed);
+ json = new Utf8JsonReader(dataUtf8.AsSpan((int)(consumed + consumedInner)), isFinalBlock: true, json.CurrentState);
+ output = JsonTestHelper.ReaderLoop(outputSpan.Length - written, out length, ref json);
+ output.AsSpan(0, length).CopyTo(outputSpan.Slice(written));
+ written += length;
+ Assert.Equal(dataUtf8.Length - consumedInner - consumed, json.BytesConsumed);
Assert.Equal(json.BytesConsumed, json.CurrentState.BytesConsumed);
Assert.Equal(default, json.Position);
Assert.Equal(default, json.CurrentState.Position);
string actualStr = Encoding.UTF8.GetString(outputArray);
Assert.Equal(expectedStr, actualStr);
}
- else
- {
- for (long j = consumed; j < dataUtf8.Length - consumed; j++)
- {
- // Need to re-initialize the state and reader to avoid using the previous state stack.
- state = default;
- json = new Utf8JsonReader(dataUtf8.AsSpan(0, i), isFinalBlock: false, state);
- while (json.Read())
- ;
-
- JsonReaderState jsonState = json.CurrentState;
-
- written = firstLength;
- json = new Utf8JsonReader(dataUtf8.AsSpan((int)consumed, (int)j), isFinalBlock: false, jsonState);
- output = JsonTestHelper.ReaderLoop(outputSpan.Length - written, out int length, ref json);
- output.AsSpan(0, length).CopyTo(outputSpan.Slice(written));
- written += length;
-
- long consumedInner = json.BytesConsumed;
- Assert.Equal(consumedInner, json.CurrentState.BytesConsumed);
- json = new Utf8JsonReader(dataUtf8.AsSpan((int)(consumed + consumedInner)), isFinalBlock: true, json.CurrentState);
- output = JsonTestHelper.ReaderLoop(outputSpan.Length - written, out length, ref json);
- output.AsSpan(0, length).CopyTo(outputSpan.Slice(written));
- written += length;
- Assert.Equal(dataUtf8.Length - consumedInner - consumed, json.BytesConsumed);
- Assert.Equal(json.BytesConsumed, json.CurrentState.BytesConsumed);
- Assert.Equal(default, json.Position);
- Assert.Equal(default, json.CurrentState.Position);
-
- Assert.Equal(outputSpan.Length, written);
- string actualStr = Encoding.UTF8.GetString(outputArray);
- Assert.Equal(expectedStr, actualStr);
- }
- }
}
}
// Remove all formatting/indendation
if (compactData)
{
- using (JsonTextReader jsonReader = new JsonTextReader(new StringReader(jsonString)))
- {
- jsonReader.FloatParseHandling = FloatParseHandling.Decimal;
- JToken jtoken = JToken.ReadFrom(jsonReader);
- var stringWriter = new StringWriter();
- using (JsonTextWriter jsonWriter = new JsonTextWriter(stringWriter))
- {
- jtoken.WriteTo(jsonWriter);
- jsonString = stringWriter.ToString();
- }
- }
+ jsonString = JsonTestHelper.GetCompactString(jsonString);
}
byte[] dataUtf8 = Encoding.UTF8.GetBytes(jsonString);
long consumed = json.BytesConsumed;
Assert.Equal(consumed, json.CurrentState.BytesConsumed);
+ }
+ }
+ }
+
+ [Theory]
+ [OuterLoop]
+ [MemberData(nameof(SpecialNumTestCases))]
+ public static void TestPartialJsonReaderSlicesSpecialNumbers(TestCaseType type, string jsonString)
+ {
+ byte[] dataUtf8 = Encoding.UTF8.GetBytes(jsonString);
+
+ foreach (JsonCommentHandling commentHandling in Enum.GetValues(typeof(JsonCommentHandling)))
+ {
+ for (int i = 0; i < dataUtf8.Length; i++)
+ {
+ var state = new JsonReaderState(options: new JsonReaderOptions { CommentHandling = commentHandling });
+ var json = new Utf8JsonReader(dataUtf8.AsSpan(0, i), isFinalBlock: false, state);
+ while (json.Read())
+ ;
+
+ long consumed = json.BytesConsumed;
+ Assert.Equal(consumed, json.CurrentState.BytesConsumed);
for (long j = consumed; j < dataUtf8.Length - consumed; j++)
{
}
}
+ public static IEnumerable<object[]> SmallTestCases
+ {
+ get
+ {
+ return new List<object[]>
+ {
+ new object[] { true, TestCaseType.Basic, SR.BasicJson},
+ new object[] { true, TestCaseType.BasicLargeNum, SR.BasicJsonWithLargeNum}, // Json.NET treats numbers starting with 0 as octal (0425 becomes 277)
+ new object[] { true, TestCaseType.FullSchema1, SR.FullJsonSchema1},
+ new object[] { true, TestCaseType.HelloWorld, SR.HelloWorld},
+ new object[] { true, TestCaseType.Json400B, SR.Json400B},
+
+ new object[] { false, TestCaseType.Basic, SR.BasicJson},
+ new object[] { false, TestCaseType.BasicLargeNum, SR.BasicJsonWithLargeNum}, // Json.NET treats numbers starting with 0 as octal (0425 becomes 277)
+ new object[] { false, TestCaseType.FullSchema1, SR.FullJsonSchema1},
+ new object[] { false, TestCaseType.HelloWorld, SR.HelloWorld},
+ new object[] { false, TestCaseType.Json400B, SR.Json400B},
+ };
+ }
+ }
+
+ public static IEnumerable<object[]> LargeTestCases
+ {
+ get
+ {
+ return new List<object[]>
+ {
+ new object[] { true, TestCaseType.BroadTree, SR.BroadTree}, // \r\n behavior is different between Json.NET and System.Text.Json
+ new object[] { true, TestCaseType.DeepTree, SR.DeepTree},
+ new object[] { true, TestCaseType.LotsOfNumbers, SR.LotsOfNumbers},
+ new object[] { true, TestCaseType.LotsOfStrings, SR.LotsOfStrings},
+ new object[] { true, TestCaseType.ProjectLockJson, SR.ProjectLockJson},
+ new object[] { true, TestCaseType.Json400B, SR.Json400B},
+ new object[] { true, TestCaseType.Json40KB, SR.Json40KB},
+ new object[] { true, TestCaseType.Json400KB, SR.Json400KB},
+
+ new object[] { false, TestCaseType.BroadTree, SR.BroadTree}, // \r\n behavior is different between Json.NET and System.Text.Json
+ new object[] { false, TestCaseType.DeepTree, SR.DeepTree},
+ new object[] { false, TestCaseType.LotsOfNumbers, SR.LotsOfNumbers},
+ new object[] { false, TestCaseType.LotsOfStrings, SR.LotsOfStrings},
+ new object[] { false, TestCaseType.ProjectLockJson, SR.ProjectLockJson},
+ new object[] { false, TestCaseType.Json400B, SR.Json400B},
+ new object[] { false, TestCaseType.Json40KB, SR.Json40KB},
+ new object[] { false, TestCaseType.Json400KB, SR.Json400KB}
+ };
+ }
+ }
+
public static IEnumerable<object[]> SpecialNumTestCases
{
get