public static System.Linq.ParallelQuery<TSource> Concat<TSource>(this System.Linq.ParallelQuery<TSource> first, System.Collections.Generic.IEnumerable<TSource> second) { throw null; }
public static System.Linq.ParallelQuery<TSource> Concat<TSource>(this System.Linq.ParallelQuery<TSource> first, System.Linq.ParallelQuery<TSource> second) { throw null; }
public static bool Contains<TSource>(this System.Linq.ParallelQuery<TSource> source, TSource value) { throw null; }
- public static bool Contains<TSource>(this System.Linq.ParallelQuery<TSource> source, TSource value, System.Collections.Generic.IEqualityComparer<TSource> comparer) { throw null; }
+ public static bool Contains<TSource>(this System.Linq.ParallelQuery<TSource> source, TSource value, System.Collections.Generic.IEqualityComparer<TSource>? comparer) { throw null; }
public static int Count<TSource>(this System.Linq.ParallelQuery<TSource> source) { throw null; }
public static int Count<TSource>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, bool> predicate) { throw null; }
public static System.Linq.ParallelQuery<TSource> DefaultIfEmpty<TSource>(this System.Linq.ParallelQuery<TSource> source) { throw null; }
public static System.Linq.ParallelQuery<TSource> DefaultIfEmpty<TSource>(this System.Linq.ParallelQuery<TSource> source, TSource defaultValue) { throw null; }
public static System.Linq.ParallelQuery<TSource> Distinct<TSource>(this System.Linq.ParallelQuery<TSource> source) { throw null; }
- public static System.Linq.ParallelQuery<TSource> Distinct<TSource>(this System.Linq.ParallelQuery<TSource> source, System.Collections.Generic.IEqualityComparer<TSource> comparer) { throw null; }
+ public static System.Linq.ParallelQuery<TSource> Distinct<TSource>(this System.Linq.ParallelQuery<TSource> source, System.Collections.Generic.IEqualityComparer<TSource>? comparer) { throw null; }
+ [return: System.Diagnostics.CodeAnalysis.MaybeNullAttribute]
public static TSource ElementAtOrDefault<TSource>(this System.Linq.ParallelQuery<TSource> source, int index) { throw null; }
public static TSource ElementAt<TSource>(this System.Linq.ParallelQuery<TSource> source, int index) { throw null; }
public static System.Linq.ParallelQuery<TResult> Empty<TResult>() { throw null; }
[System.ObsoleteAttribute("The second data source of a binary operator must be of type System.Linq.ParallelQuery<T> rather than System.Collections.Generic.IEnumerable<T>. To fix this problem, use the AsParallel() extension method to convert the right data source to System.Linq.ParallelQuery<T>.")]
public static System.Linq.ParallelQuery<TSource> Except<TSource>(this System.Linq.ParallelQuery<TSource> first, System.Collections.Generic.IEnumerable<TSource> second) { throw null; }
[System.ObsoleteAttribute("The second data source of a binary operator must be of type System.Linq.ParallelQuery<T> rather than System.Collections.Generic.IEnumerable<T>. To fix this problem, use the AsParallel() extension method to convert the right data source to System.Linq.ParallelQuery<T>.")]
- public static System.Linq.ParallelQuery<TSource> Except<TSource>(this System.Linq.ParallelQuery<TSource> first, System.Collections.Generic.IEnumerable<TSource> second, System.Collections.Generic.IEqualityComparer<TSource> comparer) { throw null; }
+ public static System.Linq.ParallelQuery<TSource> Except<TSource>(this System.Linq.ParallelQuery<TSource> first, System.Collections.Generic.IEnumerable<TSource> second, System.Collections.Generic.IEqualityComparer<TSource>? comparer) { throw null; }
public static System.Linq.ParallelQuery<TSource> Except<TSource>(this System.Linq.ParallelQuery<TSource> first, System.Linq.ParallelQuery<TSource> second) { throw null; }
- public static System.Linq.ParallelQuery<TSource> Except<TSource>(this System.Linq.ParallelQuery<TSource> first, System.Linq.ParallelQuery<TSource> second, System.Collections.Generic.IEqualityComparer<TSource> comparer) { throw null; }
+ public static System.Linq.ParallelQuery<TSource> Except<TSource>(this System.Linq.ParallelQuery<TSource> first, System.Linq.ParallelQuery<TSource> second, System.Collections.Generic.IEqualityComparer<TSource>? comparer) { throw null; }
+ [return: System.Diagnostics.CodeAnalysis.MaybeNullAttribute]
public static TSource FirstOrDefault<TSource>(this System.Linq.ParallelQuery<TSource> source) { throw null; }
+ [return: System.Diagnostics.CodeAnalysis.MaybeNullAttribute]
public static TSource FirstOrDefault<TSource>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, bool> predicate) { throw null; }
public static TSource First<TSource>(this System.Linq.ParallelQuery<TSource> source) { throw null; }
public static TSource First<TSource>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, bool> predicate) { throw null; }
public static void ForAll<TSource>(this System.Linq.ParallelQuery<TSource> source, System.Action<TSource> action) { }
public static System.Linq.ParallelQuery<System.Linq.IGrouping<TKey, TSource>> GroupBy<TSource, TKey>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector) { throw null; }
- public static System.Linq.ParallelQuery<System.Linq.IGrouping<TKey, TSource>> GroupBy<TSource, TKey>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector, System.Collections.Generic.IEqualityComparer<TKey> comparer) { throw null; }
+ public static System.Linq.ParallelQuery<System.Linq.IGrouping<TKey, TSource>> GroupBy<TSource, TKey>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector, System.Collections.Generic.IEqualityComparer<TKey>? comparer) { throw null; }
public static System.Linq.ParallelQuery<System.Linq.IGrouping<TKey, TElement>> GroupBy<TSource, TKey, TElement>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector, System.Func<TSource, TElement> elementSelector) { throw null; }
- public static System.Linq.ParallelQuery<System.Linq.IGrouping<TKey, TElement>> GroupBy<TSource, TKey, TElement>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector, System.Func<TSource, TElement> elementSelector, System.Collections.Generic.IEqualityComparer<TKey> comparer) { throw null; }
+ public static System.Linq.ParallelQuery<System.Linq.IGrouping<TKey, TElement>> GroupBy<TSource, TKey, TElement>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector, System.Func<TSource, TElement> elementSelector, System.Collections.Generic.IEqualityComparer<TKey>? comparer) { throw null; }
public static System.Linq.ParallelQuery<TResult> GroupBy<TSource, TKey, TResult>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector, System.Func<TKey, System.Collections.Generic.IEnumerable<TSource>, TResult> resultSelector) { throw null; }
- public static System.Linq.ParallelQuery<TResult> GroupBy<TSource, TKey, TResult>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector, System.Func<TKey, System.Collections.Generic.IEnumerable<TSource>, TResult> resultSelector, System.Collections.Generic.IEqualityComparer<TKey> comparer) { throw null; }
+ public static System.Linq.ParallelQuery<TResult> GroupBy<TSource, TKey, TResult>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector, System.Func<TKey, System.Collections.Generic.IEnumerable<TSource>, TResult> resultSelector, System.Collections.Generic.IEqualityComparer<TKey>? comparer) { throw null; }
public static System.Linq.ParallelQuery<TResult> GroupBy<TSource, TKey, TElement, TResult>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector, System.Func<TSource, TElement> elementSelector, System.Func<TKey, System.Collections.Generic.IEnumerable<TElement>, TResult> resultSelector) { throw null; }
- public static System.Linq.ParallelQuery<TResult> GroupBy<TSource, TKey, TElement, TResult>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector, System.Func<TSource, TElement> elementSelector, System.Func<TKey, System.Collections.Generic.IEnumerable<TElement>, TResult> resultSelector, System.Collections.Generic.IEqualityComparer<TKey> comparer) { throw null; }
+ public static System.Linq.ParallelQuery<TResult> GroupBy<TSource, TKey, TElement, TResult>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector, System.Func<TSource, TElement> elementSelector, System.Func<TKey, System.Collections.Generic.IEnumerable<TElement>, TResult> resultSelector, System.Collections.Generic.IEqualityComparer<TKey>? comparer) { throw null; }
[System.ObsoleteAttribute("The second data source of a binary operator must be of type System.Linq.ParallelQuery<T> rather than System.Collections.Generic.IEnumerable<T>. To fix this problem, use the AsParallel() extension method to convert the right data source to System.Linq.ParallelQuery<T>.")]
public static System.Linq.ParallelQuery<TResult> GroupJoin<TOuter, TInner, TKey, TResult>(this System.Linq.ParallelQuery<TOuter> outer, System.Collections.Generic.IEnumerable<TInner> inner, System.Func<TOuter, TKey> outerKeySelector, System.Func<TInner, TKey> innerKeySelector, System.Func<TOuter, System.Collections.Generic.IEnumerable<TInner>, TResult> resultSelector) { throw null; }
[System.ObsoleteAttribute("The second data source of a binary operator must be of type System.Linq.ParallelQuery<T> rather than System.Collections.Generic.IEnumerable<T>. To fix this problem, use the AsParallel() extension method to convert the right data source to System.Linq.ParallelQuery<T>.")]
- public static System.Linq.ParallelQuery<TResult> GroupJoin<TOuter, TInner, TKey, TResult>(this System.Linq.ParallelQuery<TOuter> outer, System.Collections.Generic.IEnumerable<TInner> inner, System.Func<TOuter, TKey> outerKeySelector, System.Func<TInner, TKey> innerKeySelector, System.Func<TOuter, System.Collections.Generic.IEnumerable<TInner>, TResult> resultSelector, System.Collections.Generic.IEqualityComparer<TKey> comparer) { throw null; }
+ public static System.Linq.ParallelQuery<TResult> GroupJoin<TOuter, TInner, TKey, TResult>(this System.Linq.ParallelQuery<TOuter> outer, System.Collections.Generic.IEnumerable<TInner> inner, System.Func<TOuter, TKey> outerKeySelector, System.Func<TInner, TKey> innerKeySelector, System.Func<TOuter, System.Collections.Generic.IEnumerable<TInner>, TResult> resultSelector, System.Collections.Generic.IEqualityComparer<TKey>? comparer) { throw null; }
public static System.Linq.ParallelQuery<TResult> GroupJoin<TOuter, TInner, TKey, TResult>(this System.Linq.ParallelQuery<TOuter> outer, System.Linq.ParallelQuery<TInner> inner, System.Func<TOuter, TKey> outerKeySelector, System.Func<TInner, TKey> innerKeySelector, System.Func<TOuter, System.Collections.Generic.IEnumerable<TInner>, TResult> resultSelector) { throw null; }
- public static System.Linq.ParallelQuery<TResult> GroupJoin<TOuter, TInner, TKey, TResult>(this System.Linq.ParallelQuery<TOuter> outer, System.Linq.ParallelQuery<TInner> inner, System.Func<TOuter, TKey> outerKeySelector, System.Func<TInner, TKey> innerKeySelector, System.Func<TOuter, System.Collections.Generic.IEnumerable<TInner>, TResult> resultSelector, System.Collections.Generic.IEqualityComparer<TKey> comparer) { throw null; }
+ public static System.Linq.ParallelQuery<TResult> GroupJoin<TOuter, TInner, TKey, TResult>(this System.Linq.ParallelQuery<TOuter> outer, System.Linq.ParallelQuery<TInner> inner, System.Func<TOuter, TKey> outerKeySelector, System.Func<TInner, TKey> innerKeySelector, System.Func<TOuter, System.Collections.Generic.IEnumerable<TInner>, TResult> resultSelector, System.Collections.Generic.IEqualityComparer<TKey>? comparer) { throw null; }
[System.ObsoleteAttribute("The second data source of a binary operator must be of type System.Linq.ParallelQuery<T> rather than System.Collections.Generic.IEnumerable<T>. To fix this problem, use the AsParallel() extension method to convert the right data source to System.Linq.ParallelQuery<T>.")]
public static System.Linq.ParallelQuery<TSource> Intersect<TSource>(this System.Linq.ParallelQuery<TSource> first, System.Collections.Generic.IEnumerable<TSource> second) { throw null; }
[System.ObsoleteAttribute("The second data source of a binary operator must be of type System.Linq.ParallelQuery<T> rather than System.Collections.Generic.IEnumerable<T>. To fix this problem, use the AsParallel() extension method to convert the right data source to System.Linq.ParallelQuery<T>.")]
- public static System.Linq.ParallelQuery<TSource> Intersect<TSource>(this System.Linq.ParallelQuery<TSource> first, System.Collections.Generic.IEnumerable<TSource> second, System.Collections.Generic.IEqualityComparer<TSource> comparer) { throw null; }
+ public static System.Linq.ParallelQuery<TSource> Intersect<TSource>(this System.Linq.ParallelQuery<TSource> first, System.Collections.Generic.IEnumerable<TSource> second, System.Collections.Generic.IEqualityComparer<TSource>? comparer) { throw null; }
public static System.Linq.ParallelQuery<TSource> Intersect<TSource>(this System.Linq.ParallelQuery<TSource> first, System.Linq.ParallelQuery<TSource> second) { throw null; }
- public static System.Linq.ParallelQuery<TSource> Intersect<TSource>(this System.Linq.ParallelQuery<TSource> first, System.Linq.ParallelQuery<TSource> second, System.Collections.Generic.IEqualityComparer<TSource> comparer) { throw null; }
+ public static System.Linq.ParallelQuery<TSource> Intersect<TSource>(this System.Linq.ParallelQuery<TSource> first, System.Linq.ParallelQuery<TSource> second, System.Collections.Generic.IEqualityComparer<TSource>? comparer) { throw null; }
[System.ObsoleteAttribute("The second data source of a binary operator must be of type System.Linq.ParallelQuery<T> rather than System.Collections.Generic.IEnumerable<T>. To fix this problem, use the AsParallel() extension method to convert the right data source to System.Linq.ParallelQuery<T>.")]
public static System.Linq.ParallelQuery<TResult> Join<TOuter, TInner, TKey, TResult>(this System.Linq.ParallelQuery<TOuter> outer, System.Collections.Generic.IEnumerable<TInner> inner, System.Func<TOuter, TKey> outerKeySelector, System.Func<TInner, TKey> innerKeySelector, System.Func<TOuter, TInner, TResult> resultSelector) { throw null; }
[System.ObsoleteAttribute("The second data source of a binary operator must be of type System.Linq.ParallelQuery<T> rather than System.Collections.Generic.IEnumerable<T>. To fix this problem, use the AsParallel() extension method to convert the right data source to System.Linq.ParallelQuery<T>.")]
- public static System.Linq.ParallelQuery<TResult> Join<TOuter, TInner, TKey, TResult>(this System.Linq.ParallelQuery<TOuter> outer, System.Collections.Generic.IEnumerable<TInner> inner, System.Func<TOuter, TKey> outerKeySelector, System.Func<TInner, TKey> innerKeySelector, System.Func<TOuter, TInner, TResult> resultSelector, System.Collections.Generic.IEqualityComparer<TKey> comparer) { throw null; }
+ public static System.Linq.ParallelQuery<TResult> Join<TOuter, TInner, TKey, TResult>(this System.Linq.ParallelQuery<TOuter> outer, System.Collections.Generic.IEnumerable<TInner> inner, System.Func<TOuter, TKey> outerKeySelector, System.Func<TInner, TKey> innerKeySelector, System.Func<TOuter, TInner, TResult> resultSelector, System.Collections.Generic.IEqualityComparer<TKey>? comparer) { throw null; }
public static System.Linq.ParallelQuery<TResult> Join<TOuter, TInner, TKey, TResult>(this System.Linq.ParallelQuery<TOuter> outer, System.Linq.ParallelQuery<TInner> inner, System.Func<TOuter, TKey> outerKeySelector, System.Func<TInner, TKey> innerKeySelector, System.Func<TOuter, TInner, TResult> resultSelector) { throw null; }
- public static System.Linq.ParallelQuery<TResult> Join<TOuter, TInner, TKey, TResult>(this System.Linq.ParallelQuery<TOuter> outer, System.Linq.ParallelQuery<TInner> inner, System.Func<TOuter, TKey> outerKeySelector, System.Func<TInner, TKey> innerKeySelector, System.Func<TOuter, TInner, TResult> resultSelector, System.Collections.Generic.IEqualityComparer<TKey> comparer) { throw null; }
+ public static System.Linq.ParallelQuery<TResult> Join<TOuter, TInner, TKey, TResult>(this System.Linq.ParallelQuery<TOuter> outer, System.Linq.ParallelQuery<TInner> inner, System.Func<TOuter, TKey> outerKeySelector, System.Func<TInner, TKey> innerKeySelector, System.Func<TOuter, TInner, TResult> resultSelector, System.Collections.Generic.IEqualityComparer<TKey>? comparer) { throw null; }
+ [return: System.Diagnostics.CodeAnalysis.MaybeNullAttribute]
public static TSource LastOrDefault<TSource>(this System.Linq.ParallelQuery<TSource> source) { throw null; }
+ [return: System.Diagnostics.CodeAnalysis.MaybeNullAttribute]
public static TSource LastOrDefault<TSource>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, bool> predicate) { throw null; }
public static TSource Last<TSource>(this System.Linq.ParallelQuery<TSource> source) { throw null; }
public static TSource Last<TSource>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, bool> predicate) { throw null; }
public static long? Max(this System.Linq.ParallelQuery<long?> source) { throw null; }
public static float? Max(this System.Linq.ParallelQuery<float?> source) { throw null; }
public static float Max(this System.Linq.ParallelQuery<float> source) { throw null; }
+ [return: System.Diagnostics.CodeAnalysis.MaybeNullAttribute]
public static TSource Max<TSource>(this System.Linq.ParallelQuery<TSource> source) { throw null; }
public static decimal Max<TSource>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, decimal> selector) { throw null; }
public static double Max<TSource>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, double> selector) { throw null; }
public static long? Max<TSource>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, long?> selector) { throw null; }
public static float? Max<TSource>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, float?> selector) { throw null; }
public static float Max<TSource>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, float> selector) { throw null; }
+ [return: System.Diagnostics.CodeAnalysis.MaybeNullAttribute]
public static TResult Max<TSource, TResult>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, TResult> selector) { throw null; }
public static decimal Min(this System.Linq.ParallelQuery<decimal> source) { throw null; }
public static double Min(this System.Linq.ParallelQuery<double> source) { throw null; }
public static long? Min(this System.Linq.ParallelQuery<long?> source) { throw null; }
public static float? Min(this System.Linq.ParallelQuery<float?> source) { throw null; }
public static float Min(this System.Linq.ParallelQuery<float> source) { throw null; }
+ [return: System.Diagnostics.CodeAnalysis.MaybeNullAttribute]
public static TSource Min<TSource>(this System.Linq.ParallelQuery<TSource> source) { throw null; }
public static decimal Min<TSource>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, decimal> selector) { throw null; }
public static double Min<TSource>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, double> selector) { throw null; }
public static long? Min<TSource>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, long?> selector) { throw null; }
public static float? Min<TSource>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, float?> selector) { throw null; }
public static float Min<TSource>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, float> selector) { throw null; }
+ [return: System.Diagnostics.CodeAnalysis.MaybeNullAttribute]
public static TResult Min<TSource, TResult>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, TResult> selector) { throw null; }
public static System.Linq.ParallelQuery<TResult> OfType<TResult>(this System.Linq.ParallelQuery source) { throw null; }
public static System.Linq.OrderedParallelQuery<TSource> OrderByDescending<TSource, TKey>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector) { throw null; }
- public static System.Linq.OrderedParallelQuery<TSource> OrderByDescending<TSource, TKey>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector, System.Collections.Generic.IComparer<TKey> comparer) { throw null; }
+ public static System.Linq.OrderedParallelQuery<TSource> OrderByDescending<TSource, TKey>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector, System.Collections.Generic.IComparer<TKey>? comparer) { throw null; }
public static System.Linq.OrderedParallelQuery<TSource> OrderBy<TSource, TKey>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector) { throw null; }
- public static System.Linq.OrderedParallelQuery<TSource> OrderBy<TSource, TKey>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector, System.Collections.Generic.IComparer<TKey> comparer) { throw null; }
+ public static System.Linq.OrderedParallelQuery<TSource> OrderBy<TSource, TKey>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector, System.Collections.Generic.IComparer<TKey>? comparer) { throw null; }
public static System.Linq.ParallelQuery<int> Range(int start, int count) { throw null; }
public static System.Linq.ParallelQuery<TResult> Repeat<TResult>(TResult element, int count) { throw null; }
public static System.Linq.ParallelQuery<TSource> Reverse<TSource>(this System.Linq.ParallelQuery<TSource> source) { throw null; }
[System.ObsoleteAttribute("The second data source of a binary operator must be of type System.Linq.ParallelQuery<T> rather than System.Collections.Generic.IEnumerable<T>. To fix this problem, use the AsParallel() extension method to convert the right data source to System.Linq.ParallelQuery<T>.")]
public static bool SequenceEqual<TSource>(this System.Linq.ParallelQuery<TSource> first, System.Collections.Generic.IEnumerable<TSource> second) { throw null; }
[System.ObsoleteAttribute("The second data source of a binary operator must be of type System.Linq.ParallelQuery<T> rather than System.Collections.Generic.IEnumerable<T>. To fix this problem, use the AsParallel() extension method to convert the right data source to System.Linq.ParallelQuery<T>.")]
- public static bool SequenceEqual<TSource>(this System.Linq.ParallelQuery<TSource> first, System.Collections.Generic.IEnumerable<TSource> second, System.Collections.Generic.IEqualityComparer<TSource> comparer) { throw null; }
+ public static bool SequenceEqual<TSource>(this System.Linq.ParallelQuery<TSource> first, System.Collections.Generic.IEnumerable<TSource> second, System.Collections.Generic.IEqualityComparer<TSource>? comparer) { throw null; }
public static bool SequenceEqual<TSource>(this System.Linq.ParallelQuery<TSource> first, System.Linq.ParallelQuery<TSource> second) { throw null; }
- public static bool SequenceEqual<TSource>(this System.Linq.ParallelQuery<TSource> first, System.Linq.ParallelQuery<TSource> second, System.Collections.Generic.IEqualityComparer<TSource> comparer) { throw null; }
+ public static bool SequenceEqual<TSource>(this System.Linq.ParallelQuery<TSource> first, System.Linq.ParallelQuery<TSource> second, System.Collections.Generic.IEqualityComparer<TSource>? comparer) { throw null; }
+ [return: System.Diagnostics.CodeAnalysis.MaybeNullAttribute]
public static TSource SingleOrDefault<TSource>(this System.Linq.ParallelQuery<TSource> source) { throw null; }
+ [return: System.Diagnostics.CodeAnalysis.MaybeNullAttribute]
public static TSource SingleOrDefault<TSource>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, bool> predicate) { throw null; }
public static TSource Single<TSource>(this System.Linq.ParallelQuery<TSource> source) { throw null; }
public static TSource Single<TSource>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, bool> predicate) { throw null; }
public static System.Linq.ParallelQuery<TSource> TakeWhile<TSource>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, int, bool> predicate) { throw null; }
public static System.Linq.ParallelQuery<TSource> Take<TSource>(this System.Linq.ParallelQuery<TSource> source, int count) { throw null; }
public static System.Linq.OrderedParallelQuery<TSource> ThenByDescending<TSource, TKey>(this System.Linq.OrderedParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector) { throw null; }
- public static System.Linq.OrderedParallelQuery<TSource> ThenByDescending<TSource, TKey>(this System.Linq.OrderedParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector, System.Collections.Generic.IComparer<TKey> comparer) { throw null; }
+ public static System.Linq.OrderedParallelQuery<TSource> ThenByDescending<TSource, TKey>(this System.Linq.OrderedParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector, System.Collections.Generic.IComparer<TKey>? comparer) { throw null; }
public static System.Linq.OrderedParallelQuery<TSource> ThenBy<TSource, TKey>(this System.Linq.OrderedParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector) { throw null; }
- public static System.Linq.OrderedParallelQuery<TSource> ThenBy<TSource, TKey>(this System.Linq.OrderedParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector, System.Collections.Generic.IComparer<TKey> comparer) { throw null; }
+ public static System.Linq.OrderedParallelQuery<TSource> ThenBy<TSource, TKey>(this System.Linq.OrderedParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector, System.Collections.Generic.IComparer<TKey>? comparer) { throw null; }
public static TSource[] ToArray<TSource>(this System.Linq.ParallelQuery<TSource> source) { throw null; }
- public static System.Collections.Generic.Dictionary<TKey, TSource> ToDictionary<TSource, TKey>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector) { throw null; }
- public static System.Collections.Generic.Dictionary<TKey, TSource> ToDictionary<TSource, TKey>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector, System.Collections.Generic.IEqualityComparer<TKey> comparer) { throw null; }
- public static System.Collections.Generic.Dictionary<TKey, TElement> ToDictionary<TSource, TKey, TElement>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector, System.Func<TSource, TElement> elementSelector) { throw null; }
- public static System.Collections.Generic.Dictionary<TKey, TElement> ToDictionary<TSource, TKey, TElement>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector, System.Func<TSource, TElement> elementSelector, System.Collections.Generic.IEqualityComparer<TKey> comparer) { throw null; }
+ public static System.Collections.Generic.Dictionary<TKey, TSource> ToDictionary<TSource, TKey>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector) where TKey : notnull { throw null; }
+ public static System.Collections.Generic.Dictionary<TKey, TSource> ToDictionary<TSource, TKey>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector, System.Collections.Generic.IEqualityComparer<TKey>? comparer) where TKey : notnull { throw null; }
+ public static System.Collections.Generic.Dictionary<TKey, TElement> ToDictionary<TSource, TKey, TElement>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector, System.Func<TSource, TElement> elementSelector) where TKey : notnull { throw null; }
+ public static System.Collections.Generic.Dictionary<TKey, TElement> ToDictionary<TSource, TKey, TElement>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector, System.Func<TSource, TElement> elementSelector, System.Collections.Generic.IEqualityComparer<TKey>? comparer) where TKey : notnull { throw null; }
public static System.Collections.Generic.List<TSource> ToList<TSource>(this System.Linq.ParallelQuery<TSource> source) { throw null; }
- public static System.Linq.ILookup<TKey, TSource> ToLookup<TSource, TKey>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector) { throw null; }
- public static System.Linq.ILookup<TKey, TSource> ToLookup<TSource, TKey>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector, System.Collections.Generic.IEqualityComparer<TKey> comparer) { throw null; }
- public static System.Linq.ILookup<TKey, TElement> ToLookup<TSource, TKey, TElement>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector, System.Func<TSource, TElement> elementSelector) { throw null; }
- public static System.Linq.ILookup<TKey, TElement> ToLookup<TSource, TKey, TElement>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector, System.Func<TSource, TElement> elementSelector, System.Collections.Generic.IEqualityComparer<TKey> comparer) { throw null; }
+ public static System.Linq.ILookup<TKey, TSource> ToLookup<TSource, TKey>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector) where TKey : notnull { throw null; }
+ public static System.Linq.ILookup<TKey, TSource> ToLookup<TSource, TKey>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector, System.Collections.Generic.IEqualityComparer<TKey>? comparer) where TKey : notnull { throw null; }
+ public static System.Linq.ILookup<TKey, TElement> ToLookup<TSource, TKey, TElement>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector, System.Func<TSource, TElement> elementSelector) where TKey : notnull { throw null; }
+ public static System.Linq.ILookup<TKey, TElement> ToLookup<TSource, TKey, TElement>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, TKey> keySelector, System.Func<TSource, TElement> elementSelector, System.Collections.Generic.IEqualityComparer<TKey>? comparer) where TKey : notnull { throw null; }
[System.ObsoleteAttribute("The second data source of a binary operator must be of type System.Linq.ParallelQuery<T> rather than System.Collections.Generic.IEnumerable<T>. To fix this problem, use the AsParallel() extension method to convert the right data source to System.Linq.ParallelQuery<T>.")]
public static System.Linq.ParallelQuery<TSource> Union<TSource>(this System.Linq.ParallelQuery<TSource> first, System.Collections.Generic.IEnumerable<TSource> second) { throw null; }
[System.ObsoleteAttribute("The second data source of a binary operator must be of type System.Linq.ParallelQuery<T> rather than System.Collections.Generic.IEnumerable<T>. To fix this problem, use the AsParallel() extension method to convert the right data source to System.Linq.ParallelQuery<T>.")]
- public static System.Linq.ParallelQuery<TSource> Union<TSource>(this System.Linq.ParallelQuery<TSource> first, System.Collections.Generic.IEnumerable<TSource> second, System.Collections.Generic.IEqualityComparer<TSource> comparer) { throw null; }
+ public static System.Linq.ParallelQuery<TSource> Union<TSource>(this System.Linq.ParallelQuery<TSource> first, System.Collections.Generic.IEnumerable<TSource> second, System.Collections.Generic.IEqualityComparer<TSource>? comparer) { throw null; }
public static System.Linq.ParallelQuery<TSource> Union<TSource>(this System.Linq.ParallelQuery<TSource> first, System.Linq.ParallelQuery<TSource> second) { throw null; }
- public static System.Linq.ParallelQuery<TSource> Union<TSource>(this System.Linq.ParallelQuery<TSource> first, System.Linq.ParallelQuery<TSource> second, System.Collections.Generic.IEqualityComparer<TSource> comparer) { throw null; }
+ public static System.Linq.ParallelQuery<TSource> Union<TSource>(this System.Linq.ParallelQuery<TSource> first, System.Linq.ParallelQuery<TSource> second, System.Collections.Generic.IEqualityComparer<TSource>? comparer) { throw null; }
public static System.Linq.ParallelQuery<TSource> Where<TSource>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, bool> predicate) { throw null; }
public static System.Linq.ParallelQuery<TSource> Where<TSource>(this System.Linq.ParallelQuery<TSource> source, System.Func<TSource, int, bool> predicate) { throw null; }
public static System.Linq.ParallelQuery<TSource> WithCancellation<TSource>(this System.Linq.ParallelQuery<TSource> source, System.Threading.CancellationToken cancellationToken) { throw null; }
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<Configurations>netcoreapp-Debug;netcoreapp-Release</Configurations>
+ <Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<Compile Include="System.Linq.Parallel.cs" />
<AssemblyName>System.Linq.Parallel</AssemblyName>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
<Configurations>netcoreapp-Debug;netcoreapp-Release</Configurations>
+ <Nullable>enable</Nullable>
</PropertyGroup>
<!-- Compiled Source Files -->
<ItemGroup>
using System.Threading;
using System.Diagnostics;
+using System.Diagnostics.CodeAnalysis;
namespace System.Linq.Parallel
{
// 1 consumer thread to access this thing concurrently. It's been carefully designed
// to avoid locking, but only because of this restriction...
- private readonly T[][] _buffer; // The buffer of chunks.
+ private readonly T[]?[] _buffer; // The buffer of chunks.
private readonly int _index; // Index of this channel
private volatile int _producerBufferIndex; // Producer's current index, i.e. where to put the next chunk.
private volatile int _consumerBufferIndex; // Consumer's current index, i.e. where to get the next chunk.
private volatile bool _done; // Set to true once the producer is done.
- private T[] _producerChunk; // The temporary chunk being generated by the producer.
+ private T[]? _producerChunk; // The temporary chunk being generated by the producer.
private int _producerChunkIndex; // A producer's index into its temporary chunk.
- private T[] _consumerChunk; // The temporary chunk being enumerated by the consumer.
+ private T[]? _consumerChunk; // The temporary chunk being enumerated by the consumer.
private int _consumerChunkIndex; // A consumer's index into its temporary chunk.
private readonly int _chunkSize; // The number of elements that comprise a chunk.
// These events are used to signal a waiting producer when the consumer dequeues, and to signal a
// waiting consumer when the producer enqueues.
- private ManualResetEventSlim _producerEvent;
- private IntValueEvent _consumerEvent;
+ private ManualResetEventSlim? _producerEvent;
+ private IntValueEvent? _consumerEvent;
// These two-valued ints track whether a producer or consumer _might_ be waiting. They are marked
// volatile because they are used in synchronization critical regions of code (see usage below).
// individual elements.
//
- internal AsynchronousChannel(int index, int chunkSize, CancellationToken cancellationToken, IntValueEvent consumerEvent) :
+ internal AsynchronousChannel(int index, int chunkSize, CancellationToken cancellationToken, IntValueEvent? consumerEvent) :
this(index, Scheduling.DEFAULT_BOUNDED_BUFFER_CAPACITY, chunkSize, cancellationToken, consumerEvent)
{
}
- internal AsynchronousChannel(int index, int capacity, int chunkSize, CancellationToken cancellationToken, IntValueEvent consumerEvent)
+ internal AsynchronousChannel(int index, int capacity, int chunkSize, CancellationToken cancellationToken, IntValueEvent? consumerEvent)
{
if (chunkSize == 0) chunkSize = Scheduling.GetDefaultChunkSize<T>();
internal void Enqueue(T item)
{
+ Debug.Assert(_producerChunk != null);
+
// Store the element into our current chunk.
int producerChunkIndex = _producerChunkIndex;
_producerChunk[producerChunkIndex] = item;
if (_consumerIsWaiting == 1 && !IsChunkBufferEmpty)
{
TraceHelpers.TraceInfo("AsynchronousChannel::EnqueueChunk - producer waking consumer");
+ Debug.Assert(_consumerEvent != null);
_consumerIsWaiting = 0;
_consumerEvent.Set(_index);
}
private void WaitUntilNonFull()
{
+ Debug.Assert(_producerEvent != null);
+
// We must loop; sometimes the producer event will have been set
// prematurely due to the way waiting flags are managed. By looping,
// we will only return from this method when space is truly available.
// True if an item was found, false otherwise.
//
- internal bool TryDequeue(ref T item)
+ internal bool TryDequeue([MaybeNullWhen(false), AllowNull] ref T item)
{
// Ensure we have a chunk to work with.
if (_consumerChunk == null)
// True if a chunk was found, false otherwise.
//
- private bool TryDequeueChunk(ref T[] chunk)
+ private bool TryDequeueChunk([NotNullWhen(true)] ref T[]? chunk)
{
// This is the non-blocking version of dequeue. We first check to see
// if the queue is empty. If the caller chooses to wait later, they can
// eventually regardless of whether the caller actually waits or not.
//
- internal bool TryDequeue(ref T item, ref bool isDone)
+ internal bool TryDequeue([MaybeNullWhen(false), AllowNull] ref T item, ref bool isDone)
{
isDone = false;
// eventually regardless of whether the caller actually waits or not.
//
- private bool TryDequeueChunk(ref T[] chunk, ref bool isDone)
+ private bool TryDequeueChunk([NotNullWhen(true)] ref T[]? chunk, ref bool isDone)
{
isDone = false;
// We can safely read from the consumer index because we know no producers
// will write concurrently.
int consumerBufferIndex = _consumerBufferIndex;
- T[] chunk = _buffer[consumerBufferIndex];
+ T[] chunk = _buffer[consumerBufferIndex]!;
// Zero out contents to avoid holding on to memory for longer than necessary. This
// ensures the entire chunk is eligible for GC sooner. (More important for big chunks.)
if (_producerIsWaiting == 1 && !IsFull)
{
TraceHelpers.TraceInfo("BoundedSingleLockFreeChannel::DequeueChunk - consumer waking producer");
+ Debug.Assert(_producerEvent != null);
_producerIsWaiting = 0;
_producerEvent.Set();
}
internal sealed class SynchronousChannel<T>
{
// We currently use the BCL FIFO queue internally, although any would do.
- private Queue<T> _queue;
+ private Queue<T>? _queue;
#if DEBUG
// In debug builds, we keep track of when the producer is done (for asserts).
internal void CopyTo(T[] array, int arrayIndex)
{
- Debug.Assert(array != null);
+ Debug.Assert(_queue != null && array != null);
#if DEBUG
Debug.Assert(_done, "Can only copy from the channel after it's done being added to");
#endif
using System.Collections.Generic;
using System.Linq.Parallel;
using System.Diagnostics;
+using System.Diagnostics.CodeAnalysis;
namespace System.Linq
{
// Helper method to find the minimum or maximum element in the source.
//
+ [return: MaybeNull]
private static T Reduce(IEnumerable<T> source, int sign)
{
Debug.Assert(source != null);
Func<Pair<bool, T>, T> resultSelector = MakeResultSelectorFunction();
AssociativeAggregationOperator<T, Pair<bool, T>, T> aggregation =
- new AssociativeAggregationOperator<T, Pair<bool, T>, T>(source, new Pair<bool, T>(false, default(T)), null,
- true, intermediateReduce, finalReduce, resultSelector, default(T) != null, QueryAggregationOptions.AssociativeCommutative);
+ new AssociativeAggregationOperator<T, Pair<bool, T>, T>(source, new Pair<bool, T>(false, default), null,
+ true, intermediateReduce, finalReduce, resultSelector, default(T)! != null, QueryAggregationOptions.AssociativeCommutative);
return aggregation.Aggregate();
}
// Helper method to find the minimum element in the source.
//
+ [return: MaybeNull]
internal static T ReduceMin(IEnumerable<T> source)
{
return Reduce(source, -1);
// Helper method to find the maximum element in the source.
//
+ [return: MaybeNull]
internal static T ReduceMax(IEnumerable<T> source)
{
return Reduce(source, 1);
// the existing accumulated result is equal to the sign requested by the function factory,
// we will return a new pair that contains the current element as the best item. We will
// ignore null elements (for reference and nullable types) in the input stream.
- if ((default(T) != null || element != null) &&
+ if ((default(T)! != null || element != null) &&
(!accumulator.First || Util.Sign(comparer.Compare(element, accumulator.Second)) == sign))
{
return new Pair<bool, T>(true, element);
if (element.First &&
(!accumulator.First || Util.Sign(comparer.Compare(element.Second, accumulator.Second)) == sign))
{
- Debug.Assert(default(T) != null || element.Second != null, "nulls unexpected in final reduce");
+ Debug.Assert(default(T)! != null || element.Second != null, "nulls unexpected in final reduce");
return new Pair<bool, T>(true, element.Second);
}
// empty sequences. Else, we will just return the element, which may be null for other types.
return delegate (Pair<bool, T> accumulator)
{
- Debug.Assert(accumulator.First || default(T) == null,
+ Debug.Assert(accumulator.First || default(T)! == null,
"for non-null types we expect an exception to be thrown before getting here");
return accumulator.Second;
};
using System.Collections;
using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
namespace System.Linq.Parallel
{
}
// A singleton cached and shared among callers.
- private static volatile EmptyEnumerable<T> s_instance;
- private static volatile EmptyEnumerator<T> s_enumeratorInstance;
+ private static volatile EmptyEnumerable<T>? s_instance;
+ private static volatile EmptyEnumerator<T>? s_enumeratorInstance;
internal static EmptyEnumerable<T> Instance
{
internal class EmptyEnumerator<T> : QueryOperatorEnumerator<T, int>, IEnumerator<T>
{
- internal override bool MoveNext(ref T currentElement, ref int currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref T currentElement, ref int currentKey)
{
return false;
}
// IEnumerator<T> methods.
- public T Current { get { return default(T); } }
- object IEnumerator.Current { get { return null; } }
+ [MaybeNull]
+ public T Current { get { return default!; } }
+ object? IEnumerator.Current { get { return null; } }
public bool MoveNext() { return false; }
void Collections.IEnumerator.Reset() { }
}
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
+using System.Diagnostics.CodeAnalysis;
namespace System.Linq.Parallel
{
/// a weakly typed IEnumerable object, allowing it to be accessed as a strongly typed
/// IEnumerable{object}.
/// </summary>
- internal class EnumerableWrapperWeakToStrong : IEnumerable<object>
+ internal class EnumerableWrapperWeakToStrong : IEnumerable<object?>
{
private readonly IEnumerable _wrappedEnumerable; // The wrapped enumerable object.
return ((IEnumerable<object>)this).GetEnumerator();
}
- public IEnumerator<object> GetEnumerator()
+ public IEnumerator<object?> GetEnumerator()
{
return new WrapperEnumeratorWeakToStrong(_wrappedEnumerable.GetEnumerator());
}
// A wrapper over IEnumerator that provides IEnumerator<object> interface
//
- private class WrapperEnumeratorWeakToStrong : IEnumerator<object>
+ private class WrapperEnumeratorWeakToStrong : IEnumerator<object?>
{
private readonly IEnumerator _wrappedEnumerator; // The weakly typed enumerator we've wrapped.
// forward to the corresponding weakly typed IEnumerator methods.
//
- object IEnumerator.Current
+ object? IEnumerator.Current
{
get { return _wrappedEnumerator.Current; }
}
- object IEnumerator<object>.Current
+ object? IEnumerator<object?>.Current
{
get { return _wrappedEnumerator.Current; }
}
void IDisposable.Dispose()
{
- IDisposable disposable = _wrappedEnumerator as IDisposable;
- if (disposable != null)
+ if (_wrappedEnumerator is IDisposable disposable)
{
disposable.Dispose();
}
/// A simple implementation of the ParallelQuery{object} interface which wraps an
/// underlying IEnumerable, such that it can be used in parallel queries.
/// </summary>
- internal class ParallelEnumerableWrapper : ParallelQuery<object>
+ internal class ParallelEnumerableWrapper : ParallelQuery<object?>
{
private readonly IEnumerable _source; // The wrapped enumerable object.
return _source.GetEnumerator();
}
- public override IEnumerator<object> GetEnumerator()
+ public override IEnumerator<object?> GetEnumerator()
{
return new EnumerableWrapperWeakToStrong(_source).GetEnumerator();
}
internal sealed override ParallelQuery<TCastTo> Cast<TCastTo>()
{
- return ParallelEnumerable.Select<TSource, TCastTo>(this, elem => (TCastTo)(object)elem);
+ return ParallelEnumerable.Select<TSource, TCastTo>(this, elem => (TCastTo)(object)elem!);
}
internal sealed override ParallelQuery<TCastTo> OfType<TCastTo>()
// solution (because it results in two operators) but is simple to implement.
return this
.Where<TSource>(elem => elem is TCastTo)
- .Select<TSource, TCastTo>(elem => (TCastTo)(object)elem);
+ .Select<TSource, TCastTo>(elem => (TCastTo)(object)elem!);
}
internal override IEnumerator GetEnumeratorUntyped()
private readonly int _from; // The initial value.
private readonly int _count; // How many values to yield.
private readonly int _initialIndex; // The ordinal index of the first value in the range.
- private Shared<int> _currentCount; // The 0-based index of the current value. [allocate in moveNext to avoid false-sharing]
+ private Shared<int>? _currentCount; // The 0-based index of the current value. [allocate in moveNext to avoid false-sharing]
//-----------------------------------------------------------------------------------
// Creates a new enumerator.
using System.Collections.Generic;
using System.Diagnostics;
+using System.Diagnostics.CodeAnalysis;
namespace System.Linq.Parallel
{
private readonly TResult _element; // The element to repeat.
private readonly int _count; // The number of times to repeat it.
private readonly int _indexOffset; // Our index offset.
- private Shared<int> _currentIndex; // The number of times we have already repeated it. [allocate in moveNext to avoid false-sharing]
+ private Shared<int>? _currentIndex; // The number of times we have already repeated it. [allocate in moveNext to avoid false-sharing]
//-----------------------------------------------------------------------------------
// Creates a new enumerator.
// Basic IEnumerator<T> methods. These produce the repeating sequence..
//
- internal override bool MoveNext(ref TResult currentElement, ref int currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref TResult currentElement, ref int currentKey)
{
if (_currentIndex == null)
_currentIndex = new Shared<int>(-1);
using System.Collections.Generic;
using System.Diagnostics;
+using System.Diagnostics.CodeAnalysis;
namespace System.Linq.Parallel
{
private const int InitialSize = 7;
private const int HashCodeMask = 0x7FFFFFFF;
- public Set(IEqualityComparer<TElement> comparer)
+ public Set(IEqualityComparer<TElement>? comparer)
{
if (comparer == null) comparer = EqualityComparer<TElement>.Default;
_comparer = comparer;
int last = -1;
for (int i = _buckets[bucket] - 1; i >= 0; last = i, i = _slots[i].next)
{
- if (_slots[i].hashCode == hashCode && _comparer.Equals(_slots[i].value, value))
+ if (_slots[i].hashCode == hashCode && _comparer.Equals(_slots[i].value!, value)) // TODO-NULLABLE: https://github.com/dotnet/csharplang/issues/2872
{
if (last < 0)
{
_slots[last].next = _slots[i].next;
}
_slots[i].hashCode = -1;
- _slots[i].value = default(TElement);
+ _slots[i].value = default;
_slots[i].next = -1;
return true;
}
int hashCode = InternalGetHashCode(value);
for (int i = _buckets[hashCode % _buckets.Length] - 1; i >= 0; i = _slots[i].next)
{
- if (_slots[i].hashCode == hashCode && _comparer.Equals(_slots[i].value, value)) return true;
+ if (_slots[i].hashCode == hashCode && _comparer.Equals(_slots[i].value!, value)) return true; // TODO-NULLABLE: https://github.com/dotnet/csharplang/issues/2872
}
if (add)
{
{
internal int hashCode;
internal int next;
- internal TElement value;
+ [MaybeNull, AllowNull] internal TElement value;
}
}
}
using System.Threading;
using System.Diagnostics;
+using System.Diagnostics.CodeAnalysis;
namespace System.Linq.Parallel
{
internal sealed class AsynchronousChannelMergeEnumerator<T> : MergeEnumerator<T>
{
private readonly AsynchronousChannel<T>[] _channels; // The channels being enumerated.
- private IntValueEvent _consumerEvent; // The consumer event.
+ private IntValueEvent? _consumerEvent; // The consumer event.
private readonly bool[] _done; // Tracks which channels are done.
private int _channelIndex; // The next channel from which we'll dequeue.
- private T _currentElement; // The remembered element from the previous MoveNext.
+ [MaybeNull, AllowNull] private T _currentElement = default; // The remembered element from the previous MoveNext. TODO-NULLABLE: https://github.com/dotnet/roslyn/issues/37511
//-----------------------------------------------------------------------------------
// Allocates a new enumerator over a set of one-to-one channels.
//
internal AsynchronousChannelMergeEnumerator(
- QueryTaskGroupState taskGroupState, AsynchronousChannel<T>[] channels, IntValueEvent consumerEvent)
+ QueryTaskGroupState taskGroupState, AsynchronousChannel<T>[] channels, IntValueEvent? consumerEvent)
: base(taskGroupState)
{
Debug.Assert(channels != null);
throw new InvalidOperationException(SR.PLINQ_CommonEnumerator_Current_NotStarted);
}
- return _currentElement;
+ return _currentElement!;
}
}
}
// Else try the fast path.
- if (!_done[index] && _channels[index].TryDequeue(ref _currentElement))
+ if (!_done[index] && _channels[index].TryDequeue(ref _currentElement!)) // TODO-NULLABLE: https://github.com/dotnet/csharplang/issues/2872
{
_channelIndex = (index + 1) % _channels.Length;
return true;
AsynchronousChannel<T> current = _channels[currChannelIndex];
bool isDone = _done[currChannelIndex];
- if (!isDone && current.TryDequeue(ref _currentElement))
+ if (!isDone && current.TryDequeue(ref _currentElement!)) // TODO-NULLABLE: https://github.com/dotnet/csharplang/issues/2872
{
// The channel has an item to be processed. We already remembered the current
// element (Dequeue stores it as an out-parameter), so we just return true
// we still need to continue processing them.
if (!current.IsChunkBufferEmpty)
{
- bool dequeueResult = current.TryDequeue(ref _currentElement);
+ bool dequeueResult = current.TryDequeue(ref _currentElement!); // TODO-NULLABLE: https://github.com/dotnet/csharplang/issues/2872
Debug.Assert(dequeueResult, "channel isn't empty, yet the dequeue failed, hmm");
return true;
}
for (int i = 0; i < _channels.Length; i++)
{
bool channelIsDone = false;
- if (!_done[i] && _channels[i].TryDequeue(ref _currentElement, ref channelIsDone))
+ if (!_done[i] && _channels[i].TryDequeue(ref _currentElement!, ref channelIsDone)) // TODO-NULLABLE: https://github.com/dotnet/csharplang/issues/2872
{
// The channel has received an item since the last time we checked.
// Just return and let the consumer process the element returned.
break;
}
+ Debug.Assert(_consumerEvent != null);
//This Wait() does not require cancellation support as it will wake up when all the producers into the
//channel have finished. Hence, if all the producers wake up on cancellation, so will this.
_consumerEvent.Wait();
{
private readonly QueryTaskGroupState _taskGroupState; // State shared among tasks.
private readonly PartitionedStream<TInputOutput, TIgnoreKey> _partitions; // Source partitions.
- private readonly AsynchronousChannel<TInputOutput>[] _asyncChannels; // Destination channels (async).
- private readonly SynchronousChannel<TInputOutput>[] _syncChannels; // Destination channels (sync).
- private readonly IEnumerator<TInputOutput> _channelEnumerator; // Output enumerator.
+ private readonly AsynchronousChannel<TInputOutput>[]? _asyncChannels; // Destination channels (async).
+ private readonly SynchronousChannel<TInputOutput>[]? _syncChannels; // Destination channels (sync).
+ private readonly IEnumerator<TInputOutput>? _channelEnumerator; // Output enumerator.
private readonly TaskScheduler _taskScheduler; // The task manager to execute the query.
private readonly bool _ignoreOutput; // Whether we're enumerating "for effect".
IEnumerator<TInputOutput> IMergeHelper<TInputOutput>.GetEnumerator()
{
Debug.Assert(_ignoreOutput || _channelEnumerator != null);
- return _channelEnumerator;
+ return _channelEnumerator!;
}
//-----------------------------------------------------------------------------------
IEnumerator<TInputOutput> GetEnumerator();
// Returns the merged output as an array.
- TInputOutput[] GetResultsAsArray();
+ TInputOutput[]? GetResultsAsArray();
}
}
// Straightforward IEnumerator<T> methods. So subclasses needn't bother.
//
- object IEnumerator.Current
+ object? IEnumerator.Current
{
get { return ((IEnumerator<TInputOutput>)this).Current; }
}
{
// Many internal algorithms are parameterized based on the data. The IMergeHelper
// is the pluggable interface whose implementations perform those algorithms.
- private IMergeHelper<TInputOutput> _mergeHelper;
+ private IMergeHelper<TInputOutput>? _mergeHelper;
// Private constructor. MergeExecutor should only be constructed via the
// MergeExecutor.Execute static method.
// Returns the merged results as an array.
//
- internal TInputOutput[] GetResultsAsArray()
+ internal TInputOutput[]? GetResultsAsArray()
{
+ Debug.Assert(_mergeHelper != null);
return _mergeHelper.GetResultsAsArray();
}
// An array of asynchronous channels, one for each partition.
//
- internal static AsynchronousChannel<TInputOutput>[] MakeAsynchronousChannels(int partitionCount, ParallelMergeOptions options, IntValueEvent consumerEvent, CancellationToken cancellationToken)
+ internal static AsynchronousChannel<TInputOutput>[] MakeAsynchronousChannels(int partitionCount, ParallelMergeOptions options, IntValueEvent? consumerEvent, CancellationToken cancellationToken)
{
AsynchronousChannel<TInputOutput>[] channels = new AsynchronousChannel<TInputOutput>[partitionCount];
{
private readonly QueryTaskGroupState _taskGroupState; // State shared among tasks.
private readonly PartitionedStream<TInputOutput, TKey> _partitions; // Source partitions.
- private readonly Shared<TInputOutput[]> _results; // The array where results are stored.
+ private readonly Shared<TInputOutput[]?> _results; // The array where results are stored.
private readonly TaskScheduler _taskScheduler; // The task manager to execute the query.
//-----------------------------------------------------------------------------------
_taskGroupState = new QueryTaskGroupState(cancellationState, queryId);
_partitions = partitions;
- _results = new Shared<TInputOutput[]>(null);
+ _results = new Shared<TInputOutput[]?>(null);
_taskScheduler = taskScheduler;
}
// Returns the results as an array.
//
- public TInputOutput[] GetResultsAsArray()
+ public TInputOutput[]? GetResultsAsArray()
{
return _results.Value;
}
///
/// Read and written by the consumer only.
/// </summary>
- private readonly Queue<Pair<TKey, TOutput>>[] _privateBuffer;
+ private readonly Queue<Pair<TKey, TOutput>>?[] _privateBuffer;
/// <summary>
/// Tracks whether MoveNext() has already been called previously.
return true;
}
- Debug.Assert(_privateBuffer[producer].Count == 0);
+ Debug.Assert(_privateBuffer[producer]!.Count == 0);
_privateBuffer[producer] = null;
}
// =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
using System.Diagnostics;
+using System.Diagnostics.CodeAnalysis;
namespace System.Linq.Parallel
{
{
private readonly SynchronousChannel<T>[] _channels; // The channel array we will enumerate, from left-to-right.
private int _channelIndex; // The current channel index. This moves through the array as we enumerate.
- private T _currentElement; // The last element remembered during enumeration.
+ private T _currentElement = default!; // The last element remembered during enumeration.
//-----------------------------------------------------------------------------------
// Instantiates a new enumerator for a set of channels.
using System.Collections.Generic;
using System.Threading;
using System.Diagnostics;
+using System.Diagnostics.CodeAnalysis;
namespace System.Linq.Parallel
{
private readonly int _partitionCount; // The number of partitions.
private readonly int _partitionIndex; // Our unique partition index.
- private readonly Func<TInputOutput, THashKey> _keySelector; // A key-selector function.
+ private readonly Func<TInputOutput, THashKey>? _keySelector; // A key-selector function.
private readonly HashRepartitionStream<TInputOutput, THashKey, int> _repartitionStream; // A repartitioning stream.
private readonly ListChunk<Pair<TInputOutput, THashKey>>[][] _valueExchangeMatrix; // Matrix to do inter-task communication.
private readonly QueryOperatorEnumerator<TInputOutput, TIgnoreKey> _source; // The immediate source of data.
private CountdownEvent _barrier; // Used to signal and wait for repartitions to complete.
private readonly CancellationToken _cancellationToken; // A token for canceling the process.
- private Mutables _mutables; // Mutable fields for this enumerator.
+ private Mutables? _mutables; // Mutable fields for this enumerator.
private class Mutables
{
internal int _currentBufferIndex; // Current buffer index.
- internal ListChunk<Pair<TInputOutput, THashKey>> _currentBuffer; // The buffer we're currently enumerating.
+ internal ListChunk<Pair<TInputOutput, THashKey>>? _currentBuffer; // The buffer we're currently enumerating.
internal int _currentIndex; // Current index into the buffer.
internal Mutables()
internal HashRepartitionEnumerator(
QueryOperatorEnumerator<TInputOutput, TIgnoreKey> source, int partitionCount, int partitionIndex,
- Func<TInputOutput, THashKey> keySelector, HashRepartitionStream<TInputOutput, THashKey, int> repartitionStream,
+ Func<TInputOutput, THashKey>? keySelector, HashRepartitionStream<TInputOutput, THashKey, int> repartitionStream,
CountdownEvent barrier, ListChunk<Pair<TInputOutput, THashKey>>[][] valueExchangeMatrix, CancellationToken cancellationToken)
{
Debug.Assert(source != null);
if (_partitionCount == 1)
{
// If there's only one partition, no need to do any sort of exchanges.
- TIgnoreKey keyUnused = default(TIgnoreKey);
- TInputOutput current = default(TInputOutput);
+ TIgnoreKey keyUnused = default(TIgnoreKey)!;
+ TInputOutput current = default(TInputOutput)!;
#if DEBUG
currentKey = unchecked((int)0xdeadbeef);
#endif
- if (_source.MoveNext(ref current, ref keyUnused))
+ if (_source.MoveNext(ref current!, ref keyUnused))
{
currentElement = new Pair<TInputOutput, THashKey>(
- current, _keySelector == null ? default(THashKey) : _keySelector(current));
+ current, _keySelector == null ? default : _keySelector(current));
return true;
}
return false;
}
- Mutables mutables = _mutables;
+ Mutables? mutables = _mutables;
if (mutables == null)
mutables = _mutables = new Mutables();
private void EnumerateAndRedistributeElements()
{
- Mutables mutables = _mutables;
+ Mutables? mutables = _mutables;
Debug.Assert(mutables != null);
ListChunk<Pair<TInputOutput, THashKey>>[] privateBuffers = new ListChunk<Pair<TInputOutput, THashKey>>[_partitionCount];
- TInputOutput element = default(TInputOutput);
- TIgnoreKey ignoreKey = default(TIgnoreKey);
+ TInputOutput element = default(TInputOutput)!;
+ TIgnoreKey ignoreKey = default(TIgnoreKey)!;
int loopCount = 0;
- while (_source.MoveNext(ref element, ref ignoreKey))
+ while (_source.MoveNext(ref element!, ref ignoreKey))
{
if ((loopCount++ & CancellationState.POLL_INTERVAL) == 0)
CancellationState.ThrowIfCanceled(_cancellationToken);
// Calculate the element's destination partition index, placing it into the
// appropriate buffer from which partitions will later enumerate.
int destinationIndex;
- THashKey elementHashKey = default(THashKey);
+ THashKey elementHashKey = default(THashKey)!;
if (_keySelector != null)
{
elementHashKey = _keySelector(element);
if (_mutables == null || (_mutables._currentBufferIndex == ENUMERATION_NOT_STARTED))
{
_barrier.Signal();
- _barrier = null;
+ _barrier = null!;
}
_source.Dispose();
/// <typeparam name="TOrderKey"></typeparam>
internal abstract class HashRepartitionStream<TInputOutput, THashKey, TOrderKey> : PartitionedStream<Pair<TInputOutput, THashKey>, TOrderKey>
{
- private readonly IEqualityComparer<THashKey> _keyComparer; // The optional key comparison routine.
- private readonly IEqualityComparer<TInputOutput> _elementComparer; // The optional element comparison routine.
+ private readonly IEqualityComparer<THashKey>? _keyComparer; // The optional key comparison routine.
+ private readonly IEqualityComparer<TInputOutput>? _elementComparer; // The optional element comparison routine.
private readonly int _distributionMod; // The distribution value we'll use to scramble input.
//---------------------------------------------------------------------------------------
//
internal HashRepartitionStream(
- int partitionsCount, IComparer<TOrderKey> orderKeyComparer, IEqualityComparer<THashKey> hashKeyComparer,
- IEqualityComparer<TInputOutput> elementComparer)
+ int partitionsCount, IComparer<TOrderKey> orderKeyComparer, IEqualityComparer<THashKey>? hashKeyComparer,
+ IEqualityComparer<TInputOutput>? elementComparer)
: base(partitionsCount, orderKeyComparer, OrdinalIndexState.Shuffled)
{
// elementComparer is used by operators that use elements themselves as the hash keys.
using System.Collections.Generic;
using System.Threading;
using System.Diagnostics;
+using System.Diagnostics.CodeAnalysis;
namespace System.Linq.Parallel
{
private readonly int _partitionCount; // The number of partitions.
private readonly int _partitionIndex; // Our unique partition index.
- private readonly Func<TInputOutput, THashKey> _keySelector; // A key-selector function.
+ private readonly Func<TInputOutput, THashKey>? _keySelector; // A key-selector function.
private readonly HashRepartitionStream<TInputOutput, THashKey, TOrderKey> _repartitionStream; // A repartitioning stream.
private readonly ListChunk<Pair<TInputOutput, THashKey>>[][] _valueExchangeMatrix; // Matrix to do inter-task communication of values.
private readonly ListChunk<TOrderKey>[][] _keyExchangeMatrix; // Matrix to do inter-task communication of order keys.
private readonly QueryOperatorEnumerator<TInputOutput, TOrderKey> _source; // The immediate source of data.
private CountdownEvent _barrier; // Used to signal and wait for repartitions to complete.
private readonly CancellationToken _cancellationToken; // A token for canceling the process.
- private Mutables _mutables; // Mutable fields for this enumerator.
+ private Mutables? _mutables; // Mutable fields for this enumerator.
private class Mutables
{
internal int _currentBufferIndex; // Current buffer index.
- internal ListChunk<Pair<TInputOutput, THashKey>> _currentBuffer; // The buffer we're currently enumerating.
- internal ListChunk<TOrderKey> _currentKeyBuffer; // The buffer we're currently enumerating.
+ internal ListChunk<Pair<TInputOutput, THashKey>>? _currentBuffer; // The buffer we're currently enumerating.
+ internal ListChunk<TOrderKey>? _currentKeyBuffer; // The buffer we're currently enumerating.
internal int _currentIndex; // Current index into the buffer.
internal Mutables()
internal OrderedHashRepartitionEnumerator(
QueryOperatorEnumerator<TInputOutput, TOrderKey> source, int partitionCount, int partitionIndex,
- Func<TInputOutput, THashKey> keySelector, OrderedHashRepartitionStream<TInputOutput, THashKey, TOrderKey> repartitionStream, CountdownEvent barrier,
+ Func<TInputOutput, THashKey>? keySelector, OrderedHashRepartitionStream<TInputOutput, THashKey, TOrderKey> repartitionStream, CountdownEvent barrier,
ListChunk<Pair<TInputOutput, THashKey>>[][] valueExchangeMatrix, ListChunk<TOrderKey>[][] keyExchangeMatrix, CancellationToken cancellationToken)
{
Debug.Assert(source != null);
{
if (_partitionCount == 1)
{
- TInputOutput current = default(TInputOutput);
+ TInputOutput current = default(TInputOutput)!;
// If there's only one partition, no need to do any sort of exchanges.
- if (_source.MoveNext(ref current, ref currentKey))
+ if (_source.MoveNext(ref current!, ref currentKey))
{
currentElement = new Pair<TInputOutput, THashKey>(
- current, _keySelector == null ? default(THashKey) : _keySelector(current));
+ current, _keySelector == null ? default : _keySelector(current));
return true;
}
return false;
}
- Mutables mutables = _mutables;
+ Mutables? mutables = _mutables;
if (mutables == null)
mutables = _mutables = new Mutables();
mutables._currentKeyBuffer = mutables._currentKeyBuffer.Next;
Debug.Assert(mutables._currentBuffer == null || mutables._currentBuffer.Count > 0);
Debug.Assert((mutables._currentBuffer == null) == (mutables._currentKeyBuffer == null));
- Debug.Assert(mutables._currentBuffer == null || mutables._currentBuffer.Count == mutables._currentKeyBuffer.Count);
+ Debug.Assert(mutables._currentBuffer == null || mutables._currentBuffer.Count == mutables._currentKeyBuffer!.Count);
continue; // Go back around and invoke this same logic.
}
}
private void EnumerateAndRedistributeElements()
{
- Mutables mutables = _mutables;
+ Mutables? mutables = _mutables;
Debug.Assert(mutables != null);
ListChunk<Pair<TInputOutput, THashKey>>[] privateBuffers = new ListChunk<Pair<TInputOutput, THashKey>>[_partitionCount];
ListChunk<TOrderKey>[] privateKeyBuffers = new ListChunk<TOrderKey>[_partitionCount];
- TInputOutput element = default(TInputOutput);
- TOrderKey key = default(TOrderKey);
+ TInputOutput element = default(TInputOutput)!;
+ TOrderKey key = default(TOrderKey)!;
int loopCount = 0;
- while (_source.MoveNext(ref element, ref key))
+ while (_source.MoveNext(ref element!, ref key))
{
if ((loopCount++ & CancellationState.POLL_INTERVAL) == 0)
CancellationState.ThrowIfCanceled(_cancellationToken);
// Calculate the element's destination partition index, placing it into the
// appropriate buffer from which partitions will later enumerate.
int destinationIndex;
- THashKey elementHashKey = default(THashKey);
+ THashKey elementHashKey = default(THashKey)!;
if (_keySelector != null)
{
elementHashKey = _keySelector(element);
if (_mutables == null || (_mutables._currentBufferIndex == ENUMERATION_NOT_STARTED))
{
_barrier.Signal();
- _barrier = null;
+ _barrier = null!;
}
_source.Dispose();
internal class OrderedHashRepartitionStream<TInputOutput, THashKey, TOrderKey> : HashRepartitionStream<TInputOutput, THashKey, TOrderKey>
{
internal OrderedHashRepartitionStream(
- PartitionedStream<TInputOutput, TOrderKey> inputStream, Func<TInputOutput, THashKey> hashKeySelector,
- IEqualityComparer<THashKey> hashKeyComparer, IEqualityComparer<TInputOutput> elementComparer, CancellationToken cancellationToken)
+ PartitionedStream<TInputOutput, TOrderKey> inputStream, Func<TInputOutput, THashKey>? hashKeySelector,
+ IEqualityComparer<THashKey>? hashKeyComparer, IEqualityComparer<TInputOutput>? elementComparer, CancellationToken cancellationToken)
: base(inputStream.PartitionCount, inputStream.KeyComparer, hashKeyComparer, elementComparer)
{
_partitions =
using System.Collections.Generic;
using System.Threading;
using System.Diagnostics;
+using System.Diagnostics.CodeAnalysis;
namespace System.Linq.Parallel
{
Debug.Assert(partitionCount > 0);
// If this is a wrapper, grab the internal wrapped data source so we can uncover its real type.
- ParallelEnumerableWrapper<T> wrapper = source as ParallelEnumerableWrapper<T>;
- if (wrapper != null)
+ if (source is ParallelEnumerableWrapper<T> wrapper)
{
source = wrapper.WrappedEnumerable;
Debug.Assert(source != null);
}
// Check whether we have an indexable data source.
- IList<T> sourceAsList = source as IList<T>;
- if (sourceAsList != null)
+ if (source is IList<T> sourceAsList)
{
QueryOperatorEnumerator<T, int>[] partitions = new QueryOperatorEnumerator<T, int>[partitionCount];
// We use this below to specialize enumerators when possible.
- T[] sourceAsArray = source as T[];
+ T[]? sourceAsArray = source as T[];
// If range partitioning is used, chunk size will be unlimited, i.e. -1.
int maxChunkSize = -1;
private readonly int _partitionIndex; // The index of the current partition.
private readonly int _maxChunkSize; // The maximum size of a chunk. -1 if unlimited.
private readonly int _sectionCount; // Precomputed in ctor: the number of sections the range is split into.
- private Mutables _mutables; // Lazily allocated mutable variables.
+ private Mutables? _mutables; // Lazily allocated mutable variables.
private class Mutables
{
((_elementCount % sectionSize) == 0 ? 0 : 1);
}
- internal override bool MoveNext(ref T currentElement, ref int currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref T currentElement, ref int currentKey)
{
// Lazily allocate the mutable holder.
- Mutables mutables = _mutables;
+ Mutables? mutables = _mutables;
if (mutables == null)
{
mutables = _mutables = new Mutables();
private bool MoveNextSlowPath()
{
- Mutables mutables = _mutables;
+ Mutables? mutables = _mutables;
Debug.Assert(mutables != null);
Debug.Assert(mutables._currentPositionInChunk >= mutables._currentChunkSize);
private readonly T[] _data; // The elements to iterate over.
private readonly int _startIndex; // Where to begin iterating.
private readonly int _maximumIndex; // The maximum index to iterate over.
- private Shared<int> _currentIndex; // The current index (lazily allocated).
+ private Shared<int>? _currentIndex; // The current index (lazily allocated).
internal ArrayContiguousIndexRangeEnumerator(T[] data, int partitionCount, int partitionIndex)
{
Debug.Assert(_currentIndex == null, "Expected deferred allocation to ensure it happens on correct thread");
}
- internal override bool MoveNext(ref T currentElement, ref int currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref T currentElement, ref int currentKey)
{
// Lazily allocate the current index if needed.
if (_currentIndex == null)
private readonly int _partitionIndex; // The index of the current partition.
private readonly int _maxChunkSize; // The maximum size of a chunk. -1 if unlimited.
private readonly int _sectionCount; // Precomputed in ctor: the number of sections the range is split into.
- private Mutables _mutables; // Lazily allocated mutable variables.
+ private Mutables? _mutables; // Lazily allocated mutable variables.
private class Mutables
{
((_elementCount % sectionSize) == 0 ? 0 : 1);
}
- internal override bool MoveNext(ref T currentElement, ref int currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref T currentElement, ref int currentKey)
{
// Lazily allocate the mutable holder.
- Mutables mutables = _mutables;
+ Mutables? mutables = _mutables;
if (mutables == null)
{
mutables = _mutables = new Mutables();
private bool MoveNextSlowPath()
{
- Mutables mutables = _mutables;
+ Mutables? mutables = _mutables;
Debug.Assert(mutables != null);
Debug.Assert(mutables._currentPositionInChunk >= mutables._currentChunkSize);
private readonly IList<T> _data; // The elements to iterate over.
private readonly int _startIndex; // Where to begin iterating.
private readonly int _maximumIndex; // The maximum index to iterate over.
- private Shared<int> _currentIndex; // The current index (lazily allocated).
+ private Shared<int>? _currentIndex; // The current index (lazily allocated).
internal ListContiguousIndexRangeEnumerator(IList<T> data, int partitionCount, int partitionIndex)
{
Debug.Assert(_currentIndex == null, "Expected deferred allocation to ensure it happens on correct thread");
}
- internal override bool MoveNext(ref T currentElement, ref int currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref T currentElement, ref int currentKey)
{
// Lazily allocate the current index if needed.
if (_currentIndex == null)
private readonly Shared<int> _currentIndex; // The index shared by all.
private readonly Shared<int> _activeEnumeratorsCount; // How many enumerators over the same source have not been disposed yet?
private readonly Shared<bool> _exceptionTracker;
- private Mutables _mutables; // Any mutable fields on this enumerator. These mutables are local and persistent
+ private Mutables? _mutables; // Any mutable fields on this enumerator. These mutables are local and persistent
private class Mutables
{
// Just retrieves the current element from our current chunk.
//
- internal override bool MoveNext(ref T currentElement, ref int currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref T currentElement, ref int currentKey)
{
- Mutables mutables = _mutables;
+ Mutables? mutables = _mutables;
if (mutables == null)
{
mutables = _mutables = new Mutables();
internal UnorderedHashRepartitionStream(
PartitionedStream<TInputOutput, TIgnoreKey> inputStream,
- Func<TInputOutput, THashKey> keySelector, IEqualityComparer<THashKey> keyComparer, IEqualityComparer<TInputOutput> elementComparer,
+ Func<TInputOutput, THashKey>? keySelector, IEqualityComparer<THashKey>? keyComparer, IEqualityComparer<TInputOutput>? elementComparer,
CancellationToken cancellationToken)
: base(inputStream.PartitionCount, Util.GetDefaultComparer<int>(), keyComparer, elementComparer)
{
private readonly Func<TIntermediate, TOutput> _resultSelector;
// A function that constructs seed instances
- private readonly Func<TIntermediate> _seedFactory;
+ private readonly Func<TIntermediate>? _seedFactory;
//---------------------------------------------------------------------------------------
// Constructs a new instance of an associative operator.
// This operator must be associative.
//
- internal AssociativeAggregationOperator(IEnumerable<TInput> child, TIntermediate seed, Func<TIntermediate> seedFactory, bool seedIsSpecified,
+ internal AssociativeAggregationOperator(IEnumerable<TInput> child, TIntermediate seed, Func<TIntermediate>? seedFactory, bool seedIsSpecified,
Func<TIntermediate, TInput, TIntermediate> intermediateReduce,
Func<TIntermediate, TIntermediate, TIntermediate> finalReduce,
Func<TIntermediate, TOutput> resultSelector, bool throwIfEmpty, QueryAggregationOptions options)
Debug.Assert(_finalReduce != null);
Debug.Assert(_resultSelector != null);
- TIntermediate accumulator = default(TIntermediate);
+ TIntermediate accumulator = default(TIntermediate)!;
bool hadElements = false;
// Because the final reduction is typically much cheaper than the intermediate
// the end, we will have our intermediate result, ready for final aggregation.
//
- internal override bool MoveNext(ref TIntermediate currentElement, ref int currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref TIntermediate currentElement, ref int currentKey)
{
Debug.Assert(_reduceOperator != null);
Debug.Assert(_reduceOperator._intermediateReduce != null, "expected a compiled operator");
_accumulated = true;
bool hadNext = false;
- TIntermediate accumulator = default(TIntermediate);
+ TIntermediate accumulator = default(TIntermediate)!;
// Initialize the accumulator.
if (_reduceOperator._seedIsSpecified)
// Seed may be unspecified only if TInput is the same as TIntermediate.
Debug.Assert(typeof(TInput) == typeof(TIntermediate));
- TInput acc = default(TInput);
- TKey accKeyUnused = default(TKey);
- if (!_source.MoveNext(ref acc, ref accKeyUnused)) return false;
+ TInput acc = default(TInput)!;
+ TKey accKeyUnused = default(TKey)!;
+ if (!_source.MoveNext(ref acc!, ref accKeyUnused)) return false;
hadNext = true;
- accumulator = (TIntermediate)((object)acc);
+ accumulator = (TIntermediate)((object?)acc!);
}
// Scan through the source and accumulate the result.
- TInput input = default(TInput);
- TKey keyUnused = default(TKey);
+ TInput input = default(TInput)!;
+ TKey keyUnused = default(TKey)!;
int i = 0;
- while (_source.MoveNext(ref input, ref keyUnused))
+ while (_source.MoveNext(ref input!, ref keyUnused))
{
if ((i++ & CancellationState.POLL_INTERVAL) == 0)
CancellationState.ThrowIfCanceled(_cancellationToken);
using System.Collections.Generic;
using System.Diagnostics;
+using System.Diagnostics.CodeAnalysis;
using System.Threading;
namespace System.Linq.Parallel
// index offset.
//
- internal override bool MoveNext(ref TSource currentElement, ref ConcatKey<TLeftKey, TRightKey> currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref TSource currentElement, ref ConcatKey<TLeftKey, TRightKey> currentKey)
{
Debug.Assert(_firstSource != null);
Debug.Assert(_secondSource != null);
if (!_begunSecond)
{
// If elements remain, just return true and continue enumerating the left.
- TLeftKey leftKey = default(TLeftKey);
- if (_firstSource.MoveNext(ref currentElement, ref leftKey))
+ TLeftKey leftKey = default(TLeftKey)!;
+ if (_firstSource.MoveNext(ref currentElement!, ref leftKey))
{
currentKey = ConcatKey<TLeftKey, TRightKey>.MakeLeft(leftKey);
return true;
}
// Now either move on to, or continue, enumerating the right data source.
- TRightKey rightKey = default(TRightKey);
- if (_secondSource.MoveNext(ref currentElement, ref rightKey))
+ TRightKey rightKey = default(TRightKey)!;
+ if (_secondSource.MoveNext(ref currentElement!, ref rightKey))
{
currentKey = ConcatKey<TLeftKey, TRightKey>.MakeRight(rightKey);
return true;
private readonly TRightKey _rightKey;
private readonly bool _isLeft;
- private ConcatKey(TLeftKey leftKey, TRightKey rightKey, bool isLeft)
+ private ConcatKey([AllowNull] TLeftKey leftKey, [AllowNull] TRightKey rightKey, bool isLeft)
{
_leftKey = leftKey;
_rightKey = rightKey;
_isLeft = isLeft;
}
- internal static ConcatKey<TLeftKey, TRightKey> MakeLeft(TLeftKey leftKey)
+ internal static ConcatKey<TLeftKey, TRightKey> MakeLeft([AllowNull] TLeftKey leftKey)
{
- return new ConcatKey<TLeftKey, TRightKey>(leftKey, default(TRightKey), isLeft: true);
+ return new ConcatKey<TLeftKey, TRightKey>(leftKey, default, isLeft: true);
}
- internal static ConcatKey<TLeftKey, TRightKey> MakeRight(TRightKey rightKey)
+ internal static ConcatKey<TLeftKey, TRightKey> MakeRight([AllowNull] TRightKey rightKey)
{
- return new ConcatKey<TLeftKey, TRightKey>(default(TLeftKey), rightKey, isLeft: false);
+ return new ConcatKey<TLeftKey, TRightKey>(default, rightKey, isLeft: false);
}
internal static IComparer<ConcatKey<TLeftKey, TRightKey>> MakeComparer(
using System.Collections.Generic;
using System.Diagnostics;
+using System.Diagnostics.CodeAnalysis;
using System.Threading;
namespace System.Linq.Parallel
internal sealed class ExceptQueryOperator<TInputOutput> :
BinaryQueryOperator<TInputOutput, TInputOutput, TInputOutput>
{
- private readonly IEqualityComparer<TInputOutput> _comparer; // An equality comparer.
+ private readonly IEqualityComparer<TInputOutput>? _comparer; // An equality comparer.
//---------------------------------------------------------------------------------------
// Constructs a new set except operator.
//
- internal ExceptQueryOperator(ParallelQuery<TInputOutput> left, ParallelQuery<TInputOutput> right, IEqualityComparer<TInputOutput> comparer)
+ internal ExceptQueryOperator(ParallelQuery<TInputOutput> left, ParallelQuery<TInputOutput> right, IEqualityComparer<TInputOutput>? comparer)
: base(left, right)
{
Debug.Assert(left != null && right != null, "child data sources cannot be null");
{
private readonly QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, TLeftKey> _leftSource; // Left data source.
private readonly QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, int> _rightSource; // Right data source.
- private readonly IEqualityComparer<TInputOutput> _comparer; // A comparer used for equality checks/hash-coding.
- private Set<TInputOutput> _hashLookup; // The hash lookup, used to produce the distinct set.
+ private readonly IEqualityComparer<TInputOutput>? _comparer; // A comparer used for equality checks/hash-coding.
+ private Set<TInputOutput>? _hashLookup; // The hash lookup, used to produce the distinct set.
private readonly CancellationToken _cancellationToken;
- private Shared<int> _outputLoopCount;
+ private Shared<int>? _outputLoopCount;
//---------------------------------------------------------------------------------------
// Instantiates a new except query operator enumerator.
internal ExceptQueryOperatorEnumerator(
QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, TLeftKey> leftSource,
QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, int> rightSource,
- IEqualityComparer<TInputOutput> comparer,
+ IEqualityComparer<TInputOutput>? comparer,
CancellationToken cancellationToken)
{
Debug.Assert(leftSource != null);
// Walks the two data sources, left and then right, to produce the distinct set
//
- internal override bool MoveNext(ref TInputOutput currentElement, ref int currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref TInputOutput currentElement, ref int currentKey)
{
Debug.Assert(_leftSource != null);
Debug.Assert(_rightSource != null);
// Now iterate over the right data source, looking for matches.
Pair<TInputOutput, NoKeyMemoizationRequired> leftElement = default(Pair<TInputOutput, NoKeyMemoizationRequired>);
- TLeftKey leftKeyUnused = default(TLeftKey);
+ TLeftKey leftKeyUnused = default!;
+ Debug.Assert(_outputLoopCount != null);
while (_leftSource.MoveNext(ref leftElement, ref leftKeyUnused))
{
if ((_outputLoopCount.Value++ & CancellationState.POLL_INTERVAL) == 0)
{
private readonly QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, TLeftKey> _leftSource; // Left data source.
private readonly QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, int> _rightSource; // Right data source.
- private readonly IEqualityComparer<TInputOutput> _comparer; // A comparer used for equality checks/hash-coding.
+ private readonly IEqualityComparer<TInputOutput>? _comparer; // A comparer used for equality checks/hash-coding.
private readonly IComparer<TLeftKey> _leftKeyComparer; // A comparer for order keys.
- private IEnumerator<KeyValuePair<Wrapper<TInputOutput>, Pair<TInputOutput, TLeftKey>>> _outputEnumerator; // The enumerator output elements + order keys.
+ private IEnumerator<KeyValuePair<Wrapper<TInputOutput>, Pair<TInputOutput, TLeftKey>>>? _outputEnumerator; // The enumerator output elements + order keys.
private readonly CancellationToken _cancellationToken;
//---------------------------------------------------------------------------------------
internal OrderedExceptQueryOperatorEnumerator(
QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, TLeftKey> leftSource,
QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, int> rightSource,
- IEqualityComparer<TInputOutput> comparer, IComparer<TLeftKey> leftKeyComparer,
+ IEqualityComparer<TInputOutput>? comparer, IComparer<TLeftKey> leftKeyComparer,
CancellationToken cancellationToken)
{
Debug.Assert(leftSource != null);
// Walks the two data sources, left and then right, to produce the distinct set
//
- internal override bool MoveNext(ref TInputOutput currentElement, ref TLeftKey currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref TInputOutput currentElement, ref TLeftKey currentKey)
{
Debug.Assert(_leftSource != null);
Debug.Assert(_rightSource != null);
new WrapperEqualityComparer<TInputOutput>(_comparer));
Pair<TInputOutput, NoKeyMemoizationRequired> leftElement = default(Pair<TInputOutput, NoKeyMemoizationRequired>);
- TLeftKey leftKey = default(TLeftKey);
+ TLeftKey leftKey = default!;
while (_leftSource.MoveNext(ref leftElement, ref leftKey))
{
if ((i++ & CancellationState.POLL_INTERVAL) == 0)
private readonly Func<TLeftInput, TKey> _leftKeySelector; // The key selection routine for the outer (left) data source.
private readonly Func<TRightInput, TKey> _rightKeySelector; // The key selection routine for the inner (right) data source.
private readonly Func<TLeftInput, IEnumerable<TRightInput>, TOutput> _resultSelector; // The result selection routine.
- private readonly IEqualityComparer<TKey> _keyComparer; // An optional key comparison object.
+ private readonly IEqualityComparer<TKey>? _keyComparer; // An optional key comparison object.
//---------------------------------------------------------------------------------------
// Constructs a new join operator.
Func<TLeftInput, TKey> leftKeySelector,
Func<TRightInput, TKey> rightKeySelector,
Func<TLeftInput, IEnumerable<TRightInput>, TOutput> resultSelector,
- IEqualityComparer<TKey> keyComparer)
+ IEqualityComparer<TKey>? keyComparer)
: base(left, right)
{
Debug.Assert(left != null && right != null, "child data sources cannot be null");
private void WrapPartitionedStreamHelper<TLeftKey, TRightKey>(
PartitionedStream<Pair<TLeftInput, TKey>, TLeftKey> leftHashStream,
HashLookupBuilder<IEnumerable<TRightInput>, TRightKey, TKey>[] rightLookupBuilders,
- IComparer<TRightKey> rightKeyComparer, IPartitionedStreamRecipient<TOutput> outputRecipient,
+ IComparer<TRightKey>? rightKeyComparer, IPartitionedStreamRecipient<TOutput> outputRecipient,
int partitionCount, CancellationToken cancellationToken)
{
if (RightChild.OutputOrdered && LeftChild.OutputOrdered)
internal class GroupJoinHashLookupBuilder<TElement, TOrderKey, THashKey> : HashLookupBuilder<IEnumerable<TElement>, int, THashKey>
{
private readonly QueryOperatorEnumerator<Pair<TElement, THashKey>, TOrderKey> _dataSource; // data source. For building.
- private readonly IEqualityComparer<THashKey> _keyComparer; // An optional key comparison object.
+ private readonly IEqualityComparer<THashKey>? _keyComparer; // An optional key comparison object.
- internal GroupJoinHashLookupBuilder(QueryOperatorEnumerator<Pair<TElement, THashKey>, TOrderKey> dataSource, IEqualityComparer<THashKey> keyComparer)
+ internal GroupJoinHashLookupBuilder(QueryOperatorEnumerator<Pair<TElement, THashKey>, TOrderKey> dataSource, IEqualityComparer<THashKey>? keyComparer)
{
Debug.Assert(dataSource != null);
{
bool hasCollision = true;
- ListChunk<TElement> currentValue = default(ListChunk<TElement>);
+ ListChunk<TElement>? currentValue = default(ListChunk<TElement>);
if (!_base.TryGetValue(hashKey, ref currentValue))
{
const int INITIAL_CHUNK_SIZE = 2;
internal sealed class OrderedGroupJoinHashLookupBuilder<TElement, TOrderKey, THashKey> : HashLookupBuilder<IEnumerable<TElement>, Pair<bool, TOrderKey>, THashKey>
{
private readonly QueryOperatorEnumerator<Pair<TElement, THashKey>, TOrderKey> _dataSource; // data source. For building.
- private readonly IEqualityComparer<THashKey> _keyComparer; // An optional key comparison object.
+ private readonly IEqualityComparer<THashKey>? _keyComparer; // An optional key comparison object.
private readonly IComparer<TOrderKey> _orderKeyComparer;
internal OrderedGroupJoinHashLookupBuilder(
QueryOperatorEnumerator<Pair<TElement, THashKey>, TOrderKey> dataSource,
- IEqualityComparer<THashKey> keyComparer,
+ IEqualityComparer<THashKey>? keyComparer,
IComparer<TOrderKey> orderKeyComparer)
{
Debug.Assert(dataSource != null);
{
bool hasCollision = true;
- GroupKeyData currentValue = default(GroupKeyData);
+ GroupKeyData? currentValue = default(GroupKeyData);
if (!_base.TryGetValue(hashKey, ref currentValue))
{
currentValue = new GroupKeyData(orderKey, hashKey, _orderKeyComparer);
private Pair<IEnumerable<TElement>, TOrderKey> GetValueList(THashKey key)
{
- TBaseElement baseValue = default(TBaseElement);
- if (_base.TryGetValue(key, ref baseValue))
+ TBaseElement baseValue = default(TBaseElement)!;
+ if (_base.TryGetValue(key, ref baseValue!))
{
return CreateValuePair(baseValue);
}
using System.Collections.Generic;
using System.Diagnostics;
+using System.Diagnostics.CodeAnalysis;
using System.Threading;
namespace System.Linq.Parallel
private readonly Func<TLeftInput, TRightInput, TOutput> _resultSelector; // Result selector.
private readonly HashJoinOutputKeyBuilder<TLeftKey, TRightKey, TOutputKey> _outputKeyBuilder;
private readonly CancellationToken _cancellationToken;
- private Mutables _mutables;
+ private Mutables? _mutables;
private class Mutables
{
- internal TLeftInput _currentLeft; // The current matching left element.
- internal TLeftKey _currentLeftKey; // The current index of the matching left element.
- internal HashJoinHashLookup<THashKey, TRightInput, TRightKey> _rightHashLookup; // The hash lookup.
- internal ListChunk<Pair<TRightInput, TRightKey>> _currentRightMatches; // Current right matches (if any).
+ internal TLeftInput _currentLeft = default!; // The current matching left element.
+ internal TLeftKey _currentLeftKey = default!; // The current index of the matching left element.
+ internal HashJoinHashLookup<THashKey, TRightInput, TRightKey>? _rightHashLookup; // The hash lookup.
+ internal ListChunk<Pair<TRightInput, TRightKey>>? _currentRightMatches; // Current right matches (if any).
internal int _currentRightMatchesIndex; // Current index in the set of right matches.
internal int _outputLoopCount;
}
// as we do for inner joins.
//
- internal override bool MoveNext(ref TOutput currentElement, ref TOutputKey currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref TOutput currentElement, ref TOutputKey currentKey)
{
Debug.Assert(_resultSelector != null, "expected a compiled result selector");
Debug.Assert(_leftSource != null);
Debug.Assert(_rightLookupBuilder != null);
// BUILD phase: If we haven't built the hash-table yet, create that first.
- Mutables mutables = _mutables;
+ Mutables? mutables = _mutables;
if (mutables == null)
{
mutables = _mutables = new Mutables();
}
// PROBE phase: So long as the source has a next element, return the match.
- ListChunk<Pair<TRightInput, TRightKey>> currentRightChunk = mutables._currentRightMatches;
+ ListChunk<Pair<TRightInput, TRightKey>>? currentRightChunk = mutables._currentRightMatches;
if (currentRightChunk != null && mutables._currentRightMatchesIndex == currentRightChunk.Count)
{
mutables._currentRightMatches = currentRightChunk.Next;
{
// We have to look up the next list of matches in the hash-table.
Pair<TLeftInput, THashKey> leftPair = default(Pair<TLeftInput, THashKey>);
- TLeftKey leftKey = default(TLeftKey);
+ TLeftKey leftKey = default(TLeftKey)!;
while (_leftSource.MoveNext(ref leftPair, ref leftKey))
{
if ((mutables._outputLoopCount++ & CancellationState.POLL_INTERVAL) == 0)
// Ignore null keys.
if (leftHashKey != null)
{
+ Debug.Assert(mutables._rightHashLookup != null);
if (mutables._rightHashLookup.TryGetValue(leftHashKey, ref matchValue))
{
// We found a new match. We remember the list in case there are multiple
#endif
Pair<TBaseElement, THashKey> currentPair = default(Pair<TBaseElement, THashKey>);
- TBaseOrderKey orderKey = default(TBaseOrderKey);
+ TBaseOrderKey orderKey = default(TBaseOrderKey)!;
int i = 0;
while (dataSource.MoveNext(ref currentPair, ref orderKey))
{
}
private readonly Pair<TElement, TOrderKey> _head;
- internal ListChunk<Pair<TElement, TOrderKey>> Tail
+ internal ListChunk<Pair<TElement, TOrderKey>>? Tail
{
get
{
return _tail;
}
}
- private ListChunk<Pair<TElement, TOrderKey>> _tail;
+ private ListChunk<Pair<TElement, TOrderKey>>? _tail;
private const int INITIAL_CHUNK_SIZE = 2;
{
_tail = new ListChunk<Pair<TElement, TOrderKey>>(INITIAL_CHUNK_SIZE);
}
- _tail.Add(CreatePair(value, orderKey));
+ _tail!.Add(CreatePair(value, orderKey));
return requiresMemoryChange;
}
using System.Collections.Generic;
using System.Diagnostics;
+using System.Diagnostics.CodeAnalysis;
using System.Threading;
namespace System.Linq.Parallel
internal sealed class IntersectQueryOperator<TInputOutput> :
BinaryQueryOperator<TInputOutput, TInputOutput, TInputOutput>
{
- private readonly IEqualityComparer<TInputOutput> _comparer; // An equality comparer.
+ private readonly IEqualityComparer<TInputOutput>? _comparer; // An equality comparer.
//---------------------------------------------------------------------------------------
// Constructs a new intersection operator.
//
- internal IntersectQueryOperator(ParallelQuery<TInputOutput> left, ParallelQuery<TInputOutput> right, IEqualityComparer<TInputOutput> comparer)
+ internal IntersectQueryOperator(ParallelQuery<TInputOutput> left, ParallelQuery<TInputOutput> right, IEqualityComparer<TInputOutput>? comparer)
: base(left, right)
{
Debug.Assert(left != null && right != null, "child data sources cannot be null");
{
private readonly QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, TLeftKey> _leftSource; // Left data source.
private readonly QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, int> _rightSource; // Right data source.
- private readonly IEqualityComparer<TInputOutput> _comparer; // Comparer to use for equality/hash-coding.
- private Set<TInputOutput> _hashLookup; // The hash lookup, used to produce the intersection.
+ private readonly IEqualityComparer<TInputOutput>? _comparer; // Comparer to use for equality/hash-coding.
+ private Set<TInputOutput>? _hashLookup; // The hash lookup, used to produce the intersection.
private readonly CancellationToken _cancellationToken;
- private Shared<int> _outputLoopCount;
+ private Shared<int>? _outputLoopCount;
//---------------------------------------------------------------------------------------
// Instantiates a new intersection operator.
internal IntersectQueryOperatorEnumerator(
QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, TLeftKey> leftSource,
QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, int> rightSource,
- IEqualityComparer<TInputOutput> comparer, CancellationToken cancellationToken)
+ IEqualityComparer<TInputOutput>? comparer, CancellationToken cancellationToken)
{
Debug.Assert(leftSource != null);
Debug.Assert(rightSource != null);
// Walks the two data sources, left and then right, to produce the intersection.
//
- internal override bool MoveNext(ref TInputOutput currentElement, ref int currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref TInputOutput currentElement, ref int currentKey)
{
Debug.Assert(_leftSource != null);
Debug.Assert(_rightSource != null);
// Now iterate over the left data source, looking for matches.
Pair<TInputOutput, NoKeyMemoizationRequired> leftElement = default(Pair<TInputOutput, NoKeyMemoizationRequired>);
- TLeftKey keyUnused = default(TLeftKey);
+ TLeftKey keyUnused = default(TLeftKey)!;
while (_leftSource.MoveNext(ref leftElement, ref keyUnused))
{
+ Debug.Assert(_outputLoopCount != null);
if ((_outputLoopCount.Value++ & CancellationState.POLL_INTERVAL) == 0)
CancellationState.ThrowIfCanceled(_cancellationToken);
private readonly QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, int> _rightSource; // Right data source.
private readonly IEqualityComparer<Wrapper<TInputOutput>> _comparer; // Comparer to use for equality/hash-coding.
private readonly IComparer<TLeftKey> _leftKeyComparer; // Comparer to use to determine ordering of order keys.
- private Dictionary<Wrapper<TInputOutput>, Pair<TInputOutput, TLeftKey>> _hashLookup; // The hash lookup, used to produce the intersection.
+ private Dictionary<Wrapper<TInputOutput>, Pair<TInputOutput, TLeftKey>>? _hashLookup; // The hash lookup, used to produce the intersection.
private readonly CancellationToken _cancellationToken;
//---------------------------------------------------------------------------------------
internal OrderedIntersectQueryOperatorEnumerator(
QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, TLeftKey> leftSource,
QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, int> rightSource,
- IEqualityComparer<TInputOutput> comparer, IComparer<TLeftKey> leftKeyComparer,
+ IEqualityComparer<TInputOutput>? comparer, IComparer<TLeftKey> leftKeyComparer,
CancellationToken cancellationToken)
{
Debug.Assert(leftSource != null);
// Walks the two data sources, left and then right, to produce the intersection.
//
- internal override bool MoveNext(ref TInputOutput currentElement, ref TLeftKey currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref TInputOutput currentElement, ref TLeftKey currentKey)
{
Debug.Assert(_leftSource != null);
Debug.Assert(_rightSource != null);
_hashLookup = new Dictionary<Wrapper<TInputOutput>, Pair<TInputOutput, TLeftKey>>(_comparer);
Pair<TInputOutput, NoKeyMemoizationRequired> leftElement = default(Pair<TInputOutput, NoKeyMemoizationRequired>);
- TLeftKey leftKey = default(TLeftKey);
+ TLeftKey leftKey = default(TLeftKey)!;
while (_leftSource.MoveNext(ref leftElement, ref leftKey))
{
if ((i++ & CancellationState.POLL_INTERVAL) == 0)
private readonly Func<TLeftInput, TKey> _leftKeySelector; // The key selection routine for the outer (left) data source.
private readonly Func<TRightInput, TKey> _rightKeySelector; // The key selection routine for the inner (right) data source.
private readonly Func<TLeftInput, TRightInput, TOutput> _resultSelector; // The result selection routine.
- private readonly IEqualityComparer<TKey> _keyComparer; // An optional key comparison object.
+ private readonly IEqualityComparer<TKey>? _keyComparer; // An optional key comparison object.
//---------------------------------------------------------------------------------------
// Constructs a new join operator.
Func<TLeftInput, TKey> leftKeySelector,
Func<TRightInput, TKey> rightKeySelector,
Func<TLeftInput, TRightInput, TOutput> resultSelector,
- IEqualityComparer<TKey> keyComparer)
+ IEqualityComparer<TKey>? keyComparer)
: base(left, right)
{
Debug.Assert(left != null && right != null, "child data sources cannot be null");
internal class JoinHashLookupBuilder<TElement, TOrderKey, THashKey> : HashLookupBuilder<TElement, TOrderKey, THashKey>
{
private readonly QueryOperatorEnumerator<Pair<TElement, THashKey>, TOrderKey> _dataSource; // data source. For building.
- private readonly IEqualityComparer<THashKey> _keyComparer; // An optional key comparison object.
+ private readonly IEqualityComparer<THashKey>? _keyComparer; // An optional key comparison object.
- internal JoinHashLookupBuilder(QueryOperatorEnumerator<Pair<TElement, THashKey>, TOrderKey> dataSource, IEqualityComparer<THashKey> keyComparer)
+ internal JoinHashLookupBuilder(QueryOperatorEnumerator<Pair<TElement, THashKey>, TOrderKey> dataSource, IEqualityComparer<THashKey>? keyComparer)
{
Debug.Assert(dataSource != null);
using System.Collections.Generic;
using System.Diagnostics;
+using System.Diagnostics.CodeAnalysis;
using System.Threading;
namespace System.Linq.Parallel
internal sealed class UnionQueryOperator<TInputOutput> :
BinaryQueryOperator<TInputOutput, TInputOutput, TInputOutput>
{
- private readonly IEqualityComparer<TInputOutput> _comparer; // An equality comparer.
+ private readonly IEqualityComparer<TInputOutput>? _comparer; // An equality comparer.
//---------------------------------------------------------------------------------------
// Constructs a new union operator.
//
- internal UnionQueryOperator(ParallelQuery<TInputOutput> left, ParallelQuery<TInputOutput> right, IEqualityComparer<TInputOutput> comparer)
+ internal UnionQueryOperator(ParallelQuery<TInputOutput> left, ParallelQuery<TInputOutput> right, IEqualityComparer<TInputOutput>? comparer)
: base(left, right)
{
Debug.Assert(left != null && right != null, "child data sources cannot be null");
private class UnionQueryOperatorEnumerator<TLeftKey, TRightKey> : QueryOperatorEnumerator<TInputOutput, int>
{
- private QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, TLeftKey> _leftSource; // Left data source.
- private QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, TRightKey> _rightSource; // Right data source.
- private Set<TInputOutput> _hashLookup; // The hash lookup, used to produce the union.
+ private QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, TLeftKey>? _leftSource; // Left data source.
+ private QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, TRightKey>? _rightSource; // Right data source.
+ private Set<TInputOutput>? _hashLookup; // The hash lookup, used to produce the union.
private readonly CancellationToken _cancellationToken;
- private Shared<int> _outputLoopCount;
- private readonly IEqualityComparer<TInputOutput> _comparer;
+ private Shared<int>? _outputLoopCount;
+ private readonly IEqualityComparer<TInputOutput>? _comparer;
//---------------------------------------------------------------------------------------
// Instantiates a new union operator.
internal UnionQueryOperatorEnumerator(
QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, TLeftKey> leftSource,
QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, TRightKey> rightSource,
- IEqualityComparer<TInputOutput> comparer,
+ IEqualityComparer<TInputOutput>? comparer,
CancellationToken cancellationToken)
{
Debug.Assert(leftSource != null);
// Walks the two data sources, left and then right, to produce the union.
//
- internal override bool MoveNext(ref TInputOutput currentElement, ref int currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref TInputOutput currentElement, ref int currentKey)
{
if (_hashLookup == null)
{
if (_leftSource != null)
{
// Iterate over this set's elements until we find a unique element.
- TLeftKey keyUnused = default(TLeftKey);
+ TLeftKey keyUnused = default(TLeftKey)!;
Pair<TInputOutput, NoKeyMemoizationRequired> currentLeftElement = default(Pair<TInputOutput, NoKeyMemoizationRequired>);
int i = 0;
if (_rightSource != null)
{
// Iterate over this set's elements until we find a unique element.
- TRightKey keyUnused = default(TRightKey);
+ TRightKey keyUnused = default(TRightKey)!;
Pair<TInputOutput, NoKeyMemoizationRequired> currentRightElement = default(Pair<TInputOutput, NoKeyMemoizationRequired>);
while (_rightSource.MoveNext(ref currentRightElement, ref keyUnused))
{
+ Debug.Assert(_outputLoopCount != null);
if ((_outputLoopCount.Value++ & CancellationState.POLL_INTERVAL) == 0)
CancellationState.ThrowIfCanceled(_cancellationToken);
private readonly QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, TLeftKey> _leftSource; // Left data source.
private readonly QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, TRightKey> _rightSource; // Right data source.
private readonly IComparer<ConcatKey<TLeftKey, TRightKey>> _keyComparer; // Comparer for compound order keys.
- private IEnumerator<KeyValuePair<Wrapper<TInputOutput>, Pair<TInputOutput, ConcatKey<TLeftKey, TRightKey>>>> _outputEnumerator; // Enumerator over the output of the union.
+ private IEnumerator<KeyValuePair<Wrapper<TInputOutput>, Pair<TInputOutput, ConcatKey<TLeftKey, TRightKey>>>>? _outputEnumerator; // Enumerator over the output of the union.
private readonly bool _leftOrdered; // Whether the left data source is ordered.
private readonly bool _rightOrdered; // Whether the right data source is ordered.
- private readonly IEqualityComparer<TInputOutput> _comparer; // Comparer for the elements.
+ private readonly IEqualityComparer<TInputOutput>? _comparer; // Comparer for the elements.
private readonly CancellationToken _cancellationToken;
//---------------------------------------------------------------------------------------
internal OrderedUnionQueryOperatorEnumerator(
QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, TLeftKey> leftSource,
QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, TRightKey> rightSource,
- bool leftOrdered, bool rightOrdered, IEqualityComparer<TInputOutput> comparer, IComparer<ConcatKey<TLeftKey, TRightKey>> keyComparer,
+ bool leftOrdered, bool rightOrdered, IEqualityComparer<TInputOutput>? comparer, IComparer<ConcatKey<TLeftKey, TRightKey>> keyComparer,
CancellationToken cancellationToken)
{
Debug.Assert(leftSource != null);
// Walks the two data sources, left and then right, to produce the union.
//
- internal override bool MoveNext(ref TInputOutput currentElement, ref ConcatKey<TLeftKey, TRightKey> currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref TInputOutput currentElement, ref ConcatKey<TLeftKey, TRightKey> currentKey)
{
Debug.Assert(_leftSource != null);
Debug.Assert(_rightSource != null);
new Dictionary<Wrapper<TInputOutput>, Pair<TInputOutput, ConcatKey<TLeftKey, TRightKey>>>(wrapperComparer);
Pair<TInputOutput, NoKeyMemoizationRequired> elem = default(Pair<TInputOutput, NoKeyMemoizationRequired>);
- TLeftKey leftKey = default(TLeftKey);
+ TLeftKey leftKey = default(TLeftKey)!;
int i = 0;
while (_leftSource.MoveNext(ref elem, ref leftKey))
CancellationState.ThrowIfCanceled(_cancellationToken);
ConcatKey<TLeftKey, TRightKey> key =
- ConcatKey<TLeftKey, TRightKey>.MakeLeft(_leftOrdered ? leftKey : default(TLeftKey));
+ ConcatKey<TLeftKey, TRightKey>.MakeLeft(_leftOrdered ? leftKey : default);
Pair<TInputOutput, ConcatKey<TLeftKey, TRightKey>> oldEntry;
Wrapper<TInputOutput> wrappedElem = new Wrapper<TInputOutput>(elem.First);
}
}
- TRightKey rightKey = default(TRightKey);
+ TRightKey rightKey = default(TRightKey)!;
while (_rightSource.MoveNext(ref elem, ref rightKey))
{
if ((i++ & CancellationState.POLL_INTERVAL) == 0)
CancellationState.ThrowIfCanceled(_cancellationToken);
ConcatKey<TLeftKey, TRightKey> key =
- ConcatKey<TLeftKey, TRightKey>.MakeRight(_rightOrdered ? rightKey : default(TRightKey));
+ ConcatKey<TLeftKey, TRightKey>.MakeRight(_rightOrdered ? rightKey : default);
Pair<TInputOutput, ConcatKey<TLeftKey, TRightKey>> oldEntry;
Wrapper<TInputOutput> wrappedElem = new Wrapper<TInputOutput>(elem.First);
QueryResults<TLeftInput> leftChildResults = _leftChild.Open(settings, preferStriping);
QueryResults<TRightInput> rightChildResults = _rightChild.Open(settings, preferStriping);
+ Debug.Assert(settings.DegreeOfParallelism != null);
int partitionCount = settings.DegreeOfParallelism.Value;
+ Debug.Assert(settings.TaskScheduler != null);
if (_prematureMergeLeft)
{
PartitionedStreamMerger<TLeftInput> merger = new PartitionedStreamMerger<TLeftInput>(
false, ParallelMergeOptions.FullyBuffered, settings.TaskScheduler, _leftChild.OutputOrdered,
settings.CancellationState, settings.QueryId);
leftChildResults.GivePartitionedStream(merger);
+ Debug.Assert(merger.MergeExecutor != null);
leftChildResults = new ListQueryResults<TLeftInput>(
- merger.MergeExecutor.GetResultsAsArray(), partitionCount, preferStriping);
+ merger.MergeExecutor.GetResultsAsArray()!, partitionCount, preferStriping);
}
if (_prematureMergeRight)
false, ParallelMergeOptions.FullyBuffered, settings.TaskScheduler, _rightChild.OutputOrdered,
settings.CancellationState, settings.QueryId);
rightChildResults.GivePartitionedStream(merger);
+ Debug.Assert(merger.MergeExecutor != null);
rightChildResults = new ListQueryResults<TRightInput>(
- merger.MergeExecutor.GetResultsAsArray(), partitionCount, preferStriping);
+ merger.MergeExecutor.GetResultsAsArray()!, partitionCount, preferStriping);
}
return new ZipQueryOperatorResults(leftChildResults, rightChildResults, _resultSelector, partitionCount, preferStriping);
internal override void GivePartitionedStream(IPartitionedStreamRecipient<TOutput> recipient)
{
Debug.Assert(IsIndexible == (_op.OrdinalIndexState == OrdinalIndexState.Indexable));
+ Debug.Assert(_settings.ExecutionMode != null);
if (_settings.ExecutionMode.Value == ParallelExecutionMode.Default && _op.LimitsParallelism)
{
+ Debug.Assert(_settings.DegreeOfParallelism != null);
// We need to run the query sequentially up to and including this operator
IEnumerable<TOutput> opSequential = _op.AsSequentialQuery(_settings.CancellationState.ExternalCancellationToken);
PartitionedStream<TOutput, int> result = ExchangeUtilities.PartitionDataSource(
}
else if (IsIndexible)
{
+ Debug.Assert(_settings.DegreeOfParallelism != null);
// The output of this operator is indexable. Pass the partitioned output into the IPartitionedStreamRecipient.
PartitionedStream<TOutput, int> result = ExchangeUtilities.PartitionDataSource(this, _settings.DegreeOfParallelism.Value, _preferStriping);
recipient.Receive<int>(result);
using System.Collections.Generic;
using System.Diagnostics;
+using System.Diagnostics.CodeAnalysis;
using System.Threading;
namespace System.Linq.Parallel
// The single result of aggregation.
//
- protected override int InternalAggregate(ref Exception singularExceptionToThrow)
+ protected override int InternalAggregate(ref Exception? singularExceptionToThrow)
{
// Because the final reduction is typically much cheaper than the intermediate
// reductions over the individual partitions, and because each parallel partition
//
protected override QueryOperatorEnumerator<int, int> CreateEnumerator<TKey>(
- int index, int count, QueryOperatorEnumerator<TSource, TKey> source, object sharedData,
+ int index, int count, QueryOperatorEnumerator<TSource, TKey> source, object? sharedData,
CancellationToken cancellationToken)
{
return new CountAggregationOperatorEnumerator<TKey>(source, index, cancellationToken);
protected override bool MoveNextCore(ref int currentElement)
{
- TSource elementUnused = default(TSource);
- TKey keyUnused = default(TKey);
+ TSource elementUnused = default(TSource)!;
+ TKey keyUnused = default(TKey)!;
QueryOperatorEnumerator<TSource, TKey> source = _source;
- if (source.MoveNext(ref elementUnused, ref keyUnused))
+ if (source.MoveNext(ref elementUnused!, ref keyUnused))
{
// We just scroll through the enumerator and keep a running count.
int count = 0;
count++;
}
}
- while (source.MoveNext(ref elementUnused, ref keyUnused));
+ while (source.MoveNext(ref elementUnused!, ref keyUnused));
currentElement = count;
return true;
// The single result of aggregation.
//
- protected override decimal InternalAggregate(ref Exception singularExceptionToThrow)
+ protected override decimal InternalAggregate(ref Exception? singularExceptionToThrow)
{
// Because the final reduction is typically much cheaper than the intermediate
// reductions over the individual partitions, and because each parallel partition
//
protected override QueryOperatorEnumerator<Pair<decimal, long>, int> CreateEnumerator<TKey>(
- int index, int count, QueryOperatorEnumerator<decimal, TKey> source, object sharedData, CancellationToken cancellationToken)
+ int index, int count, QueryOperatorEnumerator<decimal, TKey> source, object? sharedData, CancellationToken cancellationToken)
{
return new DecimalAverageAggregationOperatorEnumerator<TKey>(source, index, cancellationToken);
}
QueryOperatorEnumerator<decimal, TKey> source = _source;
decimal current = default(decimal);
- TKey keyUnused = default(TKey);
+ TKey keyUnused = default(TKey)!;
if (source.MoveNext(ref current, ref keyUnused))
{
// The single result of aggregation.
//
- protected override decimal InternalAggregate(ref Exception singularExceptionToThrow)
+ protected override decimal InternalAggregate(ref Exception? singularExceptionToThrow)
{
// Because the final reduction is typically much cheaper than the intermediate
// reductions over the individual partitions, and because each parallel partition
//
protected override QueryOperatorEnumerator<decimal, int> CreateEnumerator<TKey>(
- int index, int count, QueryOperatorEnumerator<decimal, TKey> source, object sharedData, CancellationToken cancellationToken)
+ int index, int count, QueryOperatorEnumerator<decimal, TKey> source, object? sharedData, CancellationToken cancellationToken)
{
return new DecimalMinMaxAggregationOperatorEnumerator<TKey>(source, index, _sign, cancellationToken);
}
{
// Based on the sign, do either a min or max reduction.
QueryOperatorEnumerator<decimal, TKey> source = _source;
- TKey keyUnused = default(TKey);
+ TKey keyUnused = default(TKey)!;
if (source.MoveNext(ref currentElement, ref keyUnused))
{
// The single result of aggregation.
//
- protected override decimal InternalAggregate(ref Exception singularExceptionToThrow)
+ protected override decimal InternalAggregate(ref Exception? singularExceptionToThrow)
{
// Because the final reduction is typically much cheaper than the intermediate
// reductions over the individual partitions, and because each parallel partition
//
protected override QueryOperatorEnumerator<decimal, int> CreateEnumerator<TKey>(
- int index, int count, QueryOperatorEnumerator<decimal, TKey> source, object sharedData, CancellationToken cancellationToken)
+ int index, int count, QueryOperatorEnumerator<decimal, TKey> source, object? sharedData, CancellationToken cancellationToken)
{
return new DecimalSumAggregationOperatorEnumerator<TKey>(source, index, cancellationToken);
}
protected override bool MoveNextCore(ref decimal currentElement)
{
decimal element = default(decimal);
- TKey keyUnused = default(TKey);
+ TKey keyUnused = default(TKey)!;
QueryOperatorEnumerator<decimal, TKey> source = _source;
if (source.MoveNext(ref element, ref keyUnused))
// The single result of aggregation.
//
- protected override double InternalAggregate(ref Exception singularExceptionToThrow)
+ protected override double InternalAggregate(ref Exception? singularExceptionToThrow)
{
// Because the final reduction is typically much cheaper than the intermediate
// reductions over the individual partitions, and because each parallel partition
//
protected override QueryOperatorEnumerator<Pair<double, long>, int> CreateEnumerator<TKey>(
- int index, int count, QueryOperatorEnumerator<double, TKey> source, object sharedData,
+ int index, int count, QueryOperatorEnumerator<double, TKey> source, object? sharedData,
CancellationToken cancellationToken)
{
return new DoubleAverageAggregationOperatorEnumerator<TKey>(source, index, cancellationToken);
QueryOperatorEnumerator<double, TKey> source = _source;
double current = default(double);
- TKey keyUnused = default(TKey);
+ TKey keyUnused = default(TKey)!;
if (source.MoveNext(ref current, ref keyUnused))
{
// The single result of aggregation.
//
- protected override double InternalAggregate(ref Exception singularExceptionToThrow)
+ protected override double InternalAggregate(ref Exception? singularExceptionToThrow)
{
// Because the final reduction is typically much cheaper than the intermediate
// reductions over the individual partitions, and because each parallel partition
//
protected override QueryOperatorEnumerator<double, int> CreateEnumerator<TKey>(
- int index, int count, QueryOperatorEnumerator<double, TKey> source, object sharedData,
+ int index, int count, QueryOperatorEnumerator<double, TKey> source, object? sharedData,
CancellationToken cancellationToken)
{
return new DoubleMinMaxAggregationOperatorEnumerator<TKey>(source, index, _sign, cancellationToken);
{
// Based on the sign, do either a min or max reduction.
QueryOperatorEnumerator<double, TKey> source = _source;
- TKey keyUnused = default(TKey);
+ TKey keyUnused = default(TKey)!;
if (source.MoveNext(ref currentElement, ref keyUnused))
{
// The single result of aggregation.
//
- protected override double InternalAggregate(ref Exception singularExceptionToThrow)
+ protected override double InternalAggregate(ref Exception? singularExceptionToThrow)
{
// Because the final reduction is typically much cheaper than the intermediate
// reductions over the individual partitions, and because each parallel partition
//
protected override QueryOperatorEnumerator<double, int> CreateEnumerator<TKey>(
- int index, int count, QueryOperatorEnumerator<double, TKey> source, object sharedData, CancellationToken cancellationToken)
+ int index, int count, QueryOperatorEnumerator<double, TKey> source, object? sharedData, CancellationToken cancellationToken)
{
return new DoubleSumAggregationOperatorEnumerator<TKey>(source, index, cancellationToken);
}
protected override bool MoveNextCore(ref double currentElement)
{
double element = default(double);
- TKey keyUnused = default(TKey);
+ TKey keyUnused = default(TKey)!;
QueryOperatorEnumerator<double, TKey> source = _source;
if (source.MoveNext(ref element, ref keyUnused))
// The single result of aggregation.
//
- protected override float InternalAggregate(ref Exception singularExceptionToThrow)
+ protected override float InternalAggregate(ref Exception? singularExceptionToThrow)
{
// Because the final reduction is typically much cheaper than the intermediate
// reductions over the individual partitions, and because each parallel partition
//
protected override QueryOperatorEnumerator<Pair<double, long>, int> CreateEnumerator<TKey>(
- int index, int count, QueryOperatorEnumerator<float, TKey> source, object sharedData,
+ int index, int count, QueryOperatorEnumerator<float, TKey> source, object? sharedData,
CancellationToken cancellationToken)
{
return new FloatAverageAggregationOperatorEnumerator<TKey>(source, index, cancellationToken);
QueryOperatorEnumerator<float, TKey> source = _source;
float current = default(float);
- TKey keyUnused = default(TKey);
+ TKey keyUnused = default(TKey)!;
if (source.MoveNext(ref current, ref keyUnused))
{
// The single result of aggregation.
//
- protected override float InternalAggregate(ref Exception singularExceptionToThrow)
+ protected override float InternalAggregate(ref Exception? singularExceptionToThrow)
{
// Because the final reduction is typically much cheaper than the intermediate
// reductions over the individual partitions, and because each parallel partition
//
protected override QueryOperatorEnumerator<float, int> CreateEnumerator<TKey>(
- int index, int count, QueryOperatorEnumerator<float, TKey> source, object sharedData, CancellationToken cancellationToken)
+ int index, int count, QueryOperatorEnumerator<float, TKey> source, object? sharedData, CancellationToken cancellationToken)
{
return new FloatMinMaxAggregationOperatorEnumerator<TKey>(source, index, _sign, cancellationToken);
}
{
// Based on the sign, do either a min or max reduction.
QueryOperatorEnumerator<float, TKey> source = _source;
- TKey keyUnused = default(TKey);
+ TKey keyUnused = default(TKey)!;
if (source.MoveNext(ref currentElement, ref keyUnused))
{
// The single result of aggregation.
//
- protected override float InternalAggregate(ref Exception singularExceptionToThrow)
+ protected override float InternalAggregate(ref Exception? singularExceptionToThrow)
{
// Because the final reduction is typically much cheaper than the intermediate
// reductions over the individual partitions, and because each parallel partition
//
protected override QueryOperatorEnumerator<double, int> CreateEnumerator<TKey>(
- int index, int count, QueryOperatorEnumerator<float, TKey> source, object sharedData,
+ int index, int count, QueryOperatorEnumerator<float, TKey> source, object? sharedData,
CancellationToken cancellationToken)
{
return new FloatSumAggregationOperatorEnumerator<TKey>(source, index, cancellationToken);
protected override bool MoveNextCore(ref double currentElement)
{
float element = default(float);
- TKey keyUnused = default(TKey);
+ TKey keyUnused = default(TKey)!;
QueryOperatorEnumerator<float, TKey> source = _source;
if (source.MoveNext(ref element, ref keyUnused))
internal TResult Aggregate()
{
TResult tr;
- Exception toThrow = null;
+ Exception? toThrow = null;
try
{
// - We find the external CancellationToken for this query in the OperationCanceledException
// - The externalToken is actually in the canceled state.
- OperationCanceledException cancelEx = ex as OperationCanceledException;
- if (cancelEx != null
+ if (ex is OperationCanceledException cancelEx
&& cancelEx.CancellationToken == SpecifiedQuerySettings.CancellationState.ExternalCancellationToken
&& SpecifiedQuerySettings.CancellationState.ExternalCancellationToken.IsCancellationRequested)
{
// The single result of aggregation.
//
- protected abstract TResult InternalAggregate(ref Exception singularExceptionToThrow);
+ protected abstract TResult InternalAggregate(ref Exception? singularExceptionToThrow);
//---------------------------------------------------------------------------------------
// Just opens the current operator, including opening the child and wrapping it with
}
protected abstract QueryOperatorEnumerator<TIntermediate, int> CreateEnumerator<TKey>(
- int index, int count, QueryOperatorEnumerator<TSource, TKey> source, object sharedData, CancellationToken cancellationToken);
+ int index, int count, QueryOperatorEnumerator<TSource, TKey> source, object? sharedData, CancellationToken cancellationToken);
[ExcludeFromCodeCoverage]
internal override IEnumerable<TIntermediate> AsSequentialQuery(CancellationToken token)
//
// =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
+using System.Diagnostics.CodeAnalysis;
using System.Threading;
namespace System.Linq.Parallel
// and then one that is used for extensibility by subclasses.
//
- internal sealed override bool MoveNext(ref TIntermediate currentElement, ref int currentKey)
+ internal sealed override bool MoveNext([MaybeNullWhen(false), AllowNull] ref TIntermediate currentElement, ref int currentKey)
{
if (!_done && MoveNextCore(ref currentElement))
{
// The single result of aggregation.
//
- protected override double InternalAggregate(ref Exception singularExceptionToThrow)
+ protected override double InternalAggregate(ref Exception? singularExceptionToThrow)
{
// Because the final reduction is typically much cheaper than the intermediate
// reductions over the individual partitions, and because each parallel partition
//
protected override QueryOperatorEnumerator<Pair<long, long>, int> CreateEnumerator<TKey>(
- int index, int count, QueryOperatorEnumerator<int, TKey> source, object sharedData, CancellationToken cancellationToken)
+ int index, int count, QueryOperatorEnumerator<int, TKey> source, object? sharedData, CancellationToken cancellationToken)
{
return new IntAverageAggregationOperatorEnumerator<TKey>(source, index, cancellationToken);
}
QueryOperatorEnumerator<int, TKey> source = _source;
int current = default(int);
- TKey keyUnused = default(TKey);
+ TKey keyUnused = default(TKey)!;
if (source.MoveNext(ref current, ref keyUnused))
{
// The single result of aggregation.
//
- protected override int InternalAggregate(ref Exception singularExceptionToThrow)
+ protected override int InternalAggregate(ref Exception? singularExceptionToThrow)
{
// Because the final reduction is typically much cheaper than the intermediate
// reductions over the individual partitions, and because each parallel partition
//
protected override QueryOperatorEnumerator<int, int> CreateEnumerator<TKey>(
- int index, int count, QueryOperatorEnumerator<int, TKey> source, object sharedData, CancellationToken cancellationToken)
+ int index, int count, QueryOperatorEnumerator<int, TKey> source, object? sharedData, CancellationToken cancellationToken)
{
return new IntMinMaxAggregationOperatorEnumerator<TKey>(source, index, _sign, cancellationToken);
}
{
// Based on the sign, do either a min or max reduction.
QueryOperatorEnumerator<int, TKey> source = _source;
- TKey keyUnused = default(TKey);
+ TKey keyUnused = default(TKey)!;
if (source.MoveNext(ref currentElement, ref keyUnused))
{
// The single result of aggregation.
//
- protected override int InternalAggregate(ref Exception singularExceptionToThrow)
+ protected override int InternalAggregate(ref Exception? singularExceptionToThrow)
{
// Because the final reduction is typically much cheaper than the intermediate
// reductions over the individual partitions, and because each parallel partition
//
protected override QueryOperatorEnumerator<int, int> CreateEnumerator<TKey>(
- int index, int count, QueryOperatorEnumerator<int, TKey> source, object sharedData, CancellationToken cancellationToken)
+ int index, int count, QueryOperatorEnumerator<int, TKey> source, object? sharedData, CancellationToken cancellationToken)
{
return new IntSumAggregationOperatorEnumerator<TKey>(source, index, cancellationToken);
}
protected override bool MoveNextCore(ref int currentElement)
{
int element = default(int);
- TKey keyUnused = default(TKey);
+ TKey keyUnused = default(TKey)!;
QueryOperatorEnumerator<int, TKey> source = _source;
if (source.MoveNext(ref element, ref keyUnused))
// The single result of aggregation.
//
- protected override double InternalAggregate(ref Exception singularExceptionToThrow)
+ protected override double InternalAggregate(ref Exception? singularExceptionToThrow)
{
// Because the final reduction is typically much cheaper than the intermediate
// reductions over the individual partitions, and because each parallel partition
//
protected override QueryOperatorEnumerator<Pair<long, long>, int> CreateEnumerator<TKey>(
- int index, int count, QueryOperatorEnumerator<long, TKey> source, object sharedData, CancellationToken cancellationToken)
+ int index, int count, QueryOperatorEnumerator<long, TKey> source, object? sharedData, CancellationToken cancellationToken)
{
return new LongAverageAggregationOperatorEnumerator<TKey>(source, index, cancellationToken);
}
QueryOperatorEnumerator<long, TKey> source = _source;
long current = default(long);
- TKey keyUnused = default(TKey);
+ TKey keyUnused = default(TKey)!;
if (source.MoveNext(ref current, ref keyUnused))
{
// The single result of aggregation.
//
- protected override long InternalAggregate(ref Exception singularExceptionToThrow)
+ protected override long InternalAggregate(ref Exception? singularExceptionToThrow)
{
// Because the final reduction is typically much cheaper than the intermediate
// reductions over the individual partitions, and because each parallel partition
//
protected override QueryOperatorEnumerator<long, int> CreateEnumerator<TKey>(
- int index, int count, QueryOperatorEnumerator<TSource, TKey> source, object sharedData, CancellationToken cancellationToken)
+ int index, int count, QueryOperatorEnumerator<TSource, TKey> source, object? sharedData, CancellationToken cancellationToken)
{
return new LongCountAggregationOperatorEnumerator<TKey>(source, index, cancellationToken);
}
protected override bool MoveNextCore(ref long currentElement)
{
- TSource elementUnused = default(TSource);
- TKey keyUnused = default(TKey);
+ TSource elementUnused = default(TSource)!;
+ TKey keyUnused = default(TKey)!;
QueryOperatorEnumerator<TSource, TKey> source = _source;
if (source.MoveNext(ref elementUnused, ref keyUnused))
// The single result of aggregation.
//
- protected override long InternalAggregate(ref Exception singularExceptionToThrow)
+ protected override long InternalAggregate(ref Exception? singularExceptionToThrow)
{
// Because the final reduction is typically much cheaper than the intermediate
// reductions over the individual partitions, and because each parallel partition
//
protected override QueryOperatorEnumerator<long, int> CreateEnumerator<TKey>(
- int index, int count, QueryOperatorEnumerator<long, TKey> source, object sharedData,
+ int index, int count, QueryOperatorEnumerator<long, TKey> source, object? sharedData,
CancellationToken cancellationToken)
{
return new LongMinMaxAggregationOperatorEnumerator<TKey>(source, index, _sign, cancellationToken);
{
// Based on the sign, do either a min or max reduction.
QueryOperatorEnumerator<long, TKey> source = _source;
- TKey keyUnused = default(TKey);
+ TKey keyUnused = default(TKey)!;
if (source.MoveNext(ref currentElement, ref keyUnused))
{
// The single result of aggregation.
//
- protected override long InternalAggregate(ref Exception singularExceptionToThrow)
+ protected override long InternalAggregate(ref Exception? singularExceptionToThrow)
{
// Because the final reduction is typically much cheaper than the intermediate
// reductions over the individual partitions, and because each parallel partition
//
protected override QueryOperatorEnumerator<long, int> CreateEnumerator<TKey>(
- int index, int count, QueryOperatorEnumerator<long, TKey> source, object sharedData, CancellationToken cancellationToken)
+ int index, int count, QueryOperatorEnumerator<long, TKey> source, object? sharedData, CancellationToken cancellationToken)
{
return new LongSumAggregationOperatorEnumerator<TKey>(source, index, cancellationToken);
}
protected override bool MoveNextCore(ref long currentElement)
{
long element = default(long);
- TKey keyUnused = default(TKey);
+ TKey keyUnused = default(TKey)!;
QueryOperatorEnumerator<long, TKey> source = _source;
if (source.MoveNext(ref element, ref keyUnused))
// The single result of aggregation.
//
- protected override decimal? InternalAggregate(ref Exception singularExceptionToThrow)
+ protected override decimal? InternalAggregate(ref Exception? singularExceptionToThrow)
{
// Because the final reduction is typically much cheaper than the intermediate
// reductions over the individual partitions, and because each parallel partition
//
protected override QueryOperatorEnumerator<Pair<decimal, long>, int> CreateEnumerator<TKey>(
- int index, int count, QueryOperatorEnumerator<decimal?, TKey> source, object sharedData, CancellationToken cancellationToken)
+ int index, int count, QueryOperatorEnumerator<decimal?, TKey> source, object? sharedData, CancellationToken cancellationToken)
{
return new NullableDecimalAverageAggregationOperatorEnumerator<TKey>(source, index, cancellationToken);
}
QueryOperatorEnumerator<decimal?, TKey> source = _source;
decimal? current = default(decimal?);
- TKey currentKey = default(TKey);
+ TKey currentKey = default(TKey)!;
int i = 0;
while (source.MoveNext(ref current, ref currentKey))
{
// The single result of aggregation.
//
- protected override decimal? InternalAggregate(ref Exception singularExceptionToThrow)
+ protected override decimal? InternalAggregate(ref Exception? singularExceptionToThrow)
{
// Because the final reduction is typically much cheaper than the intermediate
// reductions over the individual partitions, and because each parallel partition
//
protected override QueryOperatorEnumerator<decimal?, int> CreateEnumerator<TKey>(
- int index, int count, QueryOperatorEnumerator<decimal?, TKey> source, object sharedData, CancellationToken cancellationToken)
+ int index, int count, QueryOperatorEnumerator<decimal?, TKey> source, object? sharedData, CancellationToken cancellationToken)
{
return new NullableDecimalMinMaxAggregationOperatorEnumerator<TKey>(source, index, _sign, cancellationToken);
}
{
// Based on the sign, do either a min or max reduction.
QueryOperatorEnumerator<decimal?, TKey> source = _source;
- TKey keyUnused = default(TKey);
+ TKey keyUnused = default(TKey)!;
if (source.MoveNext(ref currentElement, ref keyUnused))
{
// The single result of aggregation.
//
- protected override decimal? InternalAggregate(ref Exception singularExceptionToThrow)
+ protected override decimal? InternalAggregate(ref Exception? singularExceptionToThrow)
{
// Because the final reduction is typically much cheaper than the intermediate
// reductions over the individual partitions, and because each parallel partition
//
protected override QueryOperatorEnumerator<decimal?, int> CreateEnumerator<TKey>(
- int index, int count, QueryOperatorEnumerator<decimal?, TKey> source, object sharedData, CancellationToken cancellationToken)
+ int index, int count, QueryOperatorEnumerator<decimal?, TKey> source, object? sharedData, CancellationToken cancellationToken)
{
return new NullableDecimalSumAggregationOperatorEnumerator<TKey>(source, index, cancellationToken);
}
protected override bool MoveNextCore(ref decimal? currentElement)
{
decimal? element = default(decimal?);
- TKey keyUnused = default(TKey);
+ TKey keyUnused = default(TKey)!;
QueryOperatorEnumerator<decimal?, TKey> source = _source;
if (source.MoveNext(ref element, ref keyUnused))
// The single result of aggregation.
//
- protected override double? InternalAggregate(ref Exception singularExceptionToThrow)
+ protected override double? InternalAggregate(ref Exception? singularExceptionToThrow)
{
// Because the final reduction is typically much cheaper than the intermediate
// reductions over the individual partitions, and because each parallel partition
//
protected override QueryOperatorEnumerator<Pair<double, long>, int> CreateEnumerator<TKey>(
- int index, int count, QueryOperatorEnumerator<double?, TKey> source, object sharedData, CancellationToken cancellationToken)
+ int index, int count, QueryOperatorEnumerator<double?, TKey> source, object? sharedData, CancellationToken cancellationToken)
{
return new NullableDoubleAverageAggregationOperatorEnumerator<TKey>(source, index, cancellationToken);
}
QueryOperatorEnumerator<double?, TKey> source = _source;
double? current = default(double?);
- TKey keyUnused = default(TKey);
+ TKey keyUnused = default(TKey)!;
int i = 0;
while (source.MoveNext(ref current, ref keyUnused))
{
// The single result of aggregation.
//
- protected override double? InternalAggregate(ref Exception singularExceptionToThrow)
+ protected override double? InternalAggregate(ref Exception? singularExceptionToThrow)
{
// Because the final reduction is typically much cheaper than the intermediate
// reductions over the individual partitions, and because each parallel partition
//
protected override QueryOperatorEnumerator<double?, int> CreateEnumerator<TKey>(
- int index, int count, QueryOperatorEnumerator<double?, TKey> source, object sharedData, CancellationToken cancellationToken)
+ int index, int count, QueryOperatorEnumerator<double?, TKey> source, object? sharedData, CancellationToken cancellationToken)
{
return new NullableDoubleMinMaxAggregationOperatorEnumerator<TKey>(source, index, _sign, cancellationToken);
}
{
// Based on the sign, do either a min or max reduction.
QueryOperatorEnumerator<double?, TKey> source = _source;
- TKey keyUnused = default(TKey);
+ TKey keyUnused = default(TKey)!;
if (source.MoveNext(ref currentElement, ref keyUnused))
{
// The single result of aggregation.
//
- protected override double? InternalAggregate(ref Exception singularExceptionToThrow)
+ protected override double? InternalAggregate(ref Exception? singularExceptionToThrow)
{
// Because the final reduction is typically much cheaper than the intermediate
// reductions over the individual partitions, and because each parallel partition
//
protected override QueryOperatorEnumerator<double?, int> CreateEnumerator<TKey>(
- int index, int count, QueryOperatorEnumerator<double?, TKey> source, object sharedData, CancellationToken cancellationToken)
+ int index, int count, QueryOperatorEnumerator<double?, TKey> source, object? sharedData, CancellationToken cancellationToken)
{
return new NullableDoubleSumAggregationOperatorEnumerator<TKey>(source, index, cancellationToken);
}
protected override bool MoveNextCore(ref double? currentElement)
{
double? element = default(double?);
- TKey keyUnused = default(TKey);
+ TKey keyUnused = default(TKey)!;
QueryOperatorEnumerator<double?, TKey> source = _source;
if (source.MoveNext(ref element, ref keyUnused))
// The single result of aggregation.
//
- protected override float? InternalAggregate(ref Exception singularExceptionToThrow)
+ protected override float? InternalAggregate(ref Exception? singularExceptionToThrow)
{
// Because the final reduction is typically much cheaper than the intermediate
// reductions over the individual partitions, and because each parallel partition
//
protected override QueryOperatorEnumerator<Pair<double, long>, int> CreateEnumerator<TKey>(
- int index, int count, QueryOperatorEnumerator<float?, TKey> source, object sharedData, CancellationToken cancellationToken)
+ int index, int count, QueryOperatorEnumerator<float?, TKey> source, object? sharedData, CancellationToken cancellationToken)
{
return new NullableFloatAverageAggregationOperatorEnumerator<TKey>(source, index, cancellationToken);
}
QueryOperatorEnumerator<float?, TKey> source = _source;
float? current = default(float?);
- TKey keyUnused = default(TKey);
+ TKey keyUnused = default(TKey)!;
int i = 0;
while (source.MoveNext(ref current, ref keyUnused))
// The single result of aggregation.
//
- protected override float? InternalAggregate(ref Exception singularExceptionToThrow)
+ protected override float? InternalAggregate(ref Exception? singularExceptionToThrow)
{
// Because the final reduction is typically much cheaper than the intermediate
// reductions over the individual partitions, and because each parallel partition
//
protected override QueryOperatorEnumerator<float?, int> CreateEnumerator<TKey>(
- int index, int count, QueryOperatorEnumerator<float?, TKey> source, object sharedData, CancellationToken cancellationToken)
+ int index, int count, QueryOperatorEnumerator<float?, TKey> source, object? sharedData, CancellationToken cancellationToken)
{
return new NullableFloatMinMaxAggregationOperatorEnumerator<TKey>(source, index, _sign, cancellationToken);
}
{
// Based on the sign, do either a min or max reduction.
QueryOperatorEnumerator<float?, TKey> source = _source;
- TKey keyUnused = default(TKey);
+ TKey keyUnused = default(TKey)!;
if (source.MoveNext(ref currentElement, ref keyUnused))
{
// The single result of aggregation.
//
- protected override float? InternalAggregate(ref Exception singularExceptionToThrow)
+ protected override float? InternalAggregate(ref Exception? singularExceptionToThrow)
{
// Because the final reduction is typically much cheaper than the intermediate
// reductions over the individual partitions, and because each parallel partition
//
protected override QueryOperatorEnumerator<double?, int> CreateEnumerator<TKey>(
- int index, int count, QueryOperatorEnumerator<float?, TKey> source, object sharedData, CancellationToken cancellationToken)
+ int index, int count, QueryOperatorEnumerator<float?, TKey> source, object? sharedData, CancellationToken cancellationToken)
{
return new NullableFloatSumAggregationOperatorEnumerator<TKey>(source, index, cancellationToken);
}
protected override bool MoveNextCore(ref double? currentElement)
{
float? element = default(float?);
- TKey keyUnused = default(TKey);
+ TKey keyUnused = default(TKey)!;
QueryOperatorEnumerator<float?, TKey> source = _source;
if (source.MoveNext(ref element, ref keyUnused))
// The single result of aggregation.
//
- protected override double? InternalAggregate(ref Exception singularExceptionToThrow)
+ protected override double? InternalAggregate(ref Exception? singularExceptionToThrow)
{
// Because the final reduction is typically much cheaper than the intermediate
// reductions over the individual partitions, and because each parallel partition
//
protected override QueryOperatorEnumerator<Pair<long, long>, int> CreateEnumerator<TKey>(
- int index, int count, QueryOperatorEnumerator<int?, TKey> source, object sharedData, CancellationToken cancellationToken)
+ int index, int count, QueryOperatorEnumerator<int?, TKey> source, object? sharedData, CancellationToken cancellationToken)
{
return new NullableIntAverageAggregationOperatorEnumerator<TKey>(source, index, cancellationToken);
}
QueryOperatorEnumerator<int?, TKey> source = _source;
int? current = default(int?);
- TKey keyUnused = default(TKey);
+ TKey keyUnused = default(TKey)!;
int i = 0;
while (source.MoveNext(ref current, ref keyUnused))
// The single result of aggregation.
//
- protected override int? InternalAggregate(ref Exception singularExceptionToThrow)
+ protected override int? InternalAggregate(ref Exception? singularExceptionToThrow)
{
// Because the final reduction is typically much cheaper than the intermediate
// reductions over the individual partitions, and because each parallel partition
//
protected override QueryOperatorEnumerator<int?, int> CreateEnumerator<TKey>(
- int index, int count, QueryOperatorEnumerator<int?, TKey> source, object sharedData, CancellationToken cancellationToken)
+ int index, int count, QueryOperatorEnumerator<int?, TKey> source, object? sharedData, CancellationToken cancellationToken)
{
return new NullableIntMinMaxAggregationOperatorEnumerator<TKey>(source, index, _sign, cancellationToken);
}
{
// Based on the sign, do either a min or max reduction.
QueryOperatorEnumerator<int?, TKey> source = _source;
- TKey keyUnused = default(TKey);
+ TKey keyUnused = default(TKey)!;
if (source.MoveNext(ref currentElement, ref keyUnused))
{
// The single result of aggregation.
//
- protected override int? InternalAggregate(ref Exception singularExceptionToThrow)
+ protected override int? InternalAggregate(ref Exception? singularExceptionToThrow)
{
// Because the final reduction is typically much cheaper than the intermediate
// reductions over the individual partitions, and because each parallel partition
//
protected override QueryOperatorEnumerator<int?, int> CreateEnumerator<TKey>(
- int index, int count, QueryOperatorEnumerator<int?, TKey> source, object sharedData, CancellationToken cancellationToken)
+ int index, int count, QueryOperatorEnumerator<int?, TKey> source, object? sharedData, CancellationToken cancellationToken)
{
return new NullableIntSumAggregationOperatorEnumerator<TKey>(source, index, cancellationToken);
}
protected override bool MoveNextCore(ref int? currentElement)
{
int? element = default(int?);
- TKey keyUnused = default(TKey);
+ TKey keyUnused = default(TKey)!;
QueryOperatorEnumerator<int?, TKey> source = _source;
if (source.MoveNext(ref element, ref keyUnused))
// The single result of aggregation.
//
- protected override double? InternalAggregate(ref Exception singularExceptionToThrow)
+ protected override double? InternalAggregate(ref Exception? singularExceptionToThrow)
{
// Because the final reduction is typically much cheaper than the intermediate
// reductions over the individual partitions, and because each parallel partition
//
protected override QueryOperatorEnumerator<Pair<long, long>, int> CreateEnumerator<TKey>(
- int index, int count, QueryOperatorEnumerator<long?, TKey> source, object sharedData,
+ int index, int count, QueryOperatorEnumerator<long?, TKey> source, object? sharedData,
CancellationToken cancellationToken)
{
return new NullableLongAverageAggregationOperatorEnumerator<TKey>(source, index, cancellationToken);
QueryOperatorEnumerator<long?, TKey> source = _source;
long? current = default(long?);
- TKey keyUnused = default(TKey);
+ TKey keyUnused = default(TKey)!;
int i = 0;
while (source.MoveNext(ref current, ref keyUnused))
// The single result of aggregation.
//
- protected override long? InternalAggregate(ref Exception singularExceptionToThrow)
+ protected override long? InternalAggregate(ref Exception? singularExceptionToThrow)
{
// Because the final reduction is typically much cheaper than the intermediate
// reductions over the individual partitions, and because each parallel partition
//
protected override QueryOperatorEnumerator<long?, int> CreateEnumerator<TKey>(
- int index, int count, QueryOperatorEnumerator<long?, TKey> source, object sharedData,
+ int index, int count, QueryOperatorEnumerator<long?, TKey> source, object? sharedData,
CancellationToken cancellationToken)
{
return new NullableLongMinMaxAggregationOperatorEnumerator<TKey>(source, index, _sign, cancellationToken);
{
// Based on the sign, do either a min or max reduction.
QueryOperatorEnumerator<long?, TKey> source = _source;
- TKey keyUnused = default(TKey);
+ TKey keyUnused = default(TKey)!;
if (source.MoveNext(ref currentElement, ref keyUnused))
{
// The single result of aggregation.
//
- protected override long? InternalAggregate(ref Exception singularExceptionToThrow)
+ protected override long? InternalAggregate(ref Exception? singularExceptionToThrow)
{
// Because the final reduction is typically much cheaper than the intermediate
// reductions over the individual partitions, and because each parallel partition
//
protected override QueryOperatorEnumerator<long?, int> CreateEnumerator<TKey>(
- int index, int count, QueryOperatorEnumerator<long?, TKey> source, object sharedData,
+ int index, int count, QueryOperatorEnumerator<long?, TKey> source, object? sharedData,
CancellationToken cancellationToken)
{
return new NullableLongSumAggregationOperatorEnumerator<TKey>(source, index, cancellationToken);
protected override bool MoveNextCore(ref long? currentElement)
{
long? element = default(long?);
- TKey keyUnused = default(TKey);
+ TKey keyUnused = default(TKey)!;
QueryOperatorEnumerator<long?, TKey> source = _source;
if (source.MoveNext(ref element, ref keyUnused))
internal override IEnumerator<TSource> GetEnumerator(ParallelMergeOptions? mergeOptions, bool suppressOrderPreservation)
{
- ScanQueryOperator<TSource> childAsScan = _child as ScanQueryOperator<TSource>;
- if (childAsScan != null)
+ if (_child is ScanQueryOperator<TSource> childAsScan)
{
return childAsScan.Data.GetEnumerator();
}
private readonly bool _forEffectMerge;
private readonly ParallelMergeOptions _mergeOptions;
private readonly bool _isOrdered;
- private MergeExecutor<TOutput> _mergeExecutor = null;
+ private MergeExecutor<TOutput>? _mergeExecutor = null;
private readonly TaskScheduler _taskScheduler;
private readonly int _queryId; // ID of the current query execution
private bool _received = false;
#endif
// Returns the merge executor which merges the received partitioned stream.
- internal MergeExecutor<TOutput> MergeExecutor
+ internal MergeExecutor<TOutput>? MergeExecutor
{
get
{
using System.Linq.Parallel;
using System.Diagnostics;
using System.Threading;
+using System.Diagnostics.CodeAnalysis;
namespace System.Linq.Parallel
{
/// </summary>
internal static OrdinalIndexState GetOrdinalIndexState(Partitioner<TElement> partitioner)
{
- OrderablePartitioner<TElement> orderablePartitioner = partitioner as OrderablePartitioner<TElement>;
+ OrderablePartitioner<TElement>? orderablePartitioner = partitioner as OrderablePartitioner<TElement>;
if (orderablePartitioner == null)
{
Debug.Assert(_settings.DegreeOfParallelism.HasValue);
int partitionCount = _settings.DegreeOfParallelism.Value;
- OrderablePartitioner<TElement> orderablePartitioner = _partitioner as OrderablePartitioner<TElement>;
+ OrderablePartitioner<TElement>? orderablePartitioner = _partitioner as OrderablePartitioner<TElement>;
// If the partitioner is not orderable, it will yield zeros as order keys. The order index state
// is irrelevant.
_sourceEnumerator = sourceEnumerator;
}
- internal override bool MoveNext(ref TElement currentElement, ref int currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref TElement currentElement, ref int currentKey)
{
if (!_sourceEnumerator.MoveNext()) return false;
_sourceEnumerator = sourceEnumerator;
}
- internal override bool MoveNext(ref TElement currentElement, ref int currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref TElement currentElement, ref int currentKey)
{
if (!_sourceEnumerator.MoveNext()) return false;
internal class QueryOpeningEnumerator<TOutput> : IEnumerator<TOutput>
{
private readonly QueryOperator<TOutput> _queryOperator;
- private IEnumerator<TOutput> _openedQueryEnumerator;
+ private IEnumerator<TOutput>? _openedQueryEnumerator;
private QuerySettings _querySettings;
private readonly ParallelMergeOptions? _mergeOptions;
private readonly bool _suppressOrderPreservation;
QueryLifecycle.LogicalQueryExecutionEnd(_querySettings.QueryId);
}
- object IEnumerator.Current
+ object? IEnumerator.Current
{
get { return ((IEnumerator<TOutput>)this).Current; }
}
OpenQuery();
}
- bool innerMoveNextResult = _openedQueryEnumerator.MoveNext();
+ bool innerMoveNextResult = _openedQueryEnumerator!.MoveNext();
// This provides cancellation-testing for the consumer-side of the buffers that appears in each scenario:
// Non-order-preserving (defaultMergeHelper)
// The enumerator will be "opened", which means that PLINQ will start executing the query
// immediately, even before the user calls MoveNext() for the first time.
//
- internal IEnumerator<TOutput> GetOpenedEnumerator(ParallelMergeOptions? mergeOptions, bool suppressOrder, bool forEffect,
+ internal IEnumerator<TOutput>? GetOpenedEnumerator(ParallelMergeOptions? mergeOptions, bool suppressOrder, bool forEffect,
QuerySettings querySettings)
{
+ Debug.Assert(querySettings.ExecutionMode != null);
// If the top-level enumerator forces a premature merge, run the query sequentially.
if (querySettings.ExecutionMode.Value == ParallelExecutionMode.Default && LimitsParallelism)
{
bool orderedMerge = OutputOrdered && !suppressOrder;
+ Debug.Assert(querySettings.TaskScheduler != null);
PartitionedStreamMerger<TOutput> merger = new PartitionedStreamMerger<TOutput>(forEffect, mergeOptions.GetValueOrDefault(),
querySettings.TaskScheduler,
orderedMerge,
return null;
}
+ Debug.Assert(merger.MergeExecutor != null);
return merger.MergeExecutor.GetEnumerator();
}
QueryLifecycle.LogicalQueryExecutionBegin(querySettings.QueryId);
try
{
+ Debug.Assert(querySettings.ExecutionMode != null);
if (querySettings.ExecutionMode.Value == ParallelExecutionMode.Default && LimitsParallelism)
{
IEnumerable<TOutput> opSequential = AsSequentialQuery(querySettings.CancellationState.ExternalCancellationToken);
}
else
{
+ Debug.Assert(querySettings.TaskScheduler != null);
PartitionedStreamMerger<TOutput> merger =
new PartitionedStreamMerger<TOutput>(false, ParallelMergeOptions.FullyBuffered, querySettings.TaskScheduler,
OutputOrdered, querySettings.CancellationState, querySettings.QueryId);
results.GivePartitionedStream(merger);
- TOutput[] output = merger.MergeExecutor.GetResultsAsArray();
+ Debug.Assert(merger.MergeExecutor != null);
+ TOutput[]? output = merger.MergeExecutor.GetResultsAsArray();
querySettings.CleanStateAtQueryEnd();
+ Debug.Assert(output != null);
return output;
}
}
bool useStriping,
QuerySettings settings)
{
- TaskScheduler taskScheduler = settings.TaskScheduler;
-
-
+ TaskScheduler? taskScheduler = settings.TaskScheduler;
+ Debug.Assert(taskScheduler != null);
MergeExecutor<TOutput> executor = MergeExecutor<TOutput>.Execute<TKey>(
openedChild, false, ParallelMergeOptions.FullyBuffered, taskScheduler, outputOrdered,
settings.CancellationState, settings.QueryId);
- return new ListQueryResults<TOutput>(executor.GetResultsAsArray(), partitionCount, useStriping);
+ return new ListQueryResults<TOutput>(executor.GetResultsAsArray()!, partitionCount, useStriping);
}
// Just try casting the data source to a query operator, in the case that
// our child is just another query operator.
- QueryOperator<TOutput> sourceAsOperator = source as QueryOperator<TOutput>;
+ QueryOperator<TOutput>? sourceAsOperator = source as QueryOperator<TOutput>;
if (sourceAsOperator == null)
{
- OrderedParallelQuery<TOutput> orderedQuery = source as OrderedParallelQuery<TOutput>;
- if (orderedQuery != null)
+ if (source is OrderedParallelQuery<TOutput> orderedQuery)
{
// We have to handle OrderedParallelQuery<T> specially. In all other cases,
// ParallelQuery *is* the QueryOperator<T>. But, OrderedParallelQuery<T>
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
+using System.Diagnostics.CodeAnalysis;
namespace System.Linq.Parallel
{
{
// Moves the position of the enumerator forward by one, and simultaneously returns
// the (new) current element and key. If empty, false is returned.
- internal abstract bool MoveNext(ref TElement currentElement, ref TKey currentKey);
+ internal abstract bool MoveNext([MaybeNullWhen(false), AllowNull] ref TElement currentElement, ref TKey currentKey);
// Standard implementation of the disposable pattern.
public void Dispose()
private class QueryOperatorClassicEnumerator : IEnumerator<TElement>
{
private QueryOperatorEnumerator<TElement, TKey> _operatorEnumerator;
- private TElement _current;
+ private TElement _current = default!;
internal QueryOperatorClassicEnumerator(QueryOperatorEnumerator<TElement, TKey> operatorEnumerator)
{
public bool MoveNext()
{
- TKey keyUnused = default(TKey);
- return _operatorEnumerator.MoveNext(ref _current, ref keyUnused);
+ TKey keyUnused = default(TKey)!;
+ return _operatorEnumerator.MoveNext(ref _current!, ref keyUnused);
}
public TElement Current
get { return _current; }
}
- object IEnumerator.Current
+ object? IEnumerator.Current
{
get { return _current; }
}
public void Dispose()
{
_operatorEnumerator.Dispose();
- _operatorEnumerator = null;
+ _operatorEnumerator = null!;
}
public void Reset()
/// </summary>
internal struct QuerySettings
{
- private TaskScheduler _taskScheduler;
+ private TaskScheduler? _taskScheduler;
private int? _degreeOfParallelism;
private CancellationState _cancellationState;
private ParallelExecutionMode? _executionMode;
}
// The task manager on which to execute the query.
- internal TaskScheduler TaskScheduler
+ internal TaskScheduler? TaskScheduler
{
get { return _taskScheduler; }
set { _taskScheduler = value; }
//-----------------------------------------------------------------------------------
// Constructs a new settings structure.
//
- internal QuerySettings(TaskScheduler taskScheduler, int? degreeOfParallelism,
+ internal QuerySettings(TaskScheduler? taskScheduler, int? degreeOfParallelism,
CancellationToken externalCancellationToken, ParallelExecutionMode? executionMode,
ParallelMergeOptions? mergeOptions)
{
throw new InvalidOperationException(SR.ParallelQuery_DuplicateMergeOptions);
}
- TaskScheduler tm = (this.TaskScheduler == null) ? settings2.TaskScheduler : this.TaskScheduler;
+ TaskScheduler? tm = (this.TaskScheduler == null) ? settings2.TaskScheduler : this.TaskScheduler;
int? dop = this.DegreeOfParallelism.HasValue ? this.DegreeOfParallelism : settings2.DegreeOfParallelism;
CancellationToken externalCancellationToken = (this.CancellationState.ExternalCancellationToken.CanBeCanceled) ? this.CancellationState.ExternalCancellationToken : settings2.CancellationState.ExternalCancellationToken;
ParallelExecutionMode? executionMode = this.ExecutionMode.HasValue ? this.ExecutionMode : settings2.ExecutionMode;
// uninterrupted for the duration of the full query.)
public void CleanStateAtQueryEnd()
{
+ Debug.Assert(_cancellationState.MergedCancellationTokenSource != null);
_cancellationState.MergedCancellationTokenSource.Dispose();
}
}
{
Debug.Assert(data != null);
- ParallelEnumerableWrapper<TElement> wrapper = data as ParallelEnumerableWrapper<TElement>;
- if (wrapper != null)
+ if (data is ParallelEnumerableWrapper<TElement> wrapper)
{
data = wrapper.WrappedEnumerable;
}
{
Debug.Assert(settings.DegreeOfParallelism.HasValue);
- IList<TElement> dataAsList = _data as IList<TElement>;
- if (dataAsList != null)
+ if (_data is IList<TElement> dataAsList)
{
return new ListQueryResults<TElement>(dataAsList, settings.DegreeOfParallelism.GetValueOrDefault(), preferStriping);
}
internal override void GivePartitionedStream(IPartitionedStreamRecipient<TElement> recipient)
{
+ Debug.Assert(_settings.DegreeOfParallelism != null);
// Since we are not using _data as an IList, we can pass useStriping = false.
PartitionedStream<TElement, int> partitionedStream = ExchangeUtilities.PartitionDataSource(
_data, _settings.DegreeOfParallelism.Value, false);
return false;
// We just scroll through the enumerator and accumulate the result.
- TInput element = default(TInput);
- TKey keyUnused = default(TKey);
+ TInput element = default(TInput)!;
+ TKey keyUnused = default(TKey)!;
- if (_source.MoveNext(ref element, ref keyUnused))
+ if (_source.MoveNext(ref element!, ref keyUnused))
{
currentElement = !_qualification;
currentKey = _partitionIndex;
break;
}
}
- while (_source.MoveNext(ref element, ref keyUnused));
+ while (_source.MoveNext(ref element!, ref keyUnused));
return true;
}
// comparer - a comparison routine used to test equality.
//
- internal ContainsSearchOperator(IEnumerable<TInput> child, TInput searchValue, IEqualityComparer<TInput> comparer)
+ internal ContainsSearchOperator(IEnumerable<TInput> child, TInput searchValue, IEqualityComparer<TInput>? comparer)
: base(child)
{
Debug.Assert(child != null, "child data source cannot be null");
return false;
// We just scroll through the enumerator and accumulate the result.
- TInput element = default(TInput);
- TKey keyUnused = default(TKey);
- if (_source.MoveNext(ref element, ref keyUnused))
+ TInput element = default(TInput)!;
+ TKey keyUnused = default(TKey)!;
+ if (_source.MoveNext(ref element!, ref keyUnused))
{
currentElement = false;
currentKey = _partitionIndex;
break;
}
}
- while (_source.MoveNext(ref element, ref keyUnused));
+ while (_source.MoveNext(ref element!, ref keyUnused));
return true;
}
using System.Collections.Generic;
using System.Diagnostics;
+using System.Diagnostics.CodeAnalysis;
using System.Threading;
namespace System.Linq.Parallel
// Straightforward IEnumerator<T> methods.
//
- internal override bool MoveNext(ref TSource currentElement, ref TKey currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref TSource currentElement, ref TKey currentKey)
{
Debug.Assert(_source != null);
- bool moveNextResult = _source.MoveNext(ref currentElement, ref currentKey);
+ bool moveNextResult = _source.MoveNext(ref currentElement!, ref currentKey);
// There is special logic the first time this function is called.
if (!_lookedForEmpty)
{
// No data, we will yield the default value.
currentElement = _defaultValue;
- currentKey = default(TKey);
+ currentKey = default(TKey)!;
return true;
}
else
using System.Collections.Generic;
using System.Diagnostics;
+using System.Diagnostics.CodeAnalysis;
using System.Threading;
namespace System.Linq.Parallel
/// <typeparam name="TInputOutput"></typeparam>
internal sealed class DistinctQueryOperator<TInputOutput> : UnaryQueryOperator<TInputOutput, TInputOutput>
{
- private readonly IEqualityComparer<TInputOutput> _comparer; // An (optional) equality comparer.
+ private readonly IEqualityComparer<TInputOutput>? _comparer; // An (optional) equality comparer.
//---------------------------------------------------------------------------------------
// Constructs a new distinction operator.
//
- internal DistinctQueryOperator(IEnumerable<TInputOutput> source, IEqualityComparer<TInputOutput> comparer)
+ internal DistinctQueryOperator(IEnumerable<TInputOutput> source, IEqualityComparer<TInputOutput>? comparer)
: base(source)
{
Debug.Assert(source != null, "child data source cannot be null");
private readonly QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, TKey> _source; // The data source.
private readonly Set<TInputOutput> _hashLookup; // The hash lookup, used to produce the distinct set.
private readonly CancellationToken _cancellationToken;
- private Shared<int> _outputLoopCount; // Allocated in MoveNext to avoid false sharing.
+ private Shared<int>? _outputLoopCount; // Allocated in MoveNext to avoid false sharing.
//---------------------------------------------------------------------------------------
// Instantiates a new distinction operator.
//
internal DistinctQueryOperatorEnumerator(
- QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, TKey> source, IEqualityComparer<TInputOutput> comparer,
+ QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, TKey> source, IEqualityComparer<TInputOutput>? comparer,
CancellationToken cancellationToken)
{
Debug.Assert(source != null);
// Walks the single data source, skipping elements it has already seen.
//
- internal override bool MoveNext(ref TInputOutput currentElement, ref int currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref TInputOutput currentElement, ref int currentKey)
{
Debug.Assert(_source != null);
Debug.Assert(_hashLookup != null);
// Iterate over this set's elements until we find a unique element.
- TKey keyUnused = default(TKey);
+ TKey keyUnused = default!;
Pair<TInputOutput, NoKeyMemoizationRequired> current = default(Pair<TInputOutput, NoKeyMemoizationRequired>);
if (_outputLoopCount == null)
private readonly QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, TKey> _source; // The data source.
private readonly Dictionary<Wrapper<TInputOutput>, TKey> _hashLookup; // The hash lookup, used to produce the distinct set.
private readonly IComparer<TKey> _keyComparer; // Comparer to decide the key order.
- private IEnumerator<KeyValuePair<Wrapper<TInputOutput>, TKey>> _hashLookupEnumerator; // Enumerates over _hashLookup.
+ private IEnumerator<KeyValuePair<Wrapper<TInputOutput>, TKey>>? _hashLookupEnumerator; // Enumerates over _hashLookup.
private readonly CancellationToken _cancellationToken;
//---------------------------------------------------------------------------------------
internal OrderedDistinctQueryOperatorEnumerator(
QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, TKey> source,
- IEqualityComparer<TInputOutput> comparer, IComparer<TKey> keyComparer,
+ IEqualityComparer<TInputOutput>? comparer, IComparer<TKey> keyComparer,
CancellationToken cancellationToken)
{
Debug.Assert(source != null);
// Walks the single data source, skipping elements it has already seen.
//
- internal override bool MoveNext(ref TInputOutput currentElement, ref TKey currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref TInputOutput currentElement, ref TKey currentKey)
{
Debug.Assert(_source != null);
Debug.Assert(_hashLookup != null);
if (_hashLookupEnumerator == null)
{
Pair<TInputOutput, NoKeyMemoizationRequired> elem = default(Pair<TInputOutput, NoKeyMemoizationRequired>);
- TKey orderKey = default(TKey);
+ TKey orderKey = default!;
int i = 0;
while (_source.MoveNext(ref elem, ref orderKey))
// If this is the first occurrence of this element, or the order key is lower than all keys we saw previously,
// update the order key for this element.
- if (!_hashLookup.TryGetValue(wrappedElem, out oldEntry) || _keyComparer.Compare(orderKey, oldEntry) < 0)
+ if (!_hashLookup.TryGetValue(wrappedElem, out oldEntry!) || _keyComparer.Compare(orderKey, oldEntry) < 0)
{
// For each "elem" value, we store the smallest key, and the element value that had that key.
// Note that even though two element values are "equal" according to the EqualityComparer,
/// <param name="result">result</param>
/// <param name="withDefaultValue">withDefaultValue</param>
/// <returns>whether an element with this index exists</returns>
- internal bool Aggregate(out TSource result, bool withDefaultValue)
+ internal bool Aggregate([MaybeNullWhen(false)] out TSource result, bool withDefaultValue)
{
// If we were to insert a premature merge before this ElementAt, and we are executing in conservative mode, run the whole query
// sequentially.
- if (LimitsParallelism && SpecifiedQuerySettings.WithDefaults().ExecutionMode.Value != ParallelExecutionMode.ForceParallelism)
+ if (LimitsParallelism && SpecifiedQuerySettings.WithDefaults().ExecutionMode!.Value != ParallelExecutionMode.ForceParallelism)
{
CancellationState cancelState = SpecifiedQuerySettings.CancellationState;
if (withDefaultValue)
{
IEnumerable<TSource> childAsSequential = Child.AsSequentialQuery(cancelState.ExternalCancellationToken);
IEnumerable<TSource> childWithCancelChecks = CancellableEnumerable.Wrap(childAsSequential, cancelState.ExternalCancellationToken);
- result = ExceptionAggregator.WrapEnumerable(childWithCancelChecks, cancelState).ElementAtOrDefault(_index);
+ result = ExceptionAggregator.WrapEnumerable(childWithCancelChecks, cancelState).ElementAtOrDefault(_index)!;
}
else
{
}
}
- result = default(TSource);
+ result = default(TSource)!;
return false;
}
// partition has signaled that it found the element.
//
- internal override bool MoveNext(ref TSource currentElement, ref int currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref TSource currentElement, ref int currentKey)
{
// Just walk the enumerator until we've found the element.
int i = 0;
- while (_source.MoveNext(ref currentElement, ref currentKey))
+ while (_source.MoveNext(ref currentElement!, ref currentKey))
{
if ((i++ & CancellationState.POLL_INTERVAL) == 0)
CancellationState.ThrowIfCanceled(_cancellationToken);
/// <typeparam name="TSource"></typeparam>
internal sealed class FirstQueryOperator<TSource> : UnaryQueryOperator<TSource, TSource>
{
- private readonly Func<TSource, bool> _predicate; // The optional predicate used during the search.
+ private readonly Func<TSource, bool>? _predicate; // The optional predicate used during the search.
private readonly bool _prematureMergeNeeded; // Whether to prematurely merge the input of this operator.
//---------------------------------------------------------------------------------------
// child - the child whose data we will reverse
//
- internal FirstQueryOperator(IEnumerable<TSource> child, Func<TSource, bool> predicate)
+ internal FirstQueryOperator(IEnumerable<TSource> child, Func<TSource, bool>? predicate)
: base(child)
{
Debug.Assert(child != null, "child data source cannot be null");
private class FirstQueryOperatorEnumerator<TKey> : QueryOperatorEnumerator<TSource, int>
{
private readonly QueryOperatorEnumerator<TSource, TKey> _source; // The data source to enumerate.
- private readonly Func<TSource, bool> _predicate; // The optional predicate used during the search.
+ private readonly Func<TSource, bool>? _predicate; // The optional predicate used during the search.
private bool _alreadySearched; // Set once the enumerator has performed the search.
private readonly int _partitionId; // ID of this partition
//
internal FirstQueryOperatorEnumerator(
- QueryOperatorEnumerator<TSource, TKey> source, Func<TSource, bool> predicate,
+ QueryOperatorEnumerator<TSource, TKey> source, Func<TSource, bool>? predicate,
FirstQueryOperatorState<TKey> operatorState, CountdownEvent sharedBarrier, CancellationToken cancellationToken,
IComparer<TKey> keyComparer, int partitionId)
{
// Straightforward IEnumerator<T> methods.
//
- internal override bool MoveNext(ref TSource currentElement, ref int currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref TSource currentElement, ref int currentKey)
{
Debug.Assert(_source != null);
}
// Look for the lowest element.
- TSource candidate = default(TSource);
- TKey candidateKey = default(TKey);
+ TSource candidate = default(TSource)!;
+ TKey candidateKey = default(TKey)!;
try
{
- TSource value = default(TSource);
- TKey key = default(TKey);
+ TSource value = default(TSource)!;
+ TKey key = default(TKey)!;
int i = 0;
- while (_source.MoveNext(ref value, ref key))
+ while (_source.MoveNext(ref value!, ref key))
{
if ((i++ & CancellationState.POLL_INTERVAL) == 0)
CancellationState.ThrowIfCanceled(_cancellationToken);
private class FirstQueryOperatorState<TKey>
{
- internal TKey _key;
+ internal TKey _key = default!;
internal int _partitionId = -1;
}
}
QueryLifecycle.LogicalQueryExecutionBegin(settingsWithDefaults.QueryId);
- IEnumerator<TInput> enumerator = GetOpenedEnumerator(ParallelMergeOptions.FullyBuffered, true, true,
+ IEnumerator<TInput>? enumerator = GetOpenedEnumerator(ParallelMergeOptions.FullyBuffered, true, true,
settingsWithDefaults);
settingsWithDefaults.CleanStateAtQueryEnd();
Debug.Assert(enumerator == null);
// element action for each element.
//
- internal override bool MoveNext(ref TInput currentElement, ref int currentKey)
+ internal override bool MoveNext([MaybeNull, AllowNull] ref TInput currentElement, ref int currentKey)
{
Debug.Assert(_elementAction != null, "expected a compiled operator");
// Cancellation testing must be performed here as full enumeration occurs within this method.
// We only need to throw a simple exception here.. marshalling logic handled via QueryTaskGroupState.QueryEnd (called by ForAllSpoolingTask)
- TInput element = default(TInput);
- TKey keyUnused = default(TKey);
+ TInput element = default(TInput)!;
+ TKey keyUnused = default(TKey)!;
int i = 0;
- while (_source.MoveNext(ref element, ref keyUnused))
+ while (_source.MoveNext(ref element!, ref keyUnused))
{
if ((i++ & CancellationState.POLL_INTERVAL) == 0)
CancellationState.ThrowIfCanceled(_cancellationToken);
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
+using System.Diagnostics.CodeAnalysis;
using System.Threading;
using IEnumerator = System.Collections.IEnumerator;
UnaryQueryOperator<TSource, IGrouping<TGroupKey, TElement>>
{
private readonly Func<TSource, TGroupKey> _keySelector; // Key selection function.
- private readonly Func<TSource, TElement> _elementSelector; // Optional element selection function.
- private readonly IEqualityComparer<TGroupKey> _keyComparer; // An optional key comparison object.
+ private readonly Func<TSource, TElement>? _elementSelector; // Optional element selection function.
+ private readonly IEqualityComparer<TGroupKey>? _keyComparer; // An optional key comparison object.
//---------------------------------------------------------------------------------------
// Initializes a new group by operator.
internal GroupByQueryOperator(IEnumerable<TSource> child,
Func<TSource, TGroupKey> keySelector,
- Func<TSource, TElement> elementSelector,
- IEqualityComparer<TGroupKey> keyComparer)
+ Func<TSource, TElement>? elementSelector,
+ IEqualityComparer<TGroupKey>? keyComparer)
: base(child)
{
Debug.Assert(child != null, "child data source cannot be null");
QueryOperatorEnumerator<IGrouping<TGroupKey, TElement>, TOrderKey>
{
protected readonly QueryOperatorEnumerator<Pair<TSource, TGroupKey>, TOrderKey> _source; // The data source to enumerate.
- protected readonly IEqualityComparer<TGroupKey> _keyComparer; // A key comparer.
+ protected readonly IEqualityComparer<TGroupKey>? _keyComparer; // A key comparer.
protected readonly CancellationToken _cancellationToken;
- private Mutables _mutables; // All of the mutable state.
+ private Mutables? _mutables; // All of the mutable state.
private class Mutables
{
- internal HashLookup<Wrapper<TGroupKey>, ListChunk<TElement>> _hashLookup; // The lookup with key-value mappings.
+ internal HashLookup<Wrapper<TGroupKey>, ListChunk<TElement>>? _hashLookup; // The lookup with key-value mappings.
internal int _hashLookupIndex; // The current index within the lookup.
}
protected GroupByQueryOperatorEnumerator(
QueryOperatorEnumerator<Pair<TSource, TGroupKey>, TOrderKey> source,
- IEqualityComparer<TGroupKey> keyComparer, CancellationToken cancellationToken)
+ IEqualityComparer<TGroupKey>? keyComparer, CancellationToken cancellationToken)
{
Debug.Assert(source != null);
// just enumerate the key-set from the hash-table, retrieving groupings of key-elements.
//
- internal override bool MoveNext(ref IGrouping<TGroupKey, TElement> currentElement, ref TOrderKey currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref IGrouping<TGroupKey, TElement> currentElement, ref TOrderKey currentKey)
{
Debug.Assert(_source != null);
// Lazy-init the mutable state. This also means we haven't yet built our lookup of
// groupings, so we can go ahead and do that too.
- Mutables mutables = _mutables;
+ Mutables? mutables = _mutables;
if (mutables == null)
{
mutables = _mutables = new Mutables();
// Build the hash lookup and start enumerating the lookup at the beginning.
mutables._hashLookup = BuildHashLookup();
- Debug.Assert(mutables._hashLookup != null);
mutables._hashLookupIndex = -1;
}
+ Debug.Assert(mutables._hashLookup != null);
// Now, with a hash lookup in hand, we just enumerate the keys. So long
// as the key-value lookup has elements, we have elements.
internal GroupByIdentityQueryOperatorEnumerator(
QueryOperatorEnumerator<Pair<TSource, TGroupKey>, TOrderKey> source,
- IEqualityComparer<TGroupKey> keyComparer, CancellationToken cancellationToken)
+ IEqualityComparer<TGroupKey>? keyComparer, CancellationToken cancellationToken)
: base(source, keyComparer, cancellationToken)
{
}
new HashLookup<Wrapper<TGroupKey>, ListChunk<TSource>>(new WrapperEqualityComparer<TGroupKey>(_keyComparer));
Pair<TSource, TGroupKey> sourceElement = default(Pair<TSource, TGroupKey>);
- TOrderKey sourceKeyUnused = default(TOrderKey);
+ TOrderKey sourceKeyUnused = default(TOrderKey)!;
int i = 0;
while (_source.MoveNext(ref sourceElement, ref sourceKeyUnused))
{
// If the key already exists, we just append it to the existing list --
// otherwise we will create a new one and add it to that instead.
- ListChunk<TSource> currentValue = null;
+ ListChunk<TSource>? currentValue = null;
if (!hashlookup.TryGetValue(key, ref currentValue))
{
const int INITIAL_CHUNK_SIZE = 2;
internal GroupByElementSelectorQueryOperatorEnumerator(
QueryOperatorEnumerator<Pair<TSource, TGroupKey>, TOrderKey> source,
- IEqualityComparer<TGroupKey> keyComparer, Func<TSource, TElement> elementSelector, CancellationToken cancellationToken) :
+ IEqualityComparer<TGroupKey>? keyComparer, Func<TSource, TElement> elementSelector, CancellationToken cancellationToken) :
base(source, keyComparer, cancellationToken)
{
Debug.Assert(elementSelector != null);
new HashLookup<Wrapper<TGroupKey>, ListChunk<TElement>>(new WrapperEqualityComparer<TGroupKey>(_keyComparer));
Pair<TSource, TGroupKey> sourceElement = default(Pair<TSource, TGroupKey>);
- TOrderKey sourceKeyUnused = default(TOrderKey);
+ TOrderKey sourceKeyUnused = default(TOrderKey)!;
int i = 0;
while (_source.MoveNext(ref sourceElement, ref sourceKeyUnused))
{
// If the key already exists, we just append it to the existing list --
// otherwise we will create a new one and add it to that instead.
- ListChunk<TElement> currentValue = null;
+ ListChunk<TElement>? currentValue = null;
if (!hashlookup.TryGetValue(key, ref currentValue))
{
const int INITIAL_CHUNK_SIZE = 2;
{
protected readonly QueryOperatorEnumerator<Pair<TSource, TGroupKey>, TOrderKey> _source; // The data source to enumerate.
private readonly Func<TSource, TGroupKey> _keySelector; // The key selection routine.
- protected readonly IEqualityComparer<TGroupKey> _keyComparer; // The key comparison routine.
+ protected readonly IEqualityComparer<TGroupKey>? _keyComparer; // The key comparison routine.
protected readonly IComparer<TOrderKey> _orderComparer; // The comparison routine for order keys.
protected readonly CancellationToken _cancellationToken;
- private Mutables _mutables; // All the mutable state.
+ private Mutables? _mutables; // All the mutable state.
private class Mutables
{
- internal HashLookup<Wrapper<TGroupKey>, GroupKeyData> _hashLookup; // The lookup with key-value mappings.
+ internal HashLookup<Wrapper<TGroupKey>, GroupKeyData>? _hashLookup; // The lookup with key-value mappings.
internal int _hashLookupIndex; // The current index within the lookup.
}
//
protected OrderedGroupByQueryOperatorEnumerator(QueryOperatorEnumerator<Pair<TSource, TGroupKey>, TOrderKey> source,
- Func<TSource, TGroupKey> keySelector, IEqualityComparer<TGroupKey> keyComparer, IComparer<TOrderKey> orderComparer,
+ Func<TSource, TGroupKey> keySelector, IEqualityComparer<TGroupKey>? keyComparer, IComparer<TOrderKey> orderComparer,
CancellationToken cancellationToken)
{
Debug.Assert(source != null);
// just enumerate the key-set from the hash-table, retrieving groupings of key-elements.
//
- internal override bool MoveNext(ref IGrouping<TGroupKey, TElement> currentElement, ref TOrderKey currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref IGrouping<TGroupKey, TElement> currentElement, ref TOrderKey currentKey)
{
Debug.Assert(_source != null);
Debug.Assert(_keySelector != null);
// Lazy-init the mutable state. This also means we haven't yet built our lookup of
// groupings, so we can go ahead and do that too.
- Mutables mutables = _mutables;
+ Mutables? mutables = _mutables;
if (mutables == null)
{
mutables = _mutables = new Mutables();
// Build the hash lookup and start enumerating the lookup at the beginning.
mutables._hashLookup = BuildHashLookup();
- Debug.Assert(mutables._hashLookup != null);
mutables._hashLookupIndex = -1;
}
+ Debug.Assert(mutables._hashLookup != null);
// Now, with a hash lookup in hand, we just enumerate the keys. So long
// as the key-value lookup has elements, we have elements.
//
internal OrderedGroupByIdentityQueryOperatorEnumerator(QueryOperatorEnumerator<Pair<TSource, TGroupKey>, TOrderKey> source,
- Func<TSource, TGroupKey> keySelector, IEqualityComparer<TGroupKey> keyComparer, IComparer<TOrderKey> orderComparer,
+ Func<TSource, TGroupKey> keySelector, IEqualityComparer<TGroupKey>? keyComparer, IComparer<TOrderKey> orderComparer,
CancellationToken cancellationToken)
: base(source, keySelector, keyComparer, orderComparer, cancellationToken)
{
new WrapperEqualityComparer<TGroupKey>(_keyComparer));
Pair<TSource, TGroupKey> sourceElement = default(Pair<TSource, TGroupKey>);
- TOrderKey sourceOrderKey = default(TOrderKey);
+ TOrderKey sourceOrderKey = default(TOrderKey)!;
int i = 0;
while (_source.MoveNext(ref sourceElement, ref sourceOrderKey))
{
// If the key already exists, we just append it to the existing list --
// otherwise we will create a new one and add it to that instead.
- GroupKeyData currentValue = null;
+ GroupKeyData? currentValue = null;
if (hashLookup.TryGetValue(key, ref currentValue))
{
if (_orderComparer.Compare(sourceOrderKey, currentValue._orderKey) < 0)
//
internal OrderedGroupByElementSelectorQueryOperatorEnumerator(QueryOperatorEnumerator<Pair<TSource, TGroupKey>, TOrderKey> source,
- Func<TSource, TGroupKey> keySelector, Func<TSource, TElement> elementSelector, IEqualityComparer<TGroupKey> keyComparer, IComparer<TOrderKey> orderComparer,
+ Func<TSource, TGroupKey> keySelector, Func<TSource, TElement> elementSelector, IEqualityComparer<TGroupKey>? keyComparer, IComparer<TOrderKey> orderComparer,
CancellationToken cancellationToken) :
base(source, keySelector, keyComparer, orderComparer, cancellationToken)
{
new WrapperEqualityComparer<TGroupKey>(_keyComparer));
Pair<TSource, TGroupKey> sourceElement = default(Pair<TSource, TGroupKey>);
- TOrderKey sourceOrderKey = default(TOrderKey);
+ TOrderKey sourceOrderKey = default(TOrderKey)!;
int i = 0;
while (_source.MoveNext(ref sourceElement, ref sourceOrderKey))
{
// If the key already exists, we just append it to the existing list --
// otherwise we will create a new one and add it to that instead.
- GroupKeyData currentValue = null;
+ GroupKeyData? currentValue = null;
if (hashLookup.TryGetValue(key, ref currentValue))
{
if (_orderComparer.Compare(sourceOrderKey, currentValue._orderKey) < 0)
private const int INITIAL_CHUNK_SIZE = 2;
private readonly TGroupKey _groupKey; // The group key for this grouping
- private ListChunk<Pair<TOrderKey, TElement>> _values; // Values in this group
- private TElement[] _sortedValues; // Sorted values (allocated in DoneAdding)
+ private ListChunk<Pair<TOrderKey, TElement>>? _values; // Values in this group
+ private TElement[]? _sortedValues; // Sorted values (allocated in DoneAdding)
private readonly IComparer<TOrderKey> _orderComparer; // Comparer for order keys
/// <summary>
Debug.Assert(_values != null);
int count = _values.Count;
- ListChunk<Pair<TOrderKey, TElement>> curChunk = _values;
+ ListChunk<Pair<TOrderKey, TElement>>? curChunk = _values;
while ((curChunk = curChunk.Next) != null)
{
count += curChunk.Count;
using System.Collections.Generic;
using System.Diagnostics;
+using System.Diagnostics.CodeAnalysis;
using System.Threading;
namespace System.Linq.Parallel
// Straightforward IEnumerator<T> methods.
//
- internal override bool MoveNext(ref TOutput currentElement, ref int currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref TOutput currentElement, ref int currentKey)
{
// So long as the source has a next element, we have an element.
- TInput element = default(TInput);
- if (_source.MoveNext(ref element, ref currentKey))
+ TInput element = default(TInput)!;
+ if (_source.MoveNext(ref element!, ref currentKey))
{
Debug.Assert(_selector != null, "expected a compiled selection function");
currentElement = _selector(element, currentKey);
using System.Collections.Generic;
using System.Diagnostics;
+using System.Diagnostics.CodeAnalysis;
using System.Threading;
namespace System.Linq.Parallel
private readonly QueryOperatorEnumerator<TInputOutput, int> _source; // The data source to enumerate.
private readonly Func<TInputOutput, int, bool> _predicate; // The predicate used for filtering.
private readonly CancellationToken _cancellationToken;
- private Shared<int> _outputLoopCount;
+ private Shared<int>? _outputLoopCount;
//-----------------------------------------------------------------------------------
// Instantiates a new enumerator.
//
// Moves to the next matching element in the underlying data stream.
//
- internal override bool MoveNext(ref TInputOutput currentElement, ref int currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref TInputOutput currentElement, ref int currentKey)
{
Debug.Assert(_predicate != null, "expected a compiled operator");
if (_outputLoopCount == null)
_outputLoopCount = new Shared<int>(0);
- while (_source.MoveNext(ref currentElement, ref currentKey))
+ while (_source.MoveNext(ref currentElement!, ref currentKey))
{
if ((_outputLoopCount.Value++ & CancellationState.POLL_INTERVAL) == 0)
CancellationState.ThrowIfCanceled(_cancellationToken);
/// <typeparam name="TSource"></typeparam>
internal sealed class LastQueryOperator<TSource> : UnaryQueryOperator<TSource, TSource>
{
- private readonly Func<TSource, bool> _predicate; // The optional predicate used during the search.
+ private readonly Func<TSource, bool>? _predicate; // The optional predicate used during the search.
private readonly bool _prematureMergeNeeded; // Whether to prematurely merge the input of this operator.
//---------------------------------------------------------------------------------------
// child - the child whose data we will reverse
//
- internal LastQueryOperator(IEnumerable<TSource> child, Func<TSource, bool> predicate)
+ internal LastQueryOperator(IEnumerable<TSource> child, Func<TSource, bool>? predicate)
: base(child)
{
Debug.Assert(child != null, "child data source cannot be null");
private class LastQueryOperatorEnumerator<TKey> : QueryOperatorEnumerator<TSource, int>
{
private readonly QueryOperatorEnumerator<TSource, TKey> _source; // The data source to enumerate.
- private readonly Func<TSource, bool> _predicate; // The optional predicate used during the search.
+ private readonly Func<TSource, bool>? _predicate; // The optional predicate used during the search.
private bool _alreadySearched; // Set once the enumerator has performed the search.
private readonly int _partitionId; // ID of this partition
//
internal LastQueryOperatorEnumerator(
- QueryOperatorEnumerator<TSource, TKey> source, Func<TSource, bool> predicate,
+ QueryOperatorEnumerator<TSource, TKey> source, Func<TSource, bool>? predicate,
LastQueryOperatorState<TKey> operatorState, CountdownEvent sharedBarrier, CancellationToken cancelToken,
IComparer<TKey> keyComparer, int partitionId)
{
// Straightforward IEnumerator<T> methods.
//
- internal override bool MoveNext(ref TSource currentElement, ref int currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref TSource currentElement, ref int currentKey)
{
Debug.Assert(_source != null);
}
// Look for the greatest element.
- TSource candidate = default(TSource);
- TKey candidateKey = default(TKey);
+ TSource candidate = default(TSource)!;
+ TKey candidateKey = default(TKey)!;
bool candidateFound = false;
try
{
int loopCount = 0; //counter to help with cancellation
- TSource value = default(TSource);
- TKey key = default(TKey);
- while (_source.MoveNext(ref value, ref key))
+ TSource value = default(TSource)!;
+ TKey key = default(TKey)!;
+ while (_source.MoveNext(ref value!, ref key))
{
if ((loopCount & CancellationState.POLL_INTERVAL) == 0)
CancellationState.ThrowIfCanceled(_cancellationToken);
private class LastQueryOperatorState<TKey>
{
- internal TKey _key;
+ internal TKey _key = default!;
internal int _partitionId = -1;
}
}
using System.Collections.Generic;
using System.Diagnostics;
+using System.Diagnostics.CodeAnalysis;
using System.Threading;
namespace System.Linq.Parallel
{
private readonly QueryOperatorEnumerator<TSource, TKey> _source; // The data source to reverse.
private readonly CancellationToken _cancellationToken;
- private List<Pair<TSource, TKey>> _buffer; // Our buffer. [allocate in moveNext to avoid false-sharing]
- private Shared<int> _bufferIndex; // Our current index within the buffer. [allocate in moveNext to avoid false-sharing]
+ private List<Pair<TSource, TKey>>? _buffer; // Our buffer. [allocate in moveNext to avoid false-sharing]
+ private Shared<int>? _bufferIndex; // Our current index within the buffer. [allocate in moveNext to avoid false-sharing]
//---------------------------------------------------------------------------------------
// Instantiates a new select enumerator.
// Straightforward IEnumerator<T> methods.
//
- internal override bool MoveNext(ref TSource currentElement, ref TKey currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref TSource currentElement, ref TKey currentKey)
{
// If the buffer has not been created, we will generate it lazily on demand.
if (_buffer == null)
_bufferIndex = new Shared<int>(0);
// Buffer all of our data.
_buffer = new List<Pair<TSource, TKey>>();
- TSource current = default(TSource);
- TKey key = default(TKey);
+ TSource current = default(TSource)!;
+ TKey key = default(TKey)!;
int i = 0;
- while (_source.MoveNext(ref current, ref key))
+ while (_source.MoveNext(ref current!, ref key))
{
if ((i++ & CancellationState.POLL_INTERVAL) == 0)
CancellationState.ThrowIfCanceled(_cancellationToken);
_bufferIndex.Value++;
}
}
+ Debug.Assert(_bufferIndex != null);
// Continue yielding elements from our buffer.
if (--_bufferIndex.Value >= 0)
using System.Collections.Generic;
using System.Diagnostics;
+using System.Diagnostics.CodeAnalysis;
using System.Threading;
namespace System.Linq.Parallel
/// <typeparam name="TOutput"></typeparam>
internal sealed class SelectManyQueryOperator<TLeftInput, TRightInput, TOutput> : UnaryQueryOperator<TLeftInput, TOutput>
{
- private readonly Func<TLeftInput, IEnumerable<TRightInput>> _rightChildSelector; // To select a new child each iteration.
- private readonly Func<TLeftInput, int, IEnumerable<TRightInput>> _indexedRightChildSelector; // To select a new child each iteration.
- private readonly Func<TLeftInput, TRightInput, TOutput> _resultSelector; // An optional result selection function.
+ private readonly Func<TLeftInput, IEnumerable<TRightInput>>? _rightChildSelector; // To select a new child each iteration.
+ private readonly Func<TLeftInput, int, IEnumerable<TRightInput>>? _indexedRightChildSelector; // To select a new child each iteration.
+ private readonly Func<TLeftInput, TRightInput, TOutput>? _resultSelector; // An optional result selection function.
private bool _prematureMerge = false; // Whether to prematurely merge the input of this operator.
private bool _limitsParallelism = false; // Whether to prematurely merge the input of this operator.
//
internal SelectManyQueryOperator(IEnumerable<TLeftInput> leftChild,
- Func<TLeftInput, IEnumerable<TRightInput>> rightChildSelector,
- Func<TLeftInput, int, IEnumerable<TRightInput>> indexedRightChildSelector,
- Func<TLeftInput, TRightInput, TOutput> resultSelector)
+ Func<TLeftInput, IEnumerable<TRightInput>>? rightChildSelector,
+ Func<TLeftInput, int, IEnumerable<TRightInput>>? indexedRightChildSelector,
+ Func<TLeftInput, TRightInput, TOutput>? resultSelector)
: base(leftChild)
{
Debug.Assert(leftChild != null, "left child data source cannot be null");
{
private readonly QueryOperatorEnumerator<TLeftInput, int> _leftSource; // The left data source to enumerate.
private readonly SelectManyQueryOperator<TLeftInput, TRightInput, TOutput> _selectManyOperator; // The select many operator to use.
- private IEnumerator<TRightInput> _currentRightSource; // The current enumerator we're using.
- private IEnumerator<TOutput> _currentRightSourceAsOutput; // If we need to access the enumerator for output directly (no result selector).
- private Mutables _mutables; // bag of frequently mutated value types [allocate in moveNext to avoid false-sharing]
+ private IEnumerator<TRightInput>? _currentRightSource; // The current enumerator we're using.
+ private IEnumerator<TOutput>? _currentRightSourceAsOutput; // If we need to access the enumerator for output directly (no result selector).
+ private Mutables? _mutables; // bag of frequently mutated value types [allocate in moveNext to avoid false-sharing]
private readonly CancellationToken _cancellationToken;
private class Mutables
{
internal int _currentRightSourceIndex = -1; // The index for the right data source.
- internal TLeftInput _currentLeftElement; // The current element in the left data source.
+ internal TLeftInput _currentLeftElement = default!; // The current element in the left data source.
internal int _currentLeftSourceIndex; // The current key in the left data source.
internal int _lhsCount; //counts the number of lhs elements enumerated. used for cancellation testing.
}
// Straightforward IEnumerator<T> methods.
//
- internal override bool MoveNext(ref TOutput currentElement, ref Pair<int, int> currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref TOutput currentElement, ref Pair<int, int> currentKey)
{
while (true)
{
// We don't have a "current" right enumerator to use. We have to fetch the next
// one. If the left has run out of elements, however, we're done and just return
// false right away.
- if (!_leftSource.MoveNext(ref _mutables._currentLeftElement, ref _mutables._currentLeftSourceIndex))
+ if (!_leftSource.MoveNext(ref _mutables._currentLeftElement!, ref _mutables._currentLeftSourceIndex))
{
return false;
}
+ Debug.Assert(_selectManyOperator._indexedRightChildSelector != null);
// Use the source selection routine to create a right child.
IEnumerable<TRightInput> rightChild =
_selectManyOperator._indexedRightChildSelector(_mutables._currentLeftElement, _mutables._currentLeftSourceIndex);
if (_currentRightSource.MoveNext())
{
+ Debug.Assert(_mutables != null);
_mutables._currentRightSourceIndex++;
// If the inner data source has an element, we can yield it.
{
private readonly QueryOperatorEnumerator<TLeftInput, TLeftKey> _leftSource; // The left data source to enumerate.
private readonly SelectManyQueryOperator<TLeftInput, TRightInput, TOutput> _selectManyOperator; // The select many operator to use.
- private IEnumerator<TRightInput> _currentRightSource; // The current enumerator we're using.
- private IEnumerator<TOutput> _currentRightSourceAsOutput; // If we need to access the enumerator for output directly (no result selector).
- private Mutables _mutables; // bag of frequently mutated value types [allocate in moveNext to avoid false-sharing]
+ private IEnumerator<TRightInput>? _currentRightSource; // The current enumerator we're using.
+ private IEnumerator<TOutput>? _currentRightSourceAsOutput; // If we need to access the enumerator for output directly (no result selector).
+ private Mutables? _mutables; // bag of frequently mutated value types [allocate in moveNext to avoid false-sharing]
private readonly CancellationToken _cancellationToken;
private class Mutables
{
internal int _currentRightSourceIndex = -1; // The index for the right data source.
- internal TLeftInput _currentLeftElement; // The current element in the left data source.
- internal TLeftKey _currentLeftKey; // The current key in the left data source.
+ internal TLeftInput _currentLeftElement = default!; // The current element in the left data source.
+ internal TLeftKey _currentLeftKey = default!; // The current key in the left data source.
internal int _lhsCount; // Counts the number of lhs elements enumerated. used for cancellation testing.
}
// Straightforward IEnumerator<T> methods.
//
- internal override bool MoveNext(ref TOutput currentElement, ref Pair<TLeftKey, int> currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref TOutput currentElement, ref Pair<TLeftKey, int> currentKey)
{
while (true)
{
// one. If the left has run out of elements, however, we're done and just return
// false right away.
- if (!_leftSource.MoveNext(ref _mutables._currentLeftElement, ref _mutables._currentLeftKey))
+ if (!_leftSource.MoveNext(ref _mutables._currentLeftElement!, ref _mutables._currentLeftKey))
{
return false;
}
+ Debug.Assert(_selectManyOperator._rightChildSelector != null);
// Use the source selection routine to create a right child.
IEnumerable<TRightInput> rightChild = _selectManyOperator._rightChildSelector(_mutables._currentLeftElement);
if (_currentRightSource.MoveNext())
{
+ Debug.Assert(_mutables != null);
_mutables._currentRightSourceIndex++;
// If the inner data source has an element, we can yield it.
using System.Collections.Generic;
using System.Diagnostics;
+using System.Diagnostics.CodeAnalysis;
using System.Threading;
namespace System.Linq.Parallel
// Straightforward IEnumerator<T> methods.
//
- internal override bool MoveNext(ref TOutput currentElement, ref TKey currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref TOutput currentElement, ref TKey currentKey)
{
// So long as the source has a next element, we have an element.
- TInput element = default(TInput);
- if (_source.MoveNext(ref element, ref currentKey))
+ TInput element = default(TInput)!;
+ if (_source.MoveNext(ref element!, ref currentKey))
{
Debug.Assert(_selector != null, "expected a compiled operator");
currentElement = _selector(element);
/// <typeparam name="TSource"></typeparam>
internal sealed class SingleQueryOperator<TSource> : UnaryQueryOperator<TSource, TSource>
{
- private readonly Func<TSource, bool> _predicate; // The optional predicate used during the search.
+ private readonly Func<TSource, bool>? _predicate; // The optional predicate used during the search.
//---------------------------------------------------------------------------------------
// Initializes a new Single operator.
// child - the child whose data we will reverse
//
- internal SingleQueryOperator(IEnumerable<TSource> child, Func<TSource, bool> predicate)
+ internal SingleQueryOperator(IEnumerable<TSource> child, Func<TSource, bool>? predicate)
: base(child)
{
Debug.Assert(child != null, "child data source cannot be null");
private class SingleQueryOperatorEnumerator<TKey> : QueryOperatorEnumerator<TSource, int>
{
private readonly QueryOperatorEnumerator<TSource, TKey> _source; // The data source to enumerate.
- private readonly Func<TSource, bool> _predicate; // The optional predicate used during the search.
+ private readonly Func<TSource, bool>? _predicate; // The optional predicate used during the search.
private bool _alreadySearched; // Whether we have searched our input already.
private bool _yieldExtra; // Whether we found more than one element.
//
internal SingleQueryOperatorEnumerator(QueryOperatorEnumerator<TSource, TKey> source,
- Func<TSource, bool> predicate, Shared<int> totalElementCount)
+ Func<TSource, bool>? predicate, Shared<int> totalElementCount)
{
Debug.Assert(source != null);
Debug.Assert(totalElementCount != null);
// Straightforward IEnumerator<T> methods.
//
- internal override bool MoveNext(ref TSource currentElement, ref int currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref TSource currentElement, ref int currentKey)
{
Debug.Assert(_source != null);
if (_yieldExtra)
{
_yieldExtra = false;
- currentElement = default(TSource);
+ currentElement = default(TSource)!;
currentKey = 0;
return true;
}
// Scan our input, looking for a match.
bool found = false;
- TSource current = default(TSource);
- TKey keyUnused = default(TKey);
+ TSource current = default(TSource)!;
+ TKey keyUnused = default(TKey)!;
- while (_source.MoveNext(ref current, ref keyUnused))
+ while (_source.MoveNext(ref current!, ref keyUnused))
{
// If the predicate is null or the current element satisfies it, we will remember
// it so that we can yield it later. We then proceed with scanning the input
using System.Collections.Generic;
using System.Diagnostics;
+using System.Diagnostics.CodeAnalysis;
using System.Threading;
namespace System.Linq.Parallel
//
internal SortQueryOperator(IEnumerable<TInputOutput> source, Func<TInputOutput, TSortKey> keySelector,
- IComparer<TSortKey> comparer, bool descending)
+ IComparer<TSortKey>? comparer, bool descending)
: base(source, true)
{
Debug.Assert(keySelector != null, "key selector must not be null");
//
IOrderedEnumerable<TInputOutput> IOrderedEnumerable<TInputOutput>.CreateOrderedEnumerable<TKey2>(
- Func<TInputOutput, TKey2> key2Selector, IComparer<TKey2> key2Comparer, bool descending)
+ Func<TInputOutput, TKey2> key2Selector, IComparer<TKey2>? key2Comparer, bool descending)
{
key2Comparer = key2Comparer ?? Util.GetDefaultComparer<TKey2>();
internal class SortQueryOperatorEnumerator<TInputOutput, TKey, TSortKey> : QueryOperatorEnumerator<TInputOutput, TSortKey>
{
- private readonly QueryOperatorEnumerator<TInputOutput, TKey> _source; // Data source to sort.
+ private readonly QueryOperatorEnumerator<TInputOutput, TKey>? _source; // Data source to sort.
private readonly Func<TInputOutput, TSortKey> _keySelector; // Key selector used when sorting.
//---------------------------------------------------------------------------------------
// in memory, and the data sorted.
//
- internal override bool MoveNext(ref TInputOutput currentElement, ref TSortKey currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref TInputOutput currentElement, ref TSortKey currentKey)
{
Debug.Assert(_source != null);
- TKey keyUnused = default(TKey);
- if (!_source.MoveNext(ref currentElement, ref keyUnused))
+ TKey keyUnused = default(TKey)!;
+ if (!_source.MoveNext(ref currentElement!, ref keyUnused))
{
return false;
}
using System.Collections.Generic;
using System.Threading;
using System.Diagnostics;
+using System.Diagnostics.CodeAnalysis;
namespace System.Linq.Parallel
{
private readonly CountdownEvent _sharedBarrier; // To separate the search/yield phases.
private readonly CancellationToken _cancellationToken; // Indicates that cancellation has occurred.
- private List<Pair<TResult, TKey>> _buffer; // Our buffer.
- private Shared<int> _bufferIndex; // Our current index within the buffer. [allocate in moveNext to avoid false-sharing]
+ private List<Pair<TResult, TKey>>? _buffer; // Our buffer.
+ private Shared<int>? _bufferIndex; // Our current index within the buffer. [allocate in moveNext to avoid false-sharing]
//---------------------------------------------------------------------------------------
// Instantiates a new select enumerator.
// Straightforward IEnumerator<T> methods.
//
- internal override bool MoveNext(ref TResult currentElement, ref TKey currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref TResult currentElement, ref TKey currentKey)
{
Debug.Assert(_sharedIndices != null);
// Enter the search phase. In this phase, all partitions race to populate
// the shared indices with their first 'count' contiguous elements.
- TResult current = default(TResult);
- TKey index = default(TKey);
+ TResult current = default(TResult)!;
+ TKey index = default(TKey)!;
int i = 0; //counter to help with cancellation
- while (buffer.Count < _count && _source.MoveNext(ref current, ref index))
+ while (buffer.Count < _count && _source.MoveNext(ref current!, ref index))
{
if ((i++ & CancellationState.POLL_INTERVAL) == 0)
CancellationState.ThrowIfCanceled(_cancellationToken);
// index of the 'count'-th input element.
if (_take)
{
+ Debug.Assert(_buffer != null && _bufferIndex != null);
// In the case of a Take, we will yield each element from our buffer for which
// the element is lesser than the 'count'-th index found.
if (_count == 0 || _bufferIndex.Value >= _buffer.Count - 1)
}
else
{
- TKey minKey = default(TKey);
+ TKey minKey = default(TKey)!;
// If the count to skip was greater than 0, look at the buffer.
if (_count > 0)
minKey = _sharedIndices.MaxValue;
+ Debug.Assert(_buffer != null && _bufferIndex != null);
// In the case of a skip, we must skip over elements whose index is lesser than the
// 'count'-th index found. Once we've exhausted the buffer, we must go back and continue
// enumerating the data source until it is empty.
}
// Lastly, so long as our input still has elements, they will be yieldable.
- if (_source.MoveNext(ref currentElement, ref currentKey))
+ if (_source.MoveNext(ref currentElement!, ref currentKey))
{
Debug.Assert(_count <= 0 || _keyComparer.Compare(currentKey, minKey) > 0,
"expected remaining element indices to be greater than smallest");
using System.Collections.Generic;
using System.Threading;
using System.Diagnostics;
+using System.Diagnostics.CodeAnalysis;
namespace System.Linq.Parallel
{
{
// Predicate function used to decide when to stop yielding elements. One pair is used for
// index-based evaluation (i.e. it is passed the index as well as the element's value).
- private readonly Func<TResult, bool> _predicate;
- private readonly Func<TResult, int, bool> _indexedPredicate;
+ private readonly Func<TResult, bool>? _predicate;
+ private readonly Func<TResult, int, bool>? _indexedPredicate;
private readonly bool _take; // Whether to take (true) or skip (false).
private bool _prematureMerge = false; // Whether to prematurely merge the input of this operator.
//
internal TakeOrSkipWhileQueryOperator(IEnumerable<TResult> child,
- Func<TResult, bool> predicate,
- Func<TResult, int, bool> indexedPredicate, bool take)
+ Func<TResult, bool>? predicate,
+ Func<TResult, int, bool>? indexedPredicate, bool take)
: base(child)
{
Debug.Assert(child != null, "child data source cannot be null");
CountdownEvent sharedBarrier = new CountdownEvent(partitionCount);
Debug.Assert(_indexedPredicate == null || typeof(TKey) == typeof(int));
- Func<TResult, TKey, bool> convertedIndexedPredicate = (Func<TResult, TKey, bool>)(object)_indexedPredicate;
+ Func<TResult, TKey, bool>? convertedIndexedPredicate = (Func<TResult, TKey, bool>?)(object?)_indexedPredicate;
PartitionedStream<TResult, TKey> partitionedStream =
new PartitionedStream<TResult, TKey>(partitionCount, inputStream.KeyComparer, OrdinalIndexState);
return Child.AsSequentialQuery(token).TakeWhile(_indexedPredicate);
}
+ Debug.Assert(_predicate != null);
return Child.AsSequentialQuery(token).TakeWhile(_predicate);
}
return wrappedIndexedChild.SkipWhile(_indexedPredicate);
}
+ Debug.Assert(_predicate != null);
IEnumerable<TResult> wrappedChild = CancellableEnumerable.Wrap(Child.AsSequentialQuery(token), token);
return wrappedChild.SkipWhile(_predicate);
}
private class TakeOrSkipWhileQueryOperatorEnumerator<TKey> : QueryOperatorEnumerator<TResult, TKey>
{
private readonly QueryOperatorEnumerator<TResult, TKey> _source; // The data source to enumerate.
- private readonly Func<TResult, bool> _predicate; // The actual predicate function.
- private readonly Func<TResult, TKey, bool> _indexedPredicate; // The actual index-based predicate function.
+ private readonly Func<TResult, bool>? _predicate; // The actual predicate function.
+ private readonly Func<TResult, TKey, bool>? _indexedPredicate; // The actual index-based predicate function.
private readonly bool _take; // Whether to execute a take- (true) or skip-while (false).
private readonly IComparer<TKey> _keyComparer; // Comparer for the order keys.
private readonly CountdownEvent _sharedBarrier; // To separate the search/yield phases.
private readonly CancellationToken _cancellationToken; // Token used to cancel this operator.
- private List<Pair<TResult, TKey>> _buffer; // Our buffer.
- private Shared<int> _bufferIndex; // Our current index within the buffer. [allocate in moveNext to avoid false-sharing]
+ private List<Pair<TResult, TKey>>? _buffer; // Our buffer.
+ private Shared<int>? _bufferIndex; // Our current index within the buffer. [allocate in moveNext to avoid false-sharing]
private int _updatesSeen; // How many updates has this enumerator observed? (Each other enumerator will contribute one update.)
- private TKey _currentLowKey; // The lowest key rejected by one of the other enumerators.
+ private TKey _currentLowKey = default!; // The lowest key rejected by one of the other enumerators.
//---------------------------------------------------------------------------------------
//
internal TakeOrSkipWhileQueryOperatorEnumerator(
- QueryOperatorEnumerator<TResult, TKey> source, Func<TResult, bool> predicate, Func<TResult, TKey, bool> indexedPredicate, bool take,
+ QueryOperatorEnumerator<TResult, TKey> source, Func<TResult, bool>? predicate, Func<TResult, TKey, bool>? indexedPredicate, bool take,
OperatorState<TKey> operatorState, CountdownEvent sharedBarrier, CancellationToken cancelToken, IComparer<TKey> keyComparer)
{
Debug.Assert(source != null);
// Straightforward IEnumerator<T> methods.
//
- internal override bool MoveNext(ref TResult currentElement, ref TKey currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref TResult currentElement, ref TKey currentKey)
{
// If the buffer has not been created, we will generate it lazily on demand.
if (_buffer == null)
try
{
- TResult current = default(TResult);
- TKey key = default(TKey);
+ TResult current = default(TResult)!;
+ TKey key = default(TKey)!;
int i = 0; //counter to help with cancellation
- while (_source.MoveNext(ref current, ref key))
+ while (_source.MoveNext(ref current!, ref key))
{
if ((i++ & CancellationState.POLL_INTERVAL) == 0)
CancellationState.ThrowIfCanceled(_cancellationToken);
_bufferIndex = new Shared<int>(-1);
}
+ Debug.Assert(_bufferIndex != null);
// Now either enter (or continue) the yielding phase. As soon as we reach this, we know the
// current shared "low false" value is the absolute lowest with a false.
if (_take)
}
// Lastly, so long as our input still has elements, they will be yieldable.
- if (_source.MoveNext(ref currentElement, ref currentKey))
+ if (_source.MoveNext(ref currentElement!, ref currentKey))
{
Debug.Assert(_keyComparer.Compare(currentKey, _operatorState._currentLowKey) > 0,
"expected remaining element indices to be greater than smallest");
private class OperatorState<TKey>
{
internal volatile int _updatesDone = 0;
- internal TKey _currentLowKey;
+ internal TKey _currentLowKey = default!;
}
}
}
using System.Collections.Generic;
using System.Diagnostics;
+using System.Diagnostics.CodeAnalysis;
using System.Threading;
namespace System.Linq.Parallel
private readonly QueryOperatorEnumerator<TInputOutput, TKey> _source; // The data source to enumerate.
private readonly Func<TInputOutput, bool> _predicate; // The predicate used for filtering.
private readonly CancellationToken _cancellationToken;
- private Shared<int> _outputLoopCount;
+ private Shared<int>? _outputLoopCount;
//-----------------------------------------------------------------------------------
// Instantiates a new enumerator.
// Moves to the next matching element in the underlying data stream.
//
- internal override bool MoveNext(ref TInputOutput currentElement, ref TKey currentKey)
+ internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref TInputOutput currentElement, ref TKey currentKey)
{
Debug.Assert(_predicate != null, "expected a compiled operator");
if (_outputLoopCount == null)
_outputLoopCount = new Shared<int>(0);
- while (_source.MoveNext(ref currentElement, ref currentKey))
+ while (_source.MoveNext(ref currentElement!, ref currentKey))
{
if ((_outputLoopCount.Value++ & CancellationState.POLL_INTERVAL) == 0)
CancellationState.ThrowIfCanceled(_cancellationToken);
{
Debug.Assert(IsIndexible == (_op.OrdinalIndexState == OrdinalIndexState.Indexable));
+ Debug.Assert(_settings.ExecutionMode != null && _settings.DegreeOfParallelism != null);
if (_settings.ExecutionMode.Value == ParallelExecutionMode.Default && _op.LimitsParallelism)
{
// We need to run the query sequentially, up to and including this operator
internal class CancellationState
{
// a cancellation signal that can be set internally to prompt early query termination.
- internal CancellationTokenSource InternalCancellationTokenSource;
+ internal CancellationTokenSource? InternalCancellationTokenSource;
// the external cancellationToken that the user sets to ask for the query to terminate early.
// this has to be tracked explicitly so that an OCE(externalToken) can be thrown as the query
internal CancellationToken ExternalCancellationToken;
// A combined token Source for internal/external cancellation, defining the total cancellation state.
- internal CancellationTokenSource MergedCancellationTokenSource;
+ internal CancellationTokenSource? MergedCancellationTokenSource;
// A combined token for internal/external cancellation, defining the total cancellation state.
internal CancellationToken MergedCancellationToken
/// </summary>
protected override void SpoolingWork()
{
- TOutput element = default(TOutput);
- TKey key = default(TKey);
+ TOutput element = default(TOutput)!;
+ TKey key = default(TKey)!;
int chunkSize = _autoBuffered ? PRODUCER_BUFFER_AUTO_SIZE : 1;
Pair<TKey, TOutput>[] chunk = new Pair<TKey, TOutput>[chunkSize];
do
{
lastChunkSize = 0;
- while (lastChunkSize < chunkSize && partition.MoveNext(ref element, ref key))
+ while (lastChunkSize < chunkSize && partition.MoveNext(ref element!, ref key))
{
chunk[lastChunkSize] = new Pair<TKey, TOutput>(key, element);
lastChunkSize++;
/// <typeparam name="TKey"></typeparam>
internal class OrderPreservingSpoolingTask<TInputOutput, TKey> : SpoolingTaskBase
{
- private readonly Shared<TInputOutput[]> _results; // The destination array cell into which data is placed.
+ private readonly Shared<TInputOutput[]?> _results; // The destination array cell into which data is placed.
private readonly SortHelper<TInputOutput> _sortHelper; // A helper that performs the sorting.
//-----------------------------------------------------------------------------------
private OrderPreservingSpoolingTask(
int taskIndex, QueryTaskGroupState groupState,
- Shared<TInputOutput[]> results, SortHelper<TInputOutput> sortHelper) :
+ Shared<TInputOutput[]?> results, SortHelper<TInputOutput> sortHelper) :
base(taskIndex, groupState)
{
Debug.Assert(groupState != null);
internal static void Spool(
QueryTaskGroupState groupState, PartitionedStream<TInputOutput, TKey> partitions,
- Shared<TInputOutput[]> results, TaskScheduler taskScheduler)
+ Shared<TInputOutput[]?> results, TaskScheduler taskScheduler)
{
Debug.Assert(groupState != null);
Debug.Assert(partitions != null);
// A static function used by s_runTaskSynchronouslyDelegate, which is used by RunSynchronously
//
- private static void RunTaskSynchronously(object o)
+ private static void RunTaskSynchronously(object? o)
{
+ Debug.Assert(o != null);
((QueryTask)o).BaseWork(null);
}
// A static delegate used by RunSynchronously
//
- private static readonly Action<object> s_runTaskSynchronouslyDelegate = RunTaskSynchronously;
+ private static readonly Action<object?> s_runTaskSynchronouslyDelegate = RunTaskSynchronously;
//-----------------------------------------------------------------------------------
// Executes the task synchronously (on the current thread).
// Executes the task asynchronously (elsewhere, unspecified).
//
- private static readonly Action<object> s_baseWorkDelegate = delegate (object o)
+ private static readonly Action<object?> s_baseWorkDelegate = delegate (object? o)
{
+ Debug.Assert(o != null);
((QueryTask)o).BaseWork(null);
};
// amount of tracing around the call to the real work API.
//
- private void BaseWork(object unused)
+ private void BaseWork(object? unused)
{
Debug.Assert(unused == null);
TraceHelpers.TraceInfo("[timing]: {0}: Start work {1}", DateTime.Now.Ticks, _taskIndex);
/// </summary>
internal class QueryTaskGroupState
{
- private Task _rootTask; // The task under which all query tasks root.
+ private Task? _rootTask; // The task under which all query tasks root.
private int _alreadyEnded; // Whether the tasks have been waited on already.
private readonly CancellationState _cancellationState; // The cancellation state.
private readonly int _queryId; // Id of this query execution.
bool allOCEsOnTrackedExternalCancellationToken = true;
for (int i = 0; i < flattenedAE.InnerExceptions.Count; i++)
{
- OperationCanceledException oce = flattenedAE.InnerExceptions[i] as OperationCanceledException;
+ OperationCanceledException? oce = flattenedAE.InnerExceptions[i] as OperationCanceledException;
// we only let it pass through iff:
// it is not null, not default, and matches the exact token we were given as being the external token
// Because of the lack of typeof(T).IsValueType we need two pieces of information
// to determine this. default(T) will return a non null for Value Types, except those
// using Nullable<>, that is why we need a second condition.
- if (default(T) != null || Nullable.GetUnderlyingType(typeof(T)) != null)
+ if (default(T)! != null || Nullable.GetUnderlyingType(typeof(T)) != null)
{
// Marshal.SizeOf fails for value types that don't have explicit layouts. We
// just fall back to some arbitrary constant in that case. Is there a better way?
{
// We just enumerate over the entire source data stream, placing each element
// into the destination channel.
- TInputOutput current = default(TInputOutput);
- TIgnoreKey keyUnused = default(TIgnoreKey);
+ TInputOutput current = default(TInputOutput)!;
+ TIgnoreKey keyUnused = default(TIgnoreKey)!;
QueryOperatorEnumerator<TInputOutput, TIgnoreKey> source = _source;
SynchronousChannel<TInputOutput> destination = _destination;
CancellationToken cancelToken = _groupState.CancellationState.MergedCancellationToken;
destination.Init();
- while (source.MoveNext(ref current, ref keyUnused))
+ while (source.MoveNext(ref current!, ref keyUnused))
{
// If an abort has been requested, stop this worker immediately.
if (cancelToken.IsCancellationRequested)
{
// We just enumerate over the entire source data stream, placing each element
// into the destination channel.
- TInputOutput current = default(TInputOutput);
- TIgnoreKey keyUnused = default(TIgnoreKey);
+ TInputOutput current = default(TInputOutput)!;
+ TIgnoreKey keyUnused = default(TIgnoreKey)!;
QueryOperatorEnumerator<TInputOutput, TIgnoreKey> source = _source;
AsynchronousChannel<TInputOutput> destination = _destination;
CancellationToken cancelToken = _groupState.CancellationState.MergedCancellationToken;
- while (source.MoveNext(ref current, ref keyUnused))
+ while (source.MoveNext(ref current!, ref keyUnused))
{
// If an abort has been requested, stop this worker immediately.
if (cancelToken.IsCancellationRequested)
protected override void SpoolingWork()
{
// We just enumerate over the entire source data stream for effect.
- TInputOutput currentUnused = default(TInputOutput);
- TIgnoreKey keyUnused = default(TIgnoreKey);
+ TInputOutput currentUnused = default(TInputOutput)!;
+ TIgnoreKey keyUnused = default(TIgnoreKey)!;
//Note: this only ever runs with a ForAll operator, and ForAllEnumerator performs cancellation checks
- while (_source.MoveNext(ref currentUnused, ref keyUnused))
+ while (_source.MoveNext(ref currentUnused!, ref keyUnused))
;
}
// =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
using System.Collections.Generic;
+using System.Diagnostics;
using System.Threading;
namespace System.Linq.Parallel
}
catch (Exception ex)
{
- OperationCanceledException oce = ex as OperationCanceledException;
- if (oce != null &&
+ if (ex is OperationCanceledException oce &&
oce.CancellationToken == _groupState.CancellationState.MergedCancellationToken
&& _groupState.CancellationState.MergedCancellationToken.IsCancellationRequested)
{
// TPL will catch and store the exception on the task object. We'll then later
// turn around and wait on it, having the effect of propagating it. In the meantime,
// we want to cooperative cancel all workers.
+ Debug.Assert(_groupState.CancellationState.InternalCancellationTokenSource != null);
_groupState.CancellationState.InternalCancellationTokenSource.Cancel();
// And then repropagate to let TPL catch it.
{
while (true)
{
- TElement elem = default(TElement);
+ TElement elem = default(TElement)!;
try
{
if (!enumerator.MoveNext())
internal static IEnumerable<TElement> WrapQueryEnumerator<TElement, TIgnoreKey>(QueryOperatorEnumerator<TElement, TIgnoreKey> source,
CancellationState cancellationState)
{
- TElement elem = default(TElement);
- TIgnoreKey ignoreKey = default(TIgnoreKey);
+ TElement elem = default(TElement)!;
+ TIgnoreKey ignoreKey = default(TIgnoreKey)!;
try
{
{
try
{
- if (!source.MoveNext(ref elem, ref ignoreKey))
+ if (!source.MoveNext(ref elem!, ref ignoreKey))
{
yield break;
}
{
return t =>
{
- U retval = default(U);
+ U retval = default(U)!;
try
{
retval = f(t);
// See QueryTaskGroupState.WaitAll for the main plinq exception handling logic.
// check for co-operative cancellation.
- OperationCanceledException cancelEx = ex as OperationCanceledException;
- if (cancelEx != null &&
- cancelEx.CancellationToken == cancellationState.ExternalCancellationToken
- && cancellationState.ExternalCancellationToken.IsCancellationRequested)
+ if (ex is OperationCanceledException cancelEx)
{
- return true; // let the OCE(extCT) be rethrown.
- }
+ if (cancelEx.CancellationToken == cancellationState.ExternalCancellationToken
+ && cancellationState.ExternalCancellationToken.IsCancellationRequested)
+ {
+ return true; // let the OCE(extCT) be rethrown.
+ }
- // check for external cancellation which triggered the mergedToken.
- if (cancelEx != null &&
- cancelEx.CancellationToken == cancellationState.MergedCancellationToken
- && cancellationState.MergedCancellationToken.IsCancellationRequested
- && cancellationState.ExternalCancellationToken.IsCancellationRequested)
- {
- return true; // convert internal cancellation back to OCE(extCT).
+ // check for external cancellation which triggered the mergedToken.
+ if (cancelEx.CancellationToken == cancellationState.MergedCancellationToken
+ && cancellationState.MergedCancellationToken.IsCancellationRequested
+ && cancellationState.ExternalCancellationToken.IsCancellationRequested)
+ {
+ return true; // convert internal cancellation back to OCE(extCT).
+ }
}
-
return false;
}
}
// The partitioned stream to return.
PartitionedStream<T, int> returnValue;
- IParallelPartitionable<T> sourceAsPartitionable = source as IParallelPartitionable<T>;
- if (sourceAsPartitionable != null)
+ if (source is IParallelPartitionable<T> sourceAsPartitionable)
{
// The type overrides the partitioning algorithm, so we will use it instead of the default.
// The returned enumerator must be the same size that we requested, otherwise we throw.
//
internal static PartitionedStream<Pair<TElement, THashKey>, int> HashRepartition<TElement, THashKey, TIgnoreKey>(
- PartitionedStream<TElement, TIgnoreKey> source, Func<TElement, THashKey> keySelector, IEqualityComparer<THashKey> keyComparer,
- IEqualityComparer<TElement> elementComparer, CancellationToken cancellationToken)
+ PartitionedStream<TElement, TIgnoreKey> source, Func<TElement, THashKey>? keySelector, IEqualityComparer<THashKey>? keyComparer,
+ IEqualityComparer<TElement>? elementComparer, CancellationToken cancellationToken)
{
TraceHelpers.TraceInfo("PartitionStream<..>.HashRepartitionStream(..):: creating **RE**partitioned stream for nested operator");
return new UnorderedHashRepartitionStream<TElement, THashKey, TIgnoreKey>(source, keySelector, keyComparer, elementComparer, cancellationToken);
}
internal static PartitionedStream<Pair<TElement, THashKey>, TOrderKey> HashRepartitionOrdered<TElement, THashKey, TOrderKey>(
- PartitionedStream<TElement, TOrderKey> source, Func<TElement, THashKey> keySelector, IEqualityComparer<THashKey> keyComparer,
- IEqualityComparer<TElement> elementComparer, CancellationToken cancellationToken)
+ PartitionedStream<TElement, TOrderKey> source, Func<TElement, THashKey>? keySelector, IEqualityComparer<THashKey>? keyComparer,
+ IEqualityComparer<TElement>? elementComparer, CancellationToken cancellationToken)
{
TraceHelpers.TraceInfo("PartitionStream<..>.HashRepartitionStream(..):: creating **RE**partitioned stream for nested operator");
return new OrderedHashRepartitionStream<TElement, THashKey, TOrderKey>(source, keySelector, keyComparer, elementComparer, cancellationToken);
// =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
namespace System.Linq.Parallel
{
private Slot[] slots;
private int count;
private int freeList;
- private readonly IEqualityComparer<TKey> comparer;
+ private readonly IEqualityComparer<TKey>? comparer;
private const int HashCodeMask = 0x7fffffff;
{
}
- internal HashLookup(IEqualityComparer<TKey> comparer)
+ internal HashLookup(IEqualityComparer<TKey>? comparer)
{
this.comparer = comparer;
buckets = new int[7];
}
// Check whether value is in set
- internal bool TryGetValue(TKey key, ref TValue value)
+ internal bool TryGetValue(TKey key, [MaybeNullWhen(false), AllowNull] ref TValue value)
{
return Find(key, false, false, ref value);
}
{
internal TInputOutput[] _chunk;
private int _chunkCount;
- private ListChunk<TInputOutput> _nextChunk;
+ private ListChunk<TInputOutput>? _nextChunk;
private ListChunk<TInputOutput> _tailChunk;
/// <summary>
/// <summary>
/// The next chunk in the linked chain.
/// </summary>
- internal ListChunk<TInputOutput> Next
+ internal ListChunk<TInputOutput>? Next
{
get { return _nextChunk; }
}
/// </summary>
public IEnumerator<TInputOutput> GetEnumerator()
{
- ListChunk<TInputOutput> curr = this;
+ ListChunk<TInputOutput>? curr = this;
while (curr != null)
{
for (int i = 0; i < curr._chunkCount; i++)
/// </summary>
/// <typeparam name="TKey"></typeparam>
/// <typeparam name="TElement"></typeparam>
- internal class Lookup<TKey, TElement> : ILookup<TKey, TElement>
+ internal class Lookup<TKey, TElement> : ILookup<TKey, TElement> where TKey: notnull
{
private readonly IDictionary<TKey, IGrouping<TKey, TElement>> _dict;
private readonly IEqualityComparer<TKey> _comparer;
- private IGrouping<TKey, TElement> _defaultKeyGrouping = null;
+ private IGrouping<TKey, TElement>? _defaultKeyGrouping = null;
internal Lookup(IEqualityComparer<TKey> comparer)
{
{
get
{
- if (_comparer.Equals(key, default(TKey)))
+ if (_comparer.Equals(key, default))
{
if (_defaultKeyGrouping != null)
{
}
else
{
- IGrouping<TKey, TElement> grouping;
+ IGrouping<TKey, TElement>? grouping;
if (_dict.TryGetValue(key, out grouping))
{
return grouping;
public bool Contains(TKey key)
{
- if (_comparer.Equals(key, default(TKey)))
+ if (_comparer.Equals(key, default))
{
return _defaultKeyGrouping != null;
}
internal void Add(IGrouping<TKey, TElement> grouping)
{
- if (_comparer.Equals(grouping.Key, default(TKey)))
+ if (_comparer.Equals(grouping.Key, default))
{
Debug.Assert(_defaultKeyGrouping == null, "Cannot insert two groupings with the default key into a lookup.");
//
// =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
+using System.Diagnostics.CodeAnalysis;
+
namespace System.Linq.Parallel
{
/// <summary>
// A simple constructor that initializes the first/second fields.
//
- public Pair(T first, U second)
+ public Pair(T first, [MaybeNull, AllowNull] U second)
{
_first = first;
_second = second;
set { _second = value; }
}
}
-}
\ No newline at end of file
+}
internal sealed class PairComparer<T, U> : IComparer<Pair<T, U>>
{
private readonly IComparer<T> _comparer1;
- private readonly IComparer<U> _comparer2;
+ private readonly IComparer<U>? _comparer2;
- public PairComparer(IComparer<T> comparer1, IComparer<U> comparer2)
+ public PairComparer(IComparer<T> comparer1, IComparer<U>? comparer2)
{
_comparer1 = comparer1;
_comparer2 = comparer2;
return result1;
}
+ if (_comparer2 == null)
+ return result1;
+
return _comparer2.Compare(x.Second, y.Second);
}
}
// This data is shared among all partitions.
private readonly QueryTaskGroupState _groupState; // To communicate status, e.g. cancellation.
- private readonly int[][] _sharedIndices; // Shared set of indices used during sorting.
+ private readonly int[]?[] _sharedIndices; // Shared set of indices used during sorting.
private readonly GrowingArray<TKey>[] _sharedKeys; // Shared keys with which to compare elements.
private readonly TInputOutput[][] _sharedValues; // The actual values used for comparisons.
private readonly Barrier[][] _sharedBarriers; // A matrix of barriers used for synchronizing during merges.
internal override TInputOutput[] Sort()
{
// Step 1. Accumulate this partitions' worth of input.
- GrowingArray<TKey> sourceKeys = null;
- List<TInputOutput> sourceValues = null;
+ GrowingArray<TKey>? sourceKeys = null;
+ List<TInputOutput>? sourceValues = null;
BuildKeysFromSource(ref sourceKeys, ref sourceValues);
// Should only be called once per sort helper.
//
- private void BuildKeysFromSource(ref GrowingArray<TKey> keys, ref List<TInputOutput> values)
+ private void BuildKeysFromSource(ref GrowingArray<TKey>? keys, ref List<TInputOutput>? values)
{
values = new List<TInputOutput>();
CancellationToken cancelToken = _groupState.CancellationState.MergedCancellationToken;
try
{
- TInputOutput current = default(TInputOutput);
- TKey currentKey = default(TKey);
- bool hadNext = _source.MoveNext(ref current, ref currentKey);
+ TInputOutput current = default(TInputOutput)!;
+ TKey currentKey = default(TKey)!;
+ bool hadNext = _source.MoveNext(ref current!, ref currentKey);
if (keys == null)
{
keys.Add(currentKey);
values.Add(current);
}
- while (_source.MoveNext(ref current, ref currentKey));
+ while (_source.MoveNext(ref current!, ref currentKey));
}
}
finally
if (partnerIndex < _partitionCount)
{
// Cache references to our local data.
- int[] myIndices = _sharedIndices[_partitionIndex];
+ int[]? myIndices = _sharedIndices[_partitionIndex];
GrowingArray<TKey> myKeys = _sharedKeys[_partitionIndex];
TKey[] myKeysArr = myKeys.InternalArray;
// to hold the merged indices and key/value pairs.
// First, remember a copy of all of the partner's lists.
- int[] rightIndices = _sharedIndices[partnerIndex];
+ int[]? rightIndices = _sharedIndices[partnerIndex];
TKey[] rightKeys = _sharedKeys[partnerIndex].InternalArray;
TInputOutput[] rightValues = _sharedValues[partnerIndex];
// Now allocate the lists into which the merged data will go. Share this
// with the other thread so that it can place data into it as well.
- int[] mergedIndices = null;
+ int[]? mergedIndices = null;
TInputOutput[] mergedValues = new TInputOutput[totalCount];
// Only on the last phase do we need to remember indices and keys.
// copy the values and not the indices or keys.
int m = (totalCount + 1) / 2;
int i = 0, j0 = 0, j1 = 0;
+ Debug.Assert(myIndices != null);
while (i < m)
{
if ((i & CancellationState.POLL_INTERVAL) == 0)
if (j0 < leftCount && (j1 >= rightCount ||
_keyComparer.Compare(myKeysArr[myIndices[j0]],
- rightKeys[rightIndices[j1]]) <= 0))
+ rightKeys[rightIndices![j1]]) <= 0))
{
if (isLastPhase)
{
}
else
{
+ Debug.Assert(mergedIndices != null);
mergedIndices[i] = myIndices[j0];
}
j0++;
}
else
{
+ Debug.Assert(rightIndices != null);
if (isLastPhase)
{
mergedValues[i] = rightValues[rightIndices[j1]];
}
else
{
+ Debug.Assert(mergedIndices != null);
mergedIndices[i] = leftCount + rightIndices[j1];
}
j1++;
// (1) its own indices, keys, and values, stored in the cell that used to hold our data,
// and (2) the arrays into which merged data will go, stored in its shared array cells.
// We will snag references to all of these things.
- int[] leftIndices = _sharedIndices[_partitionIndex];
+ int[]? leftIndices = _sharedIndices[_partitionIndex];
TKey[] leftKeys = _sharedKeys[_partitionIndex].InternalArray;
TInputOutput[] leftValues = _sharedValues[_partitionIndex];
- int[] mergedIndices = _sharedIndices[partnerIndex];
+ int[]? mergedIndices = _sharedIndices[partnerIndex];
GrowingArray<TKey> mergedKeys = _sharedKeys[partnerIndex];
TInputOutput[] mergedValues = _sharedValues[partnerIndex];
// copy the values and not the indices or keys.
int m = (totalCount + 1) / 2;
int i = totalCount - 1, j0 = leftCount - 1, j1 = rightCount - 1;
+ Debug.Assert(myIndices != null);
while (i >= m)
{
if ((i & CancellationState.POLL_INTERVAL) == 0)
CancellationState.ThrowIfCanceled(cancelToken);
if (j0 >= 0 && (j1 < 0 ||
- _keyComparer.Compare(leftKeys[leftIndices[j0]],
+ _keyComparer.Compare(leftKeys[leftIndices![j0]],
myKeysArr[myIndices[j1]]) > 0))
{
+ Debug.Assert(leftIndices != null);
if (isLastPhase)
{
mergedValues[i] = leftValues[leftIndices[j0]];
}
else
{
+ Debug.Assert(mergedIndices != null);
mergedIndices[i] = leftIndices[j0];
}
j0--;
}
else
{
+ Debug.Assert(mergedIndices != null);
mergedIndices[i] = leftCount + myIndices[j1];
}
j1--;
{
private readonly IEqualityComparer<T> _comparer;
- internal WrapperEqualityComparer(IEqualityComparer<T> comparer)
+ internal WrapperEqualityComparer(IEqualityComparer<T>? comparer)
{
if (comparer == null)
{
using System.Collections.Concurrent;
using System.Collections;
using System.Threading.Tasks;
+using System.Diagnostics.CodeAnalysis;
namespace System.Linq
{
if (!(source is ParallelEnumerableWrapper<TSource> || source is IParallelPartitionable<TSource>))
{
- PartitionerQueryOperator<TSource> partitionerOp = source as PartitionerQueryOperator<TSource>;
- if (partitionerOp != null)
+ if (source is PartitionerQueryOperator<TSource> partitionerOp)
{
if (!partitionerOp.Orderable)
{
throw new ArgumentNullException(nameof(source));
}
- ParallelEnumerableWrapper wrapper = source as ParallelEnumerableWrapper;
+ ParallelEnumerableWrapper? wrapper = source as ParallelEnumerableWrapper;
if (wrapper == null)
{
throw new InvalidOperationException(SR.ParallelQuery_InvalidNonGenericAsOrderedCall);
}
- return new OrderingQueryOperator<object>(QueryOperator<object>.AsQueryOperator(wrapper), true);
+ return new OrderingQueryOperator<object?>(QueryOperator<object?>.AsQueryOperator(wrapper), true);
}
/// <summary>
if (source == null) throw new ArgumentNullException(nameof(source));
// Ditch the wrapper, if there is one.
- ParallelEnumerableWrapper<TSource> wrapper = source as ParallelEnumerableWrapper<TSource>;
- if (wrapper != null)
+ if (source is ParallelEnumerableWrapper<TSource> wrapper)
{
return wrapper.WrappedEnumerable;
}
public static ParallelQuery<TResult> Join<TOuter, TInner, TKey, TResult>(
this ParallelQuery<TOuter> outer, ParallelQuery<TInner> inner,
Func<TOuter, TKey> outerKeySelector, Func<TInner, TKey> innerKeySelector,
- Func<TOuter, TInner, TResult> resultSelector, IEqualityComparer<TKey> comparer)
+ Func<TOuter, TInner, TResult> resultSelector, IEqualityComparer<TKey>? comparer)
{
if (outer == null) throw new ArgumentNullException(nameof(outer));
if (inner == null) throw new ArgumentNullException(nameof(inner));
public static ParallelQuery<TResult> Join<TOuter, TInner, TKey, TResult>(
this ParallelQuery<TOuter> outer, IEnumerable<TInner> inner,
Func<TOuter, TKey> outerKeySelector, Func<TInner, TKey> innerKeySelector,
- Func<TOuter, TInner, TResult> resultSelector, IEqualityComparer<TKey> comparer)
+ Func<TOuter, TInner, TResult> resultSelector, IEqualityComparer<TKey>? comparer)
{
throw new NotSupportedException(SR.ParallelEnumerable_BinaryOpMustUseAsParallel);
}
public static ParallelQuery<TResult> GroupJoin<TOuter, TInner, TKey, TResult>(
this ParallelQuery<TOuter> outer, ParallelQuery<TInner> inner,
Func<TOuter, TKey> outerKeySelector, Func<TInner, TKey> innerKeySelector,
- Func<TOuter, IEnumerable<TInner>, TResult> resultSelector, IEqualityComparer<TKey> comparer)
+ Func<TOuter, IEnumerable<TInner>, TResult> resultSelector, IEqualityComparer<TKey>? comparer)
{
if (outer == null) throw new ArgumentNullException(nameof(outer));
if (inner == null) throw new ArgumentNullException(nameof(inner));
public static ParallelQuery<TResult> GroupJoin<TOuter, TInner, TKey, TResult>(
this ParallelQuery<TOuter> outer, IEnumerable<TInner> inner,
Func<TOuter, TKey> outerKeySelector, Func<TInner, TKey> innerKeySelector,
- Func<TOuter, IEnumerable<TInner>, TResult> resultSelector, IEqualityComparer<TKey> comparer)
+ Func<TOuter, IEnumerable<TInner>, TResult> resultSelector, IEqualityComparer<TKey>? comparer)
{
throw new NotSupportedException(SR.ParallelEnumerable_BinaryOpMustUseAsParallel);
}
/// <paramref name="source"/> or <paramref name="keySelector"/> is a null reference (Nothing in Visual Basic).
/// </exception>
public static OrderedParallelQuery<TSource> OrderBy<TSource, TKey>(
- this ParallelQuery<TSource> source, Func<TSource, TKey> keySelector, IComparer<TKey> comparer)
+ this ParallelQuery<TSource> source, Func<TSource, TKey> keySelector, IComparer<TKey>? comparer)
{
if (source == null) throw new ArgumentNullException(nameof(source));
if (keySelector == null) throw new ArgumentNullException(nameof(keySelector));
/// <paramref name="source"/> or <paramref name="keySelector"/> is a null reference (Nothing in Visual Basic).
/// </exception>
public static OrderedParallelQuery<TSource> OrderByDescending<TSource, TKey>(
- this ParallelQuery<TSource> source, Func<TSource, TKey> keySelector, IComparer<TKey> comparer)
+ this ParallelQuery<TSource> source, Func<TSource, TKey> keySelector, IComparer<TKey>? comparer)
{
if (source == null) throw new ArgumentNullException(nameof(source));
if (keySelector == null) throw new ArgumentNullException(nameof(keySelector));
///
public static OrderedParallelQuery<TSource> ThenBy<TSource, TKey>(
- this OrderedParallelQuery<TSource> source, Func<TSource, TKey> keySelector, IComparer<TKey> comparer)
+ this OrderedParallelQuery<TSource> source, Func<TSource, TKey> keySelector, IComparer<TKey>? comparer)
{
if (source == null) throw new ArgumentNullException(nameof(source));
if (keySelector == null) throw new ArgumentNullException(nameof(keySelector));
///
public static OrderedParallelQuery<TSource> ThenByDescending<TSource, TKey>(
- this OrderedParallelQuery<TSource> source, Func<TSource, TKey> keySelector, IComparer<TKey> comparer)
+ this OrderedParallelQuery<TSource> source, Func<TSource, TKey> keySelector, IComparer<TKey>? comparer)
{
if (source == null) throw new ArgumentNullException(nameof(source));
if (keySelector == null) throw new ArgumentNullException(nameof(keySelector));
/// <paramref name="source"/> or <paramref name="keySelector"/> is a null reference (Nothing in Visual Basic).
/// </exception>
public static ParallelQuery<IGrouping<TKey, TSource>> GroupBy<TSource, TKey>(
- this ParallelQuery<TSource> source, Func<TSource, TKey> keySelector, IEqualityComparer<TKey> comparer)
+ this ParallelQuery<TSource> source, Func<TSource, TKey> keySelector, IEqualityComparer<TKey>? comparer)
{
if (source == null) throw new ArgumentNullException(nameof(source));
if (keySelector == null) throw new ArgumentNullException(nameof(keySelector));
/// <paramref name="elementSelector"/> is a null reference (Nothing in Visual Basic).
/// </exception>
public static ParallelQuery<IGrouping<TKey, TElement>> GroupBy<TSource, TKey, TElement>(
- this ParallelQuery<TSource> source, Func<TSource, TKey> keySelector, Func<TSource, TElement> elementSelector, IEqualityComparer<TKey> comparer)
+ this ParallelQuery<TSource> source, Func<TSource, TKey> keySelector, Func<TSource, TElement> elementSelector, IEqualityComparer<TKey>? comparer)
{
if (source == null) throw new ArgumentNullException(nameof(source));
if (keySelector == null) throw new ArgumentNullException(nameof(keySelector));
/// <paramref name="resultSelector"/> is a null reference (Nothing in Visual Basic).
/// </exception>
public static ParallelQuery<TResult> GroupBy<TSource, TKey, TResult>(
- this ParallelQuery<TSource> source, Func<TSource, TKey> keySelector, Func<TKey, IEnumerable<TSource>, TResult> resultSelector, IEqualityComparer<TKey> comparer)
+ this ParallelQuery<TSource> source, Func<TSource, TKey> keySelector, Func<TKey, IEnumerable<TSource>, TResult> resultSelector, IEqualityComparer<TKey>? comparer)
{
if (resultSelector == null) throw new ArgumentNullException(nameof(resultSelector));
/// <paramref name="elementSelector"/> or <paramref name="resultSelector"/> is a null reference (Nothing in Visual Basic).
/// </exception>
public static ParallelQuery<TResult> GroupBy<TSource, TKey, TElement, TResult>(
- this ParallelQuery<TSource> source, Func<TSource, TKey> keySelector, Func<TSource, TElement> elementSelector, Func<TKey, IEnumerable<TElement>, TResult> resultSelector, IEqualityComparer<TKey> comparer)
+ this ParallelQuery<TSource> source, Func<TSource, TKey> keySelector, Func<TSource, TElement> elementSelector, Func<TKey, IEnumerable<TElement>, TResult> resultSelector, IEqualityComparer<TKey>? comparer)
{
if (resultSelector == null) throw new ArgumentNullException(nameof(resultSelector));
// Return Value:
// The result of aggregation.
//
-
private static T PerformAggregation<T>(this ParallelQuery<T> source,
Func<T, T, T> reduce, T seed, bool seedIsSpecified, bool throwIfEmpty, QueryAggregationOptions options)
{
return op.Aggregate();
}
-
/// <summary>
/// Run an aggregation sequentially. If the user-provided reduction function throws an exception, wrap
/// it with an AggregateException.
throw new InvalidOperationException(SR.NoElements);
}
- acc = (TAccumulate)(object)enumerator.Current;
+ acc = (TAccumulate)(object)enumerator.Current!;
}
while (enumerator.MoveNext())
{
// Non associative aggregations must be run sequentially. We run the query in parallel
// and then perform the reduction over the resulting list.
- return source.PerformSequentialAggregation(default(TSource), false, func);
+ return source.PerformSequentialAggregation(default!, false, func);
}
else
{
// If associative, we can run this aggregation in parallel. The logic of the aggregation
// operator depends on whether the operator is commutative, so we also pass that information
// down to the query planning/execution engine.
- return source.PerformAggregation<TSource>(func, default(TSource), false, true, options);
+ return source.PerformAggregation<TSource>(func, default!, false, true, options);
}
}
if (resultSelector == null) throw new ArgumentNullException(nameof(resultSelector));
return new AssociativeAggregationOperator<TSource, TAccumulate, TResult>(
- source, default(TAccumulate), seedFactory, true, updateAccumulatorFunc, combineAccumulatorsFunc, resultSelector,
+ source, default!, seedFactory, true, updateAccumulatorFunc, combineAccumulatorsFunc, resultSelector,
false, QueryAggregationOptions.AssociativeCommutative).Aggregate();
}
if (source == null) throw new ArgumentNullException(nameof(source));
// If the data source is a collection, we can just return the count right away.
- ParallelEnumerableWrapper<TSource> sourceAsWrapper = source as ParallelEnumerableWrapper<TSource>;
- if (sourceAsWrapper != null)
+ if (source is ParallelEnumerableWrapper<TSource> sourceAsWrapper)
{
- ICollection<TSource> sourceAsCollection = sourceAsWrapper.WrappedEnumerable as ICollection<TSource>;
- if (sourceAsCollection != null)
+ if (sourceAsWrapper.WrappedEnumerable is ICollection<TSource> sourceAsCollection)
{
return sourceAsCollection.Count;
}
if (source == null) throw new ArgumentNullException(nameof(source));
// If the data source is a collection, we can just return the count right away.
- ParallelEnumerableWrapper<TSource> sourceAsWrapper = source as ParallelEnumerableWrapper<TSource>;
- if (sourceAsWrapper != null)
+ if (source is ParallelEnumerableWrapper<TSource> sourceAsWrapper)
{
- ICollection<TSource> sourceAsCollection = sourceAsWrapper.WrappedEnumerable as ICollection<TSource>;
- if (sourceAsCollection != null)
+ if (sourceAsWrapper.WrappedEnumerable is ICollection<TSource> sourceAsCollection)
{
return sourceAsCollection.Count;
}
/// <exception cref="System.OperationCanceledException">
/// The query was canceled.
/// </exception>
+ [return: MaybeNull]
public static TSource Min<TSource>(this ParallelQuery<TSource> source)
{
if (source == null) throw new ArgumentNullException(nameof(source));
/// <exception cref="System.OperationCanceledException">
/// The query was canceled.
/// </exception>
+ [return: MaybeNull]
public static TResult Min<TSource, TResult>(this ParallelQuery<TSource> source, Func<TSource, TResult> selector)
{
return source.Select<TSource, TResult>(selector).Min<TResult>();
/// <exception cref="System.OperationCanceledException">
/// The query was canceled.
/// </exception>
+ [return: MaybeNull]
public static TSource Max<TSource>(this ParallelQuery<TSource> source)
{
if (source == null) throw new ArgumentNullException(nameof(source));
/// <exception cref="System.OperationCanceledException">
/// The query was canceled.
/// </exception>
+ [return: MaybeNull]
public static TResult Max<TSource, TResult>(this ParallelQuery<TSource> source, Func<TSource, TResult> selector)
{
return source.Select<TSource, TResult>(selector).Max<TResult>();
/// <exception cref="System.OperationCanceledException">
/// The query was canceled.
/// </exception>
- public static bool Contains<TSource>(this ParallelQuery<TSource> source, TSource value, IEqualityComparer<TSource> comparer)
+ public static bool Contains<TSource>(this ParallelQuery<TSource> source, TSource value, IEqualityComparer<TSource>? comparer)
{
if (source == null) throw new ArgumentNullException(nameof(source));
/// <exception cref="System.OperationCanceledException">
/// The query was canceled.
/// </exception>
- public static bool SequenceEqual<TSource>(this ParallelQuery<TSource> first, ParallelQuery<TSource> second, IEqualityComparer<TSource> comparer)
+ public static bool SequenceEqual<TSource>(this ParallelQuery<TSource> first, ParallelQuery<TSource> second, IEqualityComparer<TSource>? comparer)
{
if (first == null) throw new ArgumentNullException(nameof(first));
if (second == null) throw new ArgumentNullException(nameof(second));
/// but would in reality bind to sequential implementation.
/// </remarks>
[Obsolete(RIGHT_SOURCE_NOT_PARALLEL_STR)]
- public static bool SequenceEqual<TSource>(this ParallelQuery<TSource> first, IEnumerable<TSource> second, IEqualityComparer<TSource> comparer)
+ public static bool SequenceEqual<TSource>(this ParallelQuery<TSource> first, IEnumerable<TSource> second, IEqualityComparer<TSource>? comparer)
{
throw new NotSupportedException(SR.ParallelEnumerable_BinaryOpMustUseAsParallel);
}
/// <paramref name="source"/> is a null reference (Nothing in Visual Basic).
/// </exception>
public static ParallelQuery<TSource> Distinct<TSource>(
- this ParallelQuery<TSource> source, IEqualityComparer<TSource> comparer)
+ this ParallelQuery<TSource> source, IEqualityComparer<TSource>? comparer)
{
if (source == null) throw new ArgumentNullException(nameof(source));
/// <paramref name="first"/> or <paramref name="second"/> is a null reference (Nothing in Visual Basic).
/// </exception>
public static ParallelQuery<TSource> Union<TSource>(
- this ParallelQuery<TSource> first, ParallelQuery<TSource> second, IEqualityComparer<TSource> comparer)
+ this ParallelQuery<TSource> first, ParallelQuery<TSource> second, IEqualityComparer<TSource>? comparer)
{
if (first == null) throw new ArgumentNullException(nameof(first));
if (second == null) throw new ArgumentNullException(nameof(second));
/// </remarks>
[Obsolete(RIGHT_SOURCE_NOT_PARALLEL_STR)]
public static ParallelQuery<TSource> Union<TSource>(
- this ParallelQuery<TSource> first, IEnumerable<TSource> second, IEqualityComparer<TSource> comparer)
+ this ParallelQuery<TSource> first, IEnumerable<TSource> second, IEqualityComparer<TSource>? comparer)
{
throw new NotSupportedException(SR.ParallelEnumerable_BinaryOpMustUseAsParallel);
}
/// <paramref name="first"/> or <paramref name="second"/> is a null reference (Nothing in Visual Basic).
/// </exception>
public static ParallelQuery<TSource> Intersect<TSource>(
- this ParallelQuery<TSource> first, ParallelQuery<TSource> second, IEqualityComparer<TSource> comparer)
+ this ParallelQuery<TSource> first, ParallelQuery<TSource> second, IEqualityComparer<TSource>? comparer)
{
if (first == null) throw new ArgumentNullException(nameof(first));
if (second == null) throw new ArgumentNullException(nameof(second));
/// </remarks>
[Obsolete(RIGHT_SOURCE_NOT_PARALLEL_STR)]
public static ParallelQuery<TSource> Intersect<TSource>(
- this ParallelQuery<TSource> first, IEnumerable<TSource> second, IEqualityComparer<TSource> comparer)
+ this ParallelQuery<TSource> first, IEnumerable<TSource> second, IEqualityComparer<TSource>? comparer)
{
throw new NotSupportedException(SR.ParallelEnumerable_BinaryOpMustUseAsParallel);
}
/// <paramref name="first"/> or <paramref name="second"/> is a null reference (Nothing in Visual Basic).
/// </exception>
public static ParallelQuery<TSource> Except<TSource>(
- this ParallelQuery<TSource> first, ParallelQuery<TSource> second, IEqualityComparer<TSource> comparer)
+ this ParallelQuery<TSource> first, ParallelQuery<TSource> second, IEqualityComparer<TSource>? comparer)
{
if (first == null) throw new ArgumentNullException(nameof(first));
if (second == null) throw new ArgumentNullException(nameof(second));
/// </remarks>
[Obsolete(RIGHT_SOURCE_NOT_PARALLEL_STR)]
public static ParallelQuery<TSource> Except<TSource>(
- this ParallelQuery<TSource> first, IEnumerable<TSource> second, IEqualityComparer<TSource> comparer)
+ this ParallelQuery<TSource> first, IEnumerable<TSource> second, IEqualityComparer<TSource>? comparer)
{
throw new NotSupportedException(SR.ParallelEnumerable_BinaryOpMustUseAsParallel);
}
{
if (source == null) throw new ArgumentNullException(nameof(source));
- QueryOperator<TSource> asOperator = source as QueryOperator<TSource>;
-
- if (asOperator != null)
+ if (source is QueryOperator<TSource> asOperator)
{
return asOperator.ExecuteAndGetResultsAsArray();
}
// Allocate a growable list (optionally passing the length as the initial size).
List<TSource> list = new List<TSource>();
IEnumerator<TSource> input;
- QueryOperator<TSource> asOperator = source as QueryOperator<TSource>;
- if (asOperator != null)
+ if (source is QueryOperator<TSource> asOperator)
{
if (asOperator.OrdinalIndexState == OrdinalIndexState.Indexable && asOperator.OutputOrdered)
{
/// The query was canceled.
/// </exception>
public static Dictionary<TKey, TSource> ToDictionary<TSource, TKey>(
- this ParallelQuery<TSource> source, Func<TSource, TKey> keySelector)
+ this ParallelQuery<TSource> source, Func<TSource, TKey> keySelector) where TKey : notnull
{
return ToDictionary(source, keySelector, EqualityComparer<TKey>.Default);
}
/// The query was canceled.
/// </exception>
public static Dictionary<TKey, TSource> ToDictionary<TSource, TKey>(
- this ParallelQuery<TSource> source, Func<TSource, TKey> keySelector, IEqualityComparer<TKey> comparer)
+ this ParallelQuery<TSource> source, Func<TSource, TKey> keySelector, IEqualityComparer<TKey>? comparer) where TKey : notnull
{
if (source == null) throw new ArgumentNullException(nameof(source));
if (keySelector == null) throw new ArgumentNullException(nameof(keySelector));
// comparer may be null. In that case, the Dictionary constructor will use the default comparer.
Dictionary<TKey, TSource> result = new Dictionary<TKey, TSource>(comparer);
- QueryOperator<TSource> op = source as QueryOperator<TSource>;
+ QueryOperator<TSource>? op = source as QueryOperator<TSource>;
IEnumerator<TSource> input = (op == null) ? source.GetEnumerator() : op.GetEnumerator(ParallelMergeOptions.FullyBuffered, true);
using (input)
/// The query was canceled.
/// </exception>
public static Dictionary<TKey, TElement> ToDictionary<TSource, TKey, TElement>(
- this ParallelQuery<TSource> source, Func<TSource, TKey> keySelector, Func<TSource, TElement> elementSelector)
+ this ParallelQuery<TSource> source, Func<TSource, TKey> keySelector, Func<TSource, TElement> elementSelector) where TKey : notnull
{
return ToDictionary(source, keySelector, elementSelector, EqualityComparer<TKey>.Default);
}
/// The query was canceled.
/// </exception>
public static Dictionary<TKey, TElement> ToDictionary<TSource, TKey, TElement>(
- this ParallelQuery<TSource> source, Func<TSource, TKey> keySelector, Func<TSource, TElement> elementSelector, IEqualityComparer<TKey> comparer)
+ this ParallelQuery<TSource> source, Func<TSource, TKey> keySelector, Func<TSource, TElement> elementSelector, IEqualityComparer<TKey>? comparer) where TKey : notnull
{
if (source == null) throw new ArgumentNullException(nameof(source));
if (keySelector == null) throw new ArgumentNullException(nameof(keySelector));
// comparer may be null. In that case, the Dictionary constructor will use the default comparer.
Dictionary<TKey, TElement> result = new Dictionary<TKey, TElement>(comparer);
- QueryOperator<TSource> op = source as QueryOperator<TSource>;
+ QueryOperator<TSource>? op = source as QueryOperator<TSource>;
IEnumerator<TSource> input = (op == null) ? source.GetEnumerator() : op.GetEnumerator(ParallelMergeOptions.FullyBuffered, true);
using (input)
/// The query was canceled.
/// </exception>
public static ILookup<TKey, TSource> ToLookup<TSource, TKey>(
- this ParallelQuery<TSource> source, Func<TSource, TKey> keySelector)
+ this ParallelQuery<TSource> source, Func<TSource, TKey> keySelector) where TKey: notnull
{
return ToLookup(source, keySelector, EqualityComparer<TKey>.Default);
}
/// The query was canceled.
/// </exception>
public static ILookup<TKey, TSource> ToLookup<TSource, TKey>(
- this ParallelQuery<TSource> source, Func<TSource, TKey> keySelector, IEqualityComparer<TKey> comparer)
+ this ParallelQuery<TSource> source, Func<TSource, TKey> keySelector, IEqualityComparer<TKey>? comparer) where TKey: notnull
{
if (source == null) throw new ArgumentNullException(nameof(source));
if (keySelector == null) throw new ArgumentNullException(nameof(keySelector));
Parallel.Lookup<TKey, TSource> lookup = new Parallel.Lookup<TKey, TSource>(comparer);
Debug.Assert(groupings is QueryOperator<IGrouping<TKey, TSource>>);
- QueryOperator<IGrouping<TKey, TSource>> op = groupings as QueryOperator<IGrouping<TKey, TSource>>;
+ QueryOperator<IGrouping<TKey, TSource>>? op = groupings as QueryOperator<IGrouping<TKey, TSource>>;
IEnumerator<IGrouping<TKey, TSource>> input = (op == null) ? groupings.GetEnumerator() : op.GetEnumerator(ParallelMergeOptions.FullyBuffered);
/// The query was canceled.
/// </exception>
public static ILookup<TKey, TElement> ToLookup<TSource, TKey, TElement>(
- this ParallelQuery<TSource> source, Func<TSource, TKey> keySelector, Func<TSource, TElement> elementSelector)
+ this ParallelQuery<TSource> source, Func<TSource, TKey> keySelector, Func<TSource, TElement> elementSelector) where TKey : notnull
{
return ToLookup(source, keySelector, elementSelector, EqualityComparer<TKey>.Default);
}
/// The query was canceled.
/// </exception>
public static ILookup<TKey, TElement> ToLookup<TSource, TKey, TElement>(
- this ParallelQuery<TSource> source, Func<TSource, TKey> keySelector, Func<TSource, TElement> elementSelector, IEqualityComparer<TKey> comparer)
+ this ParallelQuery<TSource> source, Func<TSource, TKey> keySelector, Func<TSource, TElement> elementSelector, IEqualityComparer<TKey>? comparer) where TKey : notnull
{
if (source == null) throw new ArgumentNullException(nameof(source));
if (keySelector == null) throw new ArgumentNullException(nameof(keySelector));
Parallel.Lookup<TKey, TElement> lookup = new Parallel.Lookup<TKey, TElement>(comparer);
Debug.Assert(groupings is QueryOperator<IGrouping<TKey, TElement>>);
- QueryOperator<IGrouping<TKey, TElement>> op = groupings as QueryOperator<IGrouping<TKey, TElement>>;
+ QueryOperator<IGrouping<TKey, TElement>>? op = groupings as QueryOperator<IGrouping<TKey, TElement>>;
IEnumerator<IGrouping<TKey, TElement>> input = (op == null) ? groupings.GetEnumerator() : op.GetEnumerator(ParallelMergeOptions.FullyBuffered);
// defaultIfEmpty - whether to return a default value (true) or throw an
// exception if the output of the query operator is empty
//
-
+ [return: MaybeNull]
private static TSource GetOneWithPossibleDefault<TSource>(
QueryOperator<TSource> queryOp, bool throwIfTwo, bool defaultIfEmpty)
{
if (defaultIfEmpty)
{
- return default(TSource);
+ return default(TSource)!;
}
else
{
.First();
}
- return GetOneWithPossibleDefault(queryOp, false, false);
+ return GetOneWithPossibleDefault(queryOp, false, false)!;
}
/// <summary>
.First(ExceptionAggregator.WrapFunc<TSource, bool>(predicate, settings.CancellationState));
}
- return GetOneWithPossibleDefault(queryOp, false, false);
+ return GetOneWithPossibleDefault(queryOp, false, false)!;
}
/// <summary>
/// <exception cref="System.OperationCanceledException">
/// The query was canceled.
/// </exception>
+ [return: MaybeNull]
public static TSource FirstOrDefault<TSource>(this ParallelQuery<TSource> source)
{
if (source == null) throw new ArgumentNullException(nameof(source));
/// <exception cref="System.OperationCanceledException">
/// The query was canceled.
/// </exception>
+ [return: MaybeNull]
public static TSource FirstOrDefault<TSource>(this ParallelQuery<TSource> source, Func<TSource, bool> predicate)
{
if (source == null) throw new ArgumentNullException(nameof(source));
return ExceptionAggregator.WrapEnumerable(childWithCancelChecks, settings.CancellationState).Last();
}
- return GetOneWithPossibleDefault(queryOp, false, false);
+ return GetOneWithPossibleDefault(queryOp, false, false)!;
}
/// <summary>
.Last(ExceptionAggregator.WrapFunc<TSource, bool>(predicate, settings.CancellationState));
}
- return GetOneWithPossibleDefault(queryOp, false, false);
+ return GetOneWithPossibleDefault(queryOp, false, false)!;
}
/// <summary>
/// <exception cref="System.OperationCanceledException">
/// The query was canceled.
/// </exception>
+ [return: MaybeNull]
public static TSource LastOrDefault<TSource>(this ParallelQuery<TSource> source)
{
// @PERF: optimize for seekable data sources. E.g. if an array, we can
/// <exception cref="System.OperationCanceledException">
/// The query was canceled.
/// </exception>
+ [return: MaybeNull]
public static TSource LastOrDefault<TSource>(this ParallelQuery<TSource> source, Func<TSource, bool> predicate)
{
if (source == null) throw new ArgumentNullException(nameof(source));
// check the Count property and avoid costly fork/join/synchronization.
if (source == null) throw new ArgumentNullException(nameof(source));
- return GetOneWithPossibleDefault(new SingleQueryOperator<TSource>(source, null), true, false);
+ return GetOneWithPossibleDefault(new SingleQueryOperator<TSource>(source, null), true, false)!;
}
/// <summary>
if (source == null) throw new ArgumentNullException(nameof(source));
if (predicate == null) throw new ArgumentNullException(nameof(predicate));
- return GetOneWithPossibleDefault(new SingleQueryOperator<TSource>(source, predicate), true, false);
+ return GetOneWithPossibleDefault(new SingleQueryOperator<TSource>(source, predicate), true, false)!;
}
/// <summary>
/// <exception cref="System.OperationCanceledException">
/// The query was canceled.
/// </exception>
+ [return: MaybeNull]
public static TSource SingleOrDefault<TSource>(this ParallelQuery<TSource> source)
{
// @PERF: optimize for ICollection-typed data sources, i.e. we can just
/// <exception cref="System.OperationCanceledException">
/// The query was canceled.
/// </exception>
+ [return: MaybeNull]
public static TSource SingleOrDefault<TSource>(this ParallelQuery<TSource> source, Func<TSource, bool> predicate)
{
if (source == null) throw new ArgumentNullException(nameof(source));
/// </exception>
public static ParallelQuery<TSource> DefaultIfEmpty<TSource>(this ParallelQuery<TSource> source)
{
- return DefaultIfEmpty<TSource>(source, default(TSource));
+ return DefaultIfEmpty<TSource>(source, default!);
}
/// <summary>
ElementAtQueryOperator<TSource> op = new ElementAtQueryOperator<TSource>(source, index);
TSource result;
- if (op.Aggregate(out result, false))
+ if (op.Aggregate(out result!, false))
{
return result;
}
/// <exception cref="System.OperationCanceledException">
/// The query was canceled.
/// </exception>
+ [return: MaybeNull]
public static TSource ElementAtOrDefault<TSource>(this ParallelQuery<TSource> source, int index)
{
if (source == null) throw new ArgumentNullException(nameof(source));
ElementAtQueryOperator<TSource> op = new ElementAtQueryOperator<TSource>(source, index);
TSource result;
- if (op.Aggregate(out result, true))
+ if (op.Aggregate(out result!, true))
{
return result;
}
}
- return default(TSource);
+ return default(TSource)!;
}
}
}