repo_name
stringlengths 1
52
| repo_creator
stringclasses 6
values | programming_language
stringclasses 4
values | code
stringlengths 0
9.68M
| num_lines
int64 1
234k
|
---|---|---|---|---|
C-Sharp | TheAlgorithms | C# | using System;
using System.Collections.Generic;
namespace DataStructures.LinkedList.SinglyLinkedList
{
public class SinglyLinkedList<T>
{
// points to the start of the list
private SinglyLinkedListNode<T>? Head { get; set; }
/// <summary>
/// Adds new node to the start of the list,
/// time complexity: O(1),
/// space complexity: O(1).
/// </summary>
/// <param name="data">Contents of newly added node.</param>
/// <returns>Added list node.</returns>
public SinglyLinkedListNode<T> AddFirst(T data)
{
var newListElement = new SinglyLinkedListNode<T>(data)
{
Next = Head,
};
Head = newListElement;
return newListElement;
}
/// <summary>
/// Adds new node to the end of the list,
/// time complexity: O(n),
/// space complexity: O(1),
/// where n - number of nodes in the list.
/// </summary>
/// <param name="data">Contents of newly added node.</param>
/// <returns>Added list node.</returns>
public SinglyLinkedListNode<T> AddLast(T data)
{
var newListElement = new SinglyLinkedListNode<T>(data);
// if head is null, the added element is the first, hence it is the head
if (Head is null)
{
Head = newListElement;
return newListElement;
}
// temp ListElement to avoid overwriting the original
var tempElement = Head;
// iterates through all elements
while (tempElement.Next is not null)
{
tempElement = tempElement.Next;
}
// adds the new element to the last one
tempElement.Next = newListElement;
return newListElement;
}
/// <summary>
/// Returns element at index <paramref name="index" /> in the list.
/// </summary>
/// <param name="index">Index of an element to be returned.</param>
/// <returns>Element at index <paramref name="index" />.</returns>
public T GetElementByIndex(int index)
{
if (index < 0)
{
throw new ArgumentOutOfRangeException(nameof(index));
}
var tempElement = Head;
for (var i = 0; tempElement is not null && i < index; i++)
{
tempElement = tempElement.Next;
}
if (tempElement is null)
{
throw new ArgumentOutOfRangeException(nameof(index));
}
return tempElement.Data;
}
public int Length()
{
// checks if there is a head
if (Head is null)
{
return 0;
}
var tempElement = Head;
var length = 1;
while (tempElement.Next is not null)
{
tempElement = tempElement.Next;
length++;
}
return length;
}
public IEnumerable<T> GetListData()
{
// temp ListElement to avoid overwriting the original
var tempElement = Head;
// all elements where a next attribute exists
while (tempElement is not null)
{
yield return tempElement.Data;
tempElement = tempElement.Next;
}
}
public bool DeleteElement(T element)
{
var currentElement = Head;
SinglyLinkedListNode<T>? previousElement = null;
// iterates through all elements
while (currentElement is not null)
{
// checks if the element, which should get deleted is in this list element
if (currentElement.Data is null && element is null ||
currentElement.Data is not null && currentElement.Data.Equals(element))
{
// if element is head just take the next one as head
if (currentElement.Equals(Head))
{
Head = Head.Next;
return true;
}
// else take the prev one and overwrite the next with the one behind the deleted
if (previousElement is not null)
{
previousElement.Next = currentElement.Next;
return true;
}
}
// iterating
previousElement = currentElement;
currentElement = currentElement.Next;
}
return false;
}
}
}
| 158 |
C-Sharp | TheAlgorithms | C# | namespace DataStructures.LinkedList.SinglyLinkedList
{
public class SinglyLinkedListNode<T>
{
public SinglyLinkedListNode(T data)
{
Data = data;
Next = null;
}
public T Data { get; }
public SinglyLinkedListNode<T>? Next { get; set; }
}
}
| 16 |
C-Sharp | TheAlgorithms | C# | using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
namespace DataStructures.LinkedList.SkipList
{
/// <summary>
/// Skip list implementation that is based on the singly linked list,
/// but offers O(log n) time complexity on most operations.
/// </summary>
/// <typeparam name="TValue">The type of the values in the list.</typeparam>
/// <remarks>
/// Skip list nodes sorted by key.
/// The "skip lanes" allow searching for a node in O(log n) time on average.
/// The worst case performence is O(n) when the height of all nodes is 1 (very
/// unluckily to happen on any decent list size).
/// These two properties make the skip list an excellent data structure for
/// implementing additional operations like finding min/max value in the list,
/// finding values with the key in a given range, etc.
///
/// Sourses:
/// - "Skip Lists: A Probabilistic Alternative to Balanced Trees" by William Pugh.
/// - https://en.wikipedia.org/wiki/Skip_list
/// - https://iq.opengenus.org/skip-list/
/// - https://medium.com/simple-computer-science/data-structures-basics-skip-list-8b8c69f9a044
/// - https://github.com/TheAlgorithms/Java/blob/master/src/main/java/com/thealgorithms/datastructures/lists/SkipList.java
///
/// The key is hardcoded to be of type <c>int</c> to simplify the implementation,
/// but it can be easily an any generic type that implements <c>IComparable</c>.
/// </remarks>
[DebuggerDisplay("Count = {Count}")]
public class SkipList<TValue>
{
private const double Probability = 0.5;
private readonly int maxLevels;
private readonly SkipListNode<TValue> head;
private readonly SkipListNode<TValue> tail;
private readonly Random random = new Random();
/// <summary>
/// Initializes a new instance of the <see cref="SkipList{TValue}"/> class.
/// </summary>
/// <param name="capacity">Expected number of elements the list might contain.</param>
public SkipList(int capacity = 255)
{
maxLevels = (int)Math.Log2(capacity) + 1;
head = new(int.MinValue, default(TValue), maxLevels);
tail = new(int.MaxValue, default(TValue), maxLevels);
for(int i = 0; i < maxLevels; i++)
{
head.Next[i] = tail;
}
}
/// <summary>
/// Gets the number of elements currently in the list.
/// </summary>
public int Count { get; private set; }
/// <summary>
/// Gets or sets the element with the specified key.
/// </summary>
/// <exception cref="KeyNotFoundException">The key is not present in the list.</exception>
public TValue this[int key]
{
get
{
var previousNode = GetSkipNodes(key).First();
if(previousNode.Next[0].Key == key)
{
return previousNode.Next[0].Value!;
}
else
{
throw new KeyNotFoundException();
}
}
set => AddOrUpdate(key, value);
}
/// <summary>
/// Adds an element with the specified key and value to the list.
/// If an element with the same key already exists, updates its value.
/// </summary>
/// <param name="key">The key of the element to add.</param>
/// <param name="value">The value of the element to add.</param>
/// <remarks>
/// Time complexity: O(log n) where n is the number of elements in the list.
/// </remarks>
public void AddOrUpdate(int key, TValue value)
{
var skipNodes = GetSkipNodes(key);
var previousNode = skipNodes.First();
if (previousNode.Next[0].Key == key)
{
// Node with the given key already exists.
// Update its value.
previousNode.Next[0].Value = value;
return;
}
// Node with the given key does not exist.
// Insert the new one and update the skip nodes.
var newNode = new SkipListNode<TValue>(key, value, GetRandomHeight());
for (var level = 0; level < newNode.Height; level++)
{
newNode.Next[level] = skipNodes[level].Next[level];
skipNodes[level].Next[level] = newNode;
}
Count++;
}
/// <summary>
/// Returns whether a value with the given key exists in the list.
/// </summary>
/// <remarks>
/// Time complexity: O(log n) where n is the number of elements in the list.
/// </remarks>
public bool Contains(int key)
{
var previousNode = GetSkipNodes(key).First();
return previousNode.Next[0].Key == key;
}
/// <summary>
/// Removes the value with the given key from the list.
/// </summary>
/// <returns>
/// <c>true</c> if the value was removed; otherwise, <c>false</c>.
/// </returns>
/// <remarks>
/// Time complexity: O(log n) where n is the number of elements in the list.
/// </remarks>
public bool Remove(int key)
{
var skipNodes = GetSkipNodes(key);
var previousNode = skipNodes.First();
if (previousNode.Next[0].Key != key)
{
return false;
}
// Key exists in the list, remove it and update the skip nodes.
var nodeToRemove = previousNode.Next[0];
for (var level = 0; level < nodeToRemove.Height; level++)
{
skipNodes[level].Next[level] = nodeToRemove.Next[level];
}
Count--;
return true;
}
/// <summary>
/// Returns an enumerator that iterates through the list.
/// </summary>
/// <remarks>
/// Order of values is the ascending order of their keys.
/// Time complexity: O(n) where n is the number of elements in the list.
/// </remarks>
public IEnumerable<TValue> GetValues()
{
var current = head.Next[0];
while (current.Key != tail.Key)
{
yield return current.Value!;
current = current.Next[0];
}
}
/// <summary>
/// Builds a list of skip nodes on each level that
/// are closest, but smaller than the given key.
/// </summary>
/// <remarks>
/// The node on level 0 will point to the node with the given key, if it exists.
/// Time complexity: O(log n) where n is the number of elements in the list.
/// </remarks>
private SkipListNode<TValue>[] GetSkipNodes(int key)
{
var skipNodes = new SkipListNode<TValue>[maxLevels];
var current = head;
for (var level = head.Height - 1; level >= 0; level--)
{
while (current.Next[level].Key < key)
{
current = current.Next[level];
}
skipNodes[level] = current;
}
return skipNodes;
}
/// <summary>
/// Determines the height of skip levels for the new node.
/// </summary>
/// <remarks>
/// Probability of the next level is 1/(2^level).
/// </remarks>
private int GetRandomHeight()
{
int height = 1;
while (random.NextDouble() < Probability && height < maxLevels)
{
height++;
}
return height;
}
}
}
| 221 |
C-Sharp | TheAlgorithms | C# | using System;
using System.Diagnostics;
namespace DataStructures.LinkedList.SkipList
{
[DebuggerDisplay("Key = {Key}, Height = {Height}, Value = {Value}")]
internal class SkipListNode<TValue>
{
public SkipListNode(int key, TValue? value, int height)
{
Key = key;
Value = value;
Height = height;
Next = new SkipListNode<TValue>[height];
}
public int Key { get; }
public TValue? Value { get; set; }
public SkipListNode<TValue>[] Next { get; }
public int Height { get; }
}
}
| 26 |
C-Sharp | TheAlgorithms | C# | using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Text.Json;
namespace DataStructures.Probabilistic
{
public class BloomFilter<T> where T : notnull
{
private const uint FnvPrime = 16777619;
private const uint FnvOffsetBasis = 2166136261;
private readonly byte[] filter;
private readonly int numHashes;
private readonly int sizeBits;
/// <summary>
/// Initializes a new instance of the <see cref="BloomFilter{T}"/> class. This constructor will create a Bloom Filter
/// of an optimal size with the optimal number of hashes to minimize the error rate.
/// </summary>
/// <param name="expectedNumElements">Expected number of unique elements that could be added to the filter.</param>
public BloomFilter(int expectedNumElements)
{
numHashes = (int)Math.Ceiling(.693 * 8 * expectedNumElements / expectedNumElements); // compute optimal number of hashes
filter = new byte[expectedNumElements]; // set up filter with 8 times as many bits as elements
sizeBits = expectedNumElements * 8; // number of bit slots in the filter
}
/// <summary>
/// Initializes a new instance of the <see cref="BloomFilter{T}"/> class.
/// This constructor let's you decide how large you want the filter to be as well as allowing you to specify
/// how many hashes it will use. Only use if you don't care to optimize false positivity.
/// </summary>
/// <param name="sizeBits">size in bits you want the filter to be.</param>
/// <param name="numHashes">number of hash functions to be used.</param>
public BloomFilter(int sizeBits, int numHashes)
{
filter = new byte[sizeBits / 8 + 1];
this.numHashes = numHashes;
this.sizeBits = sizeBits;
}
/// <summary>
/// Inserts an item into the bloom filter.
/// </summary>
/// <param name="item">The item being inserted into the Bloom Filter.</param>
public void Insert(T item)
{
foreach (var slot in GetSlots(item))
{
filter[slot / 8] |= (byte)(1 << (slot % 8)); // set the filter at the decided slot to 1.
}
}
/// <summary>
/// Searches the Bloom Filter to determine if the item exists in the Bloom Filter.
/// </summary>
/// <param name="item">The item being searched for in the Bloom Filter.</param>
/// <returns>true if the item has been added to the Bloom Filter, false otherwise.</returns>
public bool Search(T item)
{
foreach (var slot in GetSlots(item))
{
var @byte = filter[slot / 8]; // Extract the byte in the filter.
var mask = 1 << (slot % 8); // Build the mask for the slot number.
if ((@byte & mask) != mask)
{
return false;
}
}
return true;
}
/// <summary>
/// Yields the appropriate slots for the given item.
/// </summary>
/// <param name="item">The item to determine the slots for.</param>
/// <returns>The slots of the filter to flip or check.</returns>
private IEnumerable<int> GetSlots(T item)
{
var hash = item.GetHashCode();
for (var i = 0; i < numHashes; i++)
{
yield return Math.Abs((i + 1) * hash) % sizeBits;
}
}
}
}
| 91 |
C-Sharp | TheAlgorithms | C# | using System;
namespace DataStructures.Probabilistic
{
public class CountMinSketch<T> where T : notnull
{
private readonly int[][] sketch;
private readonly int numHashes;
/// <summary>
/// Initializes a new instance of the <see cref="CountMinSketch{T}"/> class based off dimensions
/// passed by the user.
/// </summary>
/// <param name="width">The width of the sketch.</param>
/// <param name="numHashes">The number of hashes to use in the sketch.</param>
public CountMinSketch(int width, int numHashes)
{
sketch = new int[numHashes][];
for (var i = 0; i < numHashes; i++)
{
sketch[i] = new int[width];
}
this.numHashes = numHashes;
}
/// <summary>
/// Initializes a new instance of the <see cref="CountMinSketch{T}"/> class based off the optimizing error rate
/// and error probability formula width = e/errorRate numHashes = ln(1.0/errorProp).
/// </summary>
/// <param name="errorRate">The amount of acceptable over counting for the sketch.</param>
/// <param name="errorProb">The probability that an item will be over counted.</param>
public CountMinSketch(double errorRate, double errorProb)
{
var width = (int)Math.Ceiling(Math.E / errorRate);
numHashes = (int)Math.Ceiling(Math.Log(1.0 / errorProb));
sketch = new int[numHashes][];
for (var i = 0; i < numHashes; i++)
{
sketch[i] = new int[width];
}
}
/// <summary>
/// Inserts the provided item into the sketch.
/// </summary>
/// <param name="item">Item to insert.</param>
public void Insert(T item)
{
var initialHash = item.GetHashCode();
for (int i = 0; i < numHashes; i++)
{
var slot = GetSlot(i, initialHash);
sketch[i][slot]++;
}
}
/// <summary>
/// Queries the count of the given item that have been inserted into the sketch.
/// </summary>
/// <param name="item">item to insert into the sketch.</param>
/// <returns>the number of times the provided item has been inserted into the sketch.</returns>
public int Query(T item)
{
var initialHash = item.GetHashCode();
var min = int.MaxValue;
for (int i = 0; i < numHashes; i++)
{
var slot = GetSlot(i, initialHash);
min = Math.Min(sketch[i][slot], min);
}
return min;
}
private int GetSlot(int i, int initialHash) => Math.Abs((i + 1) * initialHash) % sketch[0].Length;
}
}
| 79 |
C-Sharp | TheAlgorithms | C# | using System;
using System.Collections.Generic;
using System.Linq;
namespace DataStructures.Probabilistic
{
public class HyperLogLog<T> where T : notnull
{
private const int P = 16;
private const double Alpha = .673;
private readonly int[] registers;
private readonly HashSet<int> setRegisters;
/// <summary>
/// Initializes a new instance of the <see cref="HyperLogLog{T}"/> class.
/// </summary>
public HyperLogLog()
{
var m = 1 << P;
registers = new int[m];
setRegisters = new HashSet<int>();
}
/// <summary>
/// Merge's two HyperLogLog's together to form a union HLL.
/// </summary>
/// <param name="first">the first HLL.</param>
/// <param name="second">The second HLL.</param>
/// <returns>A HyperLogLog with the combined values of the two sets of registers.</returns>
public static HyperLogLog<T> Merge(HyperLogLog<T> first, HyperLogLog<T> second)
{
var output = new HyperLogLog<T>();
for (var i = 0; i < second.registers.Length; i++)
{
output.registers[i] = Math.Max(first.registers[i], second.registers[i]);
}
output.setRegisters.UnionWith(first.setRegisters);
output.setRegisters.UnionWith(second.setRegisters);
return output;
}
/// <summary>
/// Adds an item to the HyperLogLog.
/// </summary>
/// <param name="item">The Item to be added.</param>
public void Add(T item)
{
var x = item.GetHashCode();
var binString = Convert.ToString(x, 2); // converts hash to binary
var j = Convert.ToInt32(binString.Substring(0, Math.Min(P, binString.Length)), 2); // convert first b bits to register index
var w = (int)Math.Log2(x ^ (x & (x - 1))); // find position of the right most 1.
registers[j] = Math.Max(registers[j], w); // set the appropriate register to the appropriate value.
setRegisters.Add(j);
}
/// <summary>
/// Determines the approximate cardinality of the HyperLogLog.
/// </summary>
/// <returns>the approximate cardinality.</returns>
public int Cardinality()
{
// calculate the bottom part of the harmonic mean of the registers
double z = setRegisters.Sum(index => Math.Pow(2, -1 * registers[index]));
// calculate the harmonic mean of the set registers
return (int)Math.Ceiling(Alpha * setRegisters.Count * (setRegisters.Count / z));
}
}
}
| 71 |
C-Sharp | TheAlgorithms | C# | using System;
namespace DataStructures.Queue
{
/// <summary>
/// Implementation of an array based queue. FIFO style.
/// </summary>
/// <typeparam name="T">Generic Type.</typeparam>
public class ArrayBasedQueue<T>
{
private readonly T[] queue;
private int endIndex;
private bool isEmpty;
private bool isFull;
private int startIndex;
/// <summary>
/// Initializes a new instance of the <see cref="ArrayBasedQueue{T}" /> class.
/// </summary>
public ArrayBasedQueue(int capacity)
{
queue = new T[capacity];
Clear();
}
/// <summary>
/// Clears the queue.
/// </summary>
public void Clear()
{
startIndex = 0;
endIndex = 0;
isEmpty = true;
isFull = false;
}
/// <summary>
/// Returns the first item in the queue and removes it from the queue.
/// </summary>
/// <exception cref="InvalidOperationException">Thrown if the queue is empty.</exception>
public T Dequeue()
{
if (IsEmpty())
{
throw new InvalidOperationException("There are no items in the queue.");
}
var dequeueIndex = endIndex;
endIndex++;
if (endIndex >= queue.Length)
{
endIndex = 0;
}
isFull = false;
isEmpty = startIndex == endIndex;
return queue[dequeueIndex];
}
/// <summary>
/// Returns a boolean indicating whether the queue is empty.
/// </summary>
public bool IsEmpty() => isEmpty;
/// <summary>
/// Returns a boolean indicating whether the queue is full.
/// </summary>
public bool IsFull() => isFull;
/// <summary>
/// Returns the first item in the queue and keeps it in the queue.
/// </summary>
/// <exception cref="InvalidOperationException">Thrown if the queue is empty.</exception>
public T Peek()
{
if (IsEmpty())
{
throw new InvalidOperationException("There are no items in the queue.");
}
return queue[endIndex];
}
/// <summary>
/// Adds an item at the last position in the queue.
/// </summary>
/// <exception cref="InvalidOperationException">Thrown if the queue is full.</exception>
public void Enqueue(T item)
{
if (IsFull())
{
throw new InvalidOperationException("The queue has reached its capacity.");
}
queue[startIndex] = item;
startIndex++;
if (startIndex >= queue.Length)
{
startIndex = 0;
}
isEmpty = false;
isFull = startIndex == endIndex;
}
}
}
| 109 |
C-Sharp | TheAlgorithms | C# | using System;
using System.Collections.Generic;
using System.Linq;
namespace DataStructures.Queue
{
/// <summary>
/// Implementation of a list based queue. FIFO style.
/// </summary>
/// <typeparam name="T">Generic Type.</typeparam>
public class ListBasedQueue<T>
{
private readonly LinkedList<T> queue;
/// <summary>
/// Initializes a new instance of the <see cref="ListBasedQueue{T}" /> class.
/// </summary>
public ListBasedQueue() => queue = new LinkedList<T>();
/// <summary>
/// Clears the queue.
/// </summary>
public void Clear()
{
queue.Clear();
}
/// <summary>
/// Returns the first item in the queue and removes it from the queue.
/// </summary>
/// <exception cref="InvalidOperationException">Thrown if the queue is empty.</exception>
public T Dequeue()
{
if (queue.First is null)
{
throw new InvalidOperationException("There are no items in the queue.");
}
var item = queue.First;
queue.RemoveFirst();
return item.Value;
}
/// <summary>
/// Returns a boolean indicating whether the queue is empty.
/// </summary>
public bool IsEmpty() => !queue.Any();
/// <summary>
/// Returns a boolean indicating whether the queue is full.
/// </summary>
public bool IsFull() => false;
/// <summary>
/// Returns the first item in the queue and keeps it in the queue.
/// </summary>
/// <exception cref="InvalidOperationException">Thrown if the queue is empty.</exception>
public T Peek()
{
if (queue.First is null)
{
throw new InvalidOperationException("There are no items in the queue.");
}
return queue.First.Value;
}
/// <summary>
/// Adds an item at the last position in the queue.
/// </summary>
/// <exception cref="InvalidOperationException">Thrown if the queue is full.</exception>
public void Enqueue(T item)
{
queue.AddLast(item);
}
}
}
| 78 |
C-Sharp | TheAlgorithms | C# | using System;
using System.Collections.Generic;
namespace DataStructures.Queue
{
/// <summary>
/// Implementation of a stack based queue. FIFO style.
/// </summary>
/// <remarks>
/// Enqueue is O(1) and Dequeue is amortized O(1).
/// </remarks>
/// <typeparam name="T">Generic Type.</typeparam>
public class StackBasedQueue<T>
{
private readonly Stack<T> input;
private readonly Stack<T> output;
/// <summary>
/// Initializes a new instance of the <see cref="StackBasedQueue{T}" /> class.
/// </summary>
public StackBasedQueue()
{
input = new Stack<T>();
output = new Stack<T>();
}
/// <summary>
/// Clears the queue.
/// </summary>
public void Clear()
{
input.Clear();
output.Clear();
}
/// <summary>
/// Returns the first item in the queue and removes it from the queue.
/// </summary>
/// <exception cref="InvalidOperationException">Thrown if the queue is empty.</exception>
public T Dequeue()
{
if (input.Count == 0 && output.Count == 0)
{
throw new InvalidOperationException("The queue contains no items.");
}
if (output.Count == 0)
{
while (input.Count > 0)
{
var item = input.Pop();
output.Push(item);
}
}
return output.Pop();
}
/// <summary>
/// Returns a boolean indicating whether the queue is empty.
/// </summary>
public bool IsEmpty() => input.Count == 0 && output.Count == 0;
/// <summary>
/// Returns a boolean indicating whether the queue is full.
/// </summary>
public bool IsFull() => false;
/// <summary>
/// Returns the first item in the queue and keeps it in the queue.
/// </summary>
/// <exception cref="InvalidOperationException">Thrown if the queue is empty.</exception>
public T Peek()
{
if (input.Count == 0 && output.Count == 0)
{
throw new InvalidOperationException("The queue contains no items.");
}
if (output.Count == 0)
{
while (input.Count > 0)
{
var item = input.Pop();
output.Push(item);
}
}
return output.Peek();
}
/// <summary>
/// Adds an item at the last position in the queue.
/// </summary>
public void Enqueue(T item) => input.Push(item);
}
}
| 98 |
C-Sharp | TheAlgorithms | C# | using System;
using System.Collections.Generic;
namespace DataStructures.RedBlackTree
{
/// <summary>
/// A self-balancing bindary tree.
/// </summary>
/// <remarks>
/// A red-black tree is a self-balancing binary search tree (BST) that
/// stores a color with each node. A node's color can either be red or
/// black. Several properties are maintained to ensure the tree remains
/// balanced.
/// <list type="number">
/// <item>
/// <term>A red node does not have a red child.</term>
/// </item>
/// <item>
/// <term>All null nodes are considered black.</term>
/// </item>
/// <item>
/// <term>
/// Every path from a node to its descendant leaf nodes
/// has the same number of black nodes.
/// </term>
/// </item>
/// <item>
/// <term>(Optional) The root is always black.</term>
/// </item>
/// </list>
/// Red-black trees are generally slightly more unbalanced than an
/// AVL tree, but insertion and deletion is generally faster.
/// See https://en.wikipedia.org/wiki/Red%E2%80%93black_tree for more information.
/// </remarks>
/// <typeparam name="TKey">Type of key for the tree.</typeparam>
public class RedBlackTree<TKey>
{
/// <summary>
/// Gets the number of nodes in the tree.
/// </summary>
public int Count { get; private set; }
/// <summary>
/// Comparer to use when comparing key values.
/// </summary>
private readonly Comparer<TKey> comparer;
/// <summary>
/// Reference to the root node.
/// </summary>
private RedBlackTreeNode<TKey>? root;
/// <summary>
/// Initializes a new instance of the <see cref="RedBlackTree{TKey}"/> class.
/// </summary>
public RedBlackTree()
{
comparer = Comparer<TKey>.Default;
}
/// <summary>
/// Initializes a new instance of the <see cref="RedBlackTree{TKey}"/> class
/// using the specified comparer.
/// </summary>
/// <param name="customComparer">Comparer to use when comparing keys.</param>
public RedBlackTree(Comparer<TKey> customComparer)
{
comparer = customComparer;
}
/// <summary>
/// Add a single node to the tree.
/// </summary>
/// <param name="key">Key value to add.</param>
public void Add(TKey key)
{
if (root is null)
{
// Case 3
// New node is root
root = new RedBlackTreeNode<TKey>(key, null)
{
Color = NodeColor.Black,
};
Count++;
return;
}
// Regular binary tree insertion
var node = Add(root, key);
// Get which side child was added to
var childDir = comparer.Compare(node.Key, node.Parent!.Key);
// Set node to be new node's parent for easier handling
node = node.Parent;
// Return tree to valid state
int addCase;
do
{
addCase = GetAddCase(node);
switch(addCase)
{
case 1:
break;
case 2:
var oldParent = node.Parent;
node = AddCase2(node);
if (node is not null)
{
childDir = comparer.Compare(oldParent!.Key, oldParent.Parent!.Key);
}
break;
case 4:
node.Color = NodeColor.Black;
break;
case 56:
AddCase56(node, comparer.Compare(node.Key, node.Parent!.Key), childDir);
break;
default:
throw new InvalidOperationException("It should not be possible to get here!");
}
}
while (addCase == 2 && node is not null);
Count++;
}
/// <summary>
/// Add multiple nodes to the tree.
/// </summary>
/// <param name="keys">Key values to add.</param>
public void AddRange(IEnumerable<TKey> keys)
{
foreach (var key in keys)
{
Add(key);
}
}
/// <summary>
/// Remove a node from the tree.
/// </summary>
/// <param name="key">Key value to remove.</param>
public void Remove(TKey key)
{
// Search for node
var node = Remove(root, key);
// Simple cases
node = RemoveSimpleCases(node);
// Exit if deleted node was not non-root black leaf
if (node is null)
{
return;
}
// Delete node
DeleteLeaf(node.Parent!, comparer.Compare(node.Key, node.Parent!.Key));
// Recolor tree
do
{
node = RemoveRecolor(node);
}
while (node is not null && node.Parent is not null); // Case 2: Reached root
Count--;
}
/// <summary>
/// Check if given node is in the tree.
/// </summary>
/// <param name="key">Key value to search for.</param>
/// <returns>Whether or not the node is in the tree.</returns>
public bool Contains(TKey key)
{
var node = root;
while (node is not null)
{
var compareResult = comparer.Compare(key, node.Key);
if (compareResult < 0)
{
node = node.Left;
}
else if (compareResult > 0)
{
node = node.Right;
}
else
{
return true;
}
}
return false;
}
/// <summary>
/// Get the minimum value in the tree.
/// </summary>
/// <returns>Minimum value in tree.</returns>
public TKey GetMin()
{
if (root is null)
{
throw new InvalidOperationException("Tree is empty!");
}
return GetMin(root).Key;
}
/// <summary>
/// Get the maximum value in the tree.
/// </summary>
/// <returns>Maximum value in tree.</returns>
public TKey GetMax()
{
if (root is null)
{
throw new InvalidOperationException("Tree is empty!");
}
return GetMax(root).Key;
}
/// <summary>
/// Get keys in order from smallest to largest as defined by the comparer.
/// </summary>
/// <returns>Keys in tree in order from smallest to largest.</returns>
public IEnumerable<TKey> GetKeysInOrder()
{
var result = new List<TKey>();
InOrderWalk(root);
return result;
void InOrderWalk(RedBlackTreeNode<TKey>? node)
{
if (node is null)
{
return;
}
InOrderWalk(node.Left);
result.Add(node.Key);
InOrderWalk(node.Right);
}
}
/// <summary>
/// Get keys in the pre-order order.
/// </summary>
/// <returns>Keys in pre-order order.</returns>
public IEnumerable<TKey> GetKeysPreOrder()
{
var result = new List<TKey>();
PreOrderWalk(root);
return result;
void PreOrderWalk(RedBlackTreeNode<TKey>? node)
{
if (node is null)
{
return;
}
result.Add(node.Key);
PreOrderWalk(node.Left);
PreOrderWalk(node.Right);
}
}
/// <summary>
/// Get keys in the post-order order.
/// </summary>
/// <returns>Keys in the post-order order.</returns>
public IEnumerable<TKey> GetKeysPostOrder()
{
var result = new List<TKey>();
PostOrderWalk(root);
return result;
void PostOrderWalk(RedBlackTreeNode<TKey>? node)
{
if (node is null)
{
return;
}
PostOrderWalk(node.Left);
PostOrderWalk(node.Right);
result.Add(node.Key);
}
}
/// <summary>
/// Perform binary tree insertion.
/// </summary>
/// <param name="node">Root of subtree to search from.</param>
/// <param name="key">Key value to insert.</param>
/// <returns>Node that was added.</returns>
private RedBlackTreeNode<TKey> Add(RedBlackTreeNode<TKey> node, TKey key)
{
int compareResult;
RedBlackTreeNode<TKey> newNode;
while (true)
{
compareResult = comparer.Compare(key, node!.Key);
if (compareResult < 0)
{
if (node.Left is null)
{
newNode = new RedBlackTreeNode<TKey>(key, node);
node.Left = newNode;
break;
}
else
{
node = node.Left;
}
}
else if (compareResult > 0)
{
if (node.Right is null)
{
newNode = new RedBlackTreeNode<TKey>(key, node);
node.Right = newNode;
break;
}
else
{
node = node.Right;
}
}
else
{
throw new ArgumentException($"Key \"{key}\" already exists in tree!");
}
}
return newNode;
}
/// <summary>
/// Perform case 2 of insertion by pushing blackness down from parent.
/// </summary>
/// <param name="node">Parent of inserted node.</param>
/// <returns>Grandparent of inserted node.</returns>
private RedBlackTreeNode<TKey>? AddCase2(RedBlackTreeNode<TKey> node)
{
var grandparent = node.Parent;
var parentDir = comparer.Compare(node.Key, node.Parent!.Key);
var uncle = parentDir < 0 ? grandparent!.Right : grandparent!.Left;
node.Color = NodeColor.Black;
uncle!.Color = NodeColor.Black;
grandparent.Color = NodeColor.Red;
// Keep root black
if (node.Parent.Parent is null)
{
node.Parent.Color = NodeColor.Black;
}
// Set current node as parent to move up tree
return node.Parent.Parent;
}
/// <summary>
/// Perform rotations needed for cases 5 and 6 of insertion.
/// </summary>
/// <param name="node">Parent of node just inserted.</param>
/// <param name="parentDir">The side node is on of its parent.</param>
/// <param name="childDir">The side the child node is on.</param>
private void AddCase56(RedBlackTreeNode<TKey> node, int parentDir, int childDir)
{
if (parentDir < 0)
{
// Case 5
if (childDir > 0)
{
node = RotateLeft(node);
}
// Case 6
node = RotateRight(node.Parent!);
node.Color = NodeColor.Black;
node.Right!.Color = NodeColor.Red;
}
else
{
// Case 5
if (childDir < 0)
{
node = RotateRight(node);
}
// Case 6
node = RotateLeft(node.Parent!);
node.Color = NodeColor.Black;
node.Left!.Color = NodeColor.Red;
}
}
/// <summary>
/// Determine which add case applies to inserted node.
/// </summary>
/// <param name="node">Parent of inserted node.</param>
/// <returns>Case number needed to get tree in valid state. Cases 5 and 6 are represented by 56.</returns>
private int GetAddCase(RedBlackTreeNode<TKey> node)
{
if (node.Color == NodeColor.Black)
{
return 1;
}
else if (node.Parent is null)
{
return 4;
}
else
{
// Remaining insert cases need uncle
var grandparent = node.Parent;
var parentDir = comparer.Compare(node.Key, node.Parent.Key);
var uncle = parentDir < 0 ? grandparent.Right : grandparent.Left;
// Case 5 & 6
if (uncle is null || uncle.Color == NodeColor.Black)
{
return 56;
}
return 2;
}
}
/// <summary>
/// Search for the node to be deleted.
/// </summary>
/// <param name="node">Node to start search from.</param>
/// <param name="key">Key to search for.</param>
/// <returns>Node to be deleted.</returns>
private RedBlackTreeNode<TKey> Remove(RedBlackTreeNode<TKey>? node, TKey key)
{
if (node is null)
{
throw new InvalidOperationException("Tree is empty!");
}
else if (!Contains(key))
{
throw new KeyNotFoundException($"Key {key} is not in the tree!");
}
else
{
// Find node
int dir;
while (true)
{
dir = comparer.Compare(key, node!.Key);
if (dir < 0)
{
node = node.Left;
}
else if (dir > 0)
{
node = node.Right;
}
else
{
break;
}
}
return node;
}
}
/// <summary>
/// Get the tree back into a valid state after removing non-root black leaf.
/// </summary>
/// <param name="node">Non-root black leaf being removed.</param>
private RedBlackTreeNode<TKey>? RemoveRecolor(RedBlackTreeNode<TKey> node)
{
var removeCase = GetRemoveCase(node);
var dir = comparer.Compare(node.Key, node.Parent!.Key);
// Determine current node's sibling and nephews
var sibling = dir < 0 ? node.Parent.Right : node.Parent.Left;
var closeNewphew = dir < 0 ? sibling!.Left : sibling!.Right;
var distantNephew = dir < 0 ? sibling!.Right : sibling!.Left;
switch (removeCase)
{
case 1:
sibling.Color = NodeColor.Red;
return node.Parent;
case 3:
RemoveCase3(node, closeNewphew, dir);
break;
case 4:
RemoveCase4(sibling);
break;
case 5:
RemoveCase5(node, sibling, dir);
break;
case 6:
RemoveCase6(node, distantNephew!, dir);
break;
default:
throw new InvalidOperationException("It should not be possible to get here!");
}
return null;
}
/// <summary>
/// Simple removal cases where black height doesn't change.
/// </summary>
/// <param name="node">Node to remove.</param>
/// <returns>Non-root black leaf node or null. Null indicates that removal was performed.</returns>
private RedBlackTreeNode<TKey>? RemoveSimpleCases(RedBlackTreeNode<TKey> node)
{
// Node to delete is root and has no children
if (node.Parent is null && node.Left is null && node.Right is null)
{
root = null;
Count--;
return null;
}
// Node has two children. Swap pointers
if (node.Left is not null && node.Right is not null)
{
var successor = GetMin(node.Right);
node.Key = successor.Key;
node = successor;
}
// At this point node should have at most one child
if (node.Color == NodeColor.Red)
{
// Node is red so it must have no children since it doesn't have two children
DeleteLeaf(node.Parent!, comparer.Compare(node.Key, node.Parent!.Key));
Count--;
return null;
}
else
{
// Node is black and may or may not be node
return RemoveBlackNode(node);
}
}
/// <summary>
/// Node to delete is black. If it is a leaf then we need to recolor, otherwise remove it.
/// </summary>
/// <param name="node">Black node to examine.</param>
/// <returns>Node to start recoloring from. Null if deletion occurred.</returns>
private RedBlackTreeNode<TKey>? RemoveBlackNode(RedBlackTreeNode<TKey> node)
{
// Node is black and has at most one child. If it has a child it must be red.
var child = node.Left ?? node.Right;
// Continue to recoloring if node is leaf
if (child is null)
{
return node;
}
// Recolor child
child.Color = NodeColor.Black;
child.Parent = node.Parent;
var childDir = node.Parent is null ? 0 : comparer.Compare(node.Key, node.Parent.Key);
// Replace node with child
Transplant(node.Parent, child, childDir);
Count--;
return null;
}
/// <summary>
/// Perform case 3 of removal.
/// </summary>
/// <param name="node">Node that was removed.</param>
/// <param name="closeNephew">Close nephew of removed node.</param>
/// <param name="childDir">Side of parent the removed node was.</param>
private void RemoveCase3(RedBlackTreeNode<TKey> node, RedBlackTreeNode<TKey>? closeNephew, int childDir)
{
// Rotate and recolor
var sibling = childDir < 0 ? RotateLeft(node.Parent!) : RotateRight(node.Parent!);
sibling.Color = NodeColor.Black;
if (childDir < 0)
{
sibling.Left!.Color = NodeColor.Red;
}
else
{
sibling.Right!.Color = NodeColor.Red;
}
// Get new distant newphew
sibling = closeNephew!;
var distantNephew = childDir < 0 ? sibling.Right : sibling.Left;
// Parent is red, sibling is black
if (distantNephew is not null && distantNephew.Color == NodeColor.Red)
{
RemoveCase6(node, distantNephew, childDir);
return;
}
// Get new close nephew
closeNephew = childDir < 0 ? sibling!.Left : sibling!.Right;
// Sibling is black, distant nephew is black
if (closeNephew is not null && closeNephew.Color == NodeColor.Red)
{
RemoveCase5(node, sibling!, childDir);
return;
}
// Final recoloring
RemoveCase4(sibling!);
}
/// <summary>
/// Perform case 4 of removal.
/// </summary>
/// <param name="sibling">Sibling of removed node.</param>
private void RemoveCase4(RedBlackTreeNode<TKey> sibling)
{
sibling.Color = NodeColor.Red;
sibling.Parent!.Color = NodeColor.Black;
}
/// <summary>
/// Perform case 5 of removal.
/// </summary>
/// <param name="node">Node that was removed.</param>
/// <param name="sibling">Sibling of removed node.</param>
/// <param name="childDir">Side of parent removed node was on.</param>
private void RemoveCase5(RedBlackTreeNode<TKey> node, RedBlackTreeNode<TKey> sibling, int childDir)
{
sibling = childDir < 0 ? RotateRight(sibling) : RotateLeft(sibling);
var distantNephew = childDir < 0 ? sibling.Right! : sibling.Left!;
sibling.Color = NodeColor.Black;
distantNephew.Color = NodeColor.Red;
RemoveCase6(node, distantNephew, childDir);
}
/// <summary>
/// Perform case 6 of removal.
/// </summary>
/// <param name="node">Node that was removed.</param>
/// <param name="distantNephew">Distant nephew of removed node.</param>
/// <param name="childDir">Side of parent removed node was on.</param>
private void RemoveCase6(RedBlackTreeNode<TKey> node, RedBlackTreeNode<TKey> distantNephew, int childDir)
{
var oldParent = node.Parent!;
node = childDir < 0 ? RotateLeft(oldParent) : RotateRight(oldParent);
node.Color = oldParent.Color;
oldParent.Color = NodeColor.Black;
distantNephew.Color = NodeColor.Black;
}
/// <summary>
/// Determine which removal case is required.
/// </summary>
/// <param name="node">Node being removed.</param>
/// <returns>Which removal case should be performed.</returns>
private int GetRemoveCase(RedBlackTreeNode<TKey> node)
{
var dir = comparer.Compare(node.Key, node.Parent!.Key);
// Determine current node's sibling and nephews
var sibling = dir < 0 ? node.Parent.Right : node.Parent.Left;
var closeNewphew = dir < 0 ? sibling!.Left : sibling!.Right;
var distantNephew = dir < 0 ? sibling!.Right : sibling!.Left;
if (sibling.Color == NodeColor.Red)
{
return 3;
}
else if (distantNephew is not null && distantNephew.Color == NodeColor.Red)
{
return 6;
}
else if (closeNewphew is not null && closeNewphew.Color == NodeColor.Red)
{
return 5;
}
else if (node.Parent.Color == NodeColor.Red)
{
return 4;
}
else
{
return 1;
}
}
/// <summary>
/// Set child of node or delete leaf.
/// </summary>
/// <param name="node">Node to set child of. Set to null for root.</param>
/// <param name="child">Node to set as child.</param>
/// <param name="dir">Which side of node to place child.</param>
private void Transplant(RedBlackTreeNode<TKey>? node, RedBlackTreeNode<TKey>? child, int dir)
{
if (node is null)
{
root = child;
}
else if (child is null)
{
DeleteLeaf(node, dir);
}
else if (dir < 0)
{
node.Left = child;
}
else
{
node.Right = child;
}
}
/// <summary>
/// Delete leaf node.
/// </summary>
/// <param name="node">Parent of leaf node to delete.</param>
/// <param name="dir">Side of parent leaf is on.</param>
private void DeleteLeaf(RedBlackTreeNode<TKey> node, int dir)
{
if (dir < 0)
{
node.Left = null;
}
else
{
node.Right = null;
}
}
/// <summary>
/// Perform a left (counter-clockwise) rotation.
/// </summary>
/// <param name="node">Node to rotate about.</param>
/// <returns>New node with rotation applied.</returns>
private RedBlackTreeNode<TKey> RotateLeft(RedBlackTreeNode<TKey> node)
{
var temp1 = node;
var temp2 = node!.Right!.Left;
node = node.Right;
node.Parent = temp1.Parent;
if (node.Parent is not null)
{
var nodeDir = comparer.Compare(node.Key, node.Parent.Key);
if (nodeDir < 0)
{
node.Parent.Left = node;
}
else
{
node.Parent.Right = node;
}
}
node.Left = temp1;
node.Left.Parent = node;
node.Left.Right = temp2;
if (temp2 is not null)
{
node.Left.Right!.Parent = temp1;
}
if (node.Parent is null)
{
root = node;
}
return node;
}
/// <summary>
/// Perform a right (clockwise) rotation.
/// </summary>
/// <param name="node">Node to rotate about.</param>
/// <returns>New node with rotation applied.</returns>
private RedBlackTreeNode<TKey> RotateRight(RedBlackTreeNode<TKey> node)
{
var temp1 = node;
var temp2 = node!.Left!.Right;
node = node.Left;
node.Parent = temp1.Parent;
if (node.Parent is not null)
{
var nodeDir = comparer.Compare(node.Key, node.Parent.Key);
if (nodeDir < 0)
{
node.Parent.Left = node;
}
else
{
node.Parent.Right = node;
}
}
node.Right = temp1;
node.Right.Parent = node;
node.Right.Left = temp2;
if (temp2 is not null)
{
node.Right.Left!.Parent = temp1;
}
if (node.Parent is null)
{
root = node;
}
return node;
}
/// <summary>
/// Helper function to get node instance with minimum key value
/// in the specified subtree.
/// </summary>
/// <param name="node">Node specifying root of subtree.</param>
/// <returns>Minimum value in node's subtree.</returns>
private RedBlackTreeNode<TKey> GetMin(RedBlackTreeNode<TKey> node)
{
while (node.Left is not null)
{
node = node.Left;
}
return node;
}
/// <summary>
/// Helper function to get node instance with maximum key value
/// in the specified subtree.
/// </summary>
/// <param name="node">Node specifyng root of subtree.</param>
/// <returns>Maximum value in node's subtree.</returns>
private RedBlackTreeNode<TKey> GetMax(RedBlackTreeNode<TKey> node)
{
while (node.Right is not null)
{
node = node.Right;
}
return node;
}
}
}
| 873 |
C-Sharp | TheAlgorithms | C# | namespace DataStructures.RedBlackTree
{
/// <summary>
/// Enum to represent node colors.
/// </summary>
public enum NodeColor : byte
{
/// <summary>
/// Represents red node
/// </summary>
Red,
/// <summary>
/// Represents black node
/// </summary>
Black,
}
/// <summary>
/// Generic class to represent nodes in an <see cref="RedBlackTree{TKey}"/> instance.
/// </summary>
/// <typeparam name="TKey">The type of key for the node.</typeparam>
public class RedBlackTreeNode<TKey>
{
/// <summary>
/// Gets or sets key value of node.
/// </summary>
public TKey Key { get; set; }
/// <summary>
/// Gets or sets the color of the node.
/// </summary>
public NodeColor Color { get; set; }
/// <summary>
/// Gets or sets the parent of the node.
/// </summary>
public RedBlackTreeNode<TKey>? Parent { get; set; }
/// <summary>
/// Gets or sets left child of the node.
/// </summary>
public RedBlackTreeNode<TKey>? Left { get; set; }
/// <summary>
/// Gets or sets the right child of the node.
/// </summary>
public RedBlackTreeNode<TKey>? Right { get; set; }
/// <summary>
/// Initializes a new instance of the <see cref="RedBlackTreeNode{TKey}"/> class.
/// </summary>
/// <param name="key">Key value for node.</param>
/// <param name="parent">Parent of node.</param>
public RedBlackTreeNode(TKey key, RedBlackTreeNode<TKey>? parent)
{
Key = key;
Parent = parent;
}
}
}
| 62 |
C-Sharp | TheAlgorithms | C# | using System;
using System.Collections.Generic;
namespace DataStructures.ScapegoatTree
{
public static class Extensions
{
/// <summary>
/// Flattens scapegoat tree into a list of nodes.
/// </summary>
/// <param name="root">Scapegoat tree provided as root node.</param>
/// <param name="list">An empty list.</param>
/// <typeparam name="TKey">Scapegoat tree node key type.</typeparam>
public static void FlattenTree<TKey>(Node<TKey> root, List<Node<TKey>> list) where TKey : IComparable
{
if (root.Left != null)
{
FlattenTree(root.Left, list);
}
list.Add(root);
if (root.Right != null)
{
FlattenTree(root.Right, list);
}
}
/// <summary>
/// Rebuilds a scapegoat tree from list of nodes.
/// Use with <see cref="FlattenTree{TKey}"/> method.
/// </summary>
/// <param name="list">Flattened tree.</param>
/// <param name="start">Start index.</param>
/// <param name="end">End index.</param>
/// <typeparam name="TKey">Scapegoat tree node key type.</typeparam>
/// <returns>Scapegoat tree root node.</returns>
/// <exception cref="ArgumentException">Thrown if start index is invalid.</exception>
public static Node<TKey> RebuildFromList<TKey>(IList<Node<TKey>> list, int start, int end)
where TKey : IComparable
{
if (start > end)
{
throw new ArgumentException("The parameter's value is invalid.", nameof(start));
}
var pivot = Convert.ToInt32(Math.Ceiling(start + (end - start) / 2.0));
return new Node<TKey>(list[pivot].Key)
{
Left = start > (pivot - 1) ? null : RebuildFromList(list, start, pivot - 1),
Right = (pivot + 1) > end ? null : RebuildFromList(list, pivot + 1, end),
};
}
}
}
| 57 |
C-Sharp | TheAlgorithms | C# | using System;
namespace DataStructures.ScapegoatTree
{
/// <summary>
/// Scapegoat tree node class.
/// </summary>
/// <typeparam name="TKey">Scapegoat tree node key type.</typeparam>
public class Node<TKey> where TKey : IComparable
{
private Node<TKey>? right;
private Node<TKey>? left;
public TKey Key { get; }
public Node<TKey>? Right
{
get => right;
set
{
if (value != null && !value.IsGreaterThanOrSameAs(Key))
{
throw new ArgumentException("The value's key is smaller than or equal to node's right child's key.", nameof(value));
}
right = value;
}
}
public Node<TKey>? Left
{
get => left;
set
{
if (value != null && value.IsGreaterThanOrSameAs(Key))
{
throw new ArgumentException("The value's key is greater than or equal to node's left child's key.", nameof(value));
}
left = value;
}
}
public Node(TKey key) => Key = key;
public Node(TKey key, Node<TKey>? right, Node<TKey>? left)
: this(key)
{
Right = right;
Left = left;
}
/// <summary>
/// Returns number of elements in the tree.
/// </summary>
/// <returns>Number of elements in the tree.</returns>
public int GetSize() => (Left?.GetSize() ?? 0) + 1 + (Right?.GetSize() ?? 0);
/// <summary>
/// Gets alpha height of the current node.
/// </summary>
/// <param name="alpha">Alpha value.</param>
/// <returns>Alpha height value.</returns>
public double GetAlphaHeight(double alpha) => Math.Floor(Math.Log(GetSize(), 1.0 / alpha));
public Node<TKey> GetSmallestKeyNode() => Left?.GetSmallestKeyNode() ?? this;
public Node<TKey> GetLargestKeyNode() => Right?.GetLargestKeyNode() ?? this;
/// <summary>
/// Checks if the current node is alpha weight balanced.
/// </summary>
/// <param name="a">Alpha value.</param>
/// <returns>True - if node is alpha weight balanced. If not - false.</returns>
public bool IsAlphaWeightBalanced(double a)
{
var isLeftBalanced = (Left?.GetSize() ?? 0) <= a * GetSize();
var isRightBalanced = (Right?.GetSize() ?? 0) <= a * GetSize();
return isLeftBalanced && isRightBalanced;
}
private bool IsGreaterThanOrSameAs(TKey key)
{
return Key.CompareTo(key) >= 0;
}
}
}
| 89 |
C-Sharp | TheAlgorithms | C# | using System;
using System.Collections.Generic;
namespace DataStructures.ScapegoatTree
{
/// <summary>
/// A scapegoat implementation class.
/// See https://en.wikipedia.org/wiki/Scapegoat_tree for more information about scapegoat tree.
/// </summary>
/// <typeparam name="TKey">The scapegoat tree key type.</typeparam>
public class ScapegoatTree<TKey> where TKey : IComparable
{
/// <summary>
/// Gets the α (alpha) value of the tree.
/// </summary>
public double Alpha { get; private set; }
/// <summary>
/// Gets the root node of the tree.
/// </summary>
public Node<TKey>? Root { get; private set; }
/// <summary>
/// Gets the number of nodes in the tree.
/// </summary>
public int Size { get; private set; }
/// <summary>
/// Gets the maximal value of the tree Size since the last time the tree was completely rebuilt.
/// </summary>
public int MaxSize { get; private set; }
/// <summary>
/// Gets an event handler which will fire when tree is being balanced.
/// </summary>
public event EventHandler? TreeIsUnbalanced;
public ScapegoatTree()
: this(alpha: 0.5, size: 0)
{
}
public ScapegoatTree(double alpha)
: this(alpha, size: 0)
{
}
public ScapegoatTree(Node<TKey> node, double alpha)
: this(alpha, size: node.GetSize())
{
Root = node;
}
public ScapegoatTree(TKey key, double alpha = 0.5)
: this(alpha, size: 1)
{
Root = new Node<TKey>(key);
}
private ScapegoatTree(double alpha, int size)
{
CheckAlpha(alpha);
Alpha = alpha;
Size = size;
MaxSize = size;
}
/// <summary>
/// Checks if current instance of the scapegoat tree is alpha weight balanced.
/// </summary>
/// <returns>True - if tree is alpha weight balanced. Otherwise, false.</returns>
public bool IsAlphaWeightBalanced()
{
return Root?.IsAlphaWeightBalanced(Alpha) ?? true;
}
/// <summary>
/// Check if any node in the tree has specified key value.
/// </summary>
/// <param name="key">Key value.</param>
/// <returns>Returns true if node exists, false if not.</returns>
public bool Contains(TKey key)
{
return Search(key) != null;
}
/// <summary>
/// Searches current instance of the scapegoat tree for specified key.
/// </summary>
/// <param name="key">Key value.</param>
/// <returns>Node with the specified key or null.</returns>
public Node<TKey>? Search(TKey key)
{
if (Root == null)
{
return null;
}
var current = Root;
while (true)
{
var result = current.Key.CompareTo(key);
switch (result)
{
case 0:
return current;
case > 0 when current.Left != null:
current = current.Left;
break;
case < 0 when current.Right != null:
current = current.Right;
break;
default:
return null;
}
}
}
/// <summary>
/// Inserts a new key into current instance of the scapegoat tree. Rebuilds tree if it's unbalanced.
/// </summary>
/// <param name="key">Key value.</param>
/// <returns>True - if insertion is successful, false - if the key is already present in the tree.</returns>
public bool Insert(TKey key)
{
var node = new Node<TKey>(key);
if (Root == null)
{
Root = node;
UpdateSizes();
return true;
}
var path = new Stack<Node<TKey>>();
var current = Root;
var found = false;
while (!found)
{
path.Push(current);
var result = current.Key.CompareTo(node.Key);
switch (result)
{
case < 0 when current.Right != null:
current = current.Right;
continue;
case < 0:
current.Right = node;
found = true;
break;
case > 0 when current.Left != null:
current = current.Left;
continue;
case > 0:
current.Left = node;
found = true;
break;
default:
return false;
}
}
UpdateSizes();
if (path.Count > Root.GetAlphaHeight(Alpha))
{
TreeIsUnbalanced?.Invoke(this, EventArgs.Empty);
BalanceFromPath(path);
MaxSize = Math.Max(MaxSize, Size);
}
return true;
}
/// <summary>
/// Removes the specified key from the current instance of the scapegoat tree. Rebuilds tree if it's unbalanced.
/// </summary>
/// <param name="key">Key value.</param>
/// <returns>True - if key was successfully removed, false - if the key wasn't found in the tree.</returns>
public bool Delete(TKey key)
{
if (Root == null)
{
return false;
}
if (Remove(Root, Root, key))
{
Size--;
if (Root != null && Size < Alpha * MaxSize)
{
TreeIsUnbalanced?.Invoke(this, EventArgs.Empty);
var list = new List<Node<TKey>>();
Extensions.FlattenTree(Root, list);
Root = Extensions.RebuildFromList(list, 0, list.Count - 1);
MaxSize = Size;
}
return true;
}
return false;
}
/// <summary>
/// Clears the tree.
/// </summary>
public void Clear()
{
Size = 0;
MaxSize = 0;
Root = null;
}
/// <summary>
/// Changes <see cref="Alpha"/> value to adjust balancing.
/// </summary>
/// <param name="value">New alpha value.</param>
public void Tune(double value)
{
CheckAlpha(value);
Alpha = value;
}
/// <summary>
/// Searches for a scapegoat node in provided stack.
/// </summary>
/// <param name="path">Stack instance with nodes, starting with root node.</param>
/// <returns>Scapegoat node with its parent node. Parent can be null if scapegoat node is root node.</returns>
/// <exception cref="ArgumentException">Thrown if path stack is empty.</exception>
/// <exception cref="InvalidOperationException">Thrown if scapegoat wasn't found.</exception>
public (Node<TKey>? parent, Node<TKey> scapegoat) FindScapegoatInPath(Stack<Node<TKey>> path)
{
if (path.Count == 0)
{
throw new ArgumentException("The path collection should not be empty.", nameof(path));
}
var depth = 1;
while (path.TryPop(out var next))
{
if (depth > next.GetAlphaHeight(Alpha))
{
return path.TryPop(out var parent) ? (parent, next) : (null, next);
}
depth++;
}
throw new InvalidOperationException("Scapegoat node wasn't found. The tree should be unbalanced.");
}
private static void CheckAlpha(double alpha)
{
if (alpha is < 0.5 or > 1.0)
{
throw new ArgumentException("The alpha parameter's value should be in 0.5..1.0 range.", nameof(alpha));
}
}
private bool Remove(Node<TKey>? parent, Node<TKey>? node, TKey key)
{
if (node is null || parent is null)
{
return false;
}
var compareResult = node.Key.CompareTo(key);
if (compareResult > 0)
{
return Remove(node, node.Left, key);
}
if (compareResult < 0)
{
return Remove(node, node.Right, key);
}
Node<TKey>? replacementNode;
// Case 0: Node has no children.
// Case 1: Node has one child.
if (node.Left is null || node.Right is null)
{
replacementNode = node.Left ?? node.Right;
}
// Case 2: Node has two children. (This implementation uses the in-order predecessor to replace node.)
else
{
var predecessorNode = node.Left.GetLargestKeyNode();
Remove(Root, Root, predecessorNode.Key);
replacementNode = new Node<TKey>(predecessorNode.Key)
{
Left = node.Left,
Right = node.Right,
};
}
// Replace the relevant node with a replacement found in the previous stages.
// Special case for replacing the root node.
if (node == Root)
{
Root = replacementNode;
}
else if (parent.Left == node)
{
parent.Left = replacementNode;
}
else
{
parent.Right = replacementNode;
}
return true;
}
private void BalanceFromPath(Stack<Node<TKey>> path)
{
var (parent, scapegoat) = FindScapegoatInPath(path);
var list = new List<Node<TKey>>();
Extensions.FlattenTree(scapegoat, list);
var tree = Extensions.RebuildFromList(list, 0, list.Count - 1);
if (parent == null)
{
Root = tree;
}
else
{
var result = parent.Key.CompareTo(tree.Key);
if (result < 0)
{
parent.Right = tree;
}
else
{
parent.Left = tree;
}
}
}
private void UpdateSizes()
{
Size += 1;
MaxSize = Math.Max(Size, MaxSize);
}
}
}
| 374 |
C-Sharp | TheAlgorithms | C# | using System;
namespace DataStructures.SegmentTrees
{
/// <summary>
/// Goal: Data structure with which you can quickly perform queries on an array (i.e. sum of subarray)
/// and at the same time efficiently update an entry
/// or apply a distributive operation to a subarray.
/// Idea: Preprocessing special queries
/// Hint: The query operation HAS to be associative (in this example addition).
/// </summary>
public class SegmentTree
{
/// <summary>
/// Initializes a new instance of the <see cref="SegmentTree" /> class.
/// Runtime complexity: O(n) where n equals the array-length.
/// </summary>
/// <param name="arr">Array on which the queries should be made.</param>
public SegmentTree(int[] arr)
{
// Calculates next power of two
var pow = (int)Math.Pow(2, Math.Ceiling(Math.Log(arr.Length, 2)));
Tree = new int[2 * pow];
// Transfers the input array into the last half of the segment tree array
Array.Copy(arr, 0, Tree, pow, arr.Length);
// Calculates the first half
for (var i = pow - 1; i > 0; --i)
{
Tree[i] = Tree[Left(i)] + Tree[Right(i)];
}
}
/// <summary>Gets the segment tree array.</summary>
public int[] Tree { get; }
/// <summary>
/// Starts a query.
/// Runtime complexity: O(logN) where n equals the array-length.
/// </summary>
/// <param name="l">Left border of the query.</param>
/// <param name="r">Right border of the query.</param>
/// <returns>Sum of the subarray between <c>l</c> and <c>r</c> (including <c>l</c> and <c>r</c>).</returns>
// Editing of query start at node with 1.
// Node with index 1 includes the whole input subarray.
public int Query(int l, int r) =>
Query(++l, ++r, 1, Tree.Length / 2, 1);
/// <summary>
/// Calculates the right child of a node.
/// </summary>
/// <param name="node">Current node.</param>
/// <returns>Index of the right child.</returns>
protected int Right(int node) => 2 * node + 1;
/// <summary>
/// Calculates the left child of a node.
/// </summary>
/// <param name="node">Current node.</param>
/// <returns>Index of the left child.</returns>
protected int Left(int node) => 2 * node;
/// <summary>
/// Calculates the parent of a node.
/// </summary>
/// <param name="node">Current node.</param>
/// <returns>Index of the parent node.</returns>
protected int Parent(int node) => node / 2;
/// <summary>
/// Edits a query.
/// </summary>
/// <param name="l">Left border of the query.</param>
/// <param name="r">Right border of the query.</param>
/// <param name="a">Left end of the subarray enclosed by <c>i</c>.</param>
/// <param name="b">Right end of the subarray enclosed by <c>i</c>.</param>
/// <param name="i">Current node.</param>
/// <returns>Sum of a subarray between <c>l</c> and <c>r</c> (including <c>l</c> and <c>r</c>).</returns>
protected virtual int Query(int l, int r, int a, int b, int i)
{
// If a and b are in the (by l and r) specified subarray
if (l <= a && b <= r)
{
return Tree[i];
}
// If a or b are out of the by l and r specified subarray
if (r < a || b < l)
{
// Returns the neutral value of the operation
// (in this case 0, because x + 0 = x)
return 0;
}
// Calculates index m of the node that cuts the current subarray in half
var m = (a + b) / 2;
// Start query of new two subarrays a:m and m+1:b
// The right and left child cover this intervals
return Query(l, r, a, m, Left(i)) + Query(l, r, m + 1, b, Right(i));
}
}
}
| 105 |
C-Sharp | TheAlgorithms | C# | using System;
namespace DataStructures.SegmentTrees
{
/// <summary>
/// This is an extension of a segment tree, which allows applying distributive operations to a subarray
/// (in this case multiplication).
/// </summary>
public class SegmentTreeApply : SegmentTree
{
/// <summary>
/// Initializes a new instance of the <see cref="SegmentTreeApply" /> class.
/// Runtime complexity: O(n) where n equals the array-length.
/// </summary>
/// <param name="arr">Array on which the operations should be made.</param>
public SegmentTreeApply(int[] arr)
: base(arr)
{
// Initilizes and fills "operand" array with neutral element (in this case 1, because value * 1 = value)
Operand = new int[Tree.Length];
Array.Fill(Operand, 1);
}
/// <summary>
/// Gets an array that stores for each node an operand,
/// which must be applied to all direct and indirect child nodes of this node
/// (but not to the node itself).
/// </summary>
public int[] Operand { get; }
/// <summary>
/// Applies a distributive operation to a subarray defined by <c>l</c> and <c>r</c>
/// (in this case multiplication by <c>value</c>).
/// Runtime complexity: O(logN) where N equals the initial array-length.
/// </summary>
/// <param name="l">Left border of the subarray.</param>
/// <param name="r">Right border of the subarray.</param>
/// <param name="value">Value with which each element of the interval is calculated.</param>
public void Apply(int l, int r, int value)
{
// The Application start at node with 1
// Node with index 1 includes the whole input subarray
Apply(++l, ++r, value, 1, Tree.Length / 2, 1);
}
/// <summary>
/// Edits a query.
/// </summary>
/// <param name="l">Left border of the query.</param>
/// <param name="r">Right border of the query.</param>
/// <param name="a">Left end of the subarray enclosed by <c>i</c>.</param>
/// <param name="b">Right end of the subarray enclosed by <c>i</c>.</param>
/// <param name="i">Current node.</param>
/// <returns>Sum of a subarray between <c>l</c> and <c>r</c> (including <c>l</c> and <c>r</c>).</returns>
protected override int Query(int l, int r, int a, int b, int i)
{
if (l <= a && b <= r)
{
return Tree[i];
}
if (r < a || b < l)
{
return 0;
}
var m = (a + b) / 2;
// Application of the saved operand to the direct and indrect child nodes
return Operand[i] * (Query(l, r, a, m, Left(i)) + Query(l, r, m + 1, b, Right(i)));
}
/// <summary>
/// Applies the operation.
/// </summary>
/// <param name="l">Left border of the Application.</param>
/// <param name="r">Right border of the Application.</param>
/// <param name="value">Multiplier by which the subarray is to be multiplied.</param>
/// <param name="a">Left end of the subarray enclosed by <c>i</c>.</param>
/// <param name="b">Right end of the subarray enclosed by <c>i</c>.</param>
/// <param name="i">Current node.</param>
private void Apply(int l, int r, int value, int a, int b, int i)
{
// If a and b are in the (by l and r) specified subarray
if (l <= a && b <= r)
{
// Applies the operation to the current node and saves it for the direct and indirect child nodes
Operand[i] = value * Operand[i];
Tree[i] = value * Tree[i];
return;
}
// If a or b are out of the by l and r specified subarray stop application at this node
if (r < a || b < l)
{
return;
}
// Calculates index m of the node that cuts the current subarray in half
var m = (a + b) / 2;
// Applies the operation to both halfes
Apply(l, r, value, a, m, Left(i));
Apply(l, r, value, m + 1, b, Right(i));
// Recalculates the value of this node by its (possibly new) children.
Tree[i] = Operand[i] * (Tree[Left(i)] + Tree[Right(i)]);
}
}
}
| 111 |
C-Sharp | TheAlgorithms | C# | namespace DataStructures.SegmentTrees
{
/// <summary>
/// This is an extension of a segment tree, which allows the update of a single element.
/// </summary>
public class SegmentTreeUpdate : SegmentTree
{
/// <summary>
/// Initializes a new instance of the <see cref="SegmentTreeUpdate" /> class.
/// Runtime complexity: O(n) where n equals the array-length.
/// </summary>
/// <param name="arr">Array on which the queries should be made.</param>
public SegmentTreeUpdate(int[] arr)
: base(arr)
{
}
/// <summary>
/// Updates a single element of the input array.
/// Changes the leaf first and updates its parents afterwards.
/// Runtime complexity: O(logN) where N equals the initial array-length.
/// </summary>
/// <param name="node">Index of the node that should be updated.</param>
/// <param name="value">New Value of the element.</param>
public void Update(int node, int value)
{
Tree[node + Tree.Length / 2] = value;
Propagate(Parent(node + Tree.Length / 2));
}
/// <summary>
/// Recalculates the value of node by its children.
/// Calls its parent to do the same.
/// </summary>
/// <param name="node">Index of current node.</param>
private void Propagate(int node)
{
if (node == 0)
{
// passed root
return;
}
Tree[node] = Tree[Left(node)] + Tree[Right(node)];
Propagate(Parent(node));
}
}
}
| 49 |
C-Sharp | TheAlgorithms | C# | using System;
using System.Collections.Generic;
namespace DataStructures.Stack
{
/// <summary>
/// Implementation of an array-based stack. LIFO style.
/// </summary>
/// <typeparam name="T">Generic Type.</typeparam>
public class ArrayBasedStack<T>
{
private const int DefaultCapacity = 10;
private const string StackEmptyErrorMessage = "Stack is empty";
/// <summary>
/// <see cref="Array" /> based stack.
/// </summary>
private T[] stack;
/// <summary>
/// How many items are in the stack right now.
/// </summary>
private int top;
/// <summary>
/// Initializes a new instance of the <see cref="ArrayBasedStack{T}" /> class.
/// </summary>
public ArrayBasedStack()
{
stack = new T[DefaultCapacity];
top = -1;
}
/// <summary>
/// Initializes a new instance of the <see cref="ArrayBasedStack{T}" /> class.
/// </summary>
/// <param name="item">Item to push onto the <see cref="ArrayBasedStack{T}" />.</param>
public ArrayBasedStack(T item)
: this() => Push(item);
/// <summary>
/// Initializes a new instance of the <see cref="ArrayBasedStack{T}" /> class.
/// </summary>
/// <param name="items">Items to push onto the <see cref="ArrayBasedStack{T}" />.</param>
public ArrayBasedStack(T[] items)
{
stack = items;
top = items.Length - 1;
}
/// <summary>
/// Gets the number of elements on the <see cref="ArrayBasedStack{T}" />.
/// </summary>
public int Top => top;
/// <summary>
/// Gets or sets the Capacity of the <see cref="ArrayBasedStack{T}" />.
/// </summary>
public int Capacity
{
get => stack.Length;
set => Array.Resize(ref stack, value);
}
/// <summary>
/// Removes all items from the <see cref="ArrayBasedStack{T}" />.
/// </summary>
public void Clear()
{
top = -1;
Capacity = DefaultCapacity;
}
/// <summary>
/// Determines whether an element is in the <see cref="ArrayBasedStack{T}" />.
/// </summary>
/// <param name="item">The item to locate in the <see cref="ArrayBasedStack{T}" />.</param>
/// <returns>True, if the item is in the stack.</returns>
public bool Contains(T item) => Array.IndexOf(stack, item, 0, top + 1) > -1;
/// <summary>
/// Returns the item at the top of the <see cref="ArrayBasedStack{T}" /> without removing it.
/// </summary>
/// <returns>The item at the top of the <see cref="ArrayBasedStack{T}" />.</returns>
public T Peek()
{
if (top == -1)
{
throw new InvalidOperationException(StackEmptyErrorMessage);
}
return stack[top];
}
/// <summary>
/// Removes and returns the item at the top of the <see cref="ArrayBasedStack{T}" />.
/// </summary>
/// <returns>The item removed from the top of the <see cref="ArrayBasedStack{T}" />.</returns>
public T Pop()
{
if (top == -1)
{
throw new InvalidOperationException(StackEmptyErrorMessage);
}
return stack[top--];
}
/// <summary>
/// Inserts an item at the top of the <see cref="ArrayBasedStack{T}" />.
/// </summary>
/// <param name="item">The item to push onto the <see cref="ArrayBasedStack{T}" />.</param>
public void Push(T item)
{
if (top == Capacity - 1)
{
Capacity *= 2;
}
stack[++top] = item;
}
}
}
| 124 |
C-Sharp | TheAlgorithms | C# | using System;
using System.Collections.Generic;
namespace DataStructures.Stack
{
/// <summary>
/// Implementation of a list based stack. FILO style.
/// </summary>
/// <typeparam name="T">Generic Type.</typeparam>
public class ListBasedStack<T>
{
/// <summary>
/// <see cref="List{T}" /> based stack.
/// </summary>
private readonly LinkedList<T> stack;
/// <summary>
/// Initializes a new instance of the <see cref="ListBasedStack{T}" /> class.
/// </summary>
public ListBasedStack() => stack = new LinkedList<T>();
/// <summary>
/// Initializes a new instance of the <see cref="ListBasedStack{T}" /> class.
/// </summary>
/// <param name="item">Item to push onto the <see cref="ListBasedStack{T}" />.</param>
public ListBasedStack(T item)
: this() => Push(item);
/// <summary>
/// Initializes a new instance of the <see cref="ListBasedStack{T}" /> class.
/// </summary>
/// <param name="items">Items to push onto the <see cref="ListBasedStack{T}" />.</param>
public ListBasedStack(IEnumerable<T> items)
: this()
{
foreach (var item in items)
{
Push(item);
}
}
/// <summary>
/// Gets the number of elements on the <see cref="ListBasedStack{T}" />.
/// </summary>
public int Count => stack.Count;
/// <summary>
/// Removes all items from the <see cref="ListBasedStack{T}" />.
/// </summary>
public void Clear() => stack.Clear();
/// <summary>
/// Determines whether an element is in the <see cref="ListBasedStack{T}" />.
/// </summary>
/// <param name="item">The item to locate in the <see cref="ListBasedStack{T}" />.</param>
/// <returns>True, if the item is in the stack.</returns>
public bool Contains(T item) => stack.Contains(item);
/// <summary>
/// Returns the item at the top of the <see cref="ListBasedStack{T}" /> without removing it.
/// </summary>
/// <returns>The item at the top of the <see cref="ListBasedStack{T}" />.</returns>
public T Peek()
{
if (stack.First is null)
{
throw new InvalidOperationException("Stack is empty");
}
return stack.First.Value;
}
/// <summary>
/// Removes and returns the item at the top of the <see cref="ListBasedStack{T}" />.
/// </summary>
/// <returns>The item removed from the top of the <see cref="ListBasedStack{T}" />.</returns>
public T Pop()
{
if (stack.First is null)
{
throw new InvalidOperationException("Stack is empty");
}
var item = stack.First.Value;
stack.RemoveFirst();
return item;
}
/// <summary>
/// Inserts an item at the top of the <see cref="ListBasedStack{T}" />.
/// </summary>
/// <param name="item">The item to push onto the <see cref="ListBasedStack{T}" />.</param>
public void Push(T item) => stack.AddFirst(item);
}
}
| 96 |
C-Sharp | TheAlgorithms | C# | using System;
using System.Collections.Generic;
namespace DataStructures.Tries
{
/// <summary>
/// A Trie is a data structure (particular case of m-ary tree) used to efficiently represent strings with common prefixes.
/// Originally posed by E. Fredkin in 1960.
/// Fredkin, Edward (Sept. 1960), "Trie Memory", Communications of the ACM 3 (9): 490-499.
/// Its name is due to retrieval because its main application is in the field of "Information Retrieval" (information retrieval).
/// </summary>
public class Trie
{
/// <summary>
/// This character marks the end of a string.
/// </summary>
private const char Mark = '$';
/// <summary>
/// This property represents the root node of the trie.
/// </summary>
private readonly TrieNode root;
/// <summary>
/// Initializes a new instance of the <see cref="Trie"/> class. This instances was created without text strings, generating the root node of the trie, without children.
/// </summary>
public Trie()
{
root = new TrieNode(Mark);
}
/// <summary>
/// Initializes a new instance of the <see cref="Trie"/> class. Given a set of text strings, each of those strings inserts them into the trie using the Insert (string) method.
/// </summary>
/// <param name="words">The array with text strings to insert in the trie.</param>
public Trie(IEnumerable<string> words)
: this()
{
foreach (string s in words)
{
Insert(s);
}
}
/// <summary>
/// Insert a string s to the trie. The $ mark is added to the end of the chain and then it is added, this in order to indicate the end of the chain in the trie.
/// </summary>
/// <param name="s">The string to insert into the trie.</param>
public void Insert(string s)
{
s += Mark;
int index = 0;
TrieNode match = PrefixQuery(s, ref index);
for (int i = index; i < s.Length; i++)
{
TrieNode t = new(s[i], match);
match[s[i]] = t;
match = t;
}
}
/// <summary>
/// Remove a text string from the trie.
/// </summary>
/// <param name="s">The text string to be removed from the trie.</param>
public void Remove(string s)
{
s += Mark;
int index = 0;
TrieNode match = PrefixQuery(s, ref index);
while(match.IsLeaf())
{
char c = match.Value;
if(match.Parent == null)
{
break;
}
match = match.Parent;
match.Children.Remove(c);
}
}
/// <summary>
/// Know if a text string is in the trie.
/// </summary>
/// <param name="s">The string s that you want to know if it is in the trie.</param>
/// <returns>If the string is found, it returns true, otherwise false.</returns>
public bool Find(string s)
{
int index = 0;
return PrefixQuery(s + Mark, ref index).IsLeaf();
}
/// <summary>
/// This method analyzes which is the longest common prefix of a string s in the trie. If the string is in the trie then it is equivalent to doing Find (s).
/// </summary>
/// <param name="s">The string for which you want to know the longest common prefix.</param>
/// <param name="index">The index to which the longest common prefix goes.</param>
/// <returns>
/// Returns the longest common prefix node found in the trie with the string s.
/// </returns>
private TrieNode PrefixQuery(string s, ref int index)
{
TrieNode current = root;
for (int i = 0; i < s.Length && current != null; i++)
{
if (current[s[i]] != null)
{
current = current[s[i]] ?? throw new NullReferenceException();
index = i + 1;
}
else
{
break;
}
}
return current ?? throw new NullReferenceException();
}
}
}
| 125 |
C-Sharp | TheAlgorithms | C# | using System;
using System.Collections.Generic;
namespace DataStructures.Tries
{
/// <summary>
/// This class represents the nodes of a trie.
/// </summary>
internal class TrieNode
{
/// <summary>
/// Initializes a new instance of the <see cref="TrieNode"/> class. This instance was created with a character from the alphabet, and its parent will be null.
/// </summary>
/// <param name="value">Character of the alphabet that represents the node.</param>
internal TrieNode(char value)
: this(value, null)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="TrieNode"/> class. This instance was created with a character from the alphabet, and its parent.
/// </summary>
/// <param name="value">Character of the alphabet that represents the node.</param>
/// <param name="parent">The parent or ancestor of the node in the trie structure.</param>
internal TrieNode(char value, TrieNode? parent)
{
Children = new SortedList<char, TrieNode>();
Parent = parent;
Value = value;
}
/// <summary>
/// Gets all the descendants of the current node.
/// </summary>
/// <value>A sorted set with all the descendants.</value>
internal SortedList<char, TrieNode> Children { get; private set; }
/// <summary>
/// Gets the parent or ancestor of the node in the trie structure.
/// </summary>
/// <value>A TrieNode that represent a parent.</value>
internal TrieNode? Parent { get; private set; }
/// <summary>
/// Gets the character of the alphabet that represents the node.
/// </summary>
/// <value>A character of the alphabet.</value>
internal char Value { get; private set; }
/// <summary>
/// Index the descendants of the current node given an alphabet character.
/// </summary>
/// <value>A TrieNode with the character c in Children.</value>
public TrieNode? this[char c]
{
get => Children.ContainsKey(c) ? Children[c] : null;
set => Children[c] = value ?? throw new NullReferenceException();
}
/// <summary>
/// Method that checks if the current node is a trie leaf.
/// </summary>
/// <returns>Returns true if the current node has no children, false otherwise.</returns>
public bool IsLeaf()
{
return Children.Count == 0;
}
}
}
| 70 |
C-Sharp | TheAlgorithms | C# | using System.Collections.Generic;
namespace DataStructures.UnrolledList
{
/// <summary>
/// Unrolled linked list is a linked list of small arrays,
/// all of the same size where each is so small that the insertion
/// or deletion is fast and quick, but large enough to fill the cache line.
/// </summary>
public class UnrolledLinkedList
{
private readonly int sizeNode;
private UnrolledLinkedListNode start = null!;
private UnrolledLinkedListNode end = null!;
/// <summary>
/// Initializes a new instance of the <see cref="UnrolledLinkedList"/> class.
/// Create a unrolled list with start chunk size.
/// </summary>
/// <param name="chunkSize">The size of signe chunk.</param>
public UnrolledLinkedList(int chunkSize)
{
sizeNode = chunkSize + 1;
}
/// <summary>
/// Add value to list [O(n)].
/// </summary>
/// <param name="value">The entered value.</param>
public void Insert(int value)
{
if (start == null)
{
start = new UnrolledLinkedListNode(sizeNode);
start.Set(0, value);
end = start;
return;
}
if (end.Count + 1 < sizeNode)
{
end.Set(end.Count, value);
}
else
{
var pointer = new UnrolledLinkedListNode(sizeNode);
var j = 0;
for (var pos = end.Count / 2 + 1; pos < end.Count; pos++)
{
pointer.Set(j++, end.Get(pos));
}
pointer.Set(j++, value);
pointer.Count = j;
end.Count = end.Count / 2 + 1;
end.Next = pointer;
end = pointer;
}
}
/// <summary>
/// Help method. Get all list inside to check the state.
/// </summary>
/// <returns>Items from all nodes.</returns>
public IEnumerable<int> GetRolledItems()
{
UnrolledLinkedListNode pointer = start;
List<int> result = new();
while (pointer != null)
{
for (var i = 0; i < pointer.Count; i++)
{
result.Add(pointer.Get(i));
}
pointer = pointer.Next;
}
return result;
}
}
}
| 87 |
C-Sharp | TheAlgorithms | C# | using System;
namespace DataStructures.UnrolledList
{
/// <summary>
/// Single node with array buffer for unrolled list.
/// </summary>
public class UnrolledLinkedListNode
{
private readonly int[] array;
public UnrolledLinkedListNode(int nodeSize)
{
Next = null!;
Count = 0;
array = new int[nodeSize];
}
public UnrolledLinkedListNode Next { get; set; }
public int Count { get; set; }
/// <summary>
/// Set new item in array buffer.
/// </summary>
/// <param name="pos">Index in array.</param>
/// <param name="val">The entered value.</param>
/// <exception cref="ArgumentException">Index is out of scope.</exception>
public void Set(int pos, int val)
{
if (pos < 0 || pos > array.Length - 1)
{
throw new ArgumentException("Position is out of size", nameof(pos));
}
array[pos] = val;
Count++;
}
/// <summary>
/// Get item from array buffer.
/// </summary>
/// <param name="pos">Index in array.</param>
/// <exception cref="ArgumentException">Index is out of scope.</exception>
public int Get(int pos)
{
if (pos < 0 || pos > array.Length - 1)
{
throw new ArgumentException("Position is out of size", nameof(pos));
}
return array[pos];
}
}
}
| 57 |
C-Sharp | TheAlgorithms | C# | using System;
using System.Collections.Generic;
using System.Linq;
using DataStructures.AATree;
using FluentAssertions;
using NUnit.Framework;
namespace DataStructures.Tests
{
internal class AaTreeTests
{
[Test]
public void Constructor_UseCustomComparer_FormsCorrectTree()
{
var tree = new AaTree<int>(Comparer<int>.Create((x, y) => y.CompareTo(x)));
tree.AddRange(new[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 });
tree.GetMax().Should().Be(1);
tree.GetMin().Should().Be(10);
tree.GetKeysInOrder().SequenceEqual(new[] { 10, 9, 8, 7, 6, 5, 4, 3, 2, 1 }).Should().BeTrue();
Validate(tree.Root);
}
[Test]
public void Add_MultipleKeys_FormsCorrectTree()
{
var tree = new AaTree<int>();
foreach (var elem in new[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 })
{
tree.Add(elem);
tree.Count.Should().Be(elem);
tree.Contains(elem).Should().BeTrue();
}
tree.GetKeysInOrder().SequenceEqual(new[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }).Should().BeTrue();
tree.GetKeysPostOrder().SequenceEqual(new[] { 1, 3, 2, 5, 7, 10, 9, 8, 6, 4 }).Should().BeTrue();
Validate(tree.Root);
}
[Test]
public void Add_KeyAlreadyInTree_ThrowsException()
{
var tree = new AaTree<int>();
tree.AddRange(new[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 });
Assert.Throws<ArgumentException>(() => tree.Add(1));
}
[Test]
public void AddRange_MultipleKeys_FormsCorrectTree()
{
var tree = new AaTree<int>();
tree.AddRange(new[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 });
tree.Count.Should().Be(10);
tree.GetKeysInOrder().SequenceEqual(new[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }).Should().BeTrue();
tree.GetKeysPostOrder().SequenceEqual(new[] { 1, 3, 2, 5, 7, 10, 9, 8, 6, 4 }).Should().BeTrue();
Validate(tree.Root);
}
[Test]
public void Remove_MultipleKeys_TreeStillValid()
{
var tree = new AaTree<int>();
tree.AddRange(new[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 });
Remove(4).Should().NotThrow();
tree.Contains(4).Should().BeFalse();
tree.Count.Should().Be(9);
Remove(8).Should().NotThrow();
tree.Contains(8).Should().BeFalse();
tree.Count.Should().Be(8);
Remove(1).Should().NotThrow();
tree.Contains(1).Should().BeFalse();
tree.Count.Should().Be(7);
Validate(tree.Root);
Action Remove(int x) => () => tree.Remove(x);
}
[Test]
public void Remove_KeyNotInTree_Throws()
{
var tree = new AaTree<int>();
tree.AddRange(new[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 });
Action act = () => tree.Remove(999);
act.Should().Throw<InvalidOperationException>();
}
[Test]
public void Remove_EmptyTree_Throws()
{
var tree = new AaTree<int>();
Action act = () => tree.Remove(999);
act.Should().Throw<InvalidOperationException>();
}
[Test]
public void Contains_NonEmptyTree_ReturnsCorrectAnswer()
{
var tree = new AaTree<int>();
tree.AddRange(new[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 });
tree.Contains(6).Should().BeTrue();
tree.Contains(999).Should().BeFalse();
}
[Test]
public void Contains_EmptyTree_ReturnsFalse()
{
var tree = new AaTree<int>();
tree.Contains(999).Should().BeFalse();
}
[Test]
public void GetMax_NonEmptyTree_ReturnsCorrectAnswer()
{
var tree = new AaTree<int>();
tree.AddRange(new[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 });
tree.GetMax().Should().Be(10);
}
[Test]
public void GetMax_EmptyTree_ThrowsCorrectException()
{
var tree = new AaTree<int>();
Assert.Throws<InvalidOperationException>(() => tree.GetMax());
}
[Test]
public void GetMin_NonEmptyTree_ReturnsCorrectAnswer()
{
var tree = new AaTree<int>();
tree.AddRange(new[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 });
tree.GetMin().Should().Be(1);
}
[Test]
public void GetMin_EmptyTree_ThrowsCorrectException()
{
var tree = new AaTree<int>();
Assert.Throws<InvalidOperationException>(() => tree.GetMin());
}
[Test]
public void GetKeysInOrder_NonEmptyTree_ReturnsCorrectAnswer()
{
var tree = new AaTree<int>();
tree.AddRange(new[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 });
tree.GetKeysInOrder().SequenceEqual(new[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }).Should().BeTrue();
}
[Test]
public void GetKeysInOrder_EmptyTree_ReturnsCorrectAnswer()
{
var tree = new AaTree<int>();
tree.GetKeysInOrder().ToList().Count.Should().Be(0);
}
[Test]
public void GetKeysPreOrder_NonEmptyTree_ReturnsCorrectAnswer()
{
var tree = new AaTree<int>();
tree.AddRange(new[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 });
tree.GetKeysPreOrder().SequenceEqual(new[] { 4, 2, 1, 3, 6, 5, 8, 7, 9, 10 })
.Should().BeTrue();
}
[Test]
public void GetKeysPreOrder_EmptyTree_ReturnsCorrectAnswer()
{
var tree = new AaTree<int>();
tree.GetKeysPreOrder().ToList().Count.Should().Be(0);
}
[Test]
public void GetKeysPostOrder_NonEmptyTree_ReturnsCorrectAnswer()
{
var tree = new AaTree<int>();
tree.AddRange(new[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 });
tree.GetKeysPostOrder().SequenceEqual(new[] { 1, 3, 2, 5, 7, 10, 9, 8, 6, 4 })
.Should().BeTrue();
}
[Test]
public void GetKeysPostOrder_EmptyTree_ReturnsCorrectAnswer()
{
var tree = new AaTree<int>();
tree.GetKeysPostOrder().ToList().Count.Should().Be(0);
}
/// <summary>
/// Checks various properties to determine if the tree is a valid AA Tree.
/// Throws exceptions if properties are violated.
/// Useful for debugging.
/// </summary>
/// <remarks>
/// The properties that are checked are:
/// <list type="number">
/// <item>The level of every leaf node is one.</item>
/// <item>The level of every left child is exactly one less than that of its parent.</item>
/// <item>The level of every right child is equal to or one less than that of its parent.</item>
/// <item>The level of every right grandchild is strictly less than that of its grandparent.</item>
/// <item>Every node of level greater than one has two children.</item>
/// </list>
/// More information: https://en.wikipedia.org/wiki/AA_tree .
/// </remarks>
/// <param name="node">The node to check from.</param>
/// <returns>true if node passes all checks, false otherwise.</returns>
private static bool Validate<T>(AaTreeNode<T>? node)
{
if (node is null)
{
return true;
}
// Check level == 1 if node if a leaf node.
var leafNodeCheck = CheckLeafNode(node);
// Check level of left child is exactly one less than parent.
var leftCheck = CheckLeftSubtree(node);
// Check level of right child is equal or one less than parent.
var rightCheck = CheckRightSubtree(node);
// Check right grandchild level is less than node.
var grandchildCheck = CheckRightGrandChild(node);
// Check if node has two children if not leaf.
var nonLeafChildrenCheck = CheckNonLeafChildren(node);
var thisNodeResult = leafNodeCheck && leftCheck && rightCheck;
thisNodeResult = thisNodeResult && grandchildCheck && nonLeafChildrenCheck;
return thisNodeResult && Validate(node.Left) && Validate(node.Right);
}
/// <summary>
/// Checks if node is a leaf, and if so if its level is 1.
/// </summary>
/// <param name="node">The node to check.</param>
/// <returns>true if node passes check, false otherwise.</returns>
private static bool CheckLeafNode<T>(AaTreeNode<T> node)
{
var condition = node.Left is null && node.Right is null && node.Level != 1;
return !condition;
}
/// <summary>
/// Checks if left node's level is exactly one less than node's level.
/// </summary>
/// <param name="node">The node to check.</param>
/// <returns>true if node passes check, false otherwise.</returns>
private static bool CheckLeftSubtree<T>(AaTreeNode<T> node)
{
var condition = node.Left is not null && node.Level - node.Left.Level != 1;
return !condition;
}
/// <summary>
/// Checks if right node's level is either equal to or one less than node's level.
/// </summary>
/// <param name="node">The node to check.</param>
/// <returns>true if node passes check, false otherwise.</returns>
private static bool CheckRightSubtree<T>(AaTreeNode<T> node)
{
var condition = node.Right is not null &&
node.Level - node.Right.Level != 1 &&
node.Level != node.Right.Level;
return !condition;
}
/// <summary>
/// Checks if right grandchild's (right node's right node) level is less than node.
/// </summary>
/// <param name="node">The node to check.</param>
/// <returns>true if node passes check, false otherwise.</returns>
private static bool CheckRightGrandChild<T>(AaTreeNode<T> node)
{
var condition = node.Right?.Right is not null && node.Right.Level < node.Right.Right.Level;
return !condition;
}
/// <summary>
/// Checks if node is not a leaf, and if so if it has two children.
/// </summary>
/// <param name="node">The node to check.</param>
/// <returns>true if node passes check, false otherwise.</returns>
private static bool CheckNonLeafChildren<T>(AaTreeNode<T> node)
{
var condition = node.Level > 1 && (node.Left is null || node.Right is null);
return !condition;
}
}
}
| 298 |
C-Sharp | TheAlgorithms | C# | using System;
using System.Collections.Generic;
using System.Linq;
using DataStructures.AVLTree;
using FluentAssertions;
using NUnit.Framework;
using static FluentAssertions.FluentActions;
namespace DataStructures.Tests
{
internal class AvlTreeTests
{
private static readonly int[] Data = { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 };
private static readonly int[] PreOrder = { 4, 2, 1, 3, 8, 6, 5, 7, 9, 10 };
private static readonly int[] PostOrder = { 1, 3, 2, 5, 7, 6, 10, 9, 8, 4 };
[Test]
public void Constructor_UseCustomComparer_FormsCorrectTree()
{
var tree = new AvlTree<int>(Comparer<int>.Create((x, y) => y.CompareTo(x)));
tree.AddRange(Data);
tree.GetMin().Should().Be(10);
tree.GetMax().Should().Be(1);
tree.GetKeysInOrder()
.Should()
.BeEquivalentTo(
Data.Reverse(),
config => config.WithStrictOrdering());
}
[Test]
public void Add_MultipleKeys_FormsCorrectTree()
{
var tree = new AvlTree<int>();
for (var i = 0; i < Data.Length; ++i)
{
tree.Add(Data[i]);
tree.Count.Should().Be(i + 1);
}
tree.GetKeysInOrder()
.Should()
.BeEquivalentTo(
Data,
config => config.WithStrictOrdering());
tree.GetKeysPreOrder()
.Should()
.BeEquivalentTo(
PreOrder,
config => config.WithStrictOrdering());
tree.GetKeysPostOrder()
.Should()
.BeEquivalentTo(
PostOrder,
config => config.WithStrictOrdering());
}
[Test]
public void Add_KeyAlreadyInTree_ThrowsException()
{
var tree = new AvlTree<int>();
tree.AddRange(new[] { 1, 2, 3, 4, 5 });
Invoking(() => tree.Add(1)).Should().ThrowExactly<ArgumentException>();
}
[Test]
public void AddRange_MultipleKeys_FormsCorrectTree()
{
var tree = new AvlTree<char>();
tree.AddRange(new[] { 'a', 'b', 'c', 'd', 'e', 'f', 'g' });
tree.Count.Should().Be(7);
tree.GetKeysInOrder()
.Should()
.BeEquivalentTo(
new[] { 'a', 'b', 'c', 'd', 'e', 'f', 'g' },
config => config.WithStrictOrdering());
tree.GetKeysPreOrder()
.Should()
.BeEquivalentTo(
new[] { 'd', 'b', 'a', 'c', 'f', 'e', 'g' },
config => config.WithStrictOrdering());
tree.GetKeysPostOrder()
.Should()
.BeEquivalentTo(
new[] { 'a', 'c', 'b', 'e', 'g', 'f', 'd' },
config => config.WithStrictOrdering());
}
[Test]
public void Remove_MultipleKeys_TreeStillValid()
{
var tree = new AvlTree<int>();
tree.AddRange(Data);
tree.Remove(7);
tree.Count.Should().Be(9);
tree.Contains(7).Should().BeFalse();
tree.GetKeysInOrder()
.Should()
.BeEquivalentTo(
new[] { 1, 2, 3, 4, 5, 6, 8, 9, 10 },
config => config.WithStrictOrdering());
tree.GetKeysPreOrder()
.Should()
.BeEquivalentTo(
new[] { 4, 2, 1, 3, 8, 6, 5, 9, 10 },
config => config.WithStrictOrdering());
tree.GetKeysPostOrder()
.Should()
.BeEquivalentTo(
new[] { 1, 3, 2, 5, 6, 10, 9, 8, 4 },
config => config.WithStrictOrdering());
tree.Remove(2);
tree.Count.Should().Be(8);
tree.Contains(2).Should().BeFalse();
tree.Remove(1);
tree.Count.Should().Be(7);
tree.Contains(1).Should().BeFalse();
tree.GetKeysInOrder()
.Should()
.BeEquivalentTo(
new[] { 3, 4, 5, 6, 8, 9, 10 },
config => config.WithStrictOrdering());
tree.GetKeysPreOrder()
.Should()
.BeEquivalentTo(
new[] { 8, 4, 3, 6, 5, 9, 10 },
config => config.WithStrictOrdering());
tree.GetKeysPostOrder()
.Should()
.BeEquivalentTo(
new[] { 3, 5, 6, 4, 10, 9, 8 },
config => config.WithStrictOrdering());
tree.Remove(9);
tree.Count.Should().Be(6);
tree.Contains(9).Should().BeFalse();
tree.GetKeysInOrder()
.Should()
.BeEquivalentTo(
new[] { 3, 4, 5, 6, 8, 10 },
config => config.WithStrictOrdering());
tree.GetKeysPreOrder()
.Should()
.BeEquivalentTo(
new[] { 6, 4, 3, 5, 8, 10 },
config => config.WithStrictOrdering());
tree.GetKeysPostOrder()
.Should()
.BeEquivalentTo(
new[] { 3, 5, 4, 10, 8, 6 },
config => config.WithStrictOrdering());
tree.Remove(3);
tree.Remove(4);
tree.Remove(5);
tree.Remove(6);
tree.Remove(8);
tree.Remove(10);
tree.Count.Should().Be(0);
tree.GetKeysInOrder().Should().BeEmpty();
}
[Test]
public void Remove_MultipleKeys_TreeStillValid_Variant2()
{
var tree = new AvlTree<int>();
tree.AddRange(Data);
tree.Remove(10);
tree.Count.Should().Be(9);
tree.Contains(10).Should().BeFalse();
tree.Remove(5);
tree.Count.Should().Be(8);
tree.Contains(5).Should().BeFalse();
tree.Remove(7);
tree.Count.Should().Be(7);
tree.Contains(7).Should().BeFalse();
tree.Remove(9);
tree.Count.Should().Be(6);
tree.Contains(9).Should().BeFalse();
tree.Remove(1);
tree.Count.Should().Be(5);
tree.Contains(1).Should().BeFalse();
tree.Remove(3);
tree.Count.Should().Be(4);
tree.Contains(3).Should().BeFalse();
tree.Remove(2);
tree.Count.Should().Be(3);
tree.Contains(2).Should().BeFalse();
tree.GetKeysInOrder()
.Should()
.BeEquivalentTo(
new[] { 4,6,8 },
config => config.WithStrictOrdering());
tree.GetKeysPreOrder()
.Should()
.BeEquivalentTo(
new[] { 6,4,8 },
config => config.WithStrictOrdering());
tree.GetKeysPostOrder()
.Should()
.BeEquivalentTo(
new[] { 4,8,6 },
config => config.WithStrictOrdering());
}
[Test]
public void Remove_EmptyTree_ThrowsException()
{
var tree = new AvlTree<int>();
Invoking(() => tree.Remove(1)).Should().ThrowExactly<KeyNotFoundException>();
}
[Test]
public void Remove_KeyNotInTree_ThrowsException()
{
var tree = new AvlTree<int>();
tree.AddRange(Data);
Invoking(() => tree.Remove(24)).Should().ThrowExactly<KeyNotFoundException>();
}
[Test]
public void Contains_CorrectReturn()
{
var tree = new AvlTree<int>();
tree.AddRange(Data);
tree.Contains(3).Should().BeTrue();
tree.Contains(7).Should().BeTrue();
tree.Contains(24).Should().BeFalse();
tree.Contains(-1).Should().BeFalse();
}
[Test]
public void Contains_EmptyTree_ReturnsFalse()
{
var tree = new AvlTree<int>();
tree.Contains(5).Should().BeFalse();
tree.Contains(-12).Should().BeFalse();
}
[Test]
public void GetMin_CorrectReturn()
{
var tree = new AvlTree<int>();
tree.AddRange(Data);
tree.GetMin().Should().Be(1);
}
[Test]
public void GetMin_EmptyTree_ThrowsException()
{
var tree = new AvlTree<int>();
Invoking(() => tree.GetMin()).Should().ThrowExactly<InvalidOperationException>();
}
[Test]
public void GetMax_CorrectReturn()
{
var tree = new AvlTree<int>();
tree.AddRange(Data);
tree.GetMax().Should().Be(10);
}
[Test]
public void GetMax_EmptyTree_ThrowsException()
{
var tree = new AvlTree<int>();
Invoking(() => tree.GetMax()).Should().ThrowExactly<InvalidOperationException>();
}
[Test]
public void GetKeysInOrder_CorrectReturn()
{
var tree = new AvlTree<int>();
tree.AddRange(Data);
tree.GetKeysInOrder()
.Should()
.BeEquivalentTo(
Data,
config => config.WithStrictOrdering());
}
[Test]
public void GetKeysInOrder_EmptyTree_CorrectReturn()
{
var tree = new AvlTree<int>();
tree.GetKeysInOrder().Should().BeEmpty();
}
[Test]
public void GetKeysPreOrder_CorrectReturn()
{
var tree = new AvlTree<int>();
tree.AddRange(Data);
tree.GetKeysPreOrder()
.Should()
.BeEquivalentTo(
PreOrder,
config => config.WithStrictOrdering());
}
[Test]
public void GetKeysPreOrder_EmptyTree_CorrectReturn()
{
var tree = new AvlTree<int>();
tree.GetKeysPreOrder().Should().BeEmpty();
}
[Test]
public void GetKeysPostOrder_CorrectReturn()
{
var tree = new AvlTree<int>();
tree.AddRange(Data);
tree.GetKeysPostOrder()
.Should()
.BeEquivalentTo(
PostOrder,
config => config.WithStrictOrdering());
}
[Test]
public void GetKeysPostOrder_EmptyTree_CorrectReturn()
{
var tree = new AvlTree<int>();
tree.GetKeysPostOrder().Should().BeEmpty();
}
}
}
| 390 |
C-Sharp | TheAlgorithms | C# | using System;
using System.Collections.Generic;
using System.Linq;
using DataStructures.BinarySearchTree;
using NUnit.Framework;
namespace DataStructures.Tests
{
public static class BinarySearchTreeTests
{
[Test]
public static void Constructor_UseCustomComparer_FormsCorrectTree()
{
var cmpFunc = Comparer<string>.Create((x, y) => x.Length - y.Length);
var tree = new BinarySearchTree<string>(cmpFunc);
var elems = new[] { "z", "yy", "vvv", "bbbb", "fffff", "pppppp" };
tree.AddRange(elems);
Assert.IsNotNull(tree.Search("vvv"));
Assert.AreEqual("bbbb", tree.Search("vvv")!.Right!.Key);
}
[Test]
public static void Add_MultipleKeys_FormsCorrectBST()
{
var tree = new BinarySearchTree<int>();
tree.Add(5);
Assert.AreEqual(1, tree.Count);
tree.Add(3);
Assert.AreEqual(2, tree.Count);
tree.Add(4);
Assert.AreEqual(3, tree.Count);
tree.Add(2);
Assert.AreEqual(4, tree.Count);
var rootNode = tree.Search(5);
Assert.AreEqual(5, rootNode!.Key);
Assert.AreEqual(3, rootNode!.Left!.Key);
Assert.IsNull(rootNode!.Right);
var threeNode = tree.Search(3);
Assert.AreEqual(3, threeNode!.Key);
Assert.AreEqual(2, threeNode!.Left!.Key);
Assert.AreEqual(4, threeNode!.Right!.Key);
var twoNode = tree.Search(2);
Assert.IsNull(twoNode!.Left);
Assert.IsNull(twoNode!.Right);
var fourNode = tree.Search(4);
Assert.IsNull(fourNode!.Left);
Assert.IsNull(fourNode!.Right);
}
[Test]
public static void Add_KeyAlreadyInTree_ThrowsCorrectException()
{
var tree = new BinarySearchTree<int>();
tree.AddRange(new List<int> { 5, 3, 4, 2 });
_ = Assert.Throws<ArgumentException>(() => tree.Add(5));
}
[Test]
public static void AddRange_MultipleKeys_FormsCorrectBST()
{
var tree = new BinarySearchTree<int>();
tree.AddRange(new List<int> { 5, 3, 4, 2 });
var rootNode = tree.Search(5);
Assert.AreEqual(5, rootNode!.Key);
Assert.AreEqual(3, rootNode!.Left!.Key);
Assert.IsNull(rootNode!.Right);
var threeNode = tree.Search(3);
Assert.AreEqual(3, threeNode!.Key);
Assert.AreEqual(2, threeNode!.Left!.Key);
Assert.AreEqual(4, threeNode!.Right!.Key);
var twoNode = tree.Search(2);
Assert.IsNull(twoNode!.Left);
Assert.IsNull(twoNode!.Right);
var fourNode = tree.Search(4);
Assert.IsNull(fourNode!.Left);
Assert.IsNull(fourNode!.Right);
}
[Test]
public static void Search_MultipleKeys_FindsAllKeys()
{
var tree = new BinarySearchTree<int>();
tree.AddRange(new List<int> { 5, 3, 4, 2, 7, 6, 8 });
Assert.AreEqual(2, tree.Search(2)!.Key);
Assert.AreEqual(3, tree.Search(3)!.Key);
Assert.AreEqual(4, tree.Search(4)!.Key);
Assert.AreEqual(5, tree.Search(5)!.Key);
Assert.AreEqual(6, tree.Search(6)!.Key);
Assert.AreEqual(7, tree.Search(7)!.Key);
Assert.AreEqual(8, tree.Search(8)!.Key);
}
[Test]
public static void Contains_MultipleKeys_FindsAllKeys()
{
var tree = new BinarySearchTree<int>();
tree.AddRange(new List<int> { 5, 3, 4, 2, 7, 6, 8 });
Assert.IsTrue(tree.Contains(2));
Assert.IsTrue(tree.Contains(3));
Assert.IsTrue(tree.Contains(4));
Assert.IsTrue(tree.Contains(5));
Assert.IsTrue(tree.Contains(6));
Assert.IsTrue(tree.Contains(7));
Assert.IsTrue(tree.Contains(8));
}
[Test]
public static void Remove_LeafNodes_CorrectlyRemovesNodes()
{
var tree = new BinarySearchTree<int>();
tree.AddRange(new List<int> { 5, 3, 4, 2, 7, 6, 8 });
var twoRemoveResult = tree.Remove(2);
Assert.IsTrue(twoRemoveResult);
Assert.IsNull(tree.Search(2));
Assert.IsNull(tree.Search(3)!.Left);
Assert.IsNotNull(tree.Search(3)!.Right);
Assert.AreEqual(6, tree.Count);
var fourRemoveResult = tree.Remove(4);
Assert.IsTrue(fourRemoveResult);
Assert.IsNull(tree.Search(4));
Assert.IsNull(tree.Search(3)!.Left);
Assert.IsNull(tree.Search(3)!.Right);
Assert.AreEqual(5, tree.Count);
}
[Test]
public static void Remove_NodesWithOneChild_CorrectlyRemovesNodes()
{
var tree = new BinarySearchTree<int>();
tree.AddRange(new List<int> { 5, 3, 4, 2, 7, 6, 8 });
tree.Remove(4);
var threeRemoveResult = tree.Remove(3);
Assert.IsTrue(threeRemoveResult);
Assert.IsNull(tree.Search(3));
Assert.IsNull(tree.Search(2)!.Left);
Assert.IsNull(tree.Search(2)!.Right);
Assert.AreEqual(5, tree.Count);
tree.Remove(6);
var sevenRemoveResult = tree.Remove(7);
Assert.IsTrue(sevenRemoveResult);
Assert.IsNull(tree.Search(7));
Assert.IsNull(tree.Search(8)!.Left);
Assert.IsNull(tree.Search(8)!.Right);
Assert.AreEqual(3, tree.Count);
}
[Test]
public static void Remove_NodesWithTwoChildren_CorrectlyRemovesNodes()
{
var tree = new BinarySearchTree<int>();
tree.AddRange(new List<int> { 5, 3, 4, 2, 7, 6, 8 });
var sevenRemoveResult = tree.Remove(7);
Assert.IsTrue(sevenRemoveResult);
Assert.IsNull(tree.Search(7));
Assert.IsNull(tree.Search(6)!.Left);
Assert.IsNotNull(tree.Search(6)!.Right);
Assert.AreEqual(6, tree.Count);
}
[Test]
public static void Remove_NonExistentElement_ReturnsFalse()
{
var tree = new BinarySearchTree<int>();
tree.AddRange(new List<int> { 5, 3, 4, 2, 7, 6, 8 });
Assert.IsFalse(tree.Remove(999));
Assert.AreEqual(7, tree.Count);
}
[Test]
public static void Remove_EmptyTree_ReturnsFalse()
{
var tree = new BinarySearchTree<int>();
Assert.IsFalse(tree.Remove(8));
Assert.AreEqual(0, tree.Count);
}
[Test]
public static void Remove_RemoveRoot_CorrectlyRemovesRoot()
{
var tree = new BinarySearchTree<int>();
tree.Add(5);
tree.Remove(5);
Assert.AreEqual(0, tree.Count);
Assert.IsNull(tree.Search(5));
tree.AddRange(new List<int> { 5, 4, 6 });
tree.Remove(5);
Assert.AreEqual(2, tree.Count);
Assert.IsNull(tree.Search(5));
Assert.IsNotNull(tree.Search(4));
Assert.IsNotNull(tree.Search(6));
Assert.AreEqual(6, tree.Search(4)!.Right!.Key);
}
[Test]
public static void GetMax_NonEmptyTree_ReturnsCorrectValue()
{
var tree = new BinarySearchTree<int>();
tree.AddRange(new List<int> { 5, 3, 4, 2, 7, 6, 8 });
Assert.AreEqual(8, tree.GetMax()!.Key);
}
[Test]
public static void GetMax_EmptyTree_ReturnsDefaultValue()
{
var tree = new BinarySearchTree<int>();
Assert.IsNull(tree.GetMax());
}
[Test]
public static void GetMin_NonEmptyTree_ReturnsCorrectValue()
{
var tree = new BinarySearchTree<int>();
tree.AddRange(new List<int> { 5, 3, 4, 2, 7, 6, 8 });
Assert.AreEqual(2, tree.GetMin()!.Key);
}
[Test]
public static void GetMin_EmptyTree_ReturnsDefaultValue()
{
var tree = new BinarySearchTree<int>();
Assert.IsNull(tree.GetMin());
}
[Test]
public static void GetKeysInOrder_MultipleKeys_ReturnsAllKeysInCorrectOrder()
{
var tree = new BinarySearchTree<int>();
tree.AddRange(new List<int> { 5, 3, 4, 2, 7, 6, 8 });
var keys = tree.GetKeysInOrder();
var expected = new List<int> { 2, 3, 4, 5, 6, 7, 8 };
Assert.IsTrue(keys.SequenceEqual(expected));
}
[Test]
public static void GetKeysPreOrder_MultipleKeys_ReturnsAllKeysInCorrectOrder()
{
var tree = new BinarySearchTree<int>();
tree.AddRange(new List<int> { 5, 3, 4, 2, 7, 6, 8 });
var keys = tree.GetKeysPreOrder();
var expected = new List<int> { 5, 3, 2, 4, 7, 6, 8 };
Assert.IsTrue(keys.SequenceEqual(expected));
}
[Test]
public static void GetKeysPostOrder_MultipleKeys_ReturnsAllKeysInCorrectOrder()
{
var tree = new BinarySearchTree<int>();
tree.AddRange(new List<int> { 5, 3, 4, 2, 7, 6, 8 });
var keys = tree.GetKeysPostOrder();
var expected = new List<int> { 2, 4, 3, 6, 8, 7, 5 };
Assert.IsTrue(keys.SequenceEqual(expected));
}
}
}
| 285 |
C-Sharp | TheAlgorithms | C# | using System;
using FluentAssertions;
using NUnit.Framework;
namespace DataStructures.Tests
{
/// <summary>
/// This class contains some tests for the class BitArray.
/// </summary>
public static class BitArrayTests
{
[Test]
public static void TestIndexer()
{
// Arrange
var testObj = new BitArray(5);
// Act
testObj.Compile(24);
// Assert
Assert.IsTrue(testObj[0]);
Assert.IsTrue(testObj[1]);
Assert.IsFalse(testObj[3]);
}
[TestCase(19, 3)]
public static void TestNumberOfOneBits(int number, int expected)
{
// Arrange
var testObj = new BitArray(5);
// Act
testObj.Compile(number);
// Assert
Assert.AreEqual(expected, testObj.NumberOfOneBits());
}
[TestCase(26, 2)]
public static void TestNumberOfZeroBits(int number, int expected)
{
// Arrange
var testObj = new BitArray(5);
// Act
testObj.Compile(number);
// Assert
Assert.AreEqual(expected, testObj.NumberOfZeroBits());
}
[TestCase(33, 33)]
public static void TestToInt64(int number, int expected)
{
// Arrange
var testObj = new BitArray(6);
// Act
testObj.Compile(number);
// Assert
Assert.AreEqual(expected, testObj.ToInt64());
}
[Test]
public static void TestToInt32MaxValue()
{
// Arrange
var testObj = new BitArray(33);
// Act
// Assert
_ = Assert.Throws<InvalidOperationException>(() => testObj.ToInt32());
}
[Test]
public static void TestToInt64MaxValue()
{
// Arrange
var testObj = new BitArray(65);
// Act
// Assert
_ = Assert.Throws<InvalidOperationException>(() => testObj.ToInt64());
}
[TestCase("110")]
public static void TestResetField(string sequence)
{
// Arrange
var testObj = new BitArray(sequence);
// Act
testObj.ResetField();
// Assert
Assert.AreEqual(0, testObj.ToInt64());
}
[TestCase("101001", 63)]
public static void TestSetAll(string sequence, int expected)
{
// Arrange
var testObj = new BitArray(sequence);
// Act
testObj.SetAll(true);
// Assert
Assert.AreEqual(expected, testObj.ToInt64());
}
[Test]
public static void TestCloneEquals()
{
// Arrange
var testObj1 = new BitArray("110");
// Act
var testObj2 = (BitArray)testObj1.Clone();
// Assert
Assert.IsTrue(testObj1.Equals(testObj2));
}
[Test]
public static void TestCloneNotEquals()
{
// Arrange
var testObj1 = new BitArray("101");
var testObj2 = new BitArray(15);
var testObj3 = new BitArray(3);
// Act
testObj3.Reset();
// Assert
testObj1.Equals(testObj2).Should().BeFalse();
testObj1.Equals(testObj3).Should().BeFalse();
}
[Test]
public static void TestHasCode()
{
// Arrange
const int num = 5;
var testObj = new BitArray(3);
// Act
testObj.Compile(num);
var result = testObj.GetHashCode();
// Assert
Assert.NotNull(result);
Assert.AreEqual(5, result);
}
[Test]
public static void TestMoveNextCurrent()
{
var testObj1 = new BitArray("1111010");
var counterOnes = 0;
var counterZeros = 0;
foreach (var bit in testObj1)
{
if (bit)
{
counterOnes++;
}
else
{
counterZeros++;
}
}
Assert.AreEqual(counterOnes, 5);
Assert.AreEqual(counterZeros, 2);
}
[Test]
public static void IEnumerable_IterationWorks()
{
var arr = new BitArray("010101010101010101");
var current = 0;
foreach (var b in arr)
{
b.Should().Be(arr[current]);
current++;
}
}
[Test]
public static void Equals_NullIsNotEqualToNotNull()
{
var arr1 = new BitArray("010101010101010101");
BitArray? arr2 = null;
arr1.Equals(arr2).Should().BeFalse();
}
#region COMPILE TESTS
[TestCase("00100", "00100")]
[TestCase("01101", "01101")]
[TestCase("100", "00100")]
public static void TestCompileToString(string sequence, string expectedSequence)
{
// Arrange
var testObj = new BitArray(5);
// Act
testObj.Compile(sequence);
// Assert
Assert.AreEqual(expectedSequence, testObj.ToString());
}
[TestCase("klgml", 5)]
[TestCase("klgml", 3)]
public static void TestCompileToStringThorwsException(string sequence, int arrLen)
{
// Arrange
var testObj = new BitArray(arrLen);
// Act
void Act() => testObj.Compile(sequence);
// Assert
Assert.Throws<ArgumentException>(Act);
}
[TestCase(15, "01111")]
[TestCase(17, "10001")]
[TestCase(4, "00100")]
public static void TestCompileLong(int number, string expected)
{
// Arrange
var testObj = new BitArray(5);
// Act
testObj.Compile((long)number);
// Assert
Assert.AreEqual(expected, testObj.ToString());
}
[TestCase(46, 3)]
[TestCase(-46, 5)]
public static void TestCompileLongThrowsException(int number, int arrLen)
{
// Arrange
var testObj = new BitArray(arrLen);
// Act
void Act() => testObj.Compile((long)number);
// Assert
Assert.Throws<ArgumentException>(Act);
}
[TestCase(17, "10001")]
[TestCase(25, "11001")]
[TestCase(4, "00100")]
public static void TestCompileInteger(int number, string expected)
{
// Arrange
var testObj = new BitArray(5);
// Act
testObj.Compile(number);
// Assert
Assert.AreEqual(expected, testObj.ToString());
}
[TestCase(-8, 5)]
[TestCase(18, 3)]
public static void TestCompileIntegerThrowsException(int number, int arrayLength)
{
// Arrange
var testObj = new BitArray(arrayLength);
// Act
void Act() => testObj.Compile(number);
// Assert
Assert.Throws<ArgumentException>(Act);
}
#endregion COMPILE TESTS
#region CONSTRUCTOR TESTS
[TestCase("00100", 4)]
public static void TestConstructor(string sequence, int expected)
{
// Arrange
var testObj1 = new BitArray(sequence);
// Act
// Assert
Assert.AreEqual(expected, testObj1.ToInt64());
}
[TestCase(new[] { true, false, true }, 5)]
public static void TestConstructorBoolArray(bool[] sequence, int expected)
{
// Arrange
var testObj3 = new BitArray(sequence);
// Act
// Assert
Assert.AreEqual(expected, testObj3.ToInt64());
}
[TestCase("000120")]
[TestCase("")]
public static void TestConstructorThrowsException(string sequence)
{
// Arrange
// Act
Action act = () => new BitArray(sequence);
// Assert
act.Should().Throw<ArgumentException>();
}
#endregion CONSTRUCTOR TESTS
#region OPERATOR TESTS
[TestCase(17, 17, "10001")]
[TestCase(25, 31, "11001")]
public static void TestOperatorAnd(int tObj1, int tObj2, string expected)
{
// Arrange
var testObj1 = new BitArray(5);
var testObj2 = new BitArray(5);
// Act
testObj1.Compile(tObj1);
testObj2.Compile(tObj2);
var result = testObj1 & testObj2;
// Assert
Assert.AreEqual(expected, result.ToString());
}
[TestCase(1, 1, 1, 1, "0")]
[TestCase(5, 3, 8, 4, "1101")]
[TestCase(9, 4, 4, 3, "1101")]
public static void TestOperatorXorAndDiffSizes(int t1, int s1, int t2, int s2, string expected)
{
// Arrange
var testObj1 = new BitArray(s1);
var testObj2 = new BitArray(s2);
// Act
testObj1.Compile(t1);
testObj2.Compile(t2);
var result = testObj1 ^ testObj2;
// Assert
Assert.AreEqual(expected, result.ToString());
}
[TestCase(9, 4, 4, 3, "1101")]
[TestCase(1, 1, 1, 1, "1")]
[TestCase(5, 3, 8, 4, "1101")]
public static void TestOperatorOrAndDiffSizes(int t1, int s1, int t2, int s2, string expected)
{
// Arrange
var testObj1 = new BitArray(s1);
var testObj2 = new BitArray(s2);
// Act
testObj1.Compile(t1);
testObj2.Compile(t2);
var result = testObj1 | testObj2;
// Assert
Assert.AreEqual(expected, result.ToString());
}
[TestCase(1, 1, 1, 1, "1")]
[TestCase(5, 3, 8, 4, "0000")]
[TestCase(9, 4, 4, 3, "0000")]
public static void TestOperatorAndAndDiffSizes(int t1, int s1, int t2, int s2, string expected)
{
// Arrange
var testObj1 = new BitArray(s1);
var testObj2 = new BitArray(s2);
// Act
testObj1.Compile(t1);
testObj2.Compile(t2);
var result = testObj1 & testObj2;
// Assert
Assert.AreEqual(expected, result.ToString());
}
[TestCase(25, 30, "11111")]
public static void TestOperatorOr(int tObj1, int tObj2, string expected)
{
// Arrange
var testObj1 = new BitArray(5);
var testObj2 = new BitArray(5);
// Act
testObj1.Compile(tObj1);
testObj2.Compile(tObj2);
var result = testObj1 | testObj2;
// Assert
Assert.AreEqual(expected, result.ToString());
}
[TestCase(16, "01111")]
public static void TestOperatorNot(int number, string expected)
{
// Arrange
var testObj = new BitArray(5);
// Act
testObj.Compile(number);
testObj = ~testObj;
// Assert
Assert.AreEqual(expected, testObj.ToString());
}
[TestCase(25, 30, 7)]
public static void TestOperatorXor(int testNum, int testNum2, int expected)
{
// Arrange
var testObj1 = new BitArray(5);
var testObj2 = new BitArray(5);
// Act
testObj1.Compile(testNum);
testObj2.Compile(testNum2);
var result = testObj1 ^ testObj2;
// Assert
Assert.AreEqual(expected, result.ToInt32());
}
[TestCase(16, "10000000")]
public static void TestOperatorShiftLeft(int number, string expected)
{
// Arrange
var testObj = new BitArray(5);
// Act
testObj.Compile(number);
testObj <<= 3;
// Assert
Assert.AreEqual(expected, testObj.ToString());
}
[TestCase(24, "110")]
public static void TestOperatorShiftRight(int number, string expected)
{
// Arrange
var testObj = new BitArray(5);
// Act
testObj.Compile(number);
testObj >>= 2;
// Assert
Assert.AreEqual(expected, testObj.ToString());
}
#endregion OPERATOR TESTS
#region COMPARE TESTS
[Test]
public static void TestParity()
{
// Arrange
var testObj = new BitArray(5);
// Act
testObj.Compile(26);
// Assert
Assert.IsFalse(testObj.EvenParity());
Assert.IsTrue(testObj.OddParity());
}
[Test]
public static void TestCompare()
{
// Arrange
var testObj1 = new BitArray("110");
var testObj2 = new BitArray("110");
var testObj3 = new BitArray("100");
// Act
// Assert
Assert.IsTrue(testObj1 == testObj2);
Assert.IsTrue(testObj1 != testObj3);
}
[Test]
public static void ArraysOfDifferentLengthsAreNotEqual()
{
// Arrange
var testObj1 = new BitArray("110");
var testObj2 = new BitArray("10101");
// Act
// Assert
Assert.False(testObj1 == testObj2);
}
#endregion COMPARE TESTS
}
}
| 536 |
C-Sharp | TheAlgorithms | C# | using System.Collections.Generic;
using FluentAssertions;
using NUnit.Framework;
namespace DataStructures.Tests
{
public class InvertedIndexTests
{
[Test]
public void Or_GetSourcesWithAtLeastOneFromList_ReturnAllSources()
{
var index = new InvertedIndex();
var source1 = "one star is sparkling bright";
var source2 = "two stars are sparkling even brighter";
index.AddToIndex(nameof(source1), source1);
index.AddToIndex(nameof(source2), source2);
var or = index.Or(new List<string> { "star", "sparkling" });
or.Should().BeEquivalentTo(nameof(source1), nameof(source2));
}
[Test]
public void And_GetSourcesWithAllInsideList_ReturnFirstSource()
{
var index = new InvertedIndex();
var source1 = "one star is sparkling bright";
var source2 = "two stars are sparkling even brighter";
index.AddToIndex(nameof(source1), source1);
index.AddToIndex(nameof(source2), source2);
var and = index.And(new List<string> { "star", "sparkling" });
and.Should().BeEquivalentTo(nameof(source1));
}
}
}
| 38 |
C-Sharp | TheAlgorithms | C# | using System;
using System.Collections.Generic;
using System.Linq;
using DataStructures.RedBlackTree;
using FluentAssertions;
using NUnit.Framework;
namespace DataStructures.Tests
{
internal class RedBlackTreeTests
{
[Test]
public void Constructor_UseCustomComparer_FormsCorrect_Tree()
{
var tree = new RedBlackTree<int>(Comparer<int>.Create((x, y) => y.CompareTo(x)));
tree.AddRange(new[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 });
tree.GetMin().Should().Be(10);
tree.GetMax().Should().Be(1);
tree.GetKeysInOrder().SequenceEqual(new[] { 10, 9, 8, 7, 6, 5, 4, 3, 2, 1 }).Should().BeTrue();
}
[Test]
public void Add_Case3_FormsCorrectTree()
{
var tree = new RedBlackTree<int>();
tree.Add(5);
tree.Count.Should().Be(1);
}
[Test]
public void Add_Case24_FormsCorrectTree()
{
var tree = new RedBlackTree<int>();
tree.AddRange(new[] { 5, 4, 6, 3 });
tree.GetKeysPreOrder().SequenceEqual(new[] { 5, 4, 3, 6 }).Should().BeTrue();
}
[Test]
public void Add_Case1_FormsCorrectTree()
{
var tree = new RedBlackTree<int>();
tree.AddRange(new[] { 5, 4, 6, 3, 7 });
tree.GetKeysPreOrder().SequenceEqual(new[] { 5, 4, 3, 6, 7 }).Should().BeTrue();
}
[Test]
public void Add_Case6_FormsCorrectTree()
{
// Right rotation
var tree = new RedBlackTree<int>();
tree.AddRange(new[] { 5, 4, 6, 3, 2 });
tree.GetKeysPreOrder().SequenceEqual(new[] { 5, 3, 2, 4, 6 }).Should().BeTrue();
// Left rotation
tree = new RedBlackTree<int>();
tree.AddRange(new[] { 5, 4, 6, 7, 8 });
tree.GetKeysPreOrder().SequenceEqual(new[] { 5, 4, 7, 6, 8 }).Should().BeTrue();
}
[Test]
public void Add_Case5_FormsCorrectTree()
{
// Left-right rotation
var tree = new RedBlackTree<int>();
tree.AddRange(new[] { 5, 4, 6, 2, 3 });
tree.GetKeysPreOrder().SequenceEqual(new[] { 5, 3, 2, 4, 6 }).Should().BeTrue();
// Right-left rotation
tree = new RedBlackTree<int>();
tree.AddRange(new[] { 5, 4, 6, 8, 7 });
tree.GetKeysPreOrder().SequenceEqual(new[] { 5, 4, 7, 6, 8 }).Should().BeTrue();
}
[Test]
public void Add_MultipleKeys_FormsCorrectTree()
{
var tree = new RedBlackTree<int>();
foreach (var value in new[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 })
{
tree.Add(value);
tree.Count.Should().Be(value);
}
tree.GetKeysInOrder().SequenceEqual(new[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }).Should().BeTrue();
tree.GetKeysPreOrder().SequenceEqual(new[] { 4, 2, 1, 3, 6, 5, 8, 7, 9, 10 }).Should().BeTrue();
}
[Test]
public void Add_KeyAlreadyInTree_ThrowsException()
{
var tree = new RedBlackTree<int>();
tree.AddRange(new[] { 1, 2, 3, 4, 5 });
Assert.Throws<ArgumentException>(() => tree.Add(1));
}
[Test]
public void AddRange_MultipleKeys_FormsCorrectTree()
{
var tree = new RedBlackTree<int>();
tree.AddRange(new[] { 9, 0, 1, 6, 7, 5, 2, 8, 4, 3 });
tree.Count.Should().Be(10);
tree.GetKeysInOrder().SequenceEqual(new[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 }).Should().BeTrue();
tree.GetKeysPreOrder().SequenceEqual(new[] { 5, 1, 0, 3, 2, 4, 7, 6, 9, 8 }).Should().BeTrue();
}
[Test]
public void Remove_SimpleCases_TreeStillValid()
{
var tree = new RedBlackTree<int>();
tree.AddRange(new[] { 13, 8, 17, 1, 11, 15, 25, 6, 22, 27 });
tree.Remove(6);
tree.Count.Should().Be(9);
tree.Contains(6).Should().BeFalse();
tree.GetKeysInOrder().SequenceEqual(new[] { 1, 8, 11, 13, 15, 17, 22, 25, 27 }).Should().BeTrue();
tree.GetKeysPreOrder().SequenceEqual(new[] { 13, 8, 1, 11, 17, 15, 25, 22, 27 }).Should().BeTrue();
tree = new RedBlackTree<int>();
tree.AddRange(new[] { 13, 8, 17, 1, 11, 15, 25, 6, 22, 27 });
tree.Remove(1);
tree.Count.Should().Be(9);
tree.Contains(1).Should().BeFalse();
tree.GetKeysInOrder().SequenceEqual(new[] { 6, 8, 11, 13, 15, 17, 22, 25, 27 }).Should().BeTrue();
tree.GetKeysPreOrder().SequenceEqual(new[] { 13, 8, 6, 11, 17, 15, 25, 22, 27 }).Should().BeTrue();
tree = new RedBlackTree<int>();
tree.AddRange(new[] { 13, 8, 17, 1, 11, 15, 25, 6, 22, 27 });
tree.Remove(17);
tree.Count.Should().Be(9);
tree.Contains(17).Should().BeFalse();
tree.GetKeysInOrder().SequenceEqual(new[] { 1, 6, 8, 11, 13, 15, 22, 25, 27 }).Should().BeTrue();
tree.GetKeysPreOrder().SequenceEqual(new[] { 13, 8, 1, 6, 11, 22, 15, 25, 27 }).Should().BeTrue();
tree = new RedBlackTree<int>();
tree.AddRange(new[] { 13, 8, 17, 1, 11, 15, 25, 6, 22, 27 });
tree.Remove(25);
tree.Count.Should().Be(9);
tree.Contains(25).Should().BeFalse();
tree.GetKeysInOrder().SequenceEqual(new[] { 1, 6, 8, 11, 13, 15, 17, 22, 27 }).Should().BeTrue();
tree.GetKeysPreOrder().SequenceEqual(new[] { 13, 8, 1, 6, 11, 17, 15, 27, 22 }).Should().BeTrue();
tree = new RedBlackTree<int>();
tree.AddRange(new[] { 7, 3, 18, 10, 22, 8, 11, 26 });
tree.Remove(18);
tree.Count.Should().Be(7);
tree.Contains(18).Should().BeFalse();
tree.GetKeysInOrder().SequenceEqual(new[] { 3, 7, 8, 10, 11, 22, 26 }).Should().BeTrue();
tree.GetKeysPreOrder().SequenceEqual(new[] { 7, 3, 22, 10, 8, 11, 26 }).Should().BeTrue();
tree = new RedBlackTree<int>();
tree.Add(1);
tree.Add(2);
tree.Remove(1);
tree.Count.Should().Be(1);
tree.GetKeysInOrder().SequenceEqual(new[] { 2 }).Should().BeTrue();
tree.GetKeysPreOrder().SequenceEqual(new[] { 2 }).Should().BeTrue();
}
[Test]
public void Remove_Case1_TreeStillValid()
{
var tree = new RedBlackTree<int>();
tree.AddRange(new[] { 5, 2, 8, 1 });
tree.Remove(1);
tree.Remove(2);
tree.Contains(2).Should().BeFalse();
tree.GetKeysInOrder().SequenceEqual(new[] { 5, 8 }).Should().BeTrue();
tree.GetKeysPreOrder().SequenceEqual(new[] { 5, 8 }).Should().BeTrue();
}
[Test]
public void Remove_Case3_TreeStillValid()
{
// Move to case 6
var tree = new RedBlackTree<int>();
tree.AddRange(new[] { 7, 3, 18, 1, 10, 22, 8, 11, 26 });
tree.Remove(1);
tree.Remove(3);
tree.Count.Should().Be(7);
tree.Contains(3).Should().BeFalse();
tree.GetKeysInOrder().SequenceEqual(new[] { 7, 8, 10, 11, 18, 22, 26 }).Should().BeTrue();
tree.GetKeysPreOrder().SequenceEqual(new[] { 18, 10, 7, 8, 11, 22, 26 }).Should().BeTrue();
// Move to case 5
tree = new RedBlackTree<int>();
tree.AddRange(new[] { 8, 3, 2, 0, 9, 4, 7, 6, 1, 5 });
tree.Remove(8);
tree.Remove(6);
tree.Remove(9);
tree.Count.Should().Be(7);
tree.GetKeysInOrder().SequenceEqual(new[] { 0, 1, 2, 3, 4, 5, 7 }).Should().BeTrue();
tree.GetKeysPreOrder().SequenceEqual(new[] { 3, 1, 0, 2, 5, 4, 7 }).Should().BeTrue();
// Move to case 4
tree = new RedBlackTree<int>();
tree.AddRange(new[] { 7, 5, 8, 4, 6, 3, 2, 9, 0, 1 });
tree.Remove(9);
tree.Remove(6);
tree.Remove(5);
tree.Remove(8);
tree.Count.Should().Be(6);
tree.GetKeysInOrder().SequenceEqual(new[] { 0, 1, 2, 3, 4, 7 }).Should().BeTrue();
tree.GetKeysPreOrder().SequenceEqual(new[] { 3, 1, 0, 2, 7, 4 }).Should().BeTrue();
}
[Test]
public void Remove_Case4_TreeStillValid()
{
var tree = new RedBlackTree<int>();
tree.AddRange(new[] { 5, 2, 8, 1, 4, 7, 9, 0, 3 });
tree.Remove(0);
tree.Remove(3);
tree.Remove(2);
tree.Count.Should().Be(6);
tree.Contains(2).Should().BeFalse();
tree.GetKeysInOrder().SequenceEqual(new[] { 1, 4, 5, 7, 8, 9 }).Should().BeTrue();
tree.GetKeysPreOrder().SequenceEqual(new[] { 5, 4, 1, 8, 7, 9 }).Should().BeTrue();
}
[Test]
public void Remove_Case5_TreeStillValid()
{
var tree = new RedBlackTree<int>();
tree.AddRange(new[] { 13, 8, 17, 1, 11, 15, 25, 6, 22, 27 });
tree.Remove(8);
tree.Count.Should().Be(9);
tree.Contains(8).Should().BeFalse();
tree.GetKeysInOrder().SequenceEqual(new[] { 1, 6, 11, 13, 15, 17, 22, 25, 27 }).Should().BeTrue();
tree.GetKeysPreOrder().SequenceEqual(new[] { 13, 6, 1, 11, 17, 15, 25, 22, 27 }).Should().BeTrue();
tree = new RedBlackTree<int>();
tree.AddRange(new[] { 13, 8, 17, 1, 11, 15, 25, 0, 6, 22 });
tree.Remove(13);
tree.Count.Should().Be(9);
tree.Contains(13).Should().BeFalse();
tree.GetKeysInOrder().SequenceEqual(new[] { 0, 1, 6, 8, 11, 15, 17, 22, 25 }).Should().BeTrue();
tree.GetKeysPreOrder().SequenceEqual(new[] { 15, 8, 1, 0, 6, 11, 22, 17, 25 }).Should().BeTrue();
tree = new RedBlackTree<int>();
tree.AddRange(new[] { 7, 0, 1, 4, 8, 2, 3, 6, 5, 9 });
tree.Remove(7);
tree.Remove(0);
tree.Remove(1);
tree.Remove(4);
tree.Remove(8);
tree.GetKeysInOrder().SequenceEqual(new[] { 2, 3, 5, 6, 9 }).Should().BeTrue();
tree.GetKeysPreOrder().SequenceEqual(new[] { 3, 2, 6, 5, 9 }).Should().BeTrue();
}
[Test]
public void Remove_Case6_TreeStillValid()
{
var tree = new RedBlackTree<int>();
tree.AddRange(new[] { 13, 8, 17, 1, 11, 15, 25, 6, 22, 27 });
tree.Remove(13);
tree.Count.Should().Be(9);
tree.Contains(13).Should().BeFalse();
tree.GetKeysInOrder().SequenceEqual(new[] { 1, 6, 8, 11, 15, 17, 22, 25, 27 }).Should().BeTrue();
tree.GetKeysPreOrder().SequenceEqual(new[] { 15, 8, 1, 6, 11, 25, 17, 22, 27 }).Should().BeTrue();
tree = new RedBlackTree<int>();
tree.AddRange(new[] { 13, 8, 17, 1, 11, 15, 25, 0, 6, 22 });
tree.Remove(8);
tree.Count.Should().Be(9);
tree.Contains(8).Should().BeFalse();
tree.GetKeysInOrder().SequenceEqual(new[] { 0, 1, 6, 11, 13, 15, 17, 22, 25 }).Should().BeTrue();
tree.GetKeysPreOrder().SequenceEqual(new[] { 13, 1, 0, 11, 6, 17, 15, 25, 22 }).Should().BeTrue();
}
[Test]
public void Remove_EmptyTree_ThrowsException()
{
var tree = new RedBlackTree<int>();
Assert.Throws<InvalidOperationException>(() => tree.Remove(1));
}
[Test]
public void Remove_KeyNotInTree_ThrowsException()
{
var tree = new RedBlackTree<int>();
tree.AddRange(new[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 });
Assert.Throws<KeyNotFoundException>(() => tree.Remove(24));
}
[Test]
public void Contains_CorrectReturn()
{
var tree = new RedBlackTree<int>();
tree.AddRange(new[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 });
tree.Contains(3).Should().BeTrue();
tree.Contains(7).Should().BeTrue();
tree.Contains(24).Should().BeFalse();
tree.Contains(-1).Should().BeFalse();
}
[Test]
public void Contains_EmptyTree_ReturnsFalse()
{
var tree = new RedBlackTree<int>();
tree.Contains(5).Should().BeFalse();
tree.Contains(-12).Should().BeFalse();
}
[Test]
public void GetMin_CorrectReturn()
{
var tree = new RedBlackTree<int>();
tree.AddRange(new[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 });
tree.GetMin().Should().Be(1);
}
[Test]
public void GetMin_EmptyTree_ThrowsException()
{
var tree = new RedBlackTree<int>();
Assert.Throws<InvalidOperationException>(() => tree.GetMin());
}
[Test]
public void GetMax_CorrectReturn()
{
var tree = new RedBlackTree<int>();
tree.AddRange(new[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 });
tree.GetMax().Should().Be(10);
}
[Test]
public void GetMax_EmptyTree_ThrowsException()
{
var tree = new RedBlackTree<int>();
Assert.Throws<InvalidOperationException>(() => tree.GetMax());
}
[Test]
public void GetKeysInOrder_CorrectReturn()
{
var tree = new RedBlackTree<int>();
tree.AddRange(new[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 });
tree.GetKeysInOrder().SequenceEqual(new[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }).Should().BeTrue();
}
[Test]
public void GetKeysInOrder_EmptyTree_CorrectReturn()
{
var tree = new RedBlackTree<int>();
tree.GetKeysInOrder().SequenceEqual(Array.Empty<int>()).Should().BeTrue();
}
[Test]
public void GetKeysPreOrder_CorrectReturn()
{
var tree = new RedBlackTree<int>();
tree.AddRange(new[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 });
tree.GetKeysPreOrder().SequenceEqual(new[] { 4, 2, 1, 3, 6, 5, 8, 7, 9, 10 }).Should().BeTrue();
}
[Test]
public void GetKeysPreOrder_EmptyTree_CorrectReturn()
{
var tree = new RedBlackTree<int>();
tree.GetKeysPreOrder().SequenceEqual(Array.Empty<int>()).Should().BeTrue();
}
[Test]
public void GetKeysPostOrder_CorrectReturn()
{
var tree = new RedBlackTree<int>();
tree.AddRange(new[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 });
tree.GetKeysPostOrder().SequenceEqual(new[] { 1, 3, 2, 5, 7, 10, 9, 8, 6, 4 }).Should().BeTrue();
}
[Test]
public void GetKeysPostOrder_EmptyTree_CorrectReturn()
{
var tree = new RedBlackTree<int>();
tree.GetKeysPostOrder().SequenceEqual(Array.Empty<int>()).Should().BeTrue();
}
}
}
| 380 |
C-Sharp | TheAlgorithms | C# | using System.Collections.Generic;
using System.Linq;
using NUnit.Framework;
namespace DataStructures.Tests
{
[TestFixture]
public class SortedListTests
{
[Test]
public void Add_AddMultipleValues_SortingCorrectly(
[Random(1, 1000, 100, Distinct = true)]
int count)
{
var values = GetValues(count);
var list = new SortedList<int>();
foreach (var value in values)
{
list.Add(value);
}
CollectionAssert.AreEqual(values.OrderBy(i => i), list);
}
[Test]
public void Contains_PositiveArrayAdded_NegativeNumberAsked_FalseReturned(
[Random(1, 200, 10, Distinct = true)] int count)
{
var values = GetValues(count);
const int value = -1;
var list = new SortedList<int>();
foreach (var i in values)
{
list.Add(i);
}
Assert.IsFalse(list.Contains(value));
}
[Test]
public void Contains_PositiveArrayAdded_ContainingValueAsked_TrueReturned(
[Random(1, 200, 10, Distinct = true)] int count)
{
var values = GetValues(count);
var value = values[TestContext.CurrentContext.Random.Next(count - 1)];
var list = new SortedList<int>();
foreach (var i in values)
{
list.Add(i);
}
Assert.IsTrue(list.Contains(value));
}
[Test]
public void Remove_PositiveArrayAdded_NegativeNumberAsked_FalseReturned(
[Random(1, 200, 10, Distinct = true)] int count)
{
var values = GetValues(count);
const int value = -1;
var list = new SortedList<int>();
foreach (var i in values)
{
list.Add(i);
}
Assert.IsFalse(list.TryRemove(value));
}
[Test]
public void Remove_PositiveArrayAdded_ContainingValueAsked_TrueReturned(
[Random(1, 200, 10, Distinct = true)] int count)
{
var values = GetValues(count);
var value = values[TestContext.CurrentContext.Random.Next(count - 1)];
var list = new SortedList<int>();
foreach (var i in values)
{
list.Add(i);
}
var expectingValues = values
.OrderBy(i => i)
.ToList();
expectingValues.Remove(value);
Assert.IsTrue(list.TryRemove(value));
CollectionAssert.AreEqual(expectingValues, list);
}
[Test]
public void Clear_ArrayAdded_ListCleaned_ListIsEmpty(
[Random(1, 20, 1, Distinct = true)] int count)
{
var values = GetValues(count);
var list = new SortedList<int>();
foreach (var i in values)
{
list.Add(i);
}
list.Clear();
CollectionAssert.IsEmpty(list);
}
private static List<int> GetValues(int count)
=> Enumerable
.Range(0, count)
.Select(_ => TestContext.CurrentContext.Random.Next(1_000_000))
.ToList();
}
}
| 127 |
C-Sharp | TheAlgorithms | C# | using System;
using System.Collections.Generic;
using System.Linq;
using FluentAssertions;
using FluentAssertions.Execution;
using NUnit.Framework;
namespace DataStructures.Tests
{
public static class TimelineTests
{
[Test]
public static void CountTest()
{
var timeline = new Timeline<string>
{
{ new DateTime(1995, 1, 1), "TestTime1" },
{ new DateTime(2000, 1, 1), "TestTime2" },
{ new DateTime(2005, 1, 1), "TestTime3" },
{ new DateTime(2010, 1, 1), "TestTime4" },
{ new DateTime(2015, 1, 1), "TestTime5" },
};
timeline.Count
.Should()
.Be(5);
}
[Test]
public static void TimesCountTest()
{
var timeline = new Timeline<string>
{
{ new DateTime(1995, 1, 1), "TestTime1" },
{ new DateTime(2000, 1, 1), "TestTime2" },
{ new DateTime(2005, 1, 1), "TestTime3" },
{ new DateTime(2010, 1, 1), "TestTime4" },
{ new DateTime(2015, 1, 1), "TestTime5" },
};
timeline.TimesCount
.Should()
.Be(timeline.GetAllTimes().Length);
}
[Test]
public static void ValuesCountTest()
{
var timeline = new Timeline<string>
{
{ new DateTime(1995, 1, 1), "TestTime1" },
{ new DateTime(2000, 1, 1), "TestTime2" },
{ new DateTime(2005, 1, 1), "TestTime3" },
{ new DateTime(2010, 1, 1), "TestTime4" },
{ new DateTime(2015, 1, 1), "TestTime5" },
};
timeline.ValuesCount
.Should()
.Be(timeline.GetAllValues().Length);
}
[Test]
public static void IndexerGetTest()
{
const string eventName = "TestTime2";
var eventDate = new DateTime(2000, 1, 1);
var timeline = new Timeline<string>
{
{ new DateTime(1995, 1, 1), "TestTime1" },
{ eventDate, eventName },
{ new DateTime(2005, 1, 1), "TestTime3" },
{ new DateTime(2010, 1, 1), "TestTime4" },
{ new DateTime(2015, 1, 1), "TestTime5" },
};
timeline[eventDate][0]
.Should()
.Be(eventName);
}
[Test]
public static void IndexerSetTest()
{
var eventDate = new DateTime(2000, 1, 1);
const string formerEventName = "TestTime2";
const string eventName = "TestTime2Modified";
var timeline = new Timeline<string>
{
{ new DateTime(1995, 1, 1), "TestTime1" },
{ eventDate, formerEventName },
{ new DateTime(2005, 1, 1), "TestTime3" },
{ new DateTime(2010, 1, 1), "TestTime4" },
{ new DateTime(2015, 1, 1), "TestTime5" },
};
timeline[new DateTime(2000, 1, 1)] = new[] { eventName };
timeline[new DateTime(2000, 1, 1)][0]
.Should()
.Be(eventName);
}
[Test]
public static void EqualsTest()
{
var timeline1 = new Timeline<string>
{
{ new DateTime(1995, 1, 1), "TestTime1" },
{ new DateTime(2000, 1, 1), "TestTime2" },
{ new DateTime(2005, 1, 1), "TestTime3" },
{ new DateTime(2010, 1, 1), "TestTime4" },
{ new DateTime(2015, 1, 1), "TestTime5" },
};
var timeline2 = new Timeline<string>
{
{ new DateTime(1995, 1, 1), "TestTime1" },
{ new DateTime(2000, 1, 1), "TestTime2" },
{ new DateTime(2005, 1, 1), "TestTime3" },
{ new DateTime(2010, 1, 1), "TestTime4" },
{ new DateTime(2015, 1, 1), "TestTime5" },
};
(timeline1 == timeline2)
.Should()
.BeTrue();
}
[Test]
public static void ClearTest()
{
var timeline = new Timeline<string>
{
{ new DateTime(1995, 1, 1), "TestTime1" },
{ new DateTime(2000, 1, 1), "TestTime2" },
{ new DateTime(2005, 1, 1), "TestTime3" },
{ new DateTime(2010, 1, 1), "TestTime4" },
{ new DateTime(2015, 1, 1), "TestTime5" },
};
timeline.Clear();
timeline.Count
.Should()
.Be(0);
}
[Test]
public static void CopyToTest()
{
var timeline = new Timeline<string>
{
{ new DateTime(1995, 1, 1), "TestTime1" },
{ new DateTime(2000, 1, 1), "TestTime2" },
{ new DateTime(2005, 1, 1), "TestTime3" },
{ new DateTime(2010, 1, 1), "TestTime4" },
{ new DateTime(2015, 1, 1), "TestTime5" },
};
var array = new (DateTime Time, string Value)[timeline.Count];
timeline.CopyTo(array, 0);
timeline.Count
.Should()
.Be(array.Length);
var i = 0;
using (new AssertionScope())
{
foreach (var (time, value) in timeline)
{
array[i].Time
.Should()
.Be(time);
array[i].Value
.Should()
.Be(value);
++i;
}
}
}
[Test]
public static void GetAllTimesTest()
{
var timeline = new Timeline<string>
{
{ new DateTime(1995, 1, 1), "TestTime1" },
{ new DateTime(2000, 1, 1), "TestTime2" },
{ new DateTime(2005, 1, 1), "TestTime3" },
{ new DateTime(2010, 1, 1), "TestTime4" },
{ new DateTime(2015, 1, 1), "TestTime5" },
};
var times = timeline.GetAllTimes();
var i = 0;
using (new AssertionScope())
{
foreach (var (time, _) in timeline)
{
times[i++]
.Should()
.Be(time);
}
}
}
[Test]
public static void GetTimesByValueTest()
{
var eventDate = new DateTime(2000, 1, 1);
const string eventName = "TestTime2";
var timeline = new Timeline<string>
{
{ new DateTime(1995, 1, 1), "TestTime1" },
{ eventDate, eventName },
{ new DateTime(2005, 1, 1), "TestTime3" },
{ new DateTime(2010, 1, 1), "TestTime4" },
{ new DateTime(2015, 1, 1), "TestTime5" },
};
timeline.GetTimesByValue(eventName)[0]
.Should()
.Be(eventDate);
}
[Test]
public static void GetTimesBeforeTest()
{
var timeline = new Timeline<string>
{
{ new DateTime(1995, 1, 1), "TestTime1" },
{ new DateTime(2000, 1, 1), "TestTime2" },
{ new DateTime(2005, 1, 1), "TestTime3" },
{ new DateTime(2010, 1, 1), "TestTime4" },
{ new DateTime(2015, 1, 1), "TestTime5" },
};
var times = timeline.GetTimesBefore(new DateTime(2003, 1, 1));
using (new AssertionScope())
{
times.Length
.Should()
.Be(2);
times[0]
.Should()
.Be(new DateTime(1995, 1, 1));
times[1]
.Should()
.Be(new DateTime(2000, 1, 1));
}
}
[Test]
public static void GetTimesAfterTest()
{
var timeline = new Timeline<string>
{
{ new DateTime(1995, 1, 1), "TestTime1" },
{ new DateTime(2000, 1, 1), "TestTime2" },
{ new DateTime(2005, 1, 1), "TestTime3" },
{ new DateTime(2010, 1, 1), "TestTime4" },
{ new DateTime(2015, 1, 1), "TestTime5" },
};
var times = timeline.GetTimesAfter(new DateTime(2003, 1, 1));
using (new AssertionScope())
{
times.Length
.Should()
.Be(3);
times[0]
.Should()
.Be(new DateTime(2005, 1, 1));
times[1]
.Should()
.Be(new DateTime(2010, 1, 1));
times[2]
.Should()
.Be(new DateTime(2015, 1, 1));
}
}
[Test]
public static void GetAllValuesTest()
{
var timeline = new Timeline<string>
{
{ new DateTime(1995, 1, 1), "TestTime1" },
{ new DateTime(2000, 1, 1), "TestTime2" },
{ new DateTime(2005, 1, 1), "TestTime3" },
{ new DateTime(2010, 1, 1), "TestTime4" },
{ new DateTime(2015, 1, 1), "TestTime5" },
};
var values = timeline.GetAllValues();
var i = 0;
using (new AssertionScope())
{
foreach (var (_, value) in timeline)
{
values[i++]
.Should()
.Be(value);
}
}
}
[Test]
public static void GetValuesByTimeTest()
{
var timeline = new Timeline<string>
{
{ new DateTime(1995, 1, 1), "TestTime1" },
{ new DateTime(2000, 1, 1), "TestTime2" },
{ new DateTime(2005, 1, 1), "TestTime3" },
{ new DateTime(2010, 1, 1), "TestTime4" },
{ new DateTime(2015, 1, 1), "TestTime5" },
};
timeline.GetValuesByTime(new DateTime(2000, 1, 1))[0]
.Should()
.Be("TestTime2");
}
[Test]
public static void GetValuesBeforeTest()
{
var timeline = new Timeline<string>
{
{ new DateTime(1995, 1, 1), "TestTime1" },
{ new DateTime(2000, 1, 1), "TestTime2" },
{ new DateTime(2005, 1, 1), "TestTime3" },
{ new DateTime(2010, 1, 1), "TestTime4" },
{ new DateTime(2015, 1, 1), "TestTime5" },
};
var array = timeline.GetValuesBefore(new DateTime(2003, 1, 1)).ToArray();
using (new AssertionScope())
{
array.Length
.Should()
.Be(2);
array[0].Time
.Should()
.Be(new DateTime(1995, 1, 1));
array[1].Time
.Should()
.Be(new DateTime(2000, 1, 1));
}
}
[Test]
public static void GetValuesAfterTest()
{
var timeline = new Timeline<string>
{
{ new DateTime(1995, 1, 1), "TestTime1" },
{ new DateTime(2000, 1, 1), "TestTime2" },
{ new DateTime(2005, 1, 1), "TestTime3" },
{ new DateTime(2010, 1, 1), "TestTime4" },
{ new DateTime(2015, 1, 1), "TestTime5" },
};
var array = timeline.GetValuesAfter(new DateTime(2003, 1, 1)).ToArray();
using (new AssertionScope())
{
array.Length
.Should()
.Be(3);
array[0].Time
.Should()
.Be(new DateTime(2005, 1, 1));
array[1].Time
.Should()
.Be(new DateTime(2010, 1, 1));
array[2].Time
.Should()
.Be(new DateTime(2015, 1, 1));
}
}
[Test]
public static void GetValuesByMillisecondTest()
{
var timeline = new Timeline<string>
{
{ new DateTime(1985, 1, 1, 10, 0, 0, 250), "TestTime1" },
{ new DateTime(1990, 1, 1, 10, 0, 0, 250), "TestTime2" },
{ new DateTime(1995, 1, 1, 10, 0, 0, 250), "TestTime3" },
{ new DateTime(2005, 1, 1, 10, 0, 0, 750), "TestTime4" },
{ new DateTime(2015, 1, 1, 10, 0, 0, 750), "TestTime5" },
};
var query = timeline.GetValuesByMillisecond(750);
query.Count
.Should()
.Be(2);
}
[Test]
public static void GetValuesBySecondTest()
{
var timeline = new Timeline<string>
{
{ new DateTime(1985, 1, 1, 10, 0, 5), "TestTime1" },
{ new DateTime(1990, 1, 1, 10, 0, 5), "TestTime2" },
{ new DateTime(1995, 1, 1, 10, 0, 5), "TestTime3" },
{ new DateTime(2005, 1, 1, 10, 0, 20), "TestTime4" },
{ new DateTime(2015, 1, 1, 10, 0, 20), "TestTime5" },
};
var query = timeline.GetValuesBySecond(20);
using (new AssertionScope())
{
query.Count
.Should()
.Be(2);
timeline
.Should()
.Contain(query);
}
}
[Test]
public static void GetValuesByMinuteTest()
{
var timeline = new Timeline<string>
{
{ new DateTime(1985, 1, 1, 10, 15, 0), "TestTime1" },
{ new DateTime(1990, 1, 1, 10, 15, 0), "TestTime2" },
{ new DateTime(1995, 1, 1, 10, 15, 0), "TestTime3" },
{ new DateTime(2005, 1, 1, 10, 40, 0), "TestTime4" },
{ new DateTime(2015, 1, 1, 10, 40, 0), "TestTime5" },
};
var query = timeline.GetValuesByMinute(40);
using (new AssertionScope())
{
query.Count
.Should()
.Be(2);
timeline
.Should()
.Contain(query);
}
}
[Test]
public static void GetValuesByHourTest()
{
var timeline = new Timeline<string>
{
{ new DateTime(1985, 1, 1, 7, 0, 0), "TestTime1" },
{ new DateTime(1990, 1, 1, 7, 0, 0), "TestTime2" },
{ new DateTime(1995, 1, 1, 7, 0, 0), "TestTime3" },
{ new DateTime(2005, 1, 1, 16, 0, 0), "TestTime4" },
{ new DateTime(2015, 1, 1, 16, 0, 0), "TestTime5" },
};
var query = timeline.GetValuesByHour(16);
using (new AssertionScope())
{
query.Count
.Should()
.Be(2);
timeline
.Should()
.Contain(query);
}
}
[Test]
public static void GetValuesByDayTest()
{
var timeline = new Timeline<string>
{
{ new DateTime(1985, 1, 10), "TestTime1" },
{ new DateTime(1990, 1, 10), "TestTime2" },
{ new DateTime(1995, 1, 10), "TestTime3" },
{ new DateTime(2005, 1, 20), "TestTime4" },
{ new DateTime(2015, 1, 20), "TestTime5" },
};
var query = timeline.GetValuesByDay(20);
using (new AssertionScope())
{
query.Count
.Should()
.Be(2);
timeline
.Should()
.Contain(query);
}
}
[Test]
public static void GetValuesByTimeOfDayTest()
{
var timeline = new Timeline<string>
{
{ new DateTime(1985, 1, 1, 10, 30, 15, 500), "TestTime1" },
{ new DateTime(1990, 1, 1, 10, 30, 15, 500), "TestTime2" },
{ new DateTime(1995, 1, 1, 10, 30, 15, 500), "TestTime3" },
{ new DateTime(2005, 1, 1, 21, 15, 40, 600), "TestTime4" },
{ new DateTime(2015, 1, 1, 21, 15, 40, 600), "TestTime5" },
};
var query = timeline.GetValuesByTimeOfDay(new TimeSpan(0, 21, 15, 40, 600));
using (new AssertionScope())
{
query.Count
.Should()
.Be(2);
timeline
.Should()
.Contain(query);
}
}
[Test]
public static void GetValuesByDayOfWeekTest()
{
var timeline = new Timeline<string>
{
{ new DateTime(2015, 1, 5), "TestTime1" }, //Monday
{ new DateTime(2015, 2, 2), "TestTime2" }, //Monday
{ new DateTime(2015, 1, 6), "TestTime3" }, //Tuesday
{ new DateTime(2015, 1, 7), "TestTime4" }, //Wednesday
{ new DateTime(2015, 1, 8), "TestTime5" }, //Thursday
};
var query = timeline.GetValuesByDayOfWeek(DayOfWeek.Monday);
using (new AssertionScope())
{
query.Count
.Should()
.Be(2);
timeline
.Should()
.Contain(query);
}
}
[Test]
public static void GetValuesByDayOfYearTest()
{
var timeline = new Timeline<string>
{
{ new DateTime(1985, 1, 3), "TestTime1" }, //3rd day of year
{ new DateTime(1990, 1, 7), "TestTime2" }, //7th day of year
{ new DateTime(1995, 1, 22), "TestTime3" }, //22th day of year
{ new DateTime(2000, 2, 1), "TestTime4" }, //32th day of year
{ new DateTime(2005, 2, 1), "TestTime5" }, //32th day of year
};
var query = timeline.GetValuesByDayOfYear(32);
using (new AssertionScope())
{
query.Count
.Should()
.Be(2);
timeline
.Should()
.Contain(query);
}
}
[Test]
public static void GetValuesByMonthTest()
{
var timeline = new Timeline<string>
{
{ new DateTime(1985, 1, 1), "TestTime1" },
{ new DateTime(1990, 2, 1), "TestTime2" },
{ new DateTime(1995, 3, 1), "TestTime3" },
{ new DateTime(2005, 4, 1), "TestTime4" },
{ new DateTime(2015, 4, 1), "TestTime5" },
};
var query = timeline.GetValuesByMonth(4);
using (new AssertionScope())
{
query.Count
.Should()
.Be(2);
timeline
.Should()
.Contain(query);
}
}
[Test]
public static void GetValuesByYearTest()
{
var timeline = new Timeline<string>
{
{ new DateTime(1985, 1, 2), "TestTime1" },
{ new DateTime(1990, 2, 1), "TestTime2" },
{ new DateTime(1995, 1, 2), "TestTime3" },
{ new DateTime(2005, 2, 1), "TestTime4" },
{ new DateTime(2005, 1, 2), "TestTime5" },
};
var query = timeline.GetValuesByYear(2005);
using (new AssertionScope())
{
query.Count
.Should()
.Be(2);
timeline
.Should()
.Contain(query);
}
}
[Test]
public static void AddDateTimeAndTValueTest() //void Add(DateTime time, TValue value)
{
var eventDate = new DateTime(2015, 1, 1);
const string eventName = "TestTime";
var timeline = new Timeline<string>();
timeline.Add(eventDate, eventName);
timeline.Count
.Should()
.Be(1);
timeline[eventDate][0]
.Should()
.Be(eventName);
}
[Test]
public static void AddDateTimeAndTValueArrayTest() //void Add(params (DateTime, TValue)[] timeline)
{
var eventDate1 = new DateTime(2015, 1, 1);
const string eventName1 = "TestTime1";
var eventDate2 = new DateTime(1750, 1, 1);
const string eventName2 = "TestTime2";
var timeline = new Timeline<string>();
timeline.Add(
(eventDate1, eventName1),
(eventDate2, eventName2));
using (new AssertionScope())
{
timeline.Count
.Should()
.Be(2);
timeline[eventDate1][0]
.Should()
.Be(eventName1);
timeline[eventDate2][0]
.Should()
.Be(eventName2);
}
}
[Test]
public static void AddTimelineTest() //void Add(Timeline<TValue> timeline)
{
var eventDate = new DateTime(2015, 1, 1);
const string eventName = "TestTime";
var timeline = new Timeline<string>();
timeline.Add(new Timeline<string>(eventDate, eventName));
using (new AssertionScope())
{
timeline.Count
.Should()
.Be(1);
timeline[eventDate][0]
.Should()
.Be(eventName);
}
}
[Test]
public static void AddNowTest()
{
var timeline = new Timeline<string>();
timeline.AddNow("Now");
using (new AssertionScope())
{
timeline.Count
.Should()
.Be(1);
timeline.ContainsValue("Now")
.Should()
.BeTrue();
}
}
[Test]
public static void ContainsDateTimeAndTValueTest() //bool Contains(DateTime time, TValue value)
{
var timeline = new Timeline<string>
{
{ new DateTime(1995, 1, 1), "TestTime1" },
{ new DateTime(2000, 1, 1), "TestTime2" },
{ new DateTime(2005, 1, 1), "TestTime3" },
{ new DateTime(2010, 1, 1), "TestTime4" },
{ new DateTime(2015, 1, 1), "TestTime5" },
};
timeline.Contains(new DateTime(2000, 1, 1), "TestTime2")
.Should()
.BeTrue();
}
[Test]
public static void ContainsDateTimeAndTValueArrayTest() //bool Contains(params (DateTime, TValue)[] timeline)
{
var timeline = new Timeline<string>
{
{ new DateTime(1995, 1, 1), "TestTime1" },
{ new DateTime(2000, 1, 1), "TestTime2" },
{ new DateTime(2005, 1, 1), "TestTime3" },
{ new DateTime(2010, 1, 1), "TestTime4" },
{ new DateTime(2015, 1, 1), "TestTime5" },
};
timeline.Contains(
(new DateTime(1995, 1, 1), "TestTime1"),
(new DateTime(2000, 1, 1), "TestTime2"))
.Should()
.BeTrue();
}
[Test]
public static void ContainsTimelineTest() //bool Contains(Timeline<TValue> timeline)
{
var timeline = new Timeline<string>
{
{ new DateTime(1995, 1, 1), "TestTime1" },
{ new DateTime(2000, 1, 1), "TestTime2" },
{ new DateTime(2005, 1, 1), "TestTime3" },
{ new DateTime(2010, 1, 1), "TestTime4" },
{ new DateTime(2015, 1, 1), "TestTime5" },
};
timeline.Contains(new Timeline<string>(new DateTime(2000, 1, 1), "TestTime2"))
.Should()
.BeTrue();
}
[Test]
public static void ContainsTimeTest()
{
var timeline = new Timeline<string>
{
{ new DateTime(1995, 1, 1), "TestTime1" },
{ new DateTime(2000, 1, 1), "TestTime2" },
{ new DateTime(2005, 1, 1), "TestTime3" },
{ new DateTime(2010, 1, 1), "TestTime4" },
{ new DateTime(2015, 1, 1), "TestTime5" },
};
timeline.ContainsTime(new DateTime(2000, 1, 1))
.Should()
.BeTrue();
}
[Test]
public static void ContainsValueTest()
{
var timeline = new Timeline<string>
{
{ new DateTime(1995, 1, 1), "TestTime1" },
{ new DateTime(2000, 1, 1), "TestTime2" },
{ new DateTime(2005, 1, 1), "TestTime3" },
{ new DateTime(2010, 1, 1), "TestTime4" },
{ new DateTime(2015, 1, 1), "TestTime5" },
};
timeline.ContainsValue("TestTime1")
.Should()
.BeTrue();
}
[Test]
public static void RemoveDateTimeAndTValueTest() //bool Remove(DateTime time, TValue value)
{
var timeline = new Timeline<string>
{
{ new DateTime(1995, 1, 1), "TestTime1" },
{ new DateTime(2000, 1, 1), "TestTime2" },
{ new DateTime(2005, 1, 1), "TestTime3" },
{ new DateTime(2010, 1, 1), "TestTime4" },
{ new DateTime(2015, 1, 1), "TestTime5" },
};
timeline.Remove(new DateTime(2000, 1, 1), "TestTime2");
using (new AssertionScope())
{
timeline.Count
.Should()
.Be(4);
timeline.Contains(new DateTime(2000, 1, 1), "TestTime2")
.Should()
.BeFalse();
}
}
[Test]
public static void RemoveDateTimeAndTValueArrayTest() //bool Remove(params (DateTime, TValue)[] timeline)
{
var timeline = new Timeline<string>
{
{ new DateTime(1995, 1, 1), "TestTime1" },
{ new DateTime(2000, 1, 1), "TestTime2" },
{ new DateTime(2005, 1, 1), "TestTime3" },
{ new DateTime(2010, 1, 1), "TestTime4" },
{ new DateTime(2015, 1, 1), "TestTime5" },
};
timeline.Remove(
(new DateTime(1995, 1, 1), "TestTime1"),
(new DateTime(2000, 1, 1), "TestTime2"));
using (new AssertionScope())
{
timeline.Count
.Should()
.Be(3);
timeline.Contains(
(new DateTime(1995, 1, 1), "TestTime1"),
(new DateTime(2000, 1, 1), "TestTime2"))
.Should()
.BeFalse();
}
}
[Test]
public static void RemoveTimelineTest() //bool Remove(Timeline<TValue> timeline)
{
var timeline = new Timeline<string>
{
{ new DateTime(1995, 1, 1), "TestTime1" },
{ new DateTime(2000, 1, 1), "TestTime2" },
{ new DateTime(2005, 1, 1), "TestTime3" },
{ new DateTime(2010, 1, 1), "TestTime4" },
{ new DateTime(2015, 1, 1), "TestTime5" },
};
timeline.Remove(new Timeline<string>(new DateTime(2000, 1, 1), "TestTime2"));
using (new AssertionScope())
{
timeline.Count
.Should()
.Be(4);
timeline.Contains(new DateTime(2000, 1, 1), "TestTime2")
.Should()
.BeFalse();
}
}
[Test]
public static void RemoveTimeTest()
{
var timeline = new Timeline<string>
{
{ new DateTime(1995, 1, 1), "TestTime1" },
{ new DateTime(2000, 1, 1), "TestTime2" },
{ new DateTime(2005, 1, 1), "TestTime3" },
{ new DateTime(2010, 1, 1), "TestTime4" },
{ new DateTime(2015, 1, 1), "TestTime5" },
};
timeline.RemoveTimes(new DateTime(2000, 1, 1));
using (new AssertionScope())
{
timeline.Count
.Should()
.Be(4);
timeline.ContainsTime(new DateTime(2000, 1, 1))
.Should()
.BeFalse();
}
}
[Test]
public static void RemoveValueTest()
{
var timeline = new Timeline<string>
{
{ new DateTime(1995, 1, 1), "TestTime1" },
{ new DateTime(2000, 1, 1), "TestTime2" },
{ new DateTime(2005, 1, 1), "TestTime3" },
{ new DateTime(2010, 1, 1), "TestTime4" },
{ new DateTime(2015, 1, 1), "TestTime5" },
};
timeline.RemoveValues("TestTime1");
using (new AssertionScope())
{
timeline.Count
.Should()
.Be(4);
timeline.ContainsValue("TestTime1")
.Should()
.BeFalse();
}
}
[Test]
public static void ToArrayTest()
{
var timeline = new Timeline<string>
{
{ new DateTime(1995, 1, 1), "TestTime1" },
{ new DateTime(2000, 1, 1), "TestTime2" },
{ new DateTime(2005, 1, 1), "TestTime3" },
{ new DateTime(2010, 1, 1), "TestTime4" },
{ new DateTime(2015, 1, 1), "TestTime5" },
};
var array = timeline.ToArray();
timeline.Count
.Should()
.Be(array.Length);
using (new AssertionScope())
{
var i = 0;
foreach (var (time, value) in timeline)
{
time
.Should()
.Be(array[i].Time);
value
.Should()
.Be(array[i].Value);
++i;
}
}
}
[Test]
public static void ToListTest()
{
var timeline = new Timeline<string>
{
{ new DateTime(1995, 1, 1), "TestTime1" },
{ new DateTime(2000, 1, 1), "TestTime2" },
{ new DateTime(2005, 1, 1), "TestTime3" },
{ new DateTime(2010, 1, 1), "TestTime4" },
{ new DateTime(2015, 1, 1), "TestTime5" },
};
var list = timeline.ToList();
timeline.Count
.Should()
.Be(list.Count);
using (new AssertionScope())
{
var i = 0;
foreach (var (time, value) in timeline)
{
time
.Should()
.Be(list[i].Time);
value
.Should()
.Be(list[i].Value);
++i;
}
}
}
[Test]
public static void ToDictionaryTest()
{
var timeline = new Timeline<string>
{
{ new DateTime(1995, 1, 1), "TestTime1" },
{ new DateTime(2000, 1, 1), "TestTime2" },
{ new DateTime(2005, 1, 1), "TestTime3" },
{ new DateTime(2010, 1, 1), "TestTime4" },
{ new DateTime(2015, 1, 1), "TestTime5" },
};
var dictionary = timeline.ToDictionary();
var timelineList = new List<(DateTime Time, string Value)>();
foreach (var pair in timeline)
{
timelineList.Add(pair);
}
var dictionaryList = new List<(DateTime Time, string Value)>();
foreach (var (key, value) in dictionary)
{
dictionaryList.Add((key, value));
}
timelineList.OrderBy(pair => pair.Time);
dictionaryList.OrderBy(pair => pair.Time);
timelineList.Count
.Should()
.Be(dictionaryList.Count);
using (new AssertionScope())
{
for (var i = 0; i < timelineList.Count; ++i)
{
timelineList[i].Time
.Should()
.Be(dictionaryList[i].Time);
timelineList[i].Value
.Should()
.Be(dictionaryList[i].Value);
}
}
}
[Test]
public static void EqualityOperatorTest()
{
var timeline1 = new Timeline<string>
{
{ new DateTime(1995, 1, 1), "TestTime1" },
{ new DateTime(2000, 1, 1), "TestTime2" },
{ new DateTime(2005, 1, 1), "TestTime3" },
{ new DateTime(2010, 1, 1), "TestTime4" },
{ new DateTime(2015, 1, 1), "TestTime5" },
};
var timeline2 = new Timeline<string>
{
{ new DateTime(1995, 1, 1), "TestTime1" },
{ new DateTime(2000, 1, 1), "TestTime2" },
{ new DateTime(2005, 1, 1), "TestTime3" },
{ new DateTime(2010, 1, 1), "TestTime4" },
{ new DateTime(2015, 1, 1), "TestTime5" },
};
(timeline1 == timeline2)
.Should()
.BeTrue();
}
[Test]
public static void InequalityOperatorTest()
{
var timeline1 = new Timeline<string>
{
{ new DateTime(1995, 1, 1), "TestTime1" },
{ new DateTime(2000, 1, 1), "TestTime2" },
{ new DateTime(2005, 1, 1), "TestTime3" },
{ new DateTime(2010, 1, 1), "TestTime4" },
{ new DateTime(2015, 1, 1), "TestTime5" },
};
var timeline2 = new Timeline<string>
{
{ new DateTime(1895, 1, 1), "TestTime6" },
{ new DateTime(1900, 1, 1), "TestTime7" },
{ new DateTime(1905, 1, 1), "TestTime8" },
{ new DateTime(1910, 1, 1), "TestTime9" },
{ new DateTime(1915, 1, 1), "TestTime10" },
};
(timeline1 == timeline2)
.Should()
.BeFalse();
}
}
}
| 1,144 |
C-Sharp | TheAlgorithms | C# | using System;
using DataStructures.Cache;
using NUnit.Framework;
using FluentAssertions;
namespace DataStructures.Tests.Cache
{
public static class LfuCacheTests
{
[Test]
public static void TestPutGet()
{
var cache = new LfuCache<int, string>();
cache.Put(1, "one");
cache.Contains(1).Should().BeTrue();
cache.Get(1).Should().Be("one");
}
[Test]
public static void TestCacheMiss()
{
var cache = new LfuCache<int, string>();
cache.Put(1, "one");
cache.Contains(5).Should().BeFalse();
cache.Get(5).Should().BeNull();
}
[Test]
public static void Evict_ItemWasNotUsed()
{
var cache = new LfuCache<int, string>(capacity: 1);
cache.Put(1, "one");
// Add to the full cache, 1 will be removed
cache.Put(2, "two");
cache.Get(1).Should().BeNull();
cache.Get(2).Should().Be("two");
}
[Test]
public static void Evict_OneItemWasUsed()
{
var cache = new LfuCache<int, string>(capacity: 2);
cache.Put(1, "one");
cache.Put(2, "two");
cache.Put(1, "ONE");
// Add to the full cache, 2 will be removed
cache.Put(3, "three");
cache.Get(1).Should().Be("ONE");
cache.Get(2).Should().BeNull();
cache.Get(3).Should().Be("three");
}
[Test]
public static void Evict_LruOrder()
{
var cache = new LfuCache<int, string>(capacity: 2);
cache.Put(1, "one");
cache.Put(2, "two");
cache.Put(1, "ONE");
cache.Put(2, "TWO");
// Add to the full cache, 1 will be removed
cache.Put(3, "three");
cache.Get(1).Should().BeNull();
cache.Get(2).Should().Be("TWO");
cache.Get(3).Should().Be("three");
}
}
}
| 79 |
C-Sharp | TheAlgorithms | C# | using System;
using DataStructures.Cache;
using NUnit.Framework;
using FluentAssertions;
namespace DataStructures.Tests.Cache
{
public static class LruCacheTests
{
[Test]
public static void TestPutGet()
{
var cache = new LruCache<int, string>();
cache.Put(1, "one");
cache.Contains(1).Should().BeTrue();
cache.Get(1).Should().Be("one");
}
[Test]
public static void TestCacheMiss()
{
var cache = new LruCache<int, string>();
cache.Put(1, "one");
cache.Contains(5).Should().BeFalse();
cache.Get(5).Should().BeNull();
}
[Test]
public static void TestCacheUpdate()
{
var cache = new LruCache<int, string>();
cache.Put(1, "one");
cache.Put(1, "ONE");
cache.Get(1).Should().Be("ONE");
}
[Test]
public static void RemoveOldestItem_ItemWasNotUsed()
{
var cache = new LruCache<int, string>(capacity: 2);
cache.Put(1, "one");
cache.Put(2, "two");
// Add to the full cache, 1 will be removed
cache.Put(3, "three");
cache.Get(1).Should().BeNull();
cache.Get(2).Should().Be("two");
cache.Get(3).Should().Be("three");
}
[Test]
public static void RemoveOldestItem_ItemWasRecentlyUsed()
{
var cache = new LruCache<int, string>(capacity: 2);
cache.Put(1, "one");
cache.Put(2, "two");
cache.Get(1);
// Add to the full cache, 1 was used, 2 should be removed
cache.Put(3, "three");
cache.Get(1).Should().Be("one");
cache.Get(2).Should().BeNull();
cache.Get(3).Should().Be("three");
}
}
} | 71 |
C-Sharp | TheAlgorithms | C# | using DataStructures.DisjointSet;
using FluentAssertions;
using NUnit.Framework;
namespace DataStructures.Tests.DisjointSet
{
[TestFixture]
public class DisjointSetTests
{
[Test]
public static void MakeSetDataInitializationTest()
{
DisjointSet<int> ds = new();
var one = ds.MakeSet(1);
var two = ds.MakeSet(2);
one.Data.Should().Be(1);
two.Data.Should().Be(2);
}
[Test]
public static void UnionTest()
{
DisjointSet<int> ds = new();
var one = ds.MakeSet(1);
var two = ds.MakeSet(2);
var three = ds.MakeSet(3);
ds.UnionSet(one, two);
ds.FindSet(one).Should().Be(ds.FindSet(two));
ds.UnionSet(one, three);
ds.FindSet(two).Should().Be(ds.FindSet(three));
(one.Rank + two.Rank + three.Rank).Should().Be(1);
}
}
}
| 34 |
C-Sharp | TheAlgorithms | C# | using DataStructures.Fenwick;
using NUnit.Framework;
using FluentAssertions;
using System;
namespace DataStructures.Tests.Fenwick
{
[TestFixture]
internal class BinaryIndexedTreeTests
{
[Test]
public void GetSum_CreateBITAndRequestSum_ReturnCorrect()
{
int[] array = { 2, 1, 1, 3, 2, 3, 4, 5, 6, 7, 8, 9 };
var tree = new BinaryIndexedTree(array);
var expectedSum = 12;
var resultedSum = tree.GetSum(5);
resultedSum.Should().Be(expectedSum);
}
[Test]
public void UpdateTree_UpdateTreeAndRequestSum_GetSum()
{
int[] array = { 2, 1, 1, 3, 2, 3, 4, 5, 6, 7, 8, 9 };
var tree = new BinaryIndexedTree(array);
var expectedSum = 18;
array[3] += 6;
tree.UpdateTree(3, 6);
var resultedSum = tree.GetSum(5);
resultedSum.Should().Be(expectedSum);
}
}
}
| 38 |
C-Sharp | TheAlgorithms | C# | using System;
using System.Collections.Generic;
using System.Linq;
using DataStructures.Graph;
using FluentAssertions;
using NUnit.Framework;
namespace DataStructures.Tests.Graph
{
[TestFixture]
public class DirectedWeightedGraphTests
{
[Test]
[TestCase(-1)]
[TestCase(-2)]
[TestCase(-3)]
public void GraphInitializationTest_ShouldThrowOverflow(int capacity)
{
Func<DirectedWeightedGraph<char>> createGraph = () => new DirectedWeightedGraph<char>(capacity);
createGraph.Should().Throw<InvalidOperationException>()
.WithMessage("Graph capacity should always be a non-negative integer.");
}
[Test]
[TestCase(1)]
[TestCase(10)]
[TestCase(20)]
[TestCase(30)]
public void GraphInitializationTest_Success(int capacity)
{
Func<DirectedWeightedGraph<char>> createGraph = () => new DirectedWeightedGraph<char>(capacity);
createGraph.Should().NotThrow();
}
[Test]
public void GraphAddVertexTest_Success()
{
var graph = new DirectedWeightedGraph<char>(10);
graph.AddVertex('A');
graph.AddVertex('B');
graph.AddVertex('C');
graph.Count.Should().Be(3);
}
[Test]
public void GraphAddVertexTest_ShouldThrowOverflow()
{
var graph = new DirectedWeightedGraph<char>(10);
for (var i = 0; i < 10; i++)
{
graph.AddVertex('A');
}
Action addOverflow = () => graph.AddVertex('A');
graph.Count.Should().Be(10);
graph.Vertices.Should().OnlyContain(x => x != null && x.Data == 'A');
addOverflow.Should().Throw<InvalidOperationException>()
.WithMessage("Graph overflow.");
}
[Test]
public void GraphRemoveVertexTest_Success()
{
var graph = new DirectedWeightedGraph<char>(10);
var vertexA = graph.AddVertex('A');
var vertexB = graph.AddVertex('B');
var vertexC = graph.AddVertex('C');
graph.AddEdge(vertexB, vertexA, 5);
graph.AddEdge(vertexC, vertexA, 5);
var neighborsB = graph.GetNeighbors(vertexB).ToList();
var neighborsC = graph.GetNeighbors(vertexC).ToList();
graph.RemoveVertex(vertexA);
neighborsB.Should().HaveCount(1);
neighborsB[0].Should().Be(vertexA);
neighborsC.Should().HaveCount(1);
neighborsC[0].Should().Be(vertexA);
graph.GetNeighbors(vertexB).Should().HaveCount(0);
graph.GetNeighbors(vertexC).Should().HaveCount(0);
}
[Test]
public void GraphRemoveVertexTest_ShouldThrowVertexNotInGraph()
{
var graph = new DirectedWeightedGraph<char>(10);
var vertexA = new Vertex<char>('A', 0);
Action removeVertex = () => graph.RemoveVertex(vertexA);
removeVertex.Should().Throw<InvalidOperationException>()
.WithMessage($"Vertex does not belong to graph: {vertexA}.");
}
[Test]
public void GraphAddEdgeTest_Success()
{
var graph = new DirectedWeightedGraph<char>(10);
var vertexA = graph.AddVertex('A');
var vertexB = graph.AddVertex('B');
var vertexC = graph.AddVertex('C');
graph.AddEdge(vertexA, vertexB, 5);
graph.AreAdjacent(vertexA, vertexB).Should().BeTrue();
graph.AreAdjacent(vertexA, vertexC).Should().BeFalse();
}
[Test]
public void GraphAddEdgeTest_ShouldThrowZeroWeight()
{
var graph = new DirectedWeightedGraph<char>(10);
var vertexA = graph.AddVertex('A');
var vertexB = graph.AddVertex('B');
Action addZeroEdge = () => graph.AddEdge(vertexA, vertexB, 0);
addZeroEdge.Should().Throw<InvalidOperationException>()
.WithMessage("Edge weight cannot be zero.");
}
[Test]
public void GraphAddEdgeTest_ShouldThrowVertexNotInGraph()
{
var graph = new DirectedWeightedGraph<char>(10);
var vertexA = graph.AddVertex('A');
var vertexB = new Vertex<char>('B', 1);
Action addZeroEdge = () => graph.AddEdge(vertexA, vertexB, 0);
addZeroEdge.Should().Throw<InvalidOperationException>()
.WithMessage($"Vertex does not belong to graph: {vertexB}.");
}
[Test]
public void GraphAddEdgeTest_ShouldThrowEdgeExists()
{
var graph = new DirectedWeightedGraph<char>(10);
var vertexA = graph.AddVertex('A');
var vertexB = graph.AddVertex('B');
const int currentEdgeWeight = 5;
graph.AddEdge(vertexA, vertexB, currentEdgeWeight);
Action addZeroEdge = () => graph.AddEdge(vertexA, vertexB, 10);
addZeroEdge.Should().Throw<InvalidOperationException>()
.WithMessage($"Vertex already exists: {currentEdgeWeight}");
}
[Test]
public void GraphRemoveEdgeTest_Success()
{
var graph = new DirectedWeightedGraph<char>(10);
var vertexA = graph.AddVertex('A');
var vertexB = graph.AddVertex('B');
graph.AddEdge(vertexA, vertexB, 5);
graph.RemoveEdge(vertexA, vertexB);
graph.AreAdjacent(vertexA, vertexB).Should().BeFalse();
}
[Test]
public void GraphRemoveEdgeTest_ShouldThrowVertexNotInGraph()
{
var graph = new DirectedWeightedGraph<char>(10);
var vertexA = graph.AddVertex('A');
var vertexB = new Vertex<char>('B', 1);
Action removeEdge = () => graph.RemoveEdge(vertexA, vertexB);
removeEdge.Should().Throw<InvalidOperationException>()
.WithMessage($"Vertex does not belong to graph: {vertexB}.");
}
[Test]
public void GraphGetNeighborsTest_Success()
{
var graph = new DirectedWeightedGraph<char>(10);
var vertexA = graph.AddVertex('A');
var vertexB = graph.AddVertex('B');
var vertexC = graph.AddVertex('C');
var vertexD = graph.AddVertex('D');
graph.AddEdge(vertexA, vertexB, 5);
graph.AddEdge(vertexA, vertexC, 5);
graph.AddEdge(vertexA, vertexD, 5);
var neighborsA = graph.GetNeighbors(vertexA).ToArray();
neighborsA.Should().HaveCount(3);
neighborsA[0].Should().Be(vertexB);
neighborsA[1].Should().Be(vertexC);
neighborsA[2].Should().Be(vertexD);
}
[Test]
public void GraphGetNeighborsTest_ShouldThrowVertexNotInGraph()
{
var graph = new DirectedWeightedGraph<char>(10);
var vertexA = new Vertex<char>('A', 0);
Func<List<Vertex<char>?>> getNeighbors = () =>
{
var enumerable = graph.GetNeighbors(vertexA);
return enumerable.ToList();
};
getNeighbors.Should().Throw<InvalidOperationException>()
.WithMessage($"Vertex does not belong to graph: {vertexA}.");
}
}
}
| 218 |
C-Sharp | TheAlgorithms | C# | using System;
using System.Collections.Generic;
using DataStructures.Heap;
using NUnit.Framework;
namespace DataStructures.Tests.Heap
{
internal static class BinaryHeapTests
{
private static BinaryHeap<int> BuildTestHeap()
{
var heap = new BinaryHeap<int>();
var elems = new[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 };
foreach (var i in elems)
{
heap.Push(i);
}
return heap;
}
[Test]
public static void Constructor_UseCustomComparer_BuildCorrectHeap()
{
var revHeap = new BinaryHeap<int>(Comparer<int>.Create((x, y) => y.CompareTo(x)));
foreach (var i in new[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 })
{
revHeap.Push(i);
}
Assert.AreEqual(10, revHeap.Count);
Assert.AreEqual(1, revHeap.Peek());
Assert.AreEqual(1, revHeap.Pop());
Assert.AreEqual(2, revHeap.Peek());
}
[Test]
public static void Push_AddElements_BuildCorrectHeap()
{
var heap = BuildTestHeap();
Assert.AreEqual(10, heap.Peek());
Assert.AreEqual(10, heap.Count);
}
public static void Pop_RemoveElements_HeapStillValid()
{
var heap = BuildTestHeap();
Assert.AreEqual(10, heap.Peek());
Assert.AreEqual(10, heap.Count);
Assert.AreEqual(10, heap.Pop());
Assert.AreEqual(9, heap.Count);
Assert.IsFalse(heap.Contains(10));
Assert.AreEqual(9, heap.Pop());
Assert.AreEqual(8, heap.Count);
Assert.IsFalse(heap.Contains(9));
}
[Test]
public static void Pop_EmptyHeap_ThrowsCorrectException()
{
var heap = new BinaryHeap<int>();
Assert.Throws<InvalidOperationException>(() => heap.Pop());
}
[Test]
public static void Peek_NonEmptyHeap_ReturnsCorrectAnswer()
{
var heap = BuildTestHeap();
Assert.AreEqual(10, heap.Peek());
}
[Test]
public static void Peek_EmptyHeap_ThrowsCorrectException()
{
var heap = new BinaryHeap<int>();
Assert.Throws<InvalidOperationException>(() => heap.Peek());
}
[Test]
public static void PushPop_EmptyHeap_ReturnsCorrectAnswer()
{
var heap = new BinaryHeap<int>();
Assert.AreEqual(10, heap.PushPop(10));
}
[Test]
public static void PushPop_NonEmptyHeap_ReturnsCorrectAnswer()
{
var heap = BuildTestHeap();
Assert.AreEqual(20, heap.PushPop(20));
Assert.AreEqual(10, heap.PushPop(-10));
}
[Test]
public static void Contains_NonEmptyHeap_ReturnsCorrectAnswer()
{
var heap = BuildTestHeap();
Assert.IsTrue(heap.Contains(1));
Assert.IsTrue(heap.Contains(5));
Assert.IsTrue(heap.Contains(10));
Assert.IsFalse(heap.Contains(11));
}
[Test]
public static void Contains_EmptyHeap_ReturnsCorrectAnswer()
{
var heap = new BinaryHeap<int>();
Assert.IsFalse(heap.Contains(1));
Assert.IsFalse(heap.Contains(5));
Assert.IsFalse(heap.Contains(10));
Assert.IsFalse(heap.Contains(11));
}
[Test]
public static void Remove_NonEmptyHeap_HeapStillValid()
{
var heap = BuildTestHeap();
heap.Remove(2);
Assert.IsFalse(heap.Contains(2));
Assert.AreEqual(10, heap.Peek());
Assert.AreEqual(9, heap.Count);
heap.Remove(8);
Assert.IsFalse(heap.Contains(8));
Assert.AreEqual(10, heap.Peek());
Assert.AreEqual(8, heap.Count);
heap.Remove(5);
Assert.IsFalse(heap.Contains(5));
Assert.AreEqual(10, heap.Peek());
Assert.AreEqual(7, heap.Count);
Assert.Throws<ArgumentException>(() => heap.Remove(11));
}
[Test]
public static void Remove_EmptyHeap_ThrowsCorrectException()
{
var heap = new BinaryHeap<int>();
Assert.Throws<ArgumentException>(() => heap.Remove(1));
}
}
}
| 157 |
C-Sharp | TheAlgorithms | C# | using System;
using System.Collections.Generic;
using System.Linq;
using DataStructures.Heap;
using NUnit.Framework;
namespace DataStructures.Tests.Heap
{
[TestFixture]
public static class MinMaxHeapTests
{
private static readonly object[] CollectionsSource =
{
new[] { 5, 10, -2, 0, 3, 13, 5, -8, 41, -5, -7, -60, -12 },
new[] { 'e', '4', 'x', 'D', '!', '$', '-', '_', '2', ')', 'Z', 'q' },
new[] { "abc", "abc", "xyz", "bcd", "klm", "opq", "ijk" },
};
[Test]
public static void CustomComparerTest()
{
var arr = new[] { "aaaa", "c", "dd", "bbb" };
var comparer = Comparer<string>.Create((a, b) => Comparer<int>.Default.Compare(a.Length, b.Length));
var mmh = new MinMaxHeap<string>(comparer: comparer);
foreach (var s in arr)
{
mmh.Add(s);
}
Assert.AreEqual(comparer, mmh.Comparer);
Assert.AreEqual("c", mmh.GetMin());
Assert.AreEqual("aaaa", mmh.GetMax());
}
[Test]
[TestCaseSource("CollectionsSource")]
public static void AddTest<T>(IEnumerable<T> collection)
{
var mmh = new MinMaxHeap<T>();
foreach (var item in collection)
{
mmh.Add(item);
}
var minValue = mmh.GetMin();
var maxValue = mmh.GetMax();
Assert.AreEqual(collection.Min(), minValue);
Assert.AreEqual(collection.Max(), maxValue);
Assert.AreEqual(collection.Count(), mmh.Count);
}
[Test]
[TestCaseSource("CollectionsSource")]
public static void ExtractMaxTest<T>(IEnumerable<T> collection)
{
var ordered = collection.OrderByDescending(x => x);
var mmh = new MinMaxHeap<T>(collection);
var emptyHeap = new MinMaxHeap<T>();
var first = mmh.ExtractMax();
var second = mmh.GetMax();
Assert.Throws<InvalidOperationException>(() => emptyHeap.ExtractMax());
Assert.AreEqual(ordered.ElementAt(0), first);
Assert.AreEqual(ordered.ElementAt(1), second);
Assert.AreEqual(collection.Count() - 1, mmh.Count);
}
[Test]
[TestCaseSource("CollectionsSource")]
public static void ExtractMinTest<T>(IEnumerable<T> collection)
{
var ordered = collection.OrderBy(x => x);
var mmh = new MinMaxHeap<T>(collection);
var emptyHeap = new MinMaxHeap<T>();
var first = mmh.ExtractMin();
var second = mmh.GetMin();
Assert.Throws<InvalidOperationException>(() => emptyHeap.ExtractMin());
Assert.AreEqual(ordered.ElementAt(0), first);
Assert.AreEqual(ordered.ElementAt(1), second);
Assert.AreEqual(collection.Count() - 1, mmh.Count);
}
[Test]
[TestCaseSource("CollectionsSource")]
public static void GetMaxTest<T>(IEnumerable<T> collection)
{
var emptyHeap = new MinMaxHeap<int>();
var mmh = new MinMaxHeap<T>(collection);
var maxValue = mmh.GetMax();
Assert.Throws<InvalidOperationException>(() => emptyHeap.GetMax());
Assert.AreEqual(collection.Max(), maxValue);
}
[Test]
[TestCaseSource("CollectionsSource")]
public static void GetMinTest<T>(IEnumerable<T> collection)
{
var emptyHeap = new MinMaxHeap<int>();
var mmh = new MinMaxHeap<T>(collection);
var minValue = mmh.GetMin();
Assert.Throws<InvalidOperationException>(() => emptyHeap.GetMin());
Assert.AreEqual(collection.Min(), minValue);
}
[Test]
public static void HeapSortUsingGet<T>(
[ValueSource("CollectionsSource")] IEnumerable<T> collection,
[Values] bool ascending)
{
var ordered = ascending ? collection.OrderBy(x => x) : collection.OrderByDescending(x => x);
var mmh = new MinMaxHeap<T>(collection);
var extracted = new List<T>();
while (mmh.Count > 0)
{
T value;
if (ascending)
{
value = mmh.GetMin();
_ = mmh.ExtractMin();
}
else
{
value = mmh.GetMax();
_ = mmh.ExtractMax();
}
extracted.Add(value);
}
Assert.IsTrue(ordered.SequenceEqual(extracted));
}
[Test]
public static void HeapSortUsingExtract<T>(
[ValueSource("CollectionsSource")] IEnumerable<T> collection,
[Values] bool ascending)
{
var ordered = ascending ? collection.OrderBy(x => x) : collection.OrderByDescending(x => x);
var mmh = new MinMaxHeap<T>(collection);
var extracted = new List<T>();
while (mmh.Count > 0)
{
var value = ascending ? mmh.ExtractMin() : mmh.ExtractMax();
extracted.Add(value);
}
Assert.IsTrue(ordered.SequenceEqual(extracted));
}
}
}
| 164 |
C-Sharp | TheAlgorithms | C# | using System;
using DataStructures.Heap.FibonacciHeap;
using NUnit.Framework;
namespace DataStructures.Tests.Heap.FibonacciHeaps
{
internal class TestFHeap : FibonacciHeap<int>
{
public void RawCut(FHeapNode<int> x, FHeapNode<int> y)
{
Cut(x, y);
}
public void RawCascadingCut(FHeapNode<int> y)
{
CascadingCut(y);
}
public void RawConsolidate()
{
Consolidate();
}
}
internal static class FibonacciHeapTests
{
private static FibonacciHeap<int> BuildTestHeap()
{
var heap = new FibonacciHeap<int>();
var elems = new[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 };
foreach (var i in elems)
{
heap.Push(i);
}
return heap;
}
[Test]
public static void Push_AddElements_BuildCorrectHeap()
{
var heap = BuildTestHeap();
Assert.AreEqual(1, heap.Peek());
Assert.AreEqual(10, heap.Count);
}
public static void Pop_RemoveElements_HeapStillValid()
{
var heap = BuildTestHeap();
Assert.AreEqual(1, heap.Peek());
Assert.AreEqual(10, heap.Count);
Assert.AreEqual(1, heap.Pop());
Assert.AreEqual(9, heap.Count);
Assert.AreEqual(2, heap.Pop());
Assert.AreEqual(8, heap.Count);
}
[Test]
public static void Pop_EmptyHeap_ThrowsCorrectException()
{
var heap = new FibonacciHeap<int>();
Assert.Throws<InvalidOperationException>(() => heap.Pop());
}
[Test]
public static void Pop_NonEmptyHeap_ReturnsInSortedOrder()
{
var heap = new FibonacciHeap<int>();
var rand = new Random();
var heapSize = 100;
for (var i = 0; i < heapSize; i++)
{
heap.Push(rand.Next(1000));
}
var element = heap.Pop();
for (var i = 0; i < heapSize - 1; i++)
{
var newElement = heap.Pop();
Assert.LessOrEqual(element, newElement);
element = newElement;
}
Assert.Zero(heap.Count);
}
[Test]
public static void Peek_EmptyHeap_ThrowsCorrectException()
{
var heap = new FibonacciHeap<int>();
Assert.Throws<InvalidOperationException>(() => heap.Peek());
}
[Test]
public static void DecreaseKey_NonEmptyHeap_ReturnsCorrectAnswer()
{
var heap = BuildTestHeap();
var node = heap.Push(11);
heap.DecreaseKey(node, -1);
Assert.AreEqual(heap.Pop(), -1);
Assert.AreEqual(heap.Pop(), 1);
node = heap.Push(5);
heap.DecreaseKey(node, 1);
Assert.AreEqual(heap.Pop(), 1);
Assert.AreEqual(heap.Pop(), 2);
Assert.AreEqual(heap.Pop(), 3);
}
[Test]
public static void Union_NonEmptyHeap_ReturnsSortedOrder()
{
var oddHeap = new FibonacciHeap<int>();
for (var i = 1; i < 10; i += 2)
{
oddHeap.Push(i);
}
var evenHeap = new FibonacciHeap<int>();
for (var i = 0; i < 10; i += 2)
{
evenHeap.Push(i);
}
oddHeap.Union(evenHeap);
for (var i = 0; i < 10; i++)
{
Assert.AreEqual(i, oddHeap.Pop());
}
Assert.Zero(oddHeap.Count);
Assert.Zero(evenHeap.Count);
}
[Test]
public static void Union_EmptyHeap_BecomesOtherHeap()
{
var thisHeap = new FibonacciHeap<int>();
var otherHeap = BuildTestHeap();
var minNode = otherHeap.Peek();
var otherCount = otherHeap.Count;
Assert.Zero(thisHeap.Count);
thisHeap.Union(otherHeap);
Assert.Zero(otherHeap.Count);
Assert.AreEqual(thisHeap.Peek(), minNode);
Assert.Throws<InvalidOperationException>(() => otherHeap.Peek());
Assert.AreEqual(otherCount, thisHeap.Count);
}
[Test]
public static void Union_FullHeapWithEmptyHeap_Unchanged()
{
var thisHeap = BuildTestHeap();
var otherHeap = new FibonacciHeap<int>();
var previousCount = thisHeap.Count;
var previousMin = thisHeap.Peek();
thisHeap.Union(otherHeap);
Assert.AreEqual(thisHeap.Count, previousCount);
Assert.AreEqual(thisHeap.Peek(), previousMin);
}
[Test]
public static void DecreaseKey_EmptyHeap_ThrowsCorrectException()
{
var heap = new FibonacciHeap<int>();
var item = new FHeapNode<int>(1);
Assert.Throws<ArgumentException>(() => heap.DecreaseKey(item, 0));
}
[Test]
public static void DecreaseKey_TryIncreaseKey_ThrowsCorrectException()
{
var heap = new FibonacciHeap<int>();
var item = heap.Push(1);
Assert.Throws<InvalidOperationException>(() => heap.DecreaseKey(item, 2));
}
[Test]
public static void DecreaseKey_NonEmptyHeap_PreservesHeapStructure()
{
var heap = new FibonacciHeap<int>();
for (var i = 11; i < 20; i++)
{
heap.Push(i);
}
var item = heap.Push(10);
for (var i = 0; i < 10; i++)
{
heap.Push(i);
}
var bigItem = heap.Push(20);
heap.DecreaseKey(item, -1);
Assert.AreEqual(heap.Pop(), -1);
var currentVal = -1;
for (var i = 0; i < 10; i++)
{
var newVal = heap.Pop();
Assert.True(currentVal < newVal);
currentVal = newVal;
}
heap.DecreaseKey(bigItem, -1);
Assert.AreEqual(heap.Pop(), -1);
currentVal = -1;
for (var i = 0; i < 9; i++)
{
var newVal = heap.Pop();
Assert.True(currentVal < newVal);
currentVal = newVal;
}
}
[Test]
public static void Cut_EmptyHeap_ThrowsCorrectExcpetion()
{
var heap = new TestFHeap();
var item1 = new FHeapNode<int>(1);
var item2 = new FHeapNode<int>(2);
Assert.Throws<InvalidOperationException>(() => heap.RawCut(item1, item2));
}
[Test]
public static void Cut_FilledHeap_AlteredItem()
{
var heap = new TestFHeap();
var item1 = heap.Push(1);
var item2 = heap.Push(2);
item2.Degree = -1;
Assert.Throws<InvalidOperationException>(() => heap.RawCut(item1, item2));
}
[Test]
public static void Consolidate_EmptyHeap_DoesNothing()
{
var heap = new TestFHeap();
heap.RawConsolidate();
Assert.Throws<InvalidOperationException>(() => heap.Peek());
}
}
}
| 279 |
C-Sharp | TheAlgorithms | C# | using System.Collections.Generic;
using DataStructures.Heap.PairingHeap;
using FluentAssertions;
using NUnit.Framework;
namespace DataStructures.Tests.Heap.PairingHeap
{
internal class PairingHeapComparerTests
{
[Test]
public void Compare_CheckAscending_ReturnNegative()
{
var minHeap = new PairingNodeComparer<int>(Sorting.Ascending, Comparer<int>.Default);
var node1 = new PairingHeapNode<int>(10);
var node2 = new PairingHeapNode<int>(20);
var items = minHeap.Compare(node1.Value, node2.Value);
items.Should().Be(-1);
}
[Test]
public void Compare_CheckAscending_ReturnPositive()
{
var minHeap = new PairingNodeComparer<int>(Sorting.Descending, Comparer<int>.Default);
var node1 = new PairingHeapNode<int>(10);
var node2 = new PairingHeapNode<int>(20);
var items = minHeap.Compare(node1.Value, node2.Value);
items.Should().Be(1);
}
}
}
| 35 |
C-Sharp | TheAlgorithms | C# | using System;
using System.Collections;
using System.Linq;
using DataStructures.Heap.PairingHeap;
using FluentAssertions;
using NUnit.Framework;
namespace DataStructures.Tests.Heap.PairingHeap
{
internal class PairingHeapTests
{
[Test]
public void BuildMinHeap_CheckEnumerator_NotThrowOnEnumerate()
{
var minHeap = new PairingHeap<int>();
minHeap.Insert(1);
var items = minHeap.ToList();
items.Should().HaveCount(1);
}
[Test]
public void BuildMinHeap_CheckEnumerable_NotThrowOnEnumerate()
{
var minHeap = new PairingHeap<int>();
minHeap.Insert(1);
foreach (var node in (IEnumerable)minHeap)
{
node.Should().NotBe(null);
}
}
[Test]
public void BuildMinHeap_UpdateNonExistingNode_ThrowException()
{
var minHeap = new PairingHeap<int>();
minHeap.Insert(1);
minHeap.Extract();
Action act = () => minHeap.UpdateKey(1, 10);
act.Should().Throw<ArgumentException>();
}
[Test]
public void BuildMinHeap_UpdateBadNode_ThrowException()
{
var minHeap = new PairingHeap<int>();
minHeap.Insert(10);
Action act = () => minHeap.UpdateKey(10, 11);
act.Should().Throw<ArgumentException>();
}
[Test]
public void BuildMinHeap_CreateHeap_HeapIsCheked()
{
var nodeCount = 1000 * 10;
var minHeap = new PairingHeap<int>();
for (var i = 0; i <= nodeCount; i++)
{
minHeap.Insert(i);
}
for (var i = 0; i <= nodeCount; i++)
{
minHeap.UpdateKey(i, i - 1);
}
var min = 0;
for (var i = 0; i <= nodeCount; i++)
{
min = minHeap.Extract();
Assert.AreEqual(min, i - 1);
}
Assert.AreEqual(minHeap.Count, minHeap.Count);
var rnd = new Random();
var testSeries = Enumerable.Range(0, nodeCount - 1).OrderBy(_ => rnd.Next()).ToList();
foreach (var item in testSeries)
{
minHeap.Insert(item);
}
for (var i = 0; i < testSeries.Count; i++)
{
var decremented = testSeries[i] - rnd.Next(0, 1000);
minHeap.UpdateKey(testSeries[i], decremented);
testSeries[i] = decremented;
}
testSeries.Sort();
for (var i = 0; i < nodeCount - 2; i++)
{
min = minHeap.Extract();
Assert.AreEqual(testSeries[i], min);
}
Assert.AreEqual(minHeap.Count, minHeap.Count);
}
[Test]
public void BuildMaxHeap_CreateHeap_HeapIsCheked()
{
var nodeCount = 1000 * 10;
var maxHeap = new PairingHeap<int>(Sorting.Descending);
for (var i = 0; i <= nodeCount; i++)
{
maxHeap.Insert(i);
}
for (var i = 0; i <= nodeCount; i++)
{
maxHeap.UpdateKey(i, i + 1);
}
Assert.AreEqual(maxHeap.Count, maxHeap.Count);
var max = 0;
for (var i = nodeCount; i >= 0; i--)
{
max = maxHeap.Extract();
Assert.AreEqual(max, i + 1);
}
var rnd = new Random();
var testSeries = Enumerable.Range(0, nodeCount - 1).OrderBy(_ => rnd.Next()).ToList();
foreach (var item in testSeries)
{
maxHeap.Insert(item);
}
for (var i = 0; i < testSeries.Count; i++)
{
var incremented = testSeries[i] + rnd.Next(0, 1000);
maxHeap.UpdateKey(testSeries[i], incremented);
testSeries[i] = incremented;
}
testSeries = testSeries.OrderByDescending(x => x).ToList();
for (var i = 0; i < nodeCount - 2; i++)
{
max = maxHeap.Extract();
Assert.AreEqual(testSeries[i], max);
}
Assert.AreEqual(maxHeap.Count, maxHeap.Count);
}
}
}
| 158 |
C-Sharp | TheAlgorithms | C# | using System;
using System.Linq;
using DataStructures.LinkedList.DoublyLinkedList;
using NUnit.Framework;
namespace DataStructures.Tests.LinkedList
{
public static class DoublyLinkedListTests
{
[Test]
public static void TestGetData()
{
var dll = new DoublyLinkedList<int>(new[] { 0, 1, 2, 3, 4 });
var arr = dll.GetData().ToArray();
Assert.AreEqual(dll.Count, 5);
Assert.AreEqual(new[] { 0, 1, 2, 3, 4 }, arr);
}
[Test]
public static void TestGetAt()
{
var dll = new DoublyLinkedList<int>(new[] { 0, 1, 2, 3, 4 });
var one = dll.GetAt(1);
var three = dll.GetAt(3);
Assert.AreEqual(one.Data, 1);
Assert.AreEqual(three.Data, 3);
Assert.Throws<ArgumentOutOfRangeException>(
() => dll.GetAt(-1)
);
Assert.Throws<ArgumentOutOfRangeException>(
() => dll.GetAt(5)
);
}
[Test]
public static void TestAddtion()
{
var dll = new DoublyLinkedList<int>(0);
var one = dll.Add(1);
dll.Add(3);
dll.AddAfter(2, one);
dll.Add(4);
var arr = dll.GetData().ToArray();
var reversedArr = dll.GetDataReversed().ToArray();
Assert.AreEqual(dll.Count, 5);
Assert.AreEqual(new[] { 0, 1, 2, 3, 4 }, arr);
Assert.AreEqual(new[] { 4, 3, 2, 1, 0 }, reversedArr);
}
[Test]
public static void TestRemove()
{
var dll = new DoublyLinkedList<int>(new[] { 0, 1, 2, 3, 4 });
dll.RemoveNode(dll.Find(2));
dll.RemoveHead();
dll.Remove();
var arr = dll.GetData().ToArray();
var reversedArr = dll.GetDataReversed().ToArray();
Assert.AreEqual(dll.Count, 2);
Assert.AreEqual(new[] { 1, 3 }, arr);
Assert.AreEqual(new[] { 3, 1 }, reversedArr);
}
[Test]
public static void TestFind()
{
var dll = new DoublyLinkedList<int>(new[] { 0, 1, 2, 3, 4 });
var one = dll.Find(1);
var three = dll.Find(3);
Assert.AreEqual(one.Data, 1);
Assert.AreEqual(three.Data, 3);
}
[Test]
public static void TestIndexOf()
{
var dll = new DoublyLinkedList<int>(new[] { 0, 1, 2, 3, 4 });
var one = dll.IndexOf(1);
var three = dll.IndexOf(3);
Assert.AreEqual(one, 1);
Assert.AreEqual(three, 3);
}
[Test]
public static void TestContains()
{
var dll = new DoublyLinkedList<int>(new[] { 0, 1, 2, 3, 4 });
var one = dll.Contains(1);
var six = dll.Contains(6);
Assert.IsTrue(one);
Assert.IsFalse(six);
}
[Test]
public static void TestReverse()
{
var dll = new DoublyLinkedList<int>(new[] { 0, 1, 2, 3, 4 });
dll.Reverse();
var arr = dll.GetData().ToArray();
var empty = new DoublyLinkedList<int>(new int[] { });
empty.Reverse();
var emptyArr = empty.GetData().ToArray();
Assert.AreEqual(arr, new[] { 4, 3, 2, 1, 0 });
Assert.AreEqual(emptyArr, new int[] { });
}
[Test]
public static void TestGetDataReversed()
{
var dll = new DoublyLinkedList<int>(new[] { 0, 1, 2, 3, 4 });
var arr = dll.GetData().ToArray();
var reversedArr = dll.GetDataReversed().ToArray();
Assert.AreEqual(arr, new[] { 0, 1, 2, 3, 4 });
Assert.AreEqual(reversedArr, new[] { 4, 3, 2, 1, 0 });
}
}
}
| 136 |
C-Sharp | TheAlgorithms | C# | using System;
using System.Linq;
using DataStructures.LinkedList.SinglyLinkedList;
using NUnit.Framework;
namespace DataStructures.Tests.LinkedList
{
public static class LinkedListTests
{
[Test]
public static void LengthWorksCorrectly([Random(0, 1000, 100)] int quantity)
{
// Arrange
var a = new SinglyLinkedList<int>();
// Act
var r = TestContext.CurrentContext.Random;
for (var i = 0; i < quantity; i++)
{
_ = a.AddFirst(r.Next());
}
// Assert
Assert.AreEqual(quantity, a.Length());
}
[Test]
public static void LengthOnEmptyListIsZero()
{
// Arrange
var a = new SinglyLinkedList<int>();
// Act
// Assert
Assert.AreEqual(0, a.Length());
}
[Test]
public static void GetItemsFromLinkedList()
{
// Arrange
var testObj = new SinglyLinkedList<string>();
_ = testObj.AddLast("H");
_ = testObj.AddLast("E");
_ = testObj.AddLast("L");
_ = testObj.AddLast("L");
_ = testObj.AddLast("O");
// Act
var items = testObj.GetListData();
// Assert
Assert.AreEqual(5, items.Count());
Assert.AreEqual("O", testObj.GetElementByIndex(4));
}
[Test]
public static void GetElementByIndex_IndexOutOfRange_ArgumentOutOfRangeExceptionThrown()
{
// Arrange
var list = new SinglyLinkedList<int>();
// Act
_ = list.AddFirst(1);
_ = list.AddFirst(2);
_ = list.AddFirst(3);
// Assert
_ = Assert.Throws<ArgumentOutOfRangeException>(() => list.GetElementByIndex(-1));
_ = Assert.Throws<ArgumentOutOfRangeException>(() => list.GetElementByIndex(3));
}
[Test]
public static void RemoveItemsFromList()
{
// Arrange
var testObj = new SinglyLinkedList<string>();
_ = testObj.AddLast("X");
_ = testObj.AddLast("H");
_ = testObj.AddLast("E");
_ = testObj.AddLast("L");
_ = testObj.AddLast("L");
_ = testObj.AddLast("I");
_ = testObj.AddLast("O");
// Act
var xRemoveSucess = testObj.DeleteElement("X");
var oRemoveSucess = testObj.DeleteElement("O");
var eRemoveSucess = testObj.DeleteElement("E");
var lRemoveSucess = testObj.DeleteElement("L");
var l2RemoveSucess = testObj.DeleteElement("L");
var l3RemoveSucess = testObj.DeleteElement("L");
var nonExistantRemoveSucess = testObj.DeleteElement("F");
var resultString = testObj.GetElementByIndex(0) + testObj.GetElementByIndex(1);
// Assert
Assert.AreEqual("HI", resultString);
Assert.IsTrue(xRemoveSucess);
Assert.IsTrue(oRemoveSucess);
Assert.IsTrue(eRemoveSucess);
Assert.IsTrue(lRemoveSucess);
Assert.IsTrue(l2RemoveSucess);
Assert.IsFalse(l3RemoveSucess);
Assert.IsFalse(nonExistantRemoveSucess);
}
}
}
| 111 |
C-Sharp | TheAlgorithms | C# | using System;
using DataStructures.LinkedList.SkipList;
using NUnit.Framework;
using FluentAssertions;
using System.Collections.Generic;
namespace DataStructures.Tests.LinkedList
{
public static class SkipListTests
{
[Test]
public static void TestAdd()
{
var list = new SkipList<int>();
list.AddOrUpdate(1, 1);
list[2] = 2;
list[3] = 3;
list.Count.Should().Be(3);
list.GetValues().Should().ContainInOrder(1, 2, 3);
}
[Test]
public static void TestUpdate()
{
var list = new SkipList<string>();
// Add some elements.
list[1] = "v1";
list[2] = "v2";
list[5] = "v5";
// Update
list.AddOrUpdate(1, "v1-updated");
list[2] = "v2-updated";
list.Count.Should().Be(3);
list.GetValues().Should().ContainInOrder("v1-updated", "v2-updated", "v5");
}
[Test]
public static void TestContains()
{
var list = new SkipList<int>();
list.AddOrUpdate(1, 1);
list.AddOrUpdate(3, 3);
list.AddOrUpdate(5, 5);
list.Contains(1).Should().BeTrue();
list.Contains(3).Should().BeTrue();
list.Contains(5).Should().BeTrue();
list.Contains(0).Should().BeFalse();
list.Contains(2).Should().BeFalse();
list.Contains(9).Should().BeFalse();
}
[Test]
public static void TestGetByKey_Success()
{
var list = new SkipList<string>();
list[1] = "value1";
list[1].Should().Be("value1");
}
[Test]
public static void TestGetByKey_KeyNotFoundException()
{
var list = new SkipList<string>();
list[1] = "value1";
string value;
Action act = () => value = list[2];
act.Should().Throw<KeyNotFoundException>();
}
[Test]
public static void TestRemove_ItemRemoved()
{
var list = new SkipList<int>();
list.AddOrUpdate(1, 1);
list.AddOrUpdate(2, 2);
list.AddOrUpdate(3, 3);
list.Count.Should().Be(3);
list.Contains(2).Should().BeTrue();
var isRemoved = list.Remove(2);
list.Count.Should().Be(2);
list.Contains(2).Should().BeFalse();
isRemoved.Should().BeTrue();
}
[Test]
public static void TestRemove_ItemNotFound()
{
var list = new SkipList<int>();
list.AddOrUpdate(1, 1);
list.AddOrUpdate(2, 2);
list.AddOrUpdate(3, 3);
var isRemoved = list.Remove(222);
list.Count.Should().Be(3);
isRemoved.Should().BeFalse();
}
[Test]
public static void TestGetValues()
{
var list = new SkipList<string>();
list[4] = "four";
list[2] = "two";
list[3] = "three";
list[1] = "one";
var valuesSortedByKey = list.GetValues();
valuesSortedByKey.Should().ContainInOrder("one", "two", "three", "four");
}
}
}
| 124 |
C-Sharp | TheAlgorithms | C# | using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using DataStructures.Probabilistic;
using NUnit.Framework;
namespace DataStructures.Tests.Probabilistic
{
public class BloomFilterTests
{
static readonly string[] TestNames = { "kal;jsnfka", "alkjsdfn;lakm", "aljfopiawjf", "afowjeaofeij", "oajwsefoaiwje", "aoiwjfaoiejmf", "aoijfoawiejf" };
private class SimpleObject
{
public string Name { get; set; }
public int Number { get; set; }
public SimpleObject(string name, int number)
{
Name = name;
Number = number;
}
}
private class SimpleObjectOverridenHash
{
private const uint FnvPrime = 16777619;
private const uint FnvOffsetBasis = 2166136261;
public string Name { get; set; }
public int Number { get; set; }
public SimpleObjectOverridenHash(string name, int number)
{
Name = name;
Number = number;
}
public override int GetHashCode()
{
var bytes = Encoding.UTF8.GetBytes(Name).Concat(BitConverter.GetBytes(Number));
var hash = FnvOffsetBasis;
foreach (var @byte in bytes)
{
hash = hash * FnvPrime;
hash ^= @byte;
}
return (int)hash;
}
public override bool Equals(object? obj)
{
return obj is SimpleObjectOverridenHash asSimpleObj && asSimpleObj.Name == Name && asSimpleObj.Number == Number;
}
}
[Test]
public void TestBloomFilterInsertOptimalSize()
{
var filter = new BloomFilter<int>(1000);
var set = new HashSet<int>();
var rand = new Random(124);
var falsePositives = 0;
for (var i = 0; i < 1000; i++)
{
var k = rand.Next(0, 1000);
if (!set.Contains(k) && filter.Search(k))
{
falsePositives++;
}
filter.Insert(k);
set.Add(k);
Assert.IsTrue(filter.Search(k));
}
Assert.True(.05 > falsePositives / 1000.0); // be a bit generous in our fault tolerance here
}
[Test]
public void TestBloomFilterInsert()
{
var filter = new BloomFilter<SimpleObject>(100000, 3);
var rand = new Random();
for (var i = 0; i < 1000; i++)
{
var simpleObject = new SimpleObject(TestNames[rand.Next(TestNames.Length)], rand.Next(15));
filter.Insert(simpleObject);
Assert.IsTrue(filter.Search(simpleObject));
}
}
[Test]
public void TestBloomFilterSearchOverridenHash()
{
var filter = new BloomFilter<SimpleObjectOverridenHash>(100000, 3);
var simpleObjectInserted = new SimpleObjectOverridenHash("foo", 1);
var simpleObjectInserted2 = new SimpleObjectOverridenHash("foo", 1);
var simpleObjectNotInserted = new SimpleObjectOverridenHash("bar", 2);
filter.Insert(simpleObjectInserted);
Assert.IsTrue(filter.Search(simpleObjectInserted));
Assert.IsTrue(filter.Search(simpleObjectInserted2));
Assert.IsFalse(filter.Search(simpleObjectNotInserted));
}
[Test]
public void TestBloomFilterSearch()
{
var filter = new BloomFilter<SimpleObject>(10000, 3);
var simpleObjectInserted = new SimpleObject("foo", 1);
var simpleObjectNotInserted = new SimpleObject("foo", 1);
filter.Insert(simpleObjectInserted);
Assert.False(filter.Search(simpleObjectNotInserted));
Assert.True(filter.Search(simpleObjectInserted));
}
}
}
| 119 |
C-Sharp | TheAlgorithms | C# | using System;
using System.Collections.Generic;
using DataStructures.Probabilistic;
using NUnit.Framework;
using FluentAssertions;
namespace DataStructures.Tests.Probabilistic
{
public class CountMinSketchTests
{
public class SimpleObject
{
public string Name { get; set; }
public int Number { get; set; }
public SimpleObject(string name, int number)
{
Name = name;
Number = number;
}
}
[Test]
public void TestInsertAndCount()
{
var obj1 = new SimpleObject("foo", 5);
var obj2 = new SimpleObject("bar", 6);
var sketch = new CountMinSketch<SimpleObject>(200, 5);
for (var i = 0; i < 5000; i++)
{
sketch.Insert(obj1);
sketch.Insert(obj2);
}
sketch.Query(obj1).Should().BeGreaterOrEqualTo(5000);
sketch.Query(obj2).Should().BeGreaterOrEqualTo(5000);
}
[Test]
public void TestOptimalInitializer()
{
var obj1 = new SimpleObject("foo", 5);
var obj2 = new SimpleObject("bar", 6);
var sketch = new CountMinSketch<SimpleObject>(.001, .05);
for (var i = 0; i < 5000; i++)
{
sketch.Insert(obj1);
sketch.Insert(obj2);
}
sketch.Query(obj1).Should().BeGreaterOrEqualTo(5000);
sketch.Query(obj2).Should().BeGreaterOrEqualTo(5000);
}
[Test]
public void TestProbabilities()
{
var sketch = new CountMinSketch<int>(.01, .05);
var random = new Random();
var insertedItems = new Dictionary<int,int>();
for (var i = 0; i < 10000; i++)
{
var item = random.Next(0, 1000000);
sketch.Insert(item);
if (insertedItems.ContainsKey(item))
{
insertedItems[item]++;
}
else
{
insertedItems.Add(item, 1);
}
}
var numMisses = 0;
foreach (var item in insertedItems)
{
if (sketch.Query(item.Key) - item.Value > .01 * 100000)
{
numMisses++;
}
}
(numMisses / (double)insertedItems.Count).Should().BeLessOrEqualTo(.05);
}
}
}
| 92 |
C-Sharp | TheAlgorithms | C# | using System;
using System.Collections.Generic;
using DataStructures.Probabilistic;
using FluentAssertions;
using NUnit.Framework;
namespace DataStructures.Tests.Probabilistic
{
public class HyperLogLogTest
{
[Test]
public void TestHyperLogLog()
{
var hll = new HyperLogLog<int>();
HashSet<int> actual = new ();
var rand = new Random();
var tolerance = .05;
for (var i = 0; i < 10000; i++)
{
var k = rand.Next(20000);
hll.Add(k);
actual.Add(k);
}
hll.Cardinality().Should()
.BeGreaterOrEqualTo((int)(actual.Count * (1 - tolerance)))
.And
.BeLessOrEqualTo((int)(actual.Count * (1 + tolerance)));
}
[Test]
public void TestHyperLogLogMerge()
{
var hll1 = new HyperLogLog<int>();
var hll2 = new HyperLogLog<int>();
var rand = new Random();
var tolerance = .05;
HashSet<int> actual = new ();
for (var i = 0; i < 5000; i++)
{
var k = rand.Next(20000);
hll1.Add(k);
actual.Add(k);
}
for (var i = 0; i < 5000; i++)
{
var k = rand.Next(20000);
hll2.Add(k);
actual.Add(k);
}
var hll = HyperLogLog<int>.Merge(hll1, hll2);
hll.Cardinality().Should()
.BeGreaterOrEqualTo((int)(actual.Count * (1 - tolerance)))
.And
.BeLessOrEqualTo((int)(actual.Count * (1 + tolerance)));
}
}
}
| 62 |
C-Sharp | TheAlgorithms | C# | using System;
using System.Text;
using DataStructures.Queue;
using NUnit.Framework;
namespace DataStructures.Tests.Queue
{
public static class ArrayBasedQueueTests
{
[Test]
public static void DequeueWorksCorrectly()
{
// Arrange
var q = new ArrayBasedQueue<char>(3);
q.Enqueue('A');
q.Enqueue('B');
q.Enqueue('C');
var result = new StringBuilder();
// Act
for (var i = 0; i < 3; i++)
{
result.Append(q.Dequeue());
}
// Assert
Assert.AreEqual("ABC", result.ToString());
Assert.IsTrue(q.IsEmpty(), "Queue is empty");
Assert.IsFalse(q.IsFull(), "Queue is full");
}
[Test]
public static void PeekWorksCorrectly()
{
// Arrange
var q = new ArrayBasedQueue<int>(2);
q.Enqueue(1);
q.Enqueue(2);
var peeked = 0;
// Act
for (var i = 0; i < 3; i++)
{
peeked = q.Peek();
}
// Assert
Assert.AreEqual(1, peeked);
Assert.IsFalse(q.IsEmpty(), "Queue is empty");
Assert.IsTrue(q.IsFull(), "Queue is full");
}
[Test]
public static void DequeueEmptyQueueThrowsInvalidOperationException()
{
// Arrange
var q = new ArrayBasedQueue<int>(1);
Exception? exception = null;
// Act
try
{
q.Dequeue();
}
catch (Exception ex)
{
exception = ex;
}
// Assert
Assert.AreEqual(typeof(InvalidOperationException), exception?.GetType());
}
[Test]
public static void EnqueueFullQueueThrowsInvalidOperationException()
{
// Arrange
var q = new ArrayBasedQueue<int>(1);
q.Enqueue(0);
Exception? exception = null;
// Act
try
{
q.Enqueue(1);
}
catch (Exception ex)
{
exception = ex;
}
// Assert
Assert.AreEqual(typeof(InvalidOperationException), exception?.GetType());
}
[Test]
public static void PeekEmptyQueueThrowsInvalidOperationException()
{
// Arrange
var q = new ArrayBasedQueue<int>(1);
Exception? exception = null;
// Act
try
{
q.Peek();
}
catch (Exception ex)
{
exception = ex;
}
// Assert
Assert.AreEqual(typeof(InvalidOperationException), exception?.GetType());
}
[Test]
public static void ClearWorksCorrectly()
{
// Arrange
var q = new ArrayBasedQueue<int>(2);
q.Enqueue(1);
q.Enqueue(2);
// Act
q.Clear();
// Assert
Assert.IsTrue(q.IsEmpty(), "Queue is empty");
Assert.IsFalse(q.IsFull(), "Queue is full");
}
}
}
| 134 |
C-Sharp | TheAlgorithms | C# | using System;
using System.Text;
using DataStructures.Queue;
using NUnit.Framework;
namespace DataStructures.Tests.Queue
{
public static class ListBasedQueueTests
{
[Test]
public static void DequeueWorksCorrectly()
{
// Arrange
var q = new ListBasedQueue<char>();
q.Enqueue('A');
q.Enqueue('B');
q.Enqueue('C');
var result = new StringBuilder();
// Act
for (var i = 0; i < 3; i++)
{
result.Append(q.Dequeue());
}
// Assert
Assert.AreEqual("ABC", result.ToString());
Assert.IsTrue(q.IsEmpty(), "Queue is empty");
Assert.IsFalse(q.IsFull(), "Queue is full");
}
[Test]
public static void PeekWorksCorrectly()
{
// Arrange
var q = new ListBasedQueue<int>();
q.Enqueue(1);
q.Enqueue(2);
var peeked = 0;
// Act
for (var i = 0; i < 3; i++)
{
peeked = q.Peek();
}
// Assert
Assert.AreEqual(1, peeked);
Assert.IsFalse(q.IsEmpty(), "Queue is empty");
Assert.IsFalse(q.IsFull(), "Queue is full");
}
[Test]
public static void DequeueEmptyQueueThrowsInvalidOperationException()
{
// Arrange
var q = new ListBasedQueue<int>();
Exception? exception = null;
// Act
try
{
q.Dequeue();
}
catch (Exception ex)
{
exception = ex;
}
// Assert
Assert.AreEqual(typeof(InvalidOperationException), exception?.GetType());
}
[Test]
public static void PeekEmptyQueueThrowsInvalidOperationException()
{
// Arrange
var q = new ListBasedQueue<int>();
Exception? exception = null;
// Act
try
{
q.Peek();
}
catch (Exception ex)
{
exception = ex;
}
// Assert
Assert.AreEqual(typeof(InvalidOperationException), exception?.GetType());
}
[Test]
public static void ClearWorksCorrectly()
{
// Arrange
var q = new ListBasedQueue<int>();
q.Enqueue(1);
q.Enqueue(2);
// Act
q.Clear();
// Assert
Assert.IsTrue(q.IsEmpty(), "Queue is empty");
Assert.IsFalse(q.IsFull(), "Queue is full");
}
}
}
| 112 |
C-Sharp | TheAlgorithms | C# | using System;
using System.Text;
using DataStructures.Queue;
using NUnit.Framework;
namespace DataStructures.Tests.Queue
{
public static class StackBasedQueueTests
{
[Test]
public static void DequeueWorksCorrectly()
{
// Arrange
var q = new StackBasedQueue<char>();
q.Enqueue('A');
q.Enqueue('B');
q.Enqueue('C');
var result = new StringBuilder();
// Act
for (var i = 0; i < 3; i++)
{
result.Append(q.Dequeue());
}
// Assert
Assert.AreEqual("ABC", result.ToString());
Assert.IsTrue(q.IsEmpty(), "Queue is empty");
Assert.IsFalse(q.IsFull(), "Queue is full");
}
[Test]
public static void PeekWorksCorrectly()
{
// Arrange
var q = new StackBasedQueue<int>();
q.Enqueue(1);
q.Enqueue(2);
var peeked = 0;
// Act
for (var i = 0; i < 3; i++)
{
peeked = q.Peek();
}
// Assert
Assert.AreEqual(1, peeked);
Assert.IsFalse(q.IsEmpty(), "Queue is empty");
Assert.IsFalse(q.IsFull(), "Queue is full");
}
[Test]
public static void DequeueEmptyQueueThrowsInvalidOperationException()
{
// Arrange
var q = new StackBasedQueue<int>();
Exception? exception = null;
// Act
try
{
q.Dequeue();
}
catch (Exception ex)
{
exception = ex;
}
// Assert
Assert.AreEqual(typeof(InvalidOperationException), exception?.GetType());
}
[Test]
public static void PeekEmptyQueueThrowsInvalidOperationException()
{
// Arrange
var q = new StackBasedQueue<int>();
Exception? exception = null;
// Act
try
{
q.Peek();
}
catch (Exception ex)
{
exception = ex;
}
// Assert
Assert.AreEqual(typeof(InvalidOperationException), exception?.GetType());
}
[Test]
public static void ClearWorksCorrectly()
{
// Arrange
var q = new StackBasedQueue<int>();
q.Enqueue(1);
q.Enqueue(2);
// Act
q.Clear();
// Assert
Assert.IsTrue(q.IsEmpty(), "Queue is empty");
Assert.IsFalse(q.IsFull(), "Queue is full");
}
}
}
| 112 |
C-Sharp | TheAlgorithms | C# | using System;
using System.Collections.Generic;
using DataStructures.ScapegoatTree;
using NUnit.Framework;
namespace DataStructures.Tests.ScapegoatTree
{
public class ExtensionsTests
{
[Test]
public void RebuildFlatTree_ValidFlatTree_RebuildsTree()
{
var expected = new Node<int>(3)
{
Left = new Node<int>(1)
{
Left = new Node<int>(-1),
Right = new Node<int>(2),
},
Right = new Node<int>(6)
{
Left = new Node<int>(5),
},
};
var list = new List<Node<int>>
{
new(-1),
new(1),
new(2),
new(3),
new(5),
new(6),
};
var tree = Extensions.RebuildFromList(list, 0, list.Count - 1);
Assert.AreEqual(list.Count, tree.GetSize());
Assert.AreEqual(expected.Key, tree.Key);
Assert.IsNotNull(tree.Left);
Assert.IsNotNull(tree.Right);
Assert.AreEqual(expected.Left.Key, tree.Left!.Key);
Assert.AreEqual(expected.Right.Key, tree.Right!.Key);
Assert.IsNotNull(tree.Left.Left);
Assert.IsNotNull(tree.Left.Right);
Assert.AreEqual(expected.Left.Left.Key, tree.Left!.Left!.Key);
Assert.AreEqual(expected.Left.Right.Key, tree.Left!.Right!.Key);
Assert.IsNotNull(tree.Right.Left);
Assert.AreEqual(expected.Right.Left.Key, tree.Right!.Left!.Key);
}
[Test]
public void RebuildFromList_RangeIsInvalid_ThrowsException()
{
Assert.Throws<ArgumentException>(() => Extensions.RebuildFromList(new List<Node<int>>(), 1, 0));
}
}
}
| 60 |
C-Sharp | TheAlgorithms | C# | using System;
using DataStructures.ScapegoatTree;
using NUnit.Framework;
namespace DataStructures.Tests.ScapegoatTree
{
[TestFixture]
public class ScapegoatTreeNodeTests
{
[Test]
[TestCase(2,1)]
[TestCase("B", "A")]
public void RightSetter_OtherKeyPrecedesRightKey_ThrowsException<TKey>(TKey a, TKey b)
where TKey : IComparable
{
var instance = new Node<TKey>(a);
var other = new Node<TKey>(b);
Assert.Throws<ArgumentException>(() => instance.Right = other);
}
[Test]
[TestCase(1,2)]
[TestCase("A","B")]
public void RightSetter_OtherKeyFollowsRightKey_AddsChild<TKey>(TKey a, TKey b)
where TKey : IComparable
{
var instance = new Node<TKey>(a);
var other = new Node<TKey>(b);
Assert.DoesNotThrow(() => instance.Right = other);
}
[Test]
[TestCase(1,2)]
[TestCase("A","B")]
public void LeftSetter_OtherKeyFollowsLeftKey_ThrowsException<TKey>(TKey a, TKey b)
where TKey : IComparable
{
var instance = new Node<TKey>(a);
var other = new Node<TKey>(b);
Assert.Throws<ArgumentException>(() => instance.Left = other);
}
[Test]
[TestCase(2,1)]
[TestCase("B", "A")]
public void LeftSetter_OtherKeyPrecedesLeftKey_AddsChild<TKey>(TKey a, TKey b)
where TKey : IComparable
{
var instance = new Node<TKey>(a);
var other = new Node<TKey>(b);
Assert.DoesNotThrow(() => instance.Left = other);
}
[Test]
[TestCase(1,2)]
[TestCase("A","B")]
public void CompareTo_InstanceKeyPrecedesOtherKey_ReturnsMinusOne<TKey>(TKey a, TKey b)
where TKey : IComparable
{
var instance = new Node<TKey>(a);
var other = new Node<TKey>(b);
var result = instance.Key.CompareTo(other.Key);
Assert.AreEqual(result, -1);
}
[Test]
[TestCase(2, 1)]
[TestCase("B","A")]
public void CompareTo_InstanceKeyFollowsOtherKey_ReturnsOne<TKey>(TKey a, TKey b)
where TKey : IComparable
{
var instance = new Node<TKey>(a);
var other = new Node<TKey>(b);
var result = instance.Key.CompareTo(other.Key);
Assert.AreEqual(result, 1);
}
[Test]
[TestCase(1, 1)]
[TestCase("A","A")]
public void CompareTo_InstanceKeyEqualsOtherKey_ReturnsZero<TKey>(TKey a, TKey b)
where TKey : IComparable
{
var instance = new Node<TKey>(a);
var other = new Node<TKey>(b);
var result = instance.Key.CompareTo(other.Key);
Assert.AreEqual(result, 0);
}
[Test]
public void GetSize_NodeHasNoChildren_ReturnsOne()
{
var node = new Node<int>(1);
Assert.AreEqual(node.GetSize(), 1);
}
[Test]
public void GetSize_NodeHasChildren_ReturnsCorrectSize()
{
var node = new Node<int>(1, new Node<int>(2), new Node<int>(0));
Assert.AreEqual(node.GetSize(), 3);
}
[Test]
public void GetSmallestKeyNode_NodeHasNoLeftChildren_ReturnsNode()
{
var node = new Node<int>(1);
Assert.AreEqual(node.GetSmallestKeyNode(), node);
}
[Test]
public void GetSmallestKeyNode_NodeHasSmallestChild_ReturnsChild()
{
var node = new Node<int>(1);
var smaller = new Node<int>(0);
var smallest = new Node<int>(-1);
node.Left = smaller;
smaller.Left = smallest;
Assert.AreEqual(node.GetSmallestKeyNode(), smallest);
}
[Test]
public void GetLargestKeyNode_NodeHasNoRightChildren_ReturnsNode()
{
var node = new Node<int>(1);
Assert.AreEqual(node.GetLargestKeyNode(), node);
}
[Test]
public void GetLargestKeyNode_NodeHasLargestChild_ReturnsChild()
{
var node = new Node<int>(1);
var larger = new Node<int>(2);
var largest = new Node<int>(3);
node.Right = larger;
larger.Right = largest;
Assert.AreEqual(node.GetLargestKeyNode(), largest);
}
[Test]
public void IsAlphaWeightBalanced_TreeIsUnbalanced_ReturnsFalse()
{
var root = new Node<int>(0);
var a = new Node<int>(-1);
var b = new Node<int>(-2);
var c = new Node<int>(-3);
var d = new Node<int>(1);
root.Left = a;
a.Left = b;
b.Left = c;
root.Right = d;
Assert.IsFalse(root.IsAlphaWeightBalanced(0.5));
}
[Test]
public void IsAlphaWeightBalanced_TreeIsBalanced_ReturnsTrue()
{
var root = new Node<int>(0);
var a = new Node<int>(-1);
var b = new Node<int>(-2);
var d = new Node<int>(1);
root.Left = a;
a.Left = b;
root.Right = d;
Assert.IsTrue(root.IsAlphaWeightBalanced(0.5));
}
}
}
| 189 |
C-Sharp | TheAlgorithms | C# | using System;
using System.Collections.Generic;
using DataStructures.ScapegoatTree;
using NUnit.Framework;
namespace DataStructures.Tests.ScapegoatTree
{
public class ScapegoatTreeTests
{
[Test]
public void Constructor_NoParameters_InstanceIsValid()
{
var tree = new ScapegoatTree<int>();
Assert.IsNull(tree.Root);
Assert.IsTrue(tree.Size == 0);
Assert.IsTrue(tree.MaxSize == 0);
Assert.AreEqual(0.5, tree.Alpha);
}
[Test]
public void Constructor_AlphaParameter_InstanceIsValid()
{
var expected = 0.6;
var tree = new ScapegoatTree<int>(expected);
Assert.IsNull(tree.Root);
Assert.IsTrue(tree.Size == 0);
Assert.IsTrue(tree.MaxSize == 0);
Assert.AreEqual(expected, tree.Alpha);
}
[Test]
[TestCase(1.1)]
[TestCase(0.4)]
public void Constructor_AlphaParameterIsInvalid_ThrowsException(double alpha)
{
Assert.Throws<ArgumentException>(() => new ScapegoatTree<int>(alpha));
Assert.Throws<ArgumentException>(() => new ScapegoatTree<int>(1, alpha));
}
[Test]
public void Constructor_KeyParameter_InstanceIsValid()
{
var expected = 10;
var tree = new ScapegoatTree<int>(expected);
Assert.IsNotNull(tree.Root);
Assert.IsTrue(tree.Root!.Key == expected);
Assert.IsTrue(tree.Size == 1);
Assert.IsTrue(tree.MaxSize == 1);
Assert.AreEqual(0.5, tree.Alpha);
}
[Test]
public void Constructor_KeyAndAlphaParameters_InstanceIsValid()
{
var key = 10;
var alpha = 0.8;
var tree = new ScapegoatTree<int>(key, alpha);
Assert.IsNotNull(tree.Root);
Assert.IsTrue(tree.Size == 1);
Assert.IsTrue(tree.MaxSize == 1);
Assert.AreEqual(alpha, tree.Alpha);
}
[Test]
public void Constructor_NodeAndAlphaParameters_InstanceIsValid()
{
var node = new Node<int>(10, new Node<int>(11), new Node<int>(1));
var alpha = 0.8;
var tree = new ScapegoatTree<int>(node, alpha);
Assert.IsNotNull(tree.Root);
Assert.IsTrue(tree.Size == 3);
Assert.IsTrue(tree.MaxSize == 3);
Assert.AreEqual(alpha, tree.Alpha);
}
[Test]
public void IsAlphaWeightBalanced_RootIsNull_ReturnsTrue()
{
var tree = new ScapegoatTree<int>();
var result = tree.IsAlphaWeightBalanced();
Assert.IsTrue(result);
}
[Test]
public void Search_RootIsNull_ReturnsNull()
{
var tree = new ScapegoatTree<int>();
var result = tree.Search(1);
Assert.IsNull(result);
}
[Test]
public void Search_KeyIsPresent_ReturnsKey()
{
var tree = new ScapegoatTree<int>(key: 1);
var result = tree.Search(1);
Assert.IsNotNull(result);
Assert.AreEqual(1, result!.Key);
}
[Test]
[TestCase(-2)]
[TestCase(3)]
public void Search_KeyIsNotPresent_ReturnsNull(int key)
{
var root = new Node<int>(1, new Node<int>(2), new Node<int>(-1));
var tree = new ScapegoatTree<int>(root, 0.5);
var result = tree.Search(key);
Assert.IsNull(result);
}
[Test]
public void Insert_RootIsNull_InsertsRoot()
{
var tree = new ScapegoatTree<int>();
var inserted = tree.Insert(1);
Assert.IsTrue(inserted);
Assert.IsNotNull(tree.Root);
Assert.AreEqual(1, tree.Root!.Key);
Assert.AreEqual(1, tree.Size);
Assert.AreEqual(1, tree.MaxSize);
}
[Test]
public void Delete_RootIsNull_ReturnsFalse()
{
var tree = new ScapegoatTree<int>();
var deleted = tree.Delete(1);
Assert.IsFalse(deleted);
}
[Test]
public void Delete_KeyIsNotPresent_ReturnsFalse()
{
var tree = new ScapegoatTree<int>(1);
var deleted = tree.Delete(2);
Assert.IsFalse(deleted);
Assert.AreEqual(1, tree.Size);
}
[Test]
public void Insert_KeyIsPresent_ReturnsFalse()
{
var tree = new ScapegoatTree<int>(1);
var inserted = tree.Insert(1);
Assert.IsFalse(inserted);
Assert.AreEqual(1, tree.Size);
Assert.AreEqual(1, tree.MaxSize);
}
[Test]
public void Remove_KeyIsPresent_RemovesKey()
{
var tree = new ScapegoatTree<int>(1);
var inserted = tree.Insert(2);
Assert.IsTrue(inserted);
var deleted = tree.Delete(2);
Assert.IsTrue(deleted);
Assert.AreEqual(1, tree.Size);
}
[Test]
public void Remove_KeyIsRootWithNoChildren_RemovesKey()
{
var tree = new ScapegoatTree<int>(1);
var deleted = tree.Delete(1);
Assert.IsTrue(deleted);
Assert.IsNull(tree.Root);
Assert.AreEqual(0, tree.Size);
}
[Test]
public void Remove_KeyIsRootWithOneLeftChild_RemovesKey()
{
var tree = new ScapegoatTree<int>(1);
var inserted = tree.Insert(-1);
Assert.IsTrue(inserted);
var deleted = tree.Delete(1);
Assert.IsTrue(deleted);
Assert.AreEqual(1, tree.Size);
}
[Test]
public void Remove_KeyIsRootWithOneRightChild_RemovesKey()
{
var tree = new ScapegoatTree<int>(1);
var inserted = tree.Insert(2);
Assert.IsTrue(inserted);
var deleted = tree.Delete(1);
Assert.IsTrue(deleted);
Assert.AreEqual(1, tree.Size);
}
[Test]
public void Remove_KeyIsRootWithTwoChildren_RemovesKey()
{
var tree = new ScapegoatTree<int>(1);
var inserted = tree.Insert(-1);
Assert.IsTrue(inserted);
inserted = tree.Insert(2);
Assert.IsTrue(inserted);
var deleted = tree.Delete(1);
Assert.IsTrue(deleted);
Assert.AreEqual(2, tree.Size);
}
[Test]
public void Insert_KeyIsNotPresent_KeyIsInserted()
{
var tree = new ScapegoatTree<int>(1);
var inserted = tree.Insert(2);
Assert.IsTrue(inserted);
Assert.AreEqual(2, tree.Size);
Assert.AreEqual(2, tree.MaxSize);
}
[Test]
[TestCase(3, new[]{2,5,1,6}, -1, 0.5)]
public void Insert_TreeIsUnbalanced_RebuildsTree(int root, int[] keys, int candidate, double alpha)
{
var tree = new ScapegoatTree<int>(root, alpha);
tree.TreeIsUnbalanced += FailTreeIsUnbalanced;
foreach (var item in keys)
{
Assert.DoesNotThrow(() => tree.Insert(item));
}
tree.TreeIsUnbalanced -= FailTreeIsUnbalanced;
tree.TreeIsUnbalanced += PassTreeIsUnbalanced;
Assert.Throws<SuccessException>(() => tree.Insert(candidate));
}
[Test]
[TestCase(20, new[]{10,30,5,11,29,40,50, 1, 12}, new[]{50,40,30,29}, 0.7)]
public void Delete_TreeIsUnbalanced_BalancesTree(int root, int[] keys, int[] candidates, double alpha)
{
var tree = new ScapegoatTree<int>(root, alpha);
tree.TreeIsUnbalanced += FailTreeIsUnbalanced;
foreach (var item in keys)
{
Assert.DoesNotThrow(() => tree.Insert(item));
}
tree.TreeIsUnbalanced -= FailTreeIsUnbalanced;
tree.TreeIsUnbalanced += PassTreeIsUnbalanced;
Assert.Throws<SuccessException>(() =>
{
foreach (var item in candidates)
{
tree.Delete(item);
}
});
}
[Test]
[TestCase(20, new[]{10,30,5,11,29,40,50}, 10, 1)]
public void Delete_TreeIsUnbalanced_MaxSizeEqualsSize(int root, int[] keys, int candidate, double alpha)
{
var tree = new ScapegoatTree<int>(root, alpha);
tree.TreeIsUnbalanced += FailTreeIsUnbalanced;
foreach (var item in keys)
{
Assert.DoesNotThrow(() => tree.Insert(item));
}
tree.TreeIsUnbalanced -= FailTreeIsUnbalanced;
tree.Delete(candidate);
Assert.AreEqual(tree.Size, tree.MaxSize);
}
[Test]
[TestCase(3, new[]{2,5,1,6}, -1, 0.5)]
[TestCase(3, new[]{2,5,1,6}, 7, 0.5)]
public void Insert_TreeIsUnbalanced_BalancesTree(int root, int[] keys, int candidate, double alpha)
{
var tree = new ScapegoatTree<int>(root, alpha);
tree.TreeIsUnbalanced += FailTreeIsUnbalanced;
foreach (var item in keys)
{
Assert.DoesNotThrow(() => tree.Insert(item));
}
tree.TreeIsUnbalanced -= FailTreeIsUnbalanced;
var inserted = tree.Insert(candidate);
Assert.True(inserted);
Assert.True(tree.Size == 6);
Assert.True(tree.IsAlphaWeightBalanced());
}
[TestCase(3, 5, 0.5)]
public void Insert_TreeIsUnbalanced_BalancesTree2(int root, int candidate, double alpha)
{
var tree = new ScapegoatTree<int>(root, alpha);
var inserted = tree.Insert(candidate);
Assert.True(inserted);
Assert.True(tree.Size == 2);
Assert.True(tree.IsAlphaWeightBalanced());
}
[Test]
public void Contains_RootIsNull_ReturnsFalse()
{
var tree = new ScapegoatTree<int>();
Assert.IsFalse(tree.Contains(1));
}
[Test]
public void Contains_RootHasKey_ReturnsTrue()
{
var tree = new ScapegoatTree<int>(1);
Assert.IsTrue(tree.Contains(1));
}
[Test]
public void Contains_TreeHasKey_ReturnsTrue()
{
var tree = new ScapegoatTree<int>(1);
tree.Insert(2);
Assert.IsTrue(tree.Contains(2));
}
[Test]
public void Contains_TreeDoesNotContainKey_ReturnsFalse()
{
var tree = new ScapegoatTree<int>(1);
tree.Insert(2);
Assert.IsFalse(tree.Contains(-1));
}
[Test]
public void Clear_TreeHasKeys_ClearsTree()
{
var tree = new ScapegoatTree<int>(1);
tree.Clear();
Assert.IsTrue(tree.Size == 0);
Assert.IsTrue(tree.MaxSize == 0);
Assert.IsNull(tree.Root);
}
[Test]
public void Tune_AlphaIsValid_ChangesAlpha()
{
var expected = 0.7;
var tree = new ScapegoatTree<int>();
tree.Tune(expected);
Assert.AreEqual(expected, tree.Alpha);
}
[Test]
public void Tune_AlphaIsNotValid_ThrowsException()
{
var expected = 9.9;
var tree = new ScapegoatTree<int>();
Assert.Throws<ArgumentException>(() => tree.Tune(expected));
}
[Test]
public void FindScapegoatInPath_PathIsEmpty_ThrowsAnException()
{
var tree = new ScapegoatTree<int>();
Assert.Throws<ArgumentException>(() => tree.FindScapegoatInPath(new Stack<Node<int>>()));
}
[Test]
public void FindScapegoatInPath_ScapegoatIsNotPresent_ThrowsAnException()
{
var tree = new ScapegoatTree<int>(1, 1);
var path = new Stack<Node<int>>();
path.Push(tree.Root!);
Assert.Throws<InvalidOperationException>(() => tree.FindScapegoatInPath(path));
}
private static void FailTreeIsUnbalanced(object? sender, EventArgs? e)
{
Assert.Fail();
}
private static void PassTreeIsUnbalanced(object? sender, EventArgs? e)
{
Assert.Pass();
}
}
}
| 461 |
C-Sharp | TheAlgorithms | C# | using DataStructures.SegmentTrees;
using NUnit.Framework;
namespace DataStructures.Tests.SegmentTrees
{
[TestFixture]
public class SegmentTreeApplyTests
{
private readonly SegmentTreeApply testTree = new(new[] { 8, 9, 1, 4, 8, 7, 2 });
[Test]
public void Apply_Query_Update_Query_Test()
{
Assert.AreEqual(22, testTree.Query(1, 4));
testTree.Apply(0, 3, 2);
Assert.AreEqual(new[] { 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 }, testTree.Operand);
Assert.AreEqual(36, testTree.Query(1, 4));
}
}
}
| 21 |
C-Sharp | TheAlgorithms | C# | using DataStructures.SegmentTrees;
using NUnit.Framework;
namespace DataStructures.Tests.SegmentTrees
{
[TestFixture]
public class SegmentTreeTests
{
private readonly SegmentTree testTree = new(new[] { 8, 9, 1, 4, 8, 7, 2 });
[Test]
public void TreeArray_Test()
{
int[] expectedArray = { 0, 39, 22, 17, 17, 5, 15, 2, 8, 9, 1, 4, 8, 7, 2, 0 };
Assert.AreEqual(expectedArray, testTree.Tree);
}
[TestCase(1, 4, 22)]
[TestCase(2, 2, 1)]
public void Query_Test(int left, int right, int expectedValue)
{
Assert.AreEqual(expectedValue, testTree.Query(left, right));
}
}
}
| 26 |
C-Sharp | TheAlgorithms | C# | using DataStructures.SegmentTrees;
using NUnit.Framework;
namespace DataStructures.Tests.SegmentTrees
{
[TestFixture]
public class SegmentTreeUpdateTests
{
[SetUp]
public void Init()
{
testTree = new SegmentTreeUpdate(new[] { 8, 9, 1, 4, 8, 7, 2 });
}
private SegmentTreeUpdate testTree = new(new[] { 8, 9, 1, 4, 8, 7, 2 });
[TestCase(2, 3, 1, 4, 24)]
[TestCase(0, 3, 1, 4, 22)]
public void Update_Test(int node, int value, int left, int right, int aftQuery)
{
testTree.Update(node, value);
Assert.AreEqual(aftQuery, testTree.Query(left, right));
}
}
}
| 26 |
C-Sharp | TheAlgorithms | C# | using DataStructures.Stack;
using FluentAssertions;
using NUnit.Framework;
using System;
using System.Linq;
namespace DataStructures.Tests.Stack
{
public static class ArrayBasedStackTests
{
private const string StackEmptyErrorMessage = "Stack is empty";
[Test]
public static void CountTest()
{
var stack = new ArrayBasedStack<int>(new[] { 0, 1, 2, 3, 4 });
stack.Top.Should().Be(4);
}
[Test]
public static void ClearTest()
{
var stack = new ArrayBasedStack<int>(new[] { 0, 1, 2, 3, 4 });
stack.Clear();
stack.Top.Should().Be(-1);
}
[Test]
public static void ContainsTest()
{
var stack = new ArrayBasedStack<int>(new[] { 0, 1, 2, 3, 4 });
Assert.Multiple(() =>
{
stack.Contains(0).Should().BeTrue();
stack.Contains(1).Should().BeTrue();
stack.Contains(2).Should().BeTrue();
stack.Contains(3).Should().BeTrue();
stack.Contains(4).Should().BeTrue();
});
}
[Test]
public static void PeekTest()
{
var stack = new ArrayBasedStack<int>(new[] { 0, 1, 2, 3, 4 });
Assert.Multiple(() =>
{
stack.Peek().Should().Be(4);
stack.Peek().Should().Be(4);
stack.Peek().Should().Be(4);
});
}
[Test]
public static void PopTest()
{
var stack = new ArrayBasedStack<int>(new[] { 0, 1, 2, 3, 4 });
Assert.Multiple(() =>
{
stack.Pop().Should().Be(4);
stack.Pop().Should().Be(3);
stack.Pop().Should().Be(2);
stack.Pop().Should().Be(1);
stack.Pop().Should().Be(0);
});
}
[Test]
public static void PushTest()
{
var stack = new ArrayBasedStack<int>();
Assert.Multiple(() =>
Enumerable.Range(0, 5)
.ToList()
.ForEach(number =>
{
stack.Push(number);
stack.Peek().Should().Be(number);
}));
}
[Test]
public static void AutomaticResizesTest()
{
const int initialCapacity = 2;
var stack = new ArrayBasedStack<int>
{
Capacity = initialCapacity,
};
stack.Push(0);
stack.Push(1);
stack.Push(2);
stack.Push(3);
stack.Push(4);
stack.Capacity.Should().BeGreaterThan(initialCapacity);
}
[Test]
public static void ShouldThrowStackEmptyExceptionOnEmptyPopTest()
{
var stack = new ArrayBasedStack<int>();
Action poppingAnEmptyStack = () => stack.Pop();
poppingAnEmptyStack.Should()
.Throw<InvalidOperationException>()
.WithMessage(StackEmptyErrorMessage);
}
[Test]
public static void ShouldThrowStackEmptyExceptionOnEmptyPeekTest()
{
var stack = new ArrayBasedStack<int>();
Action peekingAnEmptyStack = () => stack.Peek();
peekingAnEmptyStack.Should()
.Throw<InvalidOperationException>()
.WithMessage(StackEmptyErrorMessage);
}
}
}
| 135 |
C-Sharp | TheAlgorithms | C# | using DataStructures.Stack;
using FluentAssertions;
using NUnit.Framework;
using System.Linq;
namespace DataStructures.Tests.Stack
{
public static class ListBasedStackTests
{
[Test]
public static void CountTest()
{
var stack = new ListBasedStack<int>(new[] { 0, 1, 2, 3, 4 });
stack.Count.Should().Be(5);
}
[Test]
public static void ClearTest()
{
var stack = new ListBasedStack<int>(new[] { 0, 1, 2, 3, 4 });
stack.Clear();
stack.Count.Should().Be(0);
}
[Test]
public static void ContainsTest()
{
var stack = new ListBasedStack<int>(new[] { 0, 1, 2, 3, 4 });
Assert.Multiple(() =>
{
stack.Contains(0).Should().BeTrue();
stack.Contains(1).Should().BeTrue();
stack.Contains(2).Should().BeTrue();
stack.Contains(3).Should().BeTrue();
stack.Contains(4).Should().BeTrue();
});
}
[Test]
public static void PeekTest()
{
var stack = new ListBasedStack<int>(new[] { 0, 1, 2, 3, 4 });
Assert.Multiple(() =>
{
stack.Peek().Should().Be(4);
stack.Peek().Should().Be(4);
stack.Peek().Should().Be(4);
});
}
[Test]
public static void PopTest()
{
var stack = new ListBasedStack<int>(new[] { 0, 1, 2, 3, 4 });
Assert.Multiple(() =>
{
stack.Pop().Should().Be(4);
stack.Pop().Should().Be(3);
stack.Pop().Should().Be(2);
stack.Pop().Should().Be(1);
stack.Pop().Should().Be(0);
});
}
[Test]
public static void PushTest()
{
var stack = new ListBasedStack<int>();
Assert.Multiple(() =>
Enumerable.Range(0, 5)
.ToList()
.ForEach(number =>
{
stack.Push(number);
stack.Peek().Should().Be(number);
}));
}
}
}
| 87 |
C-Sharp | TheAlgorithms | C# | using System;
using DataStructures.Tries;
using NUnit.Framework;
namespace DataStructures.Tests.Tries
{
public static class TrieTests
{
[Test]
public static void FindWordInTrie(){
// Arrange
string[] words = {
"trie",
"node",
"none",
"treatment",
};
// Act
Trie trie = new(words);
// Assert
Assert.IsTrue(trie.Find("trie"), "The word 'trie' isn't in Trie structure");
Assert.IsTrue(trie.Find("node"), "The word 'node' isn't in Trie structure");
Assert.IsTrue(trie.Find("none"), "The word 'none' isn't in Trie structure");
Assert.IsTrue(trie.Find("treatment"), "The word 'treatment' isn't in Trie structure");
Assert.IsFalse(trie.Find("nodes"), "The word 'nodes' is in Trie sturcture");
Assert.IsFalse(trie.Find(""), "The word empty is in Trie structure");
Assert.IsFalse(trie.Find("tri"), "The word 'tri' is in Trie structure");
}
[Test]
public static void InsertInTrie(){
// Arrange
string[] words = {
"trie",
"node",
"none",
"treatment",
};
Trie trie = new();
// Act
foreach (var t in words)
{
trie.Insert(t);
}
// Assert
Assert.IsTrue(trie.Find("trie"), "The word 'trie' isn't in Trie structure");
Assert.IsTrue(trie.Find("node"), "The word 'node' isn't in Trie structure");
Assert.IsTrue(trie.Find("none"), "The word 'none' isn't in Trie structure");
Assert.IsTrue(trie.Find("treatment"), "The word 'treatment' isn't in Trie structure");
}
[Test]
public static void RemoveFromTrie(){
// Arrange
string[] words = {
"trie",
"node",
"none",
"treatment",
};
Trie trie = new();
// Act
foreach (var t in words)
{
trie.Insert(t);
}
trie.Remove("trie");
// Assert
Assert.IsFalse(trie.Find("trie"), "The word 'trie' is in Trie structure");
Assert.IsTrue(trie.Find("treatment"), "The word 'treament' isn't in Trie structure");
Assert.IsTrue(trie.Find("node"), "The word 'node' isn't in Trie structure");
Assert.IsTrue(trie.Find("none"), "The word 'none' isn't in Trie structure");
}
[Test]
public static void MultipleInsert()
{
// Arrange
string w = "trie";
Trie trie = new();
// Act
trie.Insert(w);
trie.Insert(w);
// Assert
Assert.IsTrue(trie.Find("trie"), "The word 'trie' isn't in Trie structure");
Assert.IsFalse(trie.Find("nodes"), "The word 'nodes' is in Trie sturcture");
}
[Test]
public static void RemoveAWordThatIsNtInTrie(){
// Arrange
const string w = "trie";
Trie trie = new();
// Act
trie.Insert(w);
trie.Remove("tri");
trie.Remove("none");
// Assert
Assert.IsTrue(trie.Find("trie"), "The word 'trie' isn't in Trie structure");
}
}
}
| 116 |
C-Sharp | TheAlgorithms | C# | using System;
using DataStructures.UnrolledList;
using FluentAssertions;
using NUnit.Framework;
namespace DataStructures.Tests.UnrolledList
{
public class UnrolledLinkedListNodeTests
{
[Test]
public void GetAndSet_SetItemNodeAndGetIt_ReturnExpectedItem()
{
var node = new UnrolledLinkedListNode(6);
node.Set(0, 1);
var result = node.Get(0);
result.Should().Be(1);
}
[Test]
public void Get_GetLowIndex_ThrowArgumentException()
{
var node = new UnrolledLinkedListNode(6);
Action action = () => node.Get(-1);
action.Should().Throw<ArgumentException>();
}
[Test]
public void Get_GetHighIndex_ThrowArgumentException()
{
var node = new UnrolledLinkedListNode(6);
Action action = () => node.Get(7);
action.Should().Throw<ArgumentException>();
}
[Test]
public void Set_SetLowIndex_ThrowArgumentException()
{
var node = new UnrolledLinkedListNode(6);
Action action = () => node.Set(-1, 0);
action.Should().Throw<ArgumentException>();
}
[Test]
public void Set_SetHighIndex_ThrowArgumentException()
{
var node = new UnrolledLinkedListNode(6);
Action action = () => node.Set(7, 0);
action.Should().Throw<ArgumentException>();
}
}
}
| 62 |
C-Sharp | TheAlgorithms | C# | using DataStructures.UnrolledList;
using FluentAssertions;
using NUnit.Framework;
namespace DataStructures.Tests.UnrolledList
{
public class UnrolledLinkedListTests
{
[Test]
public void Insert_LinkArrayToLinkedList_ReturnArrayHaveSameItems()
{
var linkedList = new UnrolledLinkedList(6);
var contest = new[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12 };
foreach (var number in contest)
{
linkedList.Insert(number);
}
var result = linkedList.GetRolledItems();
result.Should().BeEquivalentTo(contest);
}
}
}
| 25 |
C-Sharp | TheAlgorithms | C# | using System;
namespace Utilities.Exceptions
{
/// <summary>
/// Signs that sequence doesn't contain any items that one was looking for.
/// </summary>
public class ItemNotFoundException : Exception
{
}
}
| 12 |
C-Sharp | TheAlgorithms | C# | using System;
using System.Collections.Generic;
namespace Utilities.Extensions
{
public static class DictionaryExtensions
{
/// <summary>
/// Adds the specified key value tuples to the dictionary.
/// </summary>
/// <param name="keys">The dictionary.</param>
/// <param name="enumerable">The collection of key value tuples to add.</param>
/// <typeparam name="TKey">The type of the keys in the dictionary.</typeparam>
/// <typeparam name="TValue">The type of the values in the dictionary.</typeparam>
/// <exception cref="ArgumentException">
/// A key from the <paramref name="enumerable"/> already exists in <paramref name="keys"/>.
/// </exception>
public static void AddMany<TKey, TValue>(
this Dictionary<TKey, TValue> keys,
IEnumerable<(TKey, TValue)> enumerable) where TKey : notnull
{
foreach (var (key, value) in enumerable)
{
keys.Add(key, value);
}
}
}
}
| 29 |
C-Sharp | TheAlgorithms | C# | using System;
namespace Utilities.Extensions
{
public static class MatrixExtensions
{
/// <summary>
/// Performs immutable dot product multiplication on source matrix to operand.
/// </summary>
/// <param name="source">Source left matrix.</param>
/// <param name="operand">Operand right matrix.</param>
/// <returns>Dot product result.</returns>
/// <exception cref="InvalidOperationException">The width of a first operand should match the height of a second.</exception>
public static double[,] Multiply(this double[,] source, double[,] operand)
{
if (source.GetLength(1) != operand.GetLength(0))
{
throw new InvalidOperationException(
"The width of a first operand should match the height of a second.");
}
var result = new double[source.GetLength(0), operand.GetLength(1)];
for (var i = 0; i < result.GetLength(0); i++)
{
for (var j = 0; j < result.GetLength(1); j++)
{
double elementProduct = 0;
for (var k = 0; k < source.GetLength(1); k++)
{
elementProduct += source[i, k] * operand[k, j];
}
result[i, j] = elementProduct;
}
}
return result;
}
/// <summary>
/// Makes a copy of a matrix. Changes to the copy should not affect the original.
/// </summary>
/// <param name="matrix">The matrix.</param>
/// <returns>A copy of the matrix.</returns>
public static double[,] Copy(this double[,] matrix)
{
var result = new double[matrix.GetLength(0), matrix.GetLength(1)];
for (var i = 0; i < matrix.GetLength(0); i++)
{
for (var j = 0; j < matrix.GetLength(1); j++)
{
result[i, j] = matrix[i, j];
}
}
return result;
}
/// <summary>
/// Transposes a matrix.
/// </summary>
/// <param name="matrix">The matrix.</param>
/// <returns>The transposed matrix.</returns>
public static double[,] Transpose(this double[,] matrix)
{
var result = new double[matrix.GetLength(1), matrix.GetLength(0)];
for (var i = 0; i < matrix.GetLength(0); i++)
{
for (var j = 0; j < matrix.GetLength(1); j++)
{
result[j, i] = matrix[i, j];
}
}
return result;
}
/// <summary>
/// Multiplies a matrix by a vector.
/// </summary>
/// <param name="matrix">The matrix.</param>
/// <param name="vector">The vector.</param>
/// <returns>The product of the matrix and the vector, which is a vector.</returns>
/// <exception cref="ArgumentException">Dimensions of matrix and vector do not match.</exception>
public static double[] MultiplyVector(this double[,] matrix, double[] vector)
{
var vectorReshaped = new double[vector.Length, 1];
for (var i = 0; i < vector.Length; i++)
{
vectorReshaped[i, 0] = vector[i];
}
var resultMatrix = matrix.Multiply(vectorReshaped);
var result = new double[resultMatrix.GetLength(0)];
for (var i = 0; i < result.Length; i++)
{
result[i] = resultMatrix[i, 0];
}
return result;
}
/// <summary>
/// Performs matrix subtraction.
/// </summary>
/// <param name="lhs">The LHS matrix.</param>
/// <param name="rhs">The RHS matrix.</param>
/// <returns>The difference of the two matrices.</returns>
/// <exception cref="ArgumentException">Dimensions of matrices do not match.</exception>
public static double[,] Subtract(this double[,] lhs, double[,] rhs)
{
if (lhs.GetLength(0) != rhs.GetLength(0) ||
lhs.GetLength(1) != rhs.GetLength(1))
{
throw new ArgumentException("Dimensions of matrices must be the same");
}
var result = new double[lhs.GetLength(0), lhs.GetLength(1)];
for (var i = 0; i < lhs.GetLength(0); i++)
{
for (var j = 0; j < lhs.GetLength(1); j++)
{
result[i, j] = lhs[i, j] - rhs[i, j];
}
}
return result;
}
/// <summary>
/// Performs an element by element comparison on both matrices.
/// </summary>
/// <param name="source">Source left matrix.</param>
/// <param name="operand">Openrand right matrix.</param>
/// <returns>true: if all elements are the same; false otherwise.</returns>
public static bool IsEqual(this double[,] source, double[,] operand)
{
if (source.Length != operand.Length ||
source.GetLength(0) != operand.GetLength(0) ||
source.GetLength(1) != operand.GetLength(1))
{
return false;
}
for (var i = 0; i < source.GetLength(0); i++)
{
for (var j = 0; j < source.GetLength(0); j++)
{
if (Math.Abs(source[i, j] - operand[i, j]) >= 0.0001)
{
return false;
}
}
}
return true;
}
/// <summary>
/// Performs a round operation on every element of the input matrix up to the neareast integer.
/// </summary>
/// <param name="source">Input matrix.</param>
/// <returns>Matrix with rounded elements.</returns>
public static double[,] RoundToNextInt(this double[,] source)
{
var rows = source.GetLength(0);
var cols = source.GetLength(1);
var result = new double[rows, cols];
for (var i = 0; i < rows; i++)
{
for (var j = 0; j < cols; j++)
{
result[i, j] = Math.Round(source[i, j]);
}
}
return result;
}
}
}
| 185 |
C-Sharp | TheAlgorithms | C# | using System;
using System.Linq;
namespace Utilities.Extensions
{
public static class RandomExtensions
{
/// <summary>
/// Returns a random normalized vector of the specified size.
/// </summary>
/// <param name="rand">The random number generator.</param>
/// <param name="size">The size of the vector to return.</param>
/// <returns>A random normalized vector.</returns>
public static double[] NextVector(this Random rand, int size)
{
var vector = Enumerable.Range(0, size)
.Select(_ => rand.NextDouble()).ToArray();
var norm = vector.Magnitude();
return vector.Select(x => x / norm).ToArray();
}
}
}
| 23 |
C-Sharp | TheAlgorithms | C# | using System;
namespace Utilities.Extensions
{
public static class VectorExtensions
{
/// <summary>
/// Makes a copy of a vector. Changes to the copy should not affect the original.
/// </summary>
/// <param name="vector">The vector.</param>
/// <returns>The copy.</returns>
public static double[] Copy(this double[] vector)
{
var result = new double[vector.Length];
for (var i = 0; i < vector.Length; i++)
{
result[i] = vector[i];
}
return result;
}
/// <summary>
/// Computes the outer product of two vectors.
/// </summary>
/// <param name="lhs">The LHS vector.</param>
/// <param name="rhs">The RHS vector.</param>
/// <returns>The outer product of the two vector.</returns>
public static double[,] OuterProduct(this double[] lhs, double[] rhs)
{
var result = new double[lhs.Length, rhs.Length];
for (var i = 0; i < lhs.Length; i++)
{
for (var j = 0; j < rhs.Length; j++)
{
result[i, j] = lhs[i] * rhs[j];
}
}
return result;
}
/// <summary>
/// Computes the dot product of two vectors.
/// </summary>
/// <param name="lhs">The LHS vector.</param>
/// <param name="rhs">The RHS vector.</param>
/// <returns>The dot product of the two vector.</returns>
/// <exception cref="ArgumentException">Dimensions of vectors do not match.</exception>
public static double Dot(this double[] lhs, double[] rhs)
{
if (lhs.Length != rhs.Length)
{
throw new ArgumentException("Dot product arguments must have same dimension");
}
double result = 0;
for (var i = 0; i < lhs.Length; i++)
{
result += lhs[i] * rhs[i];
}
return result;
}
/// <summary>
/// Computes the magnitude of a vector.
/// </summary>
/// <param name="vector">The vector.</param>
/// <returns>The magnitude.</returns>
public static double Magnitude(this double[] vector)
{
var magnitude = Dot(vector, vector);
magnitude = Math.Sqrt(magnitude);
return magnitude;
}
/// <summary>
/// Returns the scaled vector.
/// </summary>
/// <param name="vector">The vector.</param>
/// <param name="factor">Scale factor.</param>
/// <returns>The unit vector.</returns>
public static double[] Scale(this double[] vector, double factor)
{
var result = new double[vector.Length];
for (var i = 0; i < vector.Length; i++)
{
result[i] = vector[i] * factor;
}
return result;
}
/// <summary>
/// Transpose 1d row vector to column vector.
/// </summary>
/// <param name="source">Input 1d vector.</param>
/// <returns>Column vector.</returns>
public static double[,] ToColumnVector(this double[] source)
{
var columnVector = new double[source.Length, 1];
for (var i = 0; i < source.Length; i++)
{
columnVector[i, 0] = source[i];
}
return columnVector;
}
/// <summary>
/// Transpose column vector to 1d row vector.
/// </summary>
/// <param name="source">Input column vector.</param>
/// <returns>Row vector.</returns>
/// <exception cref="InvalidOperationException">The column vector should have only 1 element in width.</exception>
public static double[] ToRowVector(this double[,] source)
{
if (source.GetLength(1) != 1)
{
throw new InvalidOperationException("The column vector should have only 1 element in width.");
}
var rowVector = new double[source.Length];
for (var i = 0; i < rowVector.Length; i++)
{
rowVector[i] = source[i, 0];
}
return rowVector;
}
/// <summary>
/// Generates a diagonal matrix from an specified vector.
/// </summary>
/// <param name="vector">The input vector.</param>
/// <returns>A Diagonal matrix.</returns>
public static double[,] ToDiagonalMatrix(this double[] vector)
{
var len = vector.Length;
var result = new double[len, len];
for (var i = 0; i < len; i++)
{
result[i, i] = vector[i];
}
return result;
}
}
}
| 154 |
C-Sharp | TheAlgorithms | C# | using System;
using System.Collections.Generic;
using FluentAssertions;
using NUnit.Framework;
using Utilities.Extensions;
namespace Utilities.Tests.Extensions
{
public class DictionaryExtensionsTests
{
[Test]
public void AddMany_ShouldThrowArgumentException_WhenKeyAlreadyExists()
{
var dictionary = new Dictionary<string, int> { ["one"] = 1 };
var enumerable = new[] { ("one", 1), ("two", 2) };
var action = () => dictionary.AddMany(enumerable);
action.Should().Throw<ArgumentException>();
}
[Test]
public void AddMany_ShouldAddAllKeyValuePairs()
{
var dictionary = new Dictionary<string, int> { ["one"] = 1 };
var enumerable = new[] { ("two", 2), ("three", 3) };
dictionary.AddMany(enumerable);
dictionary.Should().HaveCount(3);
dictionary.Should().ContainKey("one").WhichValue.Should().Be(1);
dictionary.Should().ContainKey("two").WhichValue.Should().Be(2);
dictionary.Should().ContainKey("three").WhichValue.Should().Be(3);
}
}
}
| 38 |
C-Sharp | TheAlgorithms | C# | using System;
using FluentAssertions;
using NUnit.Framework;
using Utilities.Extensions;
namespace Utilities.Tests.Extensions
{
public class MatrixExtensionsTests
{
private static readonly object[] MatrixMultiplyTestCases =
{
new object[]
{
new double[,] { { 2, 2, -1 }, { 0, -2, -1 }, { 0, 0, 5 } },
new double[,] { { 2 }, { 2 }, { 3 } },
new double[,] { { 5 }, { -7 }, { 15 } },
},
new object[]
{
new double[,] { { 5, 8, -4 }, { 6, 9, -5 }, { 4, 7, -3 } },
new double[,] { { 3, 2, 5 }, { 4, -1, 3 }, { 9, 6, 5 } },
new double[,] { { 11, -22, 29 }, { 9, -27, 32 }, { 13, -17, 26 } },
},
};
private static readonly object[] MatrixTransposeTestCases =
{
new object[]
{
new double[,] { { 2, 2, 3 } },
new double[,] { { 2 }, { 2 }, { 3 } },
},
new object[]
{
new double[,] { { 5, 8 }, { 6, 9 } },
new double[,] { { 5, 6 }, { 8, 9 } },
},
};
private static readonly object[] MatrixSubtractTestCases =
{
new object[]
{
new double[,] { { 0, 0 }, { 0, 0 } },
new double[,] { { 1, 1 }, { 1, 1 } },
new double[,] { { -1, -1 }, { -1, -1 } },
},
new object[]
{
new double[,] { { 1, 2 }, { 2, 3 }, { 3, 4 } },
new double[,] { { 1, 1 }, { 1, 1 }, { 1, 1 } },
new double[,] { { 0, 1 }, { 1, 2 }, { 2, 3 } },
},
new object[]
{
new double[,] { { -1, -2, 0 }, { 2, -3, 2 }, { 3, 4, 1 } },
new double[,] { { 2, 5, 12 }, { 0, 5, 1 }, { 1, 1, 4 } },
new double[,] { { -3, -7, -12 }, { 2, -8, 1 }, { 2, 3, -3 } },
},
};
[Test]
public void Multiply_ShouldThrowInvalidOperationException_WhenOperandsAreNotCompatible()
{
// Arrange
var source = new double[,] { { 1, 1, 1 }, { 1, 1, 1 }, { 1, 1, 1 } };
var operand = new double[,] { { 1 }, { 1 } };
// Act
Action action = () => source.Multiply(operand);
// Assert
action.Should().Throw<InvalidOperationException>()
.WithMessage("The width of a first operand should match the height of a second.");
}
[TestCaseSource(nameof(MatrixMultiplyTestCases))]
public void Multiply_ShouldCalculateDotProductMultiplicationResult(
double[,] source,
double[,] operand,
double[,] result) =>
source.Multiply(operand).Should().BeEquivalentTo(result);
[Test]
public void Copy_ShouldReturnImmutableCopyOfMatrix()
{
// Arrange
var sutMatrix = new double[,] { { 1, 1, 1 }, { 1, 1, 1 }, { 1, 1, 1 } };
// Act
var actualMatrix = sutMatrix.Copy();
// Assert
actualMatrix.Should().NotBeSameAs(sutMatrix);
actualMatrix.Should().BeEquivalentTo(sutMatrix);
}
[TestCaseSource(nameof(MatrixTransposeTestCases))]
public void Transpose_ShouldReturnTransposedMatrix(
double[,] source,
double[,] target) =>
source.Transpose().Should().BeEquivalentTo(target);
[Test]
public void MultiplyVector_ShouldCalculateDotProductMultiplicationResult()
{
// Arrange
var source = new double[,] { { 2, 2, -1 }, { 0, -2, -1 }, { 0, 0, 5 } };
var operand = new double[] { 2, 2, 3 };
var result = new double[] { 5, -7, 15 };
// Act
var actualMatrix = source.MultiplyVector(operand);
// Assert
actualMatrix.Should().BeEquivalentTo(result);
}
[Test]
public void Subtract_ShouldThrowArgumentException_WhenOperandsAreNotCompatible()
{
// Arrange
var source = new double[,] { { 1, 1, 1 }, { 1, 1, 1 }, { 1, 1, 1 } };
var operand = new double[,] { { 1 }, { 1 } };
// Act
Action action = () => source.Subtract(operand);
// Assert
action.Should().Throw<ArgumentException>()
.WithMessage("Dimensions of matrices must be the same");
}
[Test]
public static void EqualMatricesShouldReturnTrue()
{
// Arrange
var a = new double[,] { { 1, 2, 3 }, { 1, 2, 3 }, { 1, 2, 3 } };
var b = new double[,] { { 1, 2, 3 }, { 1, 2, 3 }, { 1, 2, 3 } };
// Act
var result = a.IsEqual(b);
// Assert
Assert.True(result);
}
[Test]
public static void NonEqualMatricesShouldReturnFalse()
{
// Arrange
var a = new double[,] { { 1, 2, 3 }, { 1, 2, 3 }, { 1, 2, 3 } };
var b = new double[,] { { 1, 2, 3 }, { 1, 2, 6 }, { 1, 2, 3 } };
// Act
var result = a.IsEqual(b);
// Assert
Assert.False(result);
}
[Test]
public static void DifferentSizeMatricesShouldReturnFalse()
{
// Arrange
var a = new double[,] { { 1, 2, 3 }, { 1, 2, 3 }, { 1, 2, 3 } };
var b = new double[,] { { 1, 2, 3 }, { 1, 2, 3 } };
// Act
var result = a.IsEqual(b);
// Assert
Assert.False(result);
}
[TestCaseSource(nameof(MatrixSubtractTestCases))]
public void Subtract_ShouldCalculateSubtractionResult(
double[,] source,
double[,] operand,
double[,] result) =>
source.Subtract(operand).Should().BeEquivalentTo(result);
[Test]
public void RoundToNextInt_ShouldReturnRoundedMatrix()
{
var source = new[,]
{
{ -1.9, 1.9 },
{ -1.5, 1.5 },
{ -1.1, 1.1 },
{ -0.9, 0.9 },
{ -0.5, 0.5 },
{ -0.1, 0.1 },
};
var result = new double[,]
{
{ -2, 2 },
{ -2, 2 },
{ -1, 1 },
{ -1, 1 },
{ 0, 0 },
{ 0, 0 },
};
var actualResult = source.RoundToNextInt();
actualResult.Should().BeEquivalentTo(result);
}
}
}
| 212 |
C-Sharp | TheAlgorithms | C# | using System;
using FluentAssertions;
using NUnit.Framework;
using Utilities.Extensions;
namespace Utilities.Tests.Extensions
{
public class RandomExtensionsTests
{
[Test]
public void NextVector_ShouldReturnNormalizedVector()
{
var random = new Random(0);
var result = random.NextVector(10);
result.Length.Should().Be(10);
result.Magnitude().Should().BeApproximately(1.0, 1e-6);
}
}
}
| 22 |
C-Sharp | TheAlgorithms | C# | using System;
using FluentAssertions;
using NUnit.Framework;
using Utilities.Extensions;
namespace Utilities.Tests.Extensions
{
public class VectorExtensionsTests
{
[Test]
public void Copy_ShouldReturnCopyOfVector()
{
var vector = new double[] { 0, 1, 2, 3 };
var vectorCopy = vector.Copy();
vectorCopy.Should().BeEquivalentTo(vector);
vectorCopy.Should().NotBeSameAs(vector);
}
[Test]
public void OuterProduct_ShouldCalculateOuterProduct()
{
var lhs = new double[] { -2, -1, 0, 1, 2 };
var rhs = new double[] { 1, 2, 3 };
var result = new double[,]
{
{ -2, -4, -6 },
{ -1, -2, -3 },
{ 0, 0, 0 },
{ 1, 2, 3 },
{ 2, 4, 6 },
};
var actualResult = lhs.OuterProduct(rhs);
actualResult.Should().BeEquivalentTo(result);
}
[Test]
public void Dot_ShouldThrowArgumentException_WhenDimensionsDoNotMatch()
{
var lhs = new double[] { 1, 2, 3 };
var rhs = new double[] { 1, 2, 3, 4 };
var func = () => lhs.Dot(rhs);
func.Should().Throw<ArgumentException>()
.WithMessage("Dot product arguments must have same dimension");
}
[Test]
public void Dot_ShouldCalculateDotProduct()
{
var lhs = new double[] { 1, 2, 3 };
var rhs = new double[] { 4, 5, 6 };
var actualResult = lhs.Dot(rhs);
actualResult.Should().Be(32);
}
[Test]
public void Magnitude_ShouldCalculateMagnitude()
{
var vector = new double[] { -3, 4 };
var actualResult = vector.Magnitude();
actualResult.Should().BeApproximately(5.0, 0.0001);
}
[Test]
public void Scale_ShouldCalculateScale()
{
var vector = new double[] { -1, 0, 1 };
var factor = 2;
var result = new double[] { -2, 0, 2 };
var actualResult = vector.Scale(factor);
actualResult.Should().BeEquivalentTo(result);
}
[Test]
public void ToColumnVector_ShouldReturnColumnVector()
{
var vector = new double[] { 1, 2, 3, 4 };
var result = new double[,] { { 1 }, { 2 }, { 3 }, { 4 } };
var actualResult = vector.ToColumnVector();
actualResult.Should().BeEquivalentTo(result);
}
[Test]
public void ToRowVector_ShouldThrowInvalidOperationException_WhenSourceIsNotAColumnVector()
{
var source = new double[,] { { 1, 2 }, { 3, 4 }, { 5, 6 } };
var func = () => source.ToRowVector();
func.Should().Throw<InvalidOperationException>()
.WithMessage("The column vector should have only 1 element in width.");
}
[Test]
public void ToRowVector_ShouldReturnRowVector()
{
var source = new double[,] { { 1 }, { 2 }, { 3 }, { 4 } };
var result = new double[] { 1, 2, 3, 4 };
var actualResult = source.ToRowVector();
actualResult.Should().BeEquivalentTo(result);
}
[Test]
public void ToDiagonalMatrix_ShouldReturnDiagonalMatrix()
{
var source = new double[] { 1, 2, 3, 4 };
var result = new double[,]
{
{ 1, 0, 0, 0 },
{ 0, 2, 0, 0 },
{ 0, 0, 3, 0 },
{ 0, 0, 0, 4 },
};
var actualResult = source.ToDiagonalMatrix();
actualResult.Should().BeEquivalentTo(result);
}
}
}
| 138 |
amazon-cognito-dotnet | aws | C# | using Amazon.Runtime.Internal.Util;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Amazon.Runtime.Internal.Util
{
public interface ILogger
{
}
public class LoggerInstance : ILogger
{
}
public class LoggerInstanceInstance : LoggerInstance
{
}
}
namespace CustomFxCopRules
{
public class ILoggerUser
{
public static ILogger Logger1;
public static ILogger Logger2 { get; set; }
public static LoggerInstance Logger3;
public static LoggerInstance Logger4;
public static LoggerInstanceInstance Logger5 { get; set; }
public static LoggerInstanceInstance Logger6 { get; set; }
private static ILogger Logger7;
private static ILogger Logger8 { get; set; }
private static LoggerInstance Logger9;
private static LoggerInstance Logger10;
private static LoggerInstanceInstance Logger11 { get; set; }
private static LoggerInstanceInstance Logger12 { get; set; }
public static ILogger Logger13
{
get
{
return null;
}
}
public static ILogger Logger14
{
get
{
return null;
}
set
{
throw new NotImplementedException();
}
}
private static ILogger Logger15
{
get
{
return null;
}
}
private static ILogger Logger16
{
get
{
return null;
}
set
{
throw new NotImplementedException();
}
}
}
}
| 83 |
amazon-cognito-dotnet | aws | C# | using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace CustomFxCopRules
{
public class MD5User
{
System.Security.Cryptography.MD5 md5Prop { get; set; }
System.Security.Cryptography.MD5 md5Field;
private static string md5Something = typeof(System.Security.Cryptography.MD5).Name;
private static string md5SomethingElse = System.Security.Cryptography.HashAlgorithm.Create().GetType().FullName;
private static string md5AnotherThing = System.Security.Cryptography.HashAlgorithm.Create(md5Something).GetType().FullName;
public MD5User()
{
var hashedBytes = System.Security.Cryptography.MD5
.Create()
.ComputeHash(System.Text.Encoding.UTF8.GetBytes("foo"));
var base64 = Convert.ToBase64String(hashedBytes);
Console.WriteLine(base64);
}
}
}
| 27 |
amazon-cognito-dotnet | aws | C# | using Microsoft.FxCop.Sdk;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace CustomFxCopRules
{
internal sealed class PreventHashAlgorithmCreateRule : SdkCustomRule
{
public PreventHashAlgorithmCreateRule()
: base("PreventHashAlgorithmCreateRule")
{
}
public override TargetVisibilities TargetVisibility
{
get
{
return TargetVisibilities.All;
}
}
public override ProblemCollection Check(Member member)
{
var method = member as Method;
CheckMethod(method);
return Problems;
}
private void CheckMethod(Method method)
{
if (method == null || method.Instructions == null || method.Instructions.Count == 0)
return;
foreach (var instruction in method.Instructions)
{
var invokedMethod = instruction.Value as Method;
if (invokedMethod != null)
{
var fullMethodName = invokedMethod.FullName;
if (HashAlgorithmCreateMethods.Contains(fullMethodName))
{
var resolution = GetResolution(method.Name.Name, method.DeclaringType.FullName, fullMethodName);
Problems.Add(new Problem(resolution));
}
}
}
}
public static HashSet<string> HashAlgorithmCreateMethods = new HashSet<string>(StringComparer.Ordinal)
{
"System.Security.Cryptography.HashAlgorithm.Create",
"System.Security.Cryptography.HashAlgorithm.Create(System.String)"
};
}
}
| 60 |
amazon-cognito-dotnet | aws | C# | using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Microsoft.FxCop.Sdk;
using System.Reflection;
namespace CustomFxCopRules
{
internal sealed class PreventMD5UseRule : SdkCustomRule
{
public PreventMD5UseRule()
: base("PreventMD5UseRule")
{ }
public override TargetVisibilities TargetVisibility
{
get
{
return TargetVisibilities.All;
}
}
public override ProblemCollection Check(Member member)
{
var method = member as Method;
CheckMethod(method);
var field = member as Field;
CheckField(field);
var prop = member as PropertyNode;
CheckProperty(prop);
return Problems;
}
private void CheckProperty(PropertyNode p)
{
if (p == null)
return;
CheckType(p.Type, p);
}
private void CheckField(Field f)
{
if (f == null)
return;
CheckType(f.Type, f);
}
private void CheckMethod(Method method)
{
if (method == null || method.Instructions == null || method.Instructions.Count == 0)
return;
foreach (var instruction in method.Instructions)
{
var invokedMethod = instruction.Value as Method;
if (invokedMethod != null)
{
var declaringType = invokedMethod.DeclaringType;
CheckType(declaringType, method);
}
var classNode = instruction.Value as ClassNode;
if (classNode != null)
{
CheckType(classNode, method);
}
}
}
public static string MD5Type = "System.Security.Cryptography.MD5";
private void CheckType(TypeNode type, Member member)
{
if (IsAssignableTo(type, MD5Type))
{
var resolution = GetResolution(type, member.FullName);
Problems.Add(new Problem(resolution));
}
}
private bool IsAssignableTo(TypeNode type, string assignableTo)
{
if (string.Equals(type.FullName, assignableTo))
return true;
var baseType = type.BaseType;
if (baseType == null)
return false;
return IsAssignableTo(baseType, assignableTo);
}
}
}
| 98 |
amazon-cognito-dotnet | aws | C# | using Microsoft.FxCop.Sdk;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace CustomFxCopRules
{
internal sealed class PreventStaticLoggersRule : SdkCustomRule
{
public PreventStaticLoggersRule()
: base("PreventStaticLoggersRule")
{ }
public override TargetVisibilities TargetVisibility
{
get
{
return TargetVisibilities.All;
}
}
public static string LoggerInterfaceFullName = "Amazon.Runtime.Internal.Util.ILogger";
public override ProblemCollection Check(Member member)
{
var field = member as Field;
if (field != null && field.IsStatic)
CheckStatic(field.DeclaringType, field.Type);
var property = member as PropertyNode;
// check only properties that are static AND have a setter
// static read-only properties (for instance, ones that call GetLogger)
// are allowed
if (property != null && property.IsStatic && property.Setter != null)
CheckStatic(property.DeclaringType, property.Type);
return Problems;
}
private void CheckStatic(TypeNode containerType, TypeNode typeOfStatic)
{
if (ImplementsILogger(typeOfStatic))
{
var resolution = GetResolution(containerType, typeOfStatic, LoggerInterfaceFullName);
Problems.Add(new Problem(resolution));
}
}
private bool ImplementsILogger(TypeNode tn)
{
if (tn == null)
return false;
// check if type is ILogger
if (IsILogger(tn.FullName))
return true;
// check if type implements ILogger
var interfaces = tn.Interfaces;
var implementsInterface = interfaces.Any(i => IsILogger(i.FullName));
if (implementsInterface)
return true;
// check base class
if (ImplementsILogger(tn.BaseType))
return true;
return false;
}
private bool IsILogger(string fullName)
{
return string.Equals(fullName, LoggerInterfaceFullName, StringComparison.Ordinal);
}
}
}
| 77 |
amazon-cognito-dotnet | aws | C# | using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Microsoft.FxCop.Sdk;
using System.Reflection;
namespace CustomFxCopRules
{
internal abstract class SdkCustomRule : BaseIntrospectionRule
{
protected SdkCustomRule(string ruleName)
: base(ruleName, "CustomFxCopRules.CustomFxCopRulesMetadata", typeof(SdkCustomRule).Assembly)
{ }
}
}
| 18 |
amazon-cognito-dotnet | aws | C# | using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Xml;
namespace CustomFxCopRules.Test
{
class Program
{
static void Main(string[] args)
{
var expectedIssues = 6 + 14; // 6 for MD5, 12 for ILogger
var issuesXpath = "//Issue";
var reportPath = Path.GetFullPath("report.xml");
var customRulesPath = Path.GetFullPath(Environment.CurrentDirectory + @"..\..\..\..\bin\Debug\CustomFxCopRules.dll");
var arguments = string.Format(@"/file:""{0}"" /rule:""{0}"" /out:""{1}""",
customRulesPath, reportPath);
if (File.Exists(reportPath))
File.Delete(reportPath);
var process = Process.Start(new ProcessStartInfo
{
FileName = @"C:\Program Files (x86)\Microsoft Fxcop 10.0\FxCopCmd.exe",
Arguments = arguments,
WorkingDirectory = Environment.CurrentDirectory,
UseShellExecute = false
});
process.WaitForExit();
var report = new XmlDocument();
report.Load(reportPath);
var allIssues = report.SelectNodes(issuesXpath);
if (allIssues.Count != expectedIssues)
throw new InvalidDataException();
Console.WriteLine("Press enter to exit...");
Console.ReadLine();
}
}
}
| 46 |
amazon-cognito-dotnet | aws | C# | using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("CustomFxCopRules.Test")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("CustomFxCopRules.Test")]
[assembly: AssemblyCopyright("Copyright © 2015")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("7412e4fa-94ff-45f6-8791-d9b02f4b8a36")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
| 37 |
amazon-cognito-dotnet | aws | C# | using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("CustomFxCopRules")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("CustomFxCopRules")]
[assembly: AssemblyCopyright("Copyright © 2015")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("11e51457-69fa-4589-9fa7-ca1e018979d7")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
| 37 |
amazon-cognito-dotnet | aws | C# | using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Microsoft.Build.Utilities;
using System.IO;
using System.Xml;
using System.Reflection;
namespace CustomTasks
{
public class UpdateFxCopProject : Task
{
public string Assemblies { get; set; }
public string FxCopProject { get; set; }
public string BinSuffix { get; set; }
public override bool Execute()
{
if (string.IsNullOrEmpty(Assemblies))
throw new ArgumentNullException("Assemblies");
if (string.IsNullOrEmpty(FxCopProject))
throw new ArgumentNullException("FxCopProject");
if (string.IsNullOrEmpty(BinSuffix))
throw new ArgumentNullException("BinSuffix");
Assemblies = Path.GetFullPath(Assemblies);
Log.LogMessage("Assemblies = " + Assemblies);
FxCopProject = Path.GetFullPath(FxCopProject);
Log.LogMessage("FxCopProject = " + FxCopProject);
Log.LogMessage("Updating project...");
FxCop.UpdateFxCopProject(Assemblies, FxCopProject, BinSuffix);
Log.LogMessage("Project updated");
return true;
}
}
public static class FxCop
{
public static void UpdateFxCopProject(string assembliesFolder, string fxCopProjectPath, string binSuffix)
{
var allAssemblies = Directory.GetFiles(assembliesFolder, "*.dll").ToList();
var doc = new XmlDocument();
doc.Load(fxCopProjectPath);
var referenceDirectoriesNode = doc.SelectSingleNode(AssemblyReferenceDirectoriesXpath);
var targetsNode = doc.SelectSingleNode(TargetsXpath);
RemoveAllNodes(doc, targetsNode, TargetXpath);
ResetReferenceDirectories(doc, referenceDirectoriesNode, DirectoriesXpath);
foreach (var assembly in allAssemblies)
{
var assemblyName = Path.GetFileName(assembly).ToLower();
var assemblyFolderName = assemblyName.Split('.')[1];
var newTarget = AddChildNode(targetsNode, "Target");
AddAttribute(newTarget, "Name", MakeRelativePath(assembly));
AddAttribute(newTarget, "Analyze", "True");
var dirNode = AddChildNode(referenceDirectoriesNode, "Directory");
/*
<Target Name="$(ProjectDir)/Deployment/assemblies/net35/AWSSDK.SyncManager.dll" Analyze="True" AnalyzeAllChildren="True" />
*/
AddAttribute(newTarget, "AnalyzeAllChildren", "True");
// Add assembly reference directory for each service
// <Directory>$(ProjectDir)/src/bin/Release/net35/</Directory>
dirNode.InnerText = string.Format("$(ProjectDir)/src/bin/Release/{0}/", binSuffix);
}
doc.Save(fxCopProjectPath);
}
public static HashSet<string> NamespacePrefixesToSkip = new HashSet<string>(StringComparer.Ordinal)
{
"ThirdParty.BouncyCastle",
"ThirdParty.Ionic.Zlib",
"ThirdParty.Json",
};
public const string NamespacesXpath = "FxCopProject/Targets/Target/Modules/Module/Namespaces";
public const string TargetsXpath = "FxCopProject/Targets";
public const string AssemblyReferenceDirectoriesXpath = "FxCopProject/Targets/AssemblyReferenceDirectories";
public const string DirectoriesXpath = "FxCopProject/Targets/AssemblyReferenceDirectories/Directory";
public const string TargetXpath = "FxCopProject/Targets/Target";
public const string CoreAssemblyName = "AWSSDK.Core.dll";
public const string DeploymentPath = @"Deployment\assemblies";
public const string ProjectDirRelative = @"$(ProjectDir)\..\";
public static IEnumerable<string> GetNamespacesToExamine(Assembly assembly)
{
HashSet<string> namespaces = new HashSet<string>(StringComparer.Ordinal);
var allTypes = assembly.GetTypes().ToList();
foreach (var type in allTypes)
{
var ns = type.Namespace;
if (ShouldSkip(ns))
continue;
namespaces.Add(ns);
}
return namespaces;
}
private static bool ShouldSkip(string ns)
{
if (ns == null)
return false;
foreach (var toSkip in NamespacePrefixesToSkip)
if (ns.StartsWith(toSkip, StringComparison.Ordinal))
return true;
return false;
}
private static string MakeRelativePath(string assemblyPath)
{
var fullPath = Path.GetFullPath(assemblyPath);
var deploymentIndex = fullPath.IndexOf(DeploymentPath, StringComparison.OrdinalIgnoreCase);
var partialPath = fullPath.Substring(deploymentIndex);
var relativePath = string.Concat(ProjectDirRelative, partialPath);
return relativePath;
}
private static void AddAttribute(XmlNode node, string name, string value)
{
var doc = node.OwnerDocument;
var attribute = doc.CreateAttribute(name);
attribute.Value = value;
node.Attributes.Append(attribute);
}
private static XmlNode AddChildNode(XmlNode parent, string name)
{
var doc = parent.OwnerDocument;
var node = doc.CreateElement(name);
parent.AppendChild(node);
return node;
}
private static void RemoveAllNodes(XmlDocument doc, XmlNode targetsNode, string xpath)
{
var matchingNodes = doc.SelectNodes(xpath);
foreach (XmlNode node in matchingNodes)
targetsNode.RemoveChild(node);
}
private static void ResetReferenceDirectories(XmlDocument doc,
XmlNode referenceDirectoriesNode, string xpath)
{
RemoveAllNodes(doc, referenceDirectoriesNode, xpath);
}
}
}
| 162 |
amazon-cognito-dotnet | aws | C# | using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("CustomTasks")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("CustomTasks")]
[assembly: AssemblyCopyright("Copyright © 2015")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("cf793704-9436-41b2-8a59-fc90e70e13da")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
| 37 |
amazon-cognito-sync-manager-net | aws | C# | //
// Copyright 2014-2015 Amazon.com,
// Inc. or its affiliates. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
using System;
using System.Collections.Generic;
using Amazon.Runtime;
using Amazon.CognitoSync.SyncManager.Internal;
using Amazon.CognitoIdentity;
using Logger = Amazon.Runtime.Internal.Util.Logger;
using Amazon.Util.Internal;
#if BCL45
using System.Threading.Tasks;
#endif
using System.Threading;
namespace Amazon.CognitoSync.SyncManager
{
/// <summary>;
/// The Cognito Sync Manager allows your application to store data
/// in the cloud for your users and synchronize across other devices. The library
/// uses the sqlite for local storage API and defaults to inmemory where sqlite
/// is not available to create a local cache for the data, similar to our SDK.
/// This allows your application to access stored data even when there is no connectivity.
/// <code>
/// CognitoAWSCredentials credentials = new CognitoAWSCredentials("identityPoolId","RegionEndpoint")
/// //using default region from your app.config or awsconfig.xml
/// CognitoSyncManager cognitoSyncManager = new CognitoSyncManager(credentials);
/// // creating a dataset
/// Dataset playerInfo = cognitoSyncManager.OpenOrCreateDataset("playerInfo");
/// // add some values into your dataset
/// playerInfo.Put("high_score", "90");
/// playerInfo.Put("name", "John");
/// // push changes to remote if needed
/// playerInfo.Synchronize();
/// </code>
/// Note: Some platforms may only expose async methods such as SynchronizeAsync().
/// </summary>
/// <seealso href="http://docs.aws.amazon.com/cognito/latest/developerguide/synchronizing-data.html">Amazon Cognito Sync Dev. Guide - Synchronizing Data</seealso>
public partial class CognitoSyncManager : IDisposable
{
private Logger _logger;
private bool _disposed;
private readonly ILocalStorage Local;
private readonly CognitoSyncStorage Remote;
private readonly CognitoAWSCredentials CognitoCredentials;
#region Constructor
/// <summary>
/// Creates an instance of CognitoSyncManager using Cognito Credentials, the region is picked up from the config if it available
/// <code>
/// CognitoSyncManager cognitoSyncManager = new CognitoSyncManager(credentials)
/// </code>
/// </summary>
/// <param name="cognitoCredentials"><see cref="Amazon.CognitoIdentity.CognitoAWSCredentials"/></param>
/// <seealso href="http://docs.aws.amazon.com/cognito/latest/developerguide/synchronizing-data.html#initializing-client">Amazon Cognito Sync Dev. Guide - Initializing Client</seealso>
public CognitoSyncManager(CognitoAWSCredentials cognitoCredentials) : this(cognitoCredentials, new AmazonCognitoSyncConfig()) { }
/// <summary>
/// Creates an instance of CognitoSyncManager using cognito credentials and a specific region
/// <code>
/// CognitoSyncManager cognitoSyncManager = new CognitoSyncManager(credentials, RegionEndpoint.USEAST1)
/// </code>
/// </summary>
/// <param name="cognitoCredentials"><see cref="Amazon.CognitoIdentity.CognitoAWSCredentials"/></param>
/// <param name="endpoint"><see cref="Amazon.RegionEndpoint"/></param>
/// <seealso href="http://docs.aws.amazon.com/cognito/latest/developerguide/synchronizing-data.html#initializing-client">Amazon Cognito Sync Dev. Guide - Initializing Client</seealso>
public CognitoSyncManager(CognitoAWSCredentials cognitoCredentials, RegionEndpoint endpoint)
: this(cognitoCredentials, new AmazonCognitoSyncConfig
{
RegionEndpoint = endpoint
})
{ }
/// <summary>
/// Creates an instance of CognitoSyncManager using cognito credentials and a configuration object
/// <code>
/// CognitoSyncManager cognitoSyncManager = new CognitoSyncManager(credentials,new AmazonCognitoSyncConfig { RegionEndpoint = RegionEndpoint.USEAST1})
/// </code>
/// </summary>
/// <param name="cognitoCredentials"><see cref="Amazon.CognitoIdentity.CognitoAWSCredentials"/></param>
/// <param name="config"><see cref="Amazon.CognitoSync.AmazonCognitoSyncConfig"/></param>
/// <seealso href="http://docs.aws.amazon.com/cognito/latest/developerguide/synchronizing-data.html#initializing-client">Amazon Cognito Sync Dev. Guide - Initializing Client</seealso>
public CognitoSyncManager(CognitoAWSCredentials cognitoCredentials, AmazonCognitoSyncConfig config)
{
if (cognitoCredentials == null)
{
throw new ArgumentNullException("cognitoCredentials");
}
#if BCL
ValidateParameters();
#endif
this.CognitoCredentials = cognitoCredentials;
Local = new SQLiteLocalStorage();
Remote = new CognitoSyncStorage(CognitoCredentials, config);
cognitoCredentials.IdentityChangedEvent += this.IdentityChanged;
_logger = Logger.GetLogger(this.GetType());
}
#endregion
#region Dispose Methods
/// <summary>
/// Releases the resources consumed by this object
/// </summary>
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
/// <summary>
/// Releases the resources consumed by this object if disposing is true.
/// </summary>
protected virtual void Dispose(bool disposing)
{
if (_disposed)
return;
if (disposing)
{
Remote.Dispose();
Local.Dispose();
CognitoCredentials.IdentityChangedEvent -= this.IdentityChanged;
_disposed = true;
}
}
#endregion
#region Public Methods
/// <summary>
/// Opens or creates a dataset. If the dataset doesn't exist, an empty one
/// with the given name will be created. Otherwise, the dataset is loaded from
/// local storage. If a dataset is marked as deleted but hasn't been deleted
/// on remote via the RefreshDatasetMetadata operation, it will throw
/// <see cref="System.InvalidOperationException"/>.
/// <code>
/// Dataset dataset = cognitoSyncManager.OpenOrCreateDataset("myDatasetName");
/// </code>
/// </summary>
/// <returns>Dataset loaded from local storage</returns>
/// <param name="datasetName">DatasetName, must be [a-zA-Z0=9_.:-]+</param>
/// <seealso href="http://docs.aws.amazon.com/cognito/latest/developerguide/synchronizing-data.html#understanding-datasets">Amazon Cognito Sync Dev. Guide - Understanding Datasets</seealso>
public Dataset OpenOrCreateDataset(string datasetName)
{
DatasetUtils.ValidateDatasetName(datasetName);
Local.CreateDataset(IdentityId, datasetName);
return new Dataset(datasetName, CognitoCredentials, Local, Remote);
}
/// <summary>
/// Retrieves a list of datasets from local storage. It may not reflect the
/// latest dataset on the remote storage until the RefreshDatasetMetadata
/// operation is performed.
/// </summary>
/// <returns>List of datasets</returns>
public List<DatasetMetadata> ListDatasets()
{
return Local.GetDatasetMetadata(IdentityId);
}
/// <summary>
/// Wipes all user data cached locally, including dataset metadata, and all records,
/// and optionally identity id and session credentials. Any data that hasn't been
/// synced will be lost. This method is usually used when customer logs out.
/// <param name="wipeCredentialsAndID">Wipe Credentials and IdentityId.</param>
/// </summary>
public void WipeData(bool wipeCredentialsAndID)
{
Local.WipeData();
_logger.InfoFormat("All data has been wiped");
if (wipeCredentialsAndID)
{
CognitoCredentials.Clear();
_logger.InfoFormat("All datasets and records have been wiped");
}
else
{
_logger.InfoFormat("All data has been wiped");
}
}
/// <summary>
/// Wipes all user data cached locally, including identity id, session
/// credentials, dataset metadata, and all records. Any data that hasn't been
/// synced will be lost. This method is usually used when customer logs out.
/// </summary>
public void WipeData()
{
WipeData(true);
}
#endregion
#region Protected Methods
/// <summary>
/// This is triggered when an Identity Change event occurs.
/// The dataset are then remapped to the new identity id.
/// This may happend for example when a user is working with
/// unauthenticated id and later decides to authenticate
/// himself with a public login provider
/// </summary>
/// <param name="sender">The object which triggered this methos</param>
/// <param name="e">Event Arguments</param>
protected void IdentityChanged(object sender, EventArgs e)
{
Amazon.CognitoIdentity.CognitoAWSCredentials.IdentityChangedArgs identityChangedEvent = e as Amazon.CognitoIdentity.CognitoAWSCredentials.IdentityChangedArgs;
if (identityChangedEvent.NewIdentityId != null)
{
String oldIdentity = string.IsNullOrEmpty(identityChangedEvent.OldIdentityId) ? DatasetUtils.UNKNOWN_IDENTITY_ID : identityChangedEvent.OldIdentityId;
String newIdentity = identityChangedEvent.NewIdentityId;
_logger.InfoFormat("Identity changed from {0} to {1}", oldIdentity, newIdentity);
Local.ChangeIdentityId(oldIdentity, newIdentity);
}
}
/// <summary>
/// Returns the IdentityId, if the application is not online then an
/// Unknown Identity Will be returned
/// </summary>
/// <returns>Identity ID</returns>
protected string IdentityId
{
get
{
return DatasetUtils.GetIdentityId(CognitoCredentials);
}
}
#endregion
#region private methods
#if BCL
static void ValidateParameters()
{
if (string.IsNullOrEmpty(AWSConfigs.ApplicationName))
{
throw new ArgumentException("A valid application name needs to configured to use this API." +
"The application name can be configured through app.config or by setting the Amazon.AWSConfigs.ApplicationName property.");
}
}
#endif
#endregion
}
}
| 264 |
amazon-cognito-sync-manager-net | aws | C# | //
// Copyright 2014-2015 Amazon.com,
// Inc. or its affiliates. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
using Amazon.CognitoIdentity;
using Amazon.CognitoSync.SyncManager.Internal;
using Amazon.Runtime.Internal;
using Amazon.Runtime.Internal.Util;
using Logger = Amazon.Runtime.Internal.Util.Logger;
using Amazon.Util.Internal.PlatformServices;
using System;
using System.Collections.Generic;
using System.Threading;
using System.Globalization;
#if BCL45
using System.Threading.Tasks;
using System.Runtime.ExceptionServices;
#endif
namespace Amazon.CognitoSync.SyncManager
{
/// <summary>
/// Dataset is the container of <see cref="Amazon.CognitoSync.SyncManager.Record"/>s. It can have up to 1k
/// <see cref="Amazon.CognitoSync.SyncManager.Record"/> or 1 MB in size. A typical use of Dataset is the
/// following.
///
/// <code>
/// //open or create dataset
/// Dataset dataset = cognitoSyncManger.OpenOrCreateDataset("new dataset");
/// // synchronize. It pulls down latest changes from remote storage
/// // and push local changes to remote storage
/// dataset.Synchronize();
/// // reads value
/// String highScore = dataset.Get("high_score");
/// String name = dataset.Get("name");
/// // sets value
/// dataset.Put("high_score", "90");
/// dataset.Put("name", "John");
/// // push changes to remote if needed
/// dataset.Synchronize();
/// </code>
/// </summary>
/// <seealso href="http://docs.aws.amazon.com/cognito/latest/developerguide/synchronizing-data.html#understanding-datasets">Amazon Cognito Sync Dev. Guide - Understanding Datasets</seealso>
public partial class Dataset : IDisposable
{
/// <summary>
/// Max number of retries during synchronize before it gives up.
/// </summary>
private const int MAX_RETRY = 3;
/// <summary>
/// The name of the dataset
/// </summary>
protected string DatasetName
{
get
{
return this._datasetName;
}
}
private string _datasetName;
/// <summary>
/// Instance of <see cref="Amazon.CognitoSync.SyncManager.ILocalStorage"/>
/// </summary>
protected ILocalStorage Local
{
get
{
return this._local;
}
}
private ILocalStorage _local;
private CognitoSyncStorage _remote;
/// <summary>
/// Instance of <see cref="Amazon.CognitoSync.SyncManager.Internal.CognitoSyncStorage"/>
/// </summary>
protected CognitoSyncStorage Remote
{
get
{
return this._remote;
}
}
/// <summary>
/// Instance of <see cref="Amazon.CognitoIdentity.CognitoAWSCredentials"/>
/// </summary>
protected CognitoAWSCredentials CognitoCredentials
{
get
{
return this._cognitoCredentials;
}
}
private CognitoAWSCredentials _cognitoCredentials;
private Boolean waitingForConnectivity = false;
private bool _disposed;
private Logger _logger;
#region constructor
/// <summary>
/// Creates a new Dataset
/// </summary>
/// <param name="datasetName">The name of the dataset</param>
/// <param name="cognitoCredentials">The Cognito Credentials associated with the dataset</param>
/// <param name="local">Local storage, can be InMemoryStorage or SQLiteStorage or Some Custom Storage Class which implements <see cref="Amazon.CognitoSync.SyncManager.ILocalStorage"/></param>
/// <param name="remote">Remote storage</param>
internal Dataset(string datasetName, CognitoAWSCredentials cognitoCredentials, ILocalStorage local, CognitoSyncStorage remote)
{
this._datasetName = datasetName;
this._cognitoCredentials = cognitoCredentials;
this._local = local;
this._remote = remote;
_logger = Logger.GetLogger(this.GetType());
DatasetSetupInternal();
}
#endregion
#region dispose methods
/// <summary>
/// Releases the resources consumed by this object
/// </summary>
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
#endregion
#region public methods
/// <summary>
/// Retrieves the associated <see cref="Amazon.CognitoSync.SyncManager.DatasetMetadata"/> from local storage.
/// </summary>
/// <value>The metadata for the Dataset.</value>
public DatasetMetadata Metadata
{
get
{
return Local.GetDatasetMetadata(IdentityId, DatasetName);
}
}
internal List<string> LocalMergedDatasets
{
get
{
List<string> mergedDatasets = new List<string>();
string prefix = DatasetName + ".";
foreach (DatasetMetadata dataset in Local.GetDatasetMetadata(IdentityId))
{
if (dataset.DatasetName.StartsWith(prefix, StringComparison.OrdinalIgnoreCase))
{
mergedDatasets.Add(dataset.DatasetName);
}
}
return mergedDatasets;
}
}
/// <summary>
/// Delete this <see cref="Amazon.CognitoSync.SyncManager.Dataset"/>. You cannot do any more operations
/// on this dataset.
/// </summary>
public void Delete()
{
Local.DeleteDataset(IdentityId, DatasetName);
}
/// <summary>
/// Gets the value of a <see cref=
/// "Record"/> with the given key. If the
/// <see cref="Amazon.CognitoSync.SyncManager.Record"/> doesn't exist or is marked deleted, null will be returned.
/// </summary>
/// <param name="key">Key of the record in the dataset.</param>
/// <seealso href="http://docs.aws.amazon.com/cognito/latest/developerguide/synchronizing-data.html#reading-and-writing-data">Amazon Cognito Sync Dev. Guide - Reading and Writing Data</seealso>
public string Get(string key)
{
return Local.GetValue(IdentityId, DatasetName,
DatasetUtils.ValidateRecordKey(key));
}
/// <summary>
/// Gets the <see cref="Amazon.CognitoSync.SyncManager.Record"/> with the given key. If the
/// <see cref="Amazon.CognitoSync.SyncManager.Record"/> doesn't exist or is marked deleted, null will be returned.
/// </summary>
/// <param name="key">Key of the record in the dataset.</param>
public Record GetRecord(string key)
{
return Local.GetRecord(IdentityId, DatasetName,
DatasetUtils.ValidateRecordKey(key));
}
/// <summary>
/// Gets the Key/Value representation of all records of this dataset. Datasets marked
/// as deleted records are excluded.
/// </summary>
/// <returns>Key/Value representation of all records, excluding deleted ones</returns>
public IDictionary<string, string> ActiveRecords
{
get
{
IDictionary<string, string> map = new Dictionary<string, string>();
foreach (Record record in Local.GetRecords(IdentityId, DatasetName))
{
if (!record.IsDeleted)
{
map.Add(record.Key, record.Value);
}
}
return map;
}
}
/// <summary>
/// Retrieves all raw records, including those marked as deleted, from local storage.
/// </summary>
/// <returns>List of all raw records</returns>
public IList<Record> Records
{
get
{
return Local.GetRecords(IdentityId, DatasetName);
}
}
/// <summary>
/// Gets the size of a record with the given key. If the key is deleted, -1
/// will be returned.
/// The size is calculated as sum of UTF-8 string length of record key and value
/// </summary>
/// <returns>The size in bytes.</returns>
/// <param name="key">The key of a record</param>
public long GetSizeInBytes(string key)
{
return DatasetUtils.ComputeRecordSize(Local.GetRecord(IdentityId,
DatasetName, DatasetUtils.ValidateRecordKey(key)));
}
/// <summary>
/// Gets the total size in bytes of this dataset. Records that are marked as
/// deleted don't contribute to the total size.
/// The size is calculated as sum of UTF-8 string length of key and value for all the records
/// </summary>
/// <returns>The total size in bytes</returns>
public long TotalSizeInBytes
{
get
{
long size = 0;
foreach (Record record in Local.GetRecords(IdentityId, DatasetName))
{
size += DatasetUtils.ComputeRecordSize(record);
}
return size;
}
}
/// <summary>
/// Retrieves the status of a record.
/// </summary>
/// <returns><c>true</c> if it is modified locally; otherwise, <c>false</c>.</returns>
/// <param name="key">Key identifying a record</param>
public bool IsModified(string key)
{
Record record = Local.GetRecord(IdentityId, DatasetName,
DatasetUtils.ValidateRecordKey(key));
return (record != null && record.IsModified);
}
/// <summary>
/// Puts a <see cref="Amazon.CognitoSync.SyncManager.Record"/> with the given key and value into the
/// Dataset. If a <see cref="Amazon.CognitoSync.SyncManager.Record"/> with the same key exists, its value
/// will be overwritten. If a <see cref="Amazon.CognitoSync.SyncManager.Record"/> is marked as deleted previously,
/// then it will be resurrected with new value while the sync count continues
/// with previous value. No matter whether the value changes or not, the
/// record is considered as updated, and it will be written to Cognito Sync
/// service on next synchronize operation. If value is null, a
/// ArgumentNullException will be thrown.
/// </summary>
/// <param name="key">Key of the record</param>
/// <param name="value">String value of a <see cref="Amazon.CognitoSync.SyncManager.Record"/> to be put into the
/// <see cref="Amazon.CognitoSync.SyncManager.Dataset"/></param>
/// <seealso href="http://docs.aws.amazon.com/cognito/latest/developerguide/synchronizing-data.html#reading-and-writing-data">Amazon Cognito Sync Dev. Guide - Reading and Writing Data</seealso>
public void Put(string key, string value)
{
Local.PutValue(IdentityId, DatasetName,
DatasetUtils.ValidateRecordKey(key), value);
}
/// <summary>
/// Populates a dataset with a dictionary of key/value pairs
/// </summary>
/// <param name="values">An IDictionary of key/value pairs</param>
public void PutAll(IDictionary<string, string> values)
{
foreach (string key in values.Keys)
{
DatasetUtils.ValidateRecordKey(key);
}
Local.PutAllValues(IdentityId, DatasetName, values);
}
/// <summary>
/// Marks a <see cref="Amazon.CognitoSync.SyncManager.Record"/> with the given key as deleted. Nothing happens if
/// the <see cref="Amazon.CognitoSync.SyncManager.Record"/> doesn't exist or is deleted already.
/// </summary>
/// <param name="key">Key identifying the Record</param>
/// <seealso href="http://docs.aws.amazon.com/cognito/latest/developerguide/synchronizing-data.html#reading-and-writing-data">Amazon Cognito Sync Dev. Guide - Reading and Writing Data</seealso>
public void Remove(string key)
{
Local.PutValue(IdentityId, DatasetName, DatasetUtils.ValidateRecordKey(key), null);
}
/// <summary>
/// Saves resolved conflicting <see cref="Amazon.CognitoSync.SyncManager.Record" /> into local storage. This is
/// used inside <see cref="SyncConflictDelegate"/> after you
/// resolve all conflicts.
/// </summary>
/// <param name="remoteRecords">A list of records to save into local storage</param>
public void Resolve(List<Record> remoteRecords)
{
Local.PutRecords(IdentityId, DatasetName, remoteRecords);
}
#endregion
#region helper methods
bool locked = false;
bool queuedSync = false;
private void EndSynchronizeAndCleanup()
{
locked = false;
if (queuedSync)
{
queuedSync = false;
}
}
#if BCL35
private void RunSyncOperation(int retry)
#else
private async Task RunSyncOperationAsync(int retry, CancellationToken cancellationToken)
#endif
{
long lastSyncCount = Local.GetLastSyncCount(IdentityId, DatasetName);
#if !(BCL35)
ExceptionDispatchInfo capturedException = null;
#endif
// if dataset is deleted locally, push it to remote
if (lastSyncCount == -1)
{
try
{
#if BCL35
Remote.DeleteDataset(DatasetName);
#else
await Remote.DeleteDatasetAsync(DatasetName, cancellationToken).ConfigureAwait(false);
#endif
}
catch (DatasetNotFoundException)
{
//Ignore the exception here, since the dataset was local only
}
catch (Exception e)
{
_logger.InfoFormat("{0} , dataset : {1}", e.Message, this.DatasetName);
EndSynchronizeAndCleanup();
FireSyncFailureEvent(e);
return;
}
Local.PurgeDataset(IdentityId, DatasetName);
_logger.InfoFormat("OnSyncSuccess: dataset delete is pushed to remote - {0}", this.DatasetName);
EndSynchronizeAndCleanup();
FireSyncSuccessEvent(new List<Record>());
return;
}
// get latest modified records from remote
_logger.InfoFormat("Get latest modified records since {0} for dataset {1}", lastSyncCount, this.DatasetName);
DatasetUpdates datasetUpdates = null;
try
{
#if BCL35
datasetUpdates = Remote.ListUpdates(DatasetName, lastSyncCount);
#else
datasetUpdates = await Remote.ListUpdatesAsync(DatasetName, lastSyncCount, cancellationToken).ConfigureAwait(false);
#endif
}
catch (Exception listUpdatesException)
{
_logger.Error(listUpdatesException, string.Empty);
EndSynchronizeAndCleanup();
FireSyncFailureEvent(listUpdatesException);
return;
}
if (datasetUpdates != null && datasetUpdates.MergedDatasetNameList.Count != 0 && this.OnDatasetMerged != null)
{
bool resume = this.OnDatasetMerged(this, datasetUpdates.MergedDatasetNameList);
if (resume)
{
if (retry == 0)
{
EndSynchronizeAndCleanup();
FireSyncFailureEvent(new SyncManagerException("Out of retries"));
}
else
{
#if BCL35
this.RunSyncOperation(--retry);
#else
await this.RunSyncOperationAsync(--retry, cancellationToken).ConfigureAwait(false);
#endif
}
return;
}
else
{
_logger.InfoFormat("OnSyncFailure: Manual Cancel");
EndSynchronizeAndCleanup();
FireSyncFailureEvent(new SyncManagerException("Manual cancel"));
return;
}
}
// if the dataset doesn't exist or is deleted, trigger onDelete
if (lastSyncCount != 0 && !datasetUpdates.Exists
|| datasetUpdates.Deleted && this.OnDatasetDeleted != null)
{
bool resume = this.OnDatasetDeleted(this);
if (resume)
{
// remove both records and metadata
Local.DeleteDataset(IdentityId, DatasetName);
Local.PurgeDataset(IdentityId, DatasetName);
_logger.InfoFormat("OnSyncSuccess");
EndSynchronizeAndCleanup();
FireSyncSuccessEvent(new List<Record>());
return;
}
else
{
_logger.InfoFormat("OnSyncFailure");
EndSynchronizeAndCleanup();
FireSyncFailureEvent(new SyncManagerException("Manual cancel"));
return;
}
}
lastSyncCount = datasetUpdates.SyncCount;
List<Record> remoteRecords = datasetUpdates.Records;
if (remoteRecords.Count != 0)
{
// if conflict, prompt developer/user with callback
List<SyncConflict> conflicts = new List<SyncConflict>();
List<Record> conflictRecords = new List<Record>();
foreach (Record remoteRecord in remoteRecords)
{
Record localRecord = Local.GetRecord(IdentityId,
DatasetName,
remoteRecord.Key);
// only when local is changed and its value is different
if (localRecord != null && localRecord.IsModified
&& !StringUtils.Equals(localRecord.Value, remoteRecord.Value))
{
conflicts.Add(new SyncConflict(remoteRecord, localRecord));
conflictRecords.Add(remoteRecord);
}
}
// retaining only non-conflict records
remoteRecords.RemoveAll(t => conflictRecords.Contains(t));
if (conflicts.Count > 0)
{
_logger.InfoFormat("{0} records in conflict!", conflicts.Count);
bool syncConflictResult = false;
if (this.OnSyncConflict == null)
{
// delegate is not implemented so the conflict resolution is applied
syncConflictResult = this.ResolveConflictsWithDefaultPolicy(conflicts);
}
else
{
syncConflictResult = this.OnSyncConflict(this, conflicts);
}
if (!syncConflictResult)
{
_logger.InfoFormat("User cancelled conflict resolution");
EndSynchronizeAndCleanup();
FireSyncFailureEvent(new OperationCanceledException("User cancelled conflict resolution"));
return;
}
}
// save to local
if (remoteRecords.Count > 0)
{
_logger.InfoFormat("Save {0} records to local", remoteRecords.Count);
Local.PutRecords(IdentityId, DatasetName, remoteRecords);
}
// new last sync count
_logger.InfoFormat("Updated sync count {0}", datasetUpdates.SyncCount);
Local.UpdateLastSyncCount(IdentityId, DatasetName,
datasetUpdates.SyncCount);
}
// push changes to remote
List<Record> localChanges = this.ModifiedRecords;
long minPatchSyncCount = lastSyncCount;
foreach (Record r in localChanges)
{
//track the max sync count
if (r.SyncCount < minPatchSyncCount)
{
minPatchSyncCount = r.SyncCount;
}
}
if (localChanges.Count != 0)
{
_logger.InfoFormat("Push {0} records to remote", localChanges.Count);
try
{
#if BCL35
List<Record> result = Remote.PutRecords(DatasetName, localChanges, datasetUpdates.SyncSessionToken);
#else
List<Record> result = await Remote.PutRecordsAsync(DatasetName, localChanges, datasetUpdates.SyncSessionToken, cancellationToken).ConfigureAwait(false);
#endif
// update local meta data
Local.ConditionallyPutRecords(IdentityId, DatasetName, result, localChanges);
// verify the server sync count is increased exactly by one, aka no
// other updates were made during this update.
long newSyncCount = 0;
foreach (Record record in result)
{
newSyncCount = newSyncCount < record.SyncCount
? record.SyncCount
: newSyncCount;
}
if (newSyncCount == lastSyncCount + 1)
{
_logger.InfoFormat("Updated sync count {0}", newSyncCount);
Local.UpdateLastSyncCount(IdentityId, DatasetName,
newSyncCount);
}
_logger.InfoFormat("OnSyncSuccess");
EndSynchronizeAndCleanup();
FireSyncSuccessEvent(remoteRecords);
return;
}
catch (DataConflictException e)
{
_logger.InfoFormat("Conflicts detected when pushing changes to remote: {0}", e.Message);
if (retry == 0)
{
EndSynchronizeAndCleanup();
FireSyncFailureEvent(e);
}
else
{
//it's possible there is a local dirty record with a stale sync count this will fix it
if (lastSyncCount > minPatchSyncCount)
{
Local.UpdateLastSyncCount(IdentityId, DatasetName, minPatchSyncCount);
}
#if BCL35
RunSyncOperation(--retry);
}
return;
}
#else
capturedException = ExceptionDispatchInfo.Capture(e);
}
}
#endif
catch (Exception e)
{
_logger.InfoFormat("OnSyncFailure {0}", e.Message);
EndSynchronizeAndCleanup();
FireSyncFailureEvent(e);
return;
}
#if !(BCL35)
if (capturedException != null)
await RunSyncOperationAsync(--retry, cancellationToken).ConfigureAwait(false);
return;
#endif
}
else
{
_logger.InfoFormat("OnSyncSuccess");
EndSynchronizeAndCleanup();
FireSyncSuccessEvent(remoteRecords);
return;
}
}
#if BCL35
private void SynchornizeInternal()
#else
private async Task SynchornizeInternalAsync(CancellationToken cancellationToken)
#endif
{
//make sure we have the latest identity id
try
{
#if BCL35
CognitoCredentials.GetIdentityId();
#else
await CognitoCredentials.GetIdentityIdAsync().ConfigureAwait(false);
#endif
bool resume = true;
List<string> mergedDatasets = LocalMergedDatasets;
if (mergedDatasets.Count > 0)
{
_logger.InfoFormat("Detected merge datasets - {0}", DatasetName);
if (this.OnDatasetMerged != null)
{
resume = this.OnDatasetMerged(this, mergedDatasets);
}
}
if (!resume)
{
EndSynchronizeAndCleanup();
FireSyncFailureEvent(new OperationCanceledException(string.Format(CultureInfo.InvariantCulture, "Sync canceled on merge for dataset - {0}", this.DatasetName)));
return;
}
#if BCL35
RunSyncOperation(MAX_RETRY);
#else
await RunSyncOperationAsync(MAX_RETRY, cancellationToken).ConfigureAwait(false);
#endif
}
catch (Exception e)
{
EndSynchronizeAndCleanup();
FireSyncFailureEvent(e);
_logger.Error(e, "");
}
}
internal String IdentityId
{
get
{
return DatasetUtils.GetIdentityId(CognitoCredentials);
}
}
internal List<Record> ModifiedRecords
{
get
{
return Local.GetModifiedRecords(IdentityId, DatasetName);
}
}
#endregion
#region SynchronizeEvents
private EventHandler<SyncSuccessEventArgs> mOnSyncSuccess;
/// <summary>
/// This is called after remote changes are downloaded to local storage
/// and local changes are uploaded to remote storage. Updated records
/// from remote storage are passed in the callback. If conflicts occur
/// during synchronize and are resolved in <see cref="SyncConflictDelegate"/> after
/// several retries, then updatedRecords will be what are pulled down
/// from remote in the last retry.
/// </summary>
public event EventHandler<SyncSuccessEventArgs> OnSyncSuccess
{
add
{
lock (this)
{
mOnSyncSuccess += value;
}
}
remove
{
lock (this)
{
mOnSyncSuccess -= value;
}
}
}
private EventHandler<SyncFailureEventArgs> mOnSyncFailure;
/// <summary>
/// This is called when an exception occurs during sync
/// </summary>
public event EventHandler<SyncFailureEventArgs> OnSyncFailure
{
add
{
lock (this)
{
mOnSyncFailure += value;
}
}
remove
{
lock (this)
{
mOnSyncFailure -= value;
}
}
}
/// <summary>
/// Fires a Sync Success Event
/// </summary>
/// <param name="records">List of records after successful sync</param>
protected void FireSyncSuccessEvent(List<Record> records)
{
if (mOnSyncSuccess != null)
{
mOnSyncSuccess(this, new SyncSuccessEventArgs(records));
}
}
/// <summary>
/// Fires a Sync Failure event.
/// </summary>
/// <param name="exception">Exception object which caused the sync Failure</param>
protected void FireSyncFailureEvent(Exception exception)
{
if (mOnSyncFailure != null)
{
mOnSyncFailure(this, new SyncFailureEventArgs(exception));
}
}
#endregion
#region SynchronizeDelegates
/// <summary>
/// Delegate which is invoked when a sync conflict occurs
/// </summary>
/// <param name="dataset">The data set which resulted in conflict</param>
/// <param name="conflicts">List of Objects which have conflicts</param>
/// <returns>true if you want to retry synchronization, else false</returns>
public delegate bool SyncConflictDelegate(Dataset dataset, List<SyncConflict> conflicts);
/// <summary>
/// Delegate which is invoked when a data set is deleted
/// </summary>
/// <param name="dataset">The dataset which was deleted</param>
/// <returns>true if you want to remove local dataset, or false if you want to
/// keep it</returns>
public delegate bool DatasetDeletedDelegate(Dataset dataset);
/// <summary>
/// Delegate which is invoked when a dataset is merged due to an identity merge
/// </summary>
/// <param name="dataset">The dataset which was merged, due to an identity merge</param>
/// <param name="datasetNames">identity id's on which merge occured</param>
/// <returns></returns>
public delegate bool DatasetMergedDelegate(Dataset dataset, List<string> datasetNames);
/// <summary>
/// This can be triggered during two phases. One is when the remote
/// changes are about to be written to local storage. The other is when
/// local changes are uploaded to remote storage and got rejected. Here
/// is an example:
///
/// <code>
/// playerInfo.OnSyncConflict = this.HandleSyncConflict;
///
/// private bool HandleSyncConflict(Dataset dataset, List<SyncConflict> conflicts)
/// {
/// List<Record> resolved = new List<Record>();
/// for (SyncConflict conflict in conflicts)
/// {
/// resolved.add(conflicts.resolveWithRemoteRecord());
/// }
/// dataset.Resolve(resolved);
/// return true;
/// }
/// </code>
/// </summary>
public SyncConflictDelegate OnSyncConflict;
/// <summary>
/// This is triggered when the given dataset is deleted remotely. Return
/// true if you want to remove local dataset, or false if you want to
/// keep it.
/// </summary>
public DatasetDeletedDelegate OnDatasetDeleted;
/// <summary>
/// If two or more datasets are merged as a result of identity merge,
/// this will be triggered. A list of names of merged datasets' is passed
/// in. The merged dataset name will be appended with its old identity
/// id. One can open the merged dataset, synchronize the content,
/// reconcile with the current dataset, and remove it. This callback will
/// fire off until the merged dataset is removed.
/// <return></return>
/// </summary>
public DatasetMergedDelegate OnDatasetMerged;
/// <summary>
/// Clears all the delegates
/// </summary>
public void ClearAllDelegates()
{
if (mOnSyncSuccess != null)
foreach (Delegate d in mOnSyncSuccess.GetInvocationList())
OnSyncSuccess -= (EventHandler<SyncSuccessEventArgs>)d;
if (mOnSyncFailure != null)
foreach (Delegate d in mOnSyncFailure.GetInvocationList())
OnSyncFailure -= (EventHandler<SyncFailureEventArgs>)d;
if (OnSyncConflict != null)
foreach (Delegate d in OnSyncConflict.GetInvocationList())
OnSyncConflict -= (SyncConflictDelegate)d;
if (OnDatasetDeleted != null)
foreach (Delegate d in OnDatasetDeleted.GetInvocationList())
OnDatasetDeleted -= (DatasetDeletedDelegate)d;
if (OnDatasetMerged != null)
foreach (Delegate d in OnDatasetMerged.GetInvocationList())
OnDatasetMerged -= (DatasetMergedDelegate)d;
}
#endregion
#region Default conflict resolution
internal bool ResolveConflictsWithDefaultPolicy(List<SyncConflict> conflicts)
{
List<Record> resolvedRecords = new List<Record>();
foreach (SyncConflict conflict in conflicts)
{
if (conflict.RemoteRecord == null || conflict.LocalRecord.LastModifiedDate.Value.CompareTo(conflict.RemoteRecord.LastModifiedDate.Value) > 0)
{
resolvedRecords.Add(conflict.ResolveWithLocalRecord());
}
else
{
resolvedRecords.Add(conflict.ResolveWithRemoteRecord());
}
}
this.Resolve(resolvedRecords);
return true;
}
#endregion
}
/// <summary>
/// A sync success event
/// </summary>
public class SyncSuccessEventArgs : EventArgs
{
/// <summary>
/// List of updated records
/// </summary>
public List<Record> UpdatedRecords { get; private set; }
internal SyncSuccessEventArgs(List<Record> updatedRecords)
{
this.UpdatedRecords = updatedRecords;
}
}
/// <summary>
/// A sync failure event
/// </summary>
public class SyncFailureEventArgs : EventArgs
{
/// <summary>
/// Exception which triggered the failure event
/// </summary>
public Exception Exception { get; private set; }
internal SyncFailureEventArgs(Exception exception)
{
this.Exception = exception;
}
}
}
| 914 |
amazon-cognito-sync-manager-net | aws | C# | /*
 * Copyright 2010-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
 *Â
 * Licensed under the Apache License, Version 2.0 (the "License").
 * You may not use this file except in compliance with the License.
 * A copy of the License is located at
 *Â
 *  http://aws.amazon.com/apache2.0
 *Â
 * or in the "license" file accompanying this file. This file is distributed
 * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
 * express or implied. See the License for the specific language governing
 * permissions and limitations under the License.
 */
Â
using System.Diagnostics.CodeAnalysis;
[module: SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures", Scope="member", Target="Amazon.CognitoSync.SyncManager.CognitoSyncManager.#RefreshDatasetMetadataAsync(System.Threading.CancellationToken)")]
[module: SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures", Scope="member", Target="Amazon.CognitoSync.SyncManager.IRemoteDataStorage.#GetDatasetMetadataAsync(System.Threading.CancellationToken)")]
[module: SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures", Scope="member", Target="Amazon.CognitoSync.SyncManager.IRemoteDataStorage.#PutRecordsAsync(System.String,System.Collections.Generic.List`1<Amazon.CognitoSync.SyncManager.Record>,System.String,System.Threading.CancellationToken)")]
[module: SuppressMessage("Microsoft.Design", "CA1051:DoNotDeclareVisibleInstanceFields", Scope = "member", Target = "Amazon.CognitoSync.SyncManager.Dataset.#OnDatasetMerged")]
[module: SuppressMessage("Microsoft.Design", "CA1051:DoNotDeclareVisibleInstanceFields", Scope = "member", Target = "Amazon.CognitoSync.SyncManager.Dataset.#OnDatasetDeleted")]
[module: SuppressMessage("Microsoft.Design", "CA1051:DoNotDeclareVisibleInstanceFields", Scope = "member", Target = "Amazon.CognitoSync.SyncManager.Dataset.#OnSyncConflict")]
[module: SuppressMessage("Microsoft.Design", "CA1034:NestedTypesShouldNotBeVisible", Scope = "type", Target = "Amazon.CognitoSync.SyncManager.Dataset+DatasetDeletedDelegate")]
[module: SuppressMessage("Microsoft.Design", "CA1034:NestedTypesShouldNotBeVisible", Scope = "type", Target = "Amazon.CognitoSync.SyncManager.Dataset+DatasetMergedDelegate")]
[module: SuppressMessage("Microsoft.Design", "CA1034:NestedTypesShouldNotBeVisible", Scope = "type", Target = "Amazon.CognitoSync.SyncManager.Dataset+SyncConflictDelegate")]
[module: SuppressMessage("Microsoft.Design", "CA1030:UseEventsWhereAppropriate", Scope = "member", Target = "Amazon.CognitoSync.SyncManager.Dataset.#FireSyncFailureEvent(System.Exception)")]
[module: SuppressMessage("Microsoft.Design", "CA1030:UseEventsWhereAppropriate", Scope = "member", Target = "Amazon.CognitoSync.SyncManager.Dataset.#FireSyncSuccessEvent(System.Collections.Generic.List`1<Amazon.CognitoSync.SyncManager.Record>)")]
[module: SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures", Scope = "member", Target = "Amazon.CognitoSync.SyncManager.Internal.CognitoSyncStorage.#PutRecordsAsync(System.String,System.Collections.Generic.List`1<Amazon.CognitoSync.SyncManager.Record>,System.String,System.Threading.CancellationToken)")]
[module: SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures", Scope = "member", Target = "Amazon.CognitoSync.SyncManager.Internal.CognitoSyncStorage.#ListDatasetMetadataAsync(System.Threading.CancellationToken)")] | 40 |
amazon-cognito-sync-manager-net | aws | C# | //
// Copyright 2014-2015 Amazon.com,
// Inc. or its affiliates. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
using System;
namespace Amazon.CognitoSync.SyncManager
{
/// <summary>
/// This exception is thrown when an update fails due to conflicting states
/// </summary>
#if !PCL
[Serializable]
#endif
public class DataConflictException : DataStorageException
{
/// <summary>
/// Constructs a new DataConflictException with the specified message and an existing exception object
/// </summary>
public DataConflictException(string detailMessage, Exception ex)
: base(detailMessage, ex)
{
}
/// <summary>
/// Constructs a new DataConflictException with the specified message
/// </summary>
public DataConflictException(string detailMessage)
: base(detailMessage)
{
}
/// <summary>
/// Constructs a new DataConflictException with an existing exception object
/// </summary>
public DataConflictException(Exception ex)
: base(ex.Message, ex)
{
}
#if !PCL
/// <summary>
/// Constructs a new instance of the DataConflictException class with serialized data.
/// </summary>
/// <param name="info">The <see cref="T:System.Runtime.Serialization.SerializationInfo" /> that holds the serialized object data about the exception being thrown.</param>
/// <param name="context">The <see cref="T:System.Runtime.Serialization.StreamingContext" /> that contains contextual information about the source or destination.</param>
/// <exception cref="T:System.ArgumentNullException">The <paramref name="info" /> parameter is null. </exception>
/// <exception cref="T:System.Runtime.Serialization.SerializationException">The class name is null or <see cref="P:System.Exception.HResult" /> is zero (0). </exception>
protected DataConflictException(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context)
: base(info, context)
{
}
#endif
}
}
| 58 |
amazon-cognito-sync-manager-net | aws | C# | //
// Copyright 2014-2015 Amazon.com,
// Inc. or its affiliates. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
using System;
namespace Amazon.CognitoSync.SyncManager
{
/// <summary>
/// This exception is thrown when the dataset operation exceeds certain limit,
/// e.g. maximum of 20 datasets per identity, 1024 records per dataset, and 1mb
/// in size per dataset, etc.
/// </summary>
#if !PCL
[Serializable]
#endif
public class DataLimitExceededException : DataStorageException
{
/// <summary>
/// Constructs a new DataLimitExceedException with the specified message and an existing exception object
/// </summary>
public DataLimitExceededException(string detailMessage, Exception ex)
: base(detailMessage, ex)
{
}
/// <summary>
/// Constructs a new DataLimitExceedException with the specified message
/// </summary>
public DataLimitExceededException(string detailMessage)
: base(detailMessage)
{
}
/// <summary>
/// Constructs a new DataLimitExceedException using an existing exception object
/// </summary>
public DataLimitExceededException(Exception ex)
: base(ex.Message, ex)
{
}
#if !PCL
/// <summary>
/// Constructs a new instance of the DataLimitExceededException class with serialized data.
/// </summary>
/// <param name="info">The <see cref="T:System.Runtime.Serialization.SerializationInfo" /> that holds the serialized object data about the exception being thrown.</param>
/// <param name="context">The <see cref="T:System.Runtime.Serialization.StreamingContext" /> that contains contextual information about the source or destination.</param>
/// <exception cref="T:System.ArgumentNullException">The <paramref name="info" /> parameter is null. </exception>
/// <exception cref="T:System.Runtime.Serialization.SerializationException">The class name is null or <see cref="P:System.Exception.HResult" /> is zero (0). </exception>
protected DataLimitExceededException(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context)
: base(info, context)
{
}
#endif
}
}
| 62 |
amazon-cognito-sync-manager-net | aws | C# | //
// Copyright 2014-2015 Amazon.com,
// Inc. or its affiliates. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
using System;
namespace Amazon.CognitoSync.SyncManager
{
/// <summary>
/// This exception is thrown when the dataset that is attempted to access does
/// not exist.
/// </summary>
#if !PCL
[Serializable]
#endif
public class DatasetNotFoundException : DataStorageException
{
/// <summary>
/// Constructs a new DatasetNotFoundException with the specified message and an existing exception object
/// </summary>
public DatasetNotFoundException(string detailMessage, Exception ex)
: base(detailMessage, ex)
{
}
/// <summary>
/// Constructs a new DatasetNotFoundException with the specified message
/// </summary>
public DatasetNotFoundException(string detailMessage)
: base(detailMessage)
{
}
/// <summary>
/// Constructs a new DatasetNotFoundException with an existing exception object
/// </summary>
public DatasetNotFoundException(Exception ex)
: base(ex.Message, ex)
{
}
#if !PCL
/// <summary>
/// Constructs a new instance of the DatasetNotFoundException class with serialized data.
/// </summary>
/// <param name="info">The <see cref="T:System.Runtime.Serialization.SerializationInfo" /> that holds the serialized object data about the exception being thrown.</param>
/// <param name="context">The <see cref="T:System.Runtime.Serialization.StreamingContext" /> that contains contextual information about the source or destination.</param>
/// <exception cref="T:System.ArgumentNullException">The <paramref name="info" /> parameter is null. </exception>
/// <exception cref="T:System.Runtime.Serialization.SerializationException">The class name is null or <see cref="P:System.Exception.HResult" /> is zero (0). </exception>
protected DatasetNotFoundException(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context)
: base(info, context)
{
}
#endif
}
}
| 59 |
amazon-cognito-sync-manager-net | aws | C# | //
// Copyright 2014-2015 Amazon.com,
// Inc. or its affiliates. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
using System;
using Amazon.Runtime;
namespace Amazon.CognitoSync.SyncManager
{
/// <summary>
/// This exception is thrown when an error occurs during an data storage
/// operation.
/// </summary>
#if !PCL
[Serializable]
#endif
public class DataStorageException : SyncManagerException
{
/// <summary>
/// Constructs a new DataStorageException with the specified message and an existing exception object
/// </summary>
public DataStorageException(string detailMessage, Exception ex)
: base(detailMessage, ex)
{
}
/// <summary>
/// Constructs a new DataStorageException with the specified message
/// </summary>
public DataStorageException(string detailMessage)
: base(detailMessage)
{
}
/// <summary>
/// Constructs a new DataStorageException with an existing exception object
/// </summary>
public DataStorageException(Exception ex)
: base(ex.Message, ex)
{
}
#if !PCL
/// <summary>
/// Constructs a new instance of the DataStorageException class with serialized data.
/// </summary>
/// <param name="info">The <see cref="T:System.Runtime.Serialization.SerializationInfo" /> that holds the serialized object data about the exception being thrown.</param>
/// <param name="context">The <see cref="T:System.Runtime.Serialization.StreamingContext" /> that contains contextual information about the source or destination.</param>
/// <exception cref="T:System.ArgumentNullException">The <paramref name="info" /> parameter is null. </exception>
/// <exception cref="T:System.Runtime.Serialization.SerializationException">The class name is null or <see cref="P:System.Exception.HResult" /> is zero (0). </exception>
protected DataStorageException(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context)
: base(info, context)
{
}
#endif
}
}
| 60 |
amazon-cognito-sync-manager-net | aws | C# | //
// Copyright 2014-2015 Amazon.com,
// Inc. or its affiliates. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
using System;
namespace Amazon.CognitoSync.SyncManager
{
/// <summary>
/// This exception is thrown when a service request failed due to network
/// connectivity problem.
/// </summary>
#if !PCL
[Serializable]
#endif
public class NetworkException : SyncManagerException
{
/// <summary>
/// Constructs a new NetworkException with the specified message and an existing exception object
/// </summary>
public NetworkException(string detailMessage, Exception ex)
: base(detailMessage, ex)
{
}
/// <summary>
/// Constructs a new NetworkException with the specified message
/// </summary>
public NetworkException(string detailMessage)
: base(detailMessage)
{
}
/// <summary>
/// Constructs a new NetworkException with an existing exception object
/// </summary>
public NetworkException(Exception ex)
: base(ex.Message, ex)
{
}
#if !PCL
/// <summary>
/// Constructs a new instance of the NetworkException class with serialized data.
/// </summary>
/// <param name="info">The <see cref="T:System.Runtime.Serialization.SerializationInfo" /> that holds the serialized object data about the exception being thrown.</param>
/// <param name="context">The <see cref="T:System.Runtime.Serialization.StreamingContext" /> that contains contextual information about the source or destination.</param>
/// <exception cref="T:System.ArgumentNullException">The <paramref name="info" /> parameter is null. </exception>
/// <exception cref="T:System.Runtime.Serialization.SerializationException">The class name is null or <see cref="P:System.Exception.HResult" /> is zero (0). </exception>
protected NetworkException(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context)
: base(info, context)
{
}
#endif
}
}
| 59 |
amazon-cognito-sync-manager-net | aws | C# | //
// Copyright 2014-2015 Amazon.com,
// Inc. or its affiliates. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
using System;
using Amazon.Runtime;
namespace Amazon.CognitoSync.SyncManager
{
/// <summary>
/// This exception is thrown when an error occurs during an data storage
/// operation.
/// </summary>
#if !PCL
[Serializable]
#endif
public class SyncManagerException : AmazonServiceException
{
/// <summary>
/// Constructs a new SyncManagerException with the specified message and an existing exception object
/// </summary>
public SyncManagerException(string detailMessage, Exception ex)
: base(detailMessage, ex)
{
}
/// <summary>
/// Constructs a new SyncManagerException with the specified message
/// </summary>
public SyncManagerException(string detailMessage)
: base(detailMessage)
{
}
/// <summary>
/// Constructs a new SyncManagerException with an existing exception object
/// </summary>
public SyncManagerException(Exception ex)
: base(ex.Message, ex)
{
}
#if !PCL
/// <summary>
/// Constructs a new instance of the AmazonClientException class with serialized data.
/// </summary>
/// <param name="info">The <see cref="T:System.Runtime.Serialization.SerializationInfo" /> that holds the serialized object data about the exception being thrown.</param>
/// <param name="context">The <see cref="T:System.Runtime.Serialization.StreamingContext" /> that contains contextual information about the source or destination.</param>
/// <exception cref="T:System.ArgumentNullException">The <paramref name="info" /> parameter is null. </exception>
/// <exception cref="T:System.Runtime.Serialization.SerializationException">The class name is null or <see cref="P:System.Exception.HResult" /> is zero (0). </exception>
protected SyncManagerException(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context)
: base(info, context)
{
}
#endif
}
}
| 61 |
amazon-cognito-sync-manager-net | aws | C# | //
// Copyright 2014-2015 Amazon.com,
// Inc. or its affiliates. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
using System;
using System.Globalization;
namespace Amazon.CognitoSync.SyncManager
{
/// <summary>
/// Metadata information representing a dataset
/// </summary>
public sealed class DatasetMetadata
{
private string _datasetName;
private DateTime? _creationDate;
private DateTime? _lastModifiedDate;
private string _lastModifiedBy;
private long _storageSizeBytes;
private long _recordCount;
/// <summary>
/// Non empty String name of the dataset
/// </summary>
/// <value>The name of the dataset.</value>
public string DatasetName
{
get { return this._datasetName; }
}
/// <summary>
/// Date when the dataset is created
/// </summary>
/// <value>The creation date.</value>
public DateTime? CreationDate
{
get { return this._creationDate; }
}
/// <summary>
/// Date when the dataset is last modified
/// </summary>
/// <value>The last modified date.</value>
public DateTime? LastModifiedDate
{
get { return this._lastModifiedDate; }
}
/// <summary>
/// The device that made the last modification
/// </summary>
/// <value>The last modified by.</value>
public string LastModifiedBy
{
get { return this._lastModifiedBy; }
}
/// <summary>
/// The storage size in bytes of the dataset
/// </summary>
/// <value>The storage size bytes.</value>
public long StorageSizeBytes
{
get { return this._storageSizeBytes; }
}
/// <summary>
/// Number of records in the dataset
/// </summary>
/// <value>The record count.</value>
public long RecordCount
{
get { return this._recordCount; }
}
/// <summary>
///
/// </summary>
/// <param name="datasetName"></param>
/// <param name="creationDate"></param>
/// <param name="lastModifiedDate"></param>
/// <param name="lastModifiedBy"></param>
/// <param name="storageSizeBytes"></param>
/// <param name="recordCount"></param>
public DatasetMetadata(string datasetName, DateTime? creationDate, DateTime? lastModifiedDate, string lastModifiedBy, long storageSizeBytes, long recordCount)
{
this._datasetName = datasetName;
this._creationDate = creationDate;
this._lastModifiedDate = lastModifiedDate;
this._lastModifiedBy = lastModifiedBy;
this._storageSizeBytes = storageSizeBytes;
this._recordCount = recordCount;
}
/// <summary>
/// A string representation of DatasetMetadata
/// </summary>
/// <returns></returns>
public override string ToString()
{
return string.Format(CultureInfo.InvariantCulture,
"DatasetName:[{0}], CreationDate:[{1}], LastModifiedDate:[{2}], LastModifiedBy:[{3}], StorageSizeBytes:[{4}], RecordCount:[{5}]",
DatasetName, CreationDate, LastModifiedDate, LastModifiedBy, StorageSizeBytes, RecordCount);
}
}
}
| 111 |
amazon-cognito-sync-manager-net | aws | C# | using System;
using System.Collections.Generic;
namespace Amazon.CognitoSync.SyncManager
{
/// <summary>
/// A model class which stores a list of updated dataset.
/// </summary>
public class DatasetUpdates
{
private string _datasetName;
private List<Record> _records;
private long _syncCount;
private string _syncSessionToken;
private bool _exists;
private bool _deleted;
private List<string> _mergedDatasetNameList;
/// <summary>
/// THe name representing the dataset.
/// </summary>
public string DatasetName
{
get
{
return this._datasetName;
}
}
/// <summary>
/// A flag which indicates if the dataset is deleted or not.
/// </summary>
public bool Deleted
{
get
{
return this._deleted;
}
}
/// <summary>
/// A flag indicating if the dataset exists.
/// </summary>
public bool Exists
{
get
{
return this._exists;
}
}
/// <summary>
/// A list of dataset names which are merged.
/// </summary>
public List<string> MergedDatasetNameList
{
get
{
return _mergedDatasetNameList;
}
}
/// <summary>
/// A list of records.
/// </summary>
public List<Record> Records
{
get
{
return this._records;
}
}
/// <summary>
/// The count of number of times the dataset is synchronized.
/// </summary>
public long SyncCount
{
get
{
return this._syncCount;
}
}
/// <summary>
///
/// </summary>
public string SyncSessionToken
{
get
{
return this._syncSessionToken;
}
}
/// <summary>
///
/// </summary>
/// <param name="datasetName"></param>
/// <param name="records"></param>
/// <param name="syncCount"></param>
/// <param name="syncSessionToken"></param>
/// <param name="exists"></param>
/// <param name="deleted"></param>
/// <param name="mergedDatasetNameList"></param>
public DatasetUpdates(string datasetName, List<Record> records, long syncCount, string syncSessionToken,
bool exists, bool deleted, List<string> mergedDatasetNameList)
{
this._datasetName = datasetName;
this._records = records;
this._syncCount = syncCount;
this._syncSessionToken = syncSessionToken;
this._exists = exists;
this._deleted = deleted;
this._mergedDatasetNameList = mergedDatasetNameList;
}
}
}
| 119 |
amazon-cognito-sync-manager-net | aws | C# | //
// Copyright 2014-2015 Amazon.com,
// Inc. or its affiliates. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
using System;
using Amazon.CognitoSync.SyncManager.Internal;
using System.Globalization;
namespace Amazon.CognitoSync.SyncManager
{
/// <summary>
/// A Record is the element stored in a <see cref="Dataset"/>.
/// There can be up to 1k records or up to 1 MB in size in a <see cref="Dataset"/>.
/// </summary>
public sealed class Record
{
private readonly string _key;
private readonly string _value;
private readonly long _syncCount;
private readonly DateTime? _lastModifiedDate;
private readonly string _lastModifiedBy;
private readonly DateTime? _deviceLastModifiedDate;
private readonly bool _modified;
/// <summary>
/// The key of this record. It's a non empty string.
/// </summary>
/// <value>The key associated with the Record</value>
public string Key
{
get { return this._key; }
}
/// <summary>
/// The value of this record. If it's null, then the record is considered as deleted.
/// </summary>
/// <value>The value.</value>
public string Value
{
get { return this._value; }
}
/// <summary>
/// Equivalent to the version of this record. It's a increasing non negative
/// long number. The value is returned from server. Do not set this value
/// arbitrarily, or else update may fail.
/// </summary>
/// <value>The sync count.</value>
public long SyncCount
{
get { return this._syncCount; }
}
/// <summary>
/// The server timestamp of this record when it was last modified.
/// </summary>
/// <value>The last modified date.</value>
public DateTime? LastModifiedDate
{
get { return this._lastModifiedDate; }
}
/// <summary>
/// An identifier of the user or device which modified this record last.
/// </summary>
/// <value>The last modified by.</value>
public string LastModifiedBy
{
get { return this._lastModifiedBy; }
}
/// <summary>
/// The local timestamp of this record when it was last modified.
/// </summary>
/// <value>The device last modified date.</value>
public DateTime? DeviceLastModifiedDate
{
get { return this._deviceLastModifiedDate; }
}
/// <summary>
/// A flag that indicates whether this record is modified locally but hasn't
/// been synced.
/// </summary>
/// <value><c>true</c> if modified; otherwise, <c>false</c>.</value>
public bool IsModified
{
get { return this._modified; }
}
/// <summary>
/// Creates an instance of the Record.
/// </summary>
/// <param name="key">The key representing the record</param>
/// <param name="value">The value for the record</param>
/// <param name="syncCount">THe number of times this record has been synchronized</param>
/// <param name="lastModifiedDate">The last time the record was modified in UTC</param>
/// <param name="lastModifiedBy"></param>
/// <param name="deviceLastModifiedDate"></param>
/// <param name="modified">Flag indicating the record was modified</param>
public Record(string key, string value, long syncCount, DateTime? lastModifiedDate, string lastModifiedBy, DateTime? deviceLastModifiedDate, bool modified)
{
if (string.IsNullOrEmpty(key))
{
throw new ArgumentNullException("key");
}
this._key = key;
this._value = value;
this._syncCount = syncCount;
this._lastModifiedDate = lastModifiedDate != null ? DatasetUtils.TruncateToSeconds(lastModifiedDate.Value.ToUniversalTime()) : lastModifiedDate;
this._lastModifiedBy = lastModifiedBy;
this._deviceLastModifiedDate = deviceLastModifiedDate != null ? DatasetUtils.TruncateToSeconds(deviceLastModifiedDate.Value.ToUniversalTime()) : deviceLastModifiedDate;
this._modified = modified;
}
/// <summary>
/// Gets whether the record is marked as deleted. True if value is null,
/// false otherwise.
/// </summary>
/// <returns><c>true</c> if the record is marked as deleted; otherwise, <c>false</c>.</returns>
public bool IsDeleted
{
get
{
return Value == null;
}
}
/// <summary>
/// A string representation of the record
/// </summary>
/// <returns></returns>
public override string ToString()
{
return string.Format(CultureInfo.InvariantCulture,
"Key:[{0}], Value:[{1}], SyncCount:[{2}], LastModifiedDate:[{3}], LastModifiedBy:[{4}], DeviceLastModifiedDate:[{5}], IsModified:[{6}]",
Key, Value, SyncCount, LastModifiedDate, LastModifiedBy, DeviceLastModifiedDate, IsModified);
}
}
}
| 146 |
amazon-cognito-sync-manager-net | aws | C# | //
// Copyright 2014-2015 Amazon.com,
// Inc. or its affiliates. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
using System;
namespace Amazon.CognitoSync.SyncManager
{
/// <summary>
/// A model which stores conflicting record from the remote storage and the local storage.
/// </summary>
public class SyncConflict
{
private readonly string _key;
private readonly Record _remoteRecord;
private readonly Record _localRecord;
/// <summary>
/// The record Key
/// </summary>
public string Key
{
get { return this._key; }
}
/// <summary>
/// Return the record which is present on the server
/// </summary>
public Record RemoteRecord
{
get { return this._remoteRecord; }
}
/// <summary>
/// Return the record which is present locally on the device
/// </summary>
public Record LocalRecord
{
get { return this._localRecord; }
}
/// <summary>
/// Constructs a SyncConflict object.
/// </summary>
/// <param name="remoteRecord">record from remote storage</param>
/// <param name="localRecord">record from local storage</param>
public SyncConflict(Record remoteRecord, Record localRecord)
{
if (remoteRecord == null || localRecord == null)
{
throw new ArgumentException("Record can't be null");
}
if (!remoteRecord.Key.Equals(localRecord.Key))
{
throw new ArgumentException("The keys of remote record and local record don't match");
}
this._key = remoteRecord.Key;
this._remoteRecord = remoteRecord;
this._localRecord = localRecord;
}
/// <summary>
/// Resolves conflict with remote record
/// </summary>
/// <returns>Resolved record</returns>
public Record ResolveWithRemoteRecord()
{
return new Record(
this.Key,
this.RemoteRecord.Value,
this.RemoteRecord.SyncCount,
this.RemoteRecord.LastModifiedDate,
this.RemoteRecord.LastModifiedBy,
this.RemoteRecord.DeviceLastModifiedDate,
false
);
}
/// <summary>
/// Resolves conflict with local record
/// </summary>
/// <returns>Resolved record</returns>
public Record ResolveWithLocalRecord()
{
return new Record(
this.Key,
this.LocalRecord.Value,
this.RemoteRecord.SyncCount,
this.LocalRecord.LastModifiedDate,
this.LocalRecord.LastModifiedBy,
this.LocalRecord.DeviceLastModifiedDate,
true
);
}
/// <summary>
/// Resolves conflict with a new value.
/// </summary>
/// <returns>Resolved record.</returns>
/// <param name="newValue">new value of the record</param>
public Record ResolveWithValue(string newValue)
{
return new Record(
this.Key,
newValue,
this.RemoteRecord.SyncCount,
this.LocalRecord.LastModifiedDate,
this.LocalRecord.LastModifiedBy,
this.LocalRecord.DeviceLastModifiedDate,
true
);
}
}
}
| 119 |
amazon-cognito-sync-manager-net | aws | C# | using System.Runtime.InteropServices;
[assembly: ComVisible(false)]
[assembly: System.CLSCompliant(true)]
[assembly: System.Security.AllowPartiallyTrustedCallers] | 5 |
amazon-cognito-sync-manager-net | aws | C# | //
// Copyright 2014-2015 Amazon.com,
// Inc. or its affiliates. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
using System;
using System.IO;
using Amazon.Runtime;
using Amazon.CognitoIdentity;
using Amazon.CognitoSync.Model;
namespace Amazon.CognitoSync.SyncManager.Internal
{
/// <summary>
/// Remote data storage using Cognito Sync service on which we can invoke
/// actions like creating a dataset or record.
/// </summary>
public partial class CognitoSyncStorage : IDisposable
{
private readonly string identityPoolId;
private readonly IAmazonCognitoSync client;
private readonly CognitoAWSCredentials cognitoCredentials;
#region Dispose
/// <summary>
/// Dispose this Object
/// </summary>
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
/// <summary>
/// Dispose this Object
/// </summary>
/// <param name="disposing"></param>
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
if (client != null)
client.Dispose();
}
}
#endregion
#region Constructor
/// <summary>
/// Creates an insance of CognitoSyncStorage.
/// </summary>
/// <param name="cognitoCredentials"><see cref="Amazon.CognitoIdentity.CognitoAWSCredentials"/></param>
/// <param name="config"><see cref="Amazon.CognitoSync.AmazonCognitoSyncConfig"/></param>
public CognitoSyncStorage(CognitoAWSCredentials cognitoCredentials, AmazonCognitoSyncConfig config)
{
if (cognitoCredentials == null)
{
throw new ArgumentNullException("cognitoCredentials");
}
this.identityPoolId = cognitoCredentials.IdentityPoolId;
this.cognitoCredentials = cognitoCredentials;
this.client = new AmazonCognitoSyncClient(cognitoCredentials, config);
}
#endregion
#region Private Methods
private string GetCurrentIdentityId()
{
return cognitoCredentials.GetIdentityId();
}
private static RecordPatch RecordToPatch(Record record)
{
RecordPatch patch = new RecordPatch();
patch.DeviceLastModifiedDate = record.DeviceLastModifiedDate.Value;
patch.Key = record.Key;
patch.Value = record.Value;
patch.SyncCount = record.SyncCount;
patch.Op = (record.Value == null ? Operation.Remove : Operation.Replace);
return patch;
}
private static DatasetMetadata ModelToDatasetMetadata(Amazon.CognitoSync.Model.Dataset model)
{
return new DatasetMetadata(
model.DatasetName,
model.CreationDate,
model.LastModifiedDate,
model.LastModifiedBy,
model.DataStorage,
model.NumRecords
);
}
private static Record ModelToRecord(Amazon.CognitoSync.Model.Record model)
{
return new Record(
model.Key,
model.Value,
model.SyncCount,
model.LastModifiedDate,
model.LastModifiedBy,
model.DeviceLastModifiedDate,
false);
}
private static SyncManagerException HandleException(Exception e, string message)
{
var ase = e as AmazonServiceException;
if (ase == null) ase = new AmazonServiceException(e);
if (ase.GetType() == typeof(ResourceNotFoundException))
{
return new DatasetNotFoundException(message);
}
else if (ase.GetType() == typeof(ResourceConflictException)
|| ase.StatusCode == System.Net.HttpStatusCode.Conflict)
{
return new DataConflictException(message);
}
else if (ase.GetType() == typeof(LimitExceededException))
{
return new DataLimitExceededException(message);
}
else if (IsNetworkException(ase))
{
return new NetworkException(message);
}
else if (ase.GetType() == typeof(AmazonCognitoSyncException)
&& ase.Message != null
&& ase.Message.StartsWith("Current SyncCount for:", StringComparison.Ordinal))
{
return new DataConflictException(message);
}
else
{
return new DataStorageException(message, ase);
}
}
private static bool IsNetworkException(AmazonServiceException ase)
{
return ase.InnerException != null && ase.InnerException.GetType() == typeof(IOException);
}
#endregion
}
}
| 160 |
amazon-cognito-sync-manager-net | aws | C# | //
// Copyright 2014-2015 Amazon.com,
// Inc. or its affiliates. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
using System;
using System.Collections;
using System.Collections.Generic;
namespace Amazon.CognitoSync.SyncManager
{
/// <summary>
/// A local storage like a sqlite database on which we can invoke actions like creating a dataset, or record
/// </summary>
public partial interface ILocalStorage : IDisposable
{
/// <summary>
/// Create a dataset
/// </summary>
/// <param name="identityId">Identity Id</param>
/// <param name="datasetName">Dataset name.</param>
void CreateDataset(string identityId, string datasetName);
/// <summary>
/// Retrieves the string value of a key in dataset. The value can be null
/// when the record doesn't exist or is marked as deleted.
/// </summary>
/// <returns>string value of the record, or null if not present or deleted.</returns>
/// <param name="identityId">Identity identifier.</param>
/// <param name="datasetName">Dataset name.</param>
/// <param name="key">record key.</param>
string GetValue(string identityId, string datasetName, string key);
/// <summary>
/// Puts the value of a key in dataset. If a new value is assigned to the
/// key, the record is marked as dirty. If the value is null, then the record
/// is marked as deleted. The changed record will be synced with remote
/// storage.
/// </summary>
/// <param name="identityId">Identity identifier.</param>
/// <param name="datasetName">Dataset name.</param>
/// <param name="key">record key.</param>
/// <param name="value">string value. If null, the record is marked as deleted.</param>
void PutValue(string identityId, string datasetName, string key, string value);
/// <summary>
/// Retrieves a key-value map from dataset, excluding marked as deleted
/// values.
/// </summary>
/// <returns>a key-value map of all but deleted values.</returns>
/// <param name="identityId">Identity identifier.</param>
/// <param name="datasetName">Dataset name.</param>
Dictionary<string, string> GetValueMap(string identityId, string datasetName);
/// <summary>
/// Puts a key-value map into a dataset. This is optimized for batch
/// operation. It's the preferred way to put a list of records into dataset.
/// </summary>
/// <param name="identityId">Identity identifier.</param>
/// <param name="datasetName">Dataset name.</param>
/// <param name="values">a key-value map.</param>
void PutAllValues(string identityId, string datasetName, IDictionary<string, string> values);
/// <summary>
/// Gets a raw record from local store. If the dataset/key combo doesn't
/// // exist, null will be returned.
/// </summary>
/// <returns>a Record object if found, null otherwise.</returns>
/// <param name="identityId">Identity identifier.</param>
/// <param name="datasetName">Dataset name.</param>
/// <param name="key">Key for the record.</param>
Record GetRecord(string identityId, string datasetName, string key);
/// <summary>
/// Gets a list of all records.
/// </summary>
/// <returns>A list of records which have been updated since lastSyncCount.</returns>
/// <param name="identityId">Identity identifier.</param>
/// <param name="datasetName">Dataset name.</param>
List<Record> GetRecords(string identityId, string datasetName);
/// <summary>
/// Retrieves a list of locally modified records since last successful sync
/// operation.
/// </summary>
/// <returns>a list of locally modified records</returns>
/// <param name="identityId">Identity identifier.</param>
/// <param name="datasetName">Dataset name.</param>
List<Record> GetModifiedRecords(string identityId, string datasetName);
/// <summary>
/// Puts a list of raw records into dataset.
/// </summary>
/// <param name="identityId">Identity identifier.</param>
/// <param name="datasetName">Dataset name.</param>
/// <param name="records">A list of Records.</param>
void PutRecords(string identityId, string datasetName, List<Record> records);
/// <summary>
/// Puts a list of raw records into that dataset if
/// the local version hasn't changed (to be used in
/// synchronizations).
/// </summary>
/// <param name="identityId">Identity id.</param>
/// <param name="datasetName">Dataset name.</param>
/// <param name="records">A list of remote records to compare with</param>
/// <param name="localRecords">A list of records to check for changes.</param>
void ConditionallyPutRecords(String identityId, String datasetName,List<Record> records, List<Record> localRecords);
/// <summary>
/// Gets a list of dataset's metadata information.
/// </summary>
/// <returns>a list of dataset metadata</returns>
/// <param name="identityId">Identity identifier.</param>
/// <exception cref="DataStorageException"></exception>
List<DatasetMetadata> GetDatasetMetadata(string identityId);
/// <summary>
/// Deletes a dataset. It clears all records in this dataset and marked it as
/// deleted for future sync.
/// </summary>
/// <param name="identityId">Identity identifier.</param>
/// <param name="datasetName">Dataset name.</param>
/// <exception cref="DatasetNotFoundException"></exception>
void DeleteDataset(string identityId, string datasetName);
/// <summary>
/// This is different from <see cref="DeleteDataset(String,String)"/>. Not only does it
/// clears all records in the dataset, it also remove it from metadata table.
/// It won't be visible in <see cref="GetDatasetMetadata(String,String)"/>.
/// </summary>
/// <param name="identityId">Identity identifier.</param>
/// <param name="datasetName">Dataset name.</param>
void PurgeDataset(string identityId, string datasetName);
/// <summary>
/// Retrieves the metadata of a dataset.
/// </summary>
/// <returns>The dataset metadata.</returns>
/// <param name="identityId">Identity identifier.</param>
/// <param name="datasetName">Dataset name.</param>
/// <exception cref="DataStorageException"></exception>
DatasetMetadata GetDatasetMetadata(string identityId, string datasetName);
/// <summary>
/// Retrieves the last sync count. This sync count is a counter that
/// represents when the last sync happened. The counter should be updated on
/// a successful sync.
/// </summary>
/// <returns>The last sync count.</returns>
/// <param name="identityId">Identity identifier.</param>
/// <param name="datasetName">Dataset name.</param>
long GetLastSyncCount(string identityId, string datasetName);
/// <summary>
/// Updates the last sync count after successful sync with the remote data
/// store.
/// </summary>
/// <param name="identityId">Identity identifier.</param>
/// <param name="datasetName">Dataset name.</param>
/// <param name="lastSyncCount">Last sync count.</param>
void UpdateLastSyncCount(string identityId, string datasetName, long lastSyncCount);
/// <summary>
/// Wipes all locally cached data including dataset metadata and records. All
/// opened dataset handler should not perform further operations to avoid
/// inconsistent state.
/// </summary>
void WipeData();
/// <summary>
/// Reparents all datasets from old identity id to a new one.
/// </summary>
/// <param name="oldIdentityId">Old identity identifier.</param>
/// <param name="newIdentityId">New identity identifier.</param>
void ChangeIdentityId(string oldIdentityId, string newIdentityId);
/// <summary>
/// Updates local dataset metadata
/// </summary>
/// <param name="identityId">Identity identifier.</param>
/// <param name="datasetMetadata">Dataset metadata.</param>
void UpdateDatasetMetadata(string identityId, List<DatasetMetadata> datasetMetadata);
}
}
| 189 |
amazon-cognito-sync-manager-net | aws | C# | //
// Copyright 2014-2015 Amazon.com,
// Inc. or its affiliates. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
using System;
using System.Collections;
using System.Collections.Generic;
using System.Runtime.InteropServices;
using Amazon.Runtime;
using Amazon.CognitoSync.SyncManager;
using Amazon.CognitoSync.SyncManager.Internal;
using System.Text;
using Amazon.Runtime.Internal.Util;
using Logger = Amazon.Runtime.Internal.Util.Logger;
using Amazon.Util;
using System.Globalization;
namespace Amazon.CognitoSync.SyncManager.Internal
{
/// <summary>
/// An implementation for <see cref="Amazon.CognitoSync.SyncManager.ILocalStorage"/>
/// using <see href="http://sqlite.org">SQLite</see>
/// </summary>
public partial class SQLiteLocalStorage : ILocalStorage
{
internal Logger _logger;
private static object sqlite_lock = new object();
internal const string DB_FILE_NAME = "aws_cognito_sync.db";
#region constructor
/// <summary>
/// Creates a new instance of SQLiteLocalStorage
/// </summary>
public SQLiteLocalStorage()
{
_logger = Logger.GetLogger(this.GetType());
SetupDatabase();
}
#if PCL
static SQLiteLocalStorage()
{
SQLitePCL.Batteries.Init();
}
#endif
#endregion
#region dispose methods
/// <summary>
/// Releases the resources consumed by this object
/// </summary>
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
#endregion
#region table datastructure
internal const string TABLE_DATASETS = "datasets";
internal const string TABLE_RECORDS = "records";
static class DatasetColumns
{
internal const string IDENTITY_ID = "identity_id";
internal const string DATASET_NAME = "dataset_name";
internal const string CREATION_TIMESTAMP = "creation_timestamp";
internal const string LAST_MODIFIED_TIMESTAMP = "last_modified_timestamp";
internal const string LAST_MODIFIED_BY = "last_modified_by";
internal const string STORAGE_SIZE_BYTES = "storage_size_bytes";
internal const string RECORD_COUNT = "record_count";
internal const string LAST_SYNC_COUNT = "last_sync_count";
internal const string LAST_SYNC_TIMESTAMP = "last_sync_timestamp";
internal const string LAST_SYNC_RESULT = "last_sync_result";
internal static readonly string[] ALL = new string[] {
IDENTITY_ID, DATASET_NAME,
CREATION_TIMESTAMP, LAST_MODIFIED_TIMESTAMP, LAST_MODIFIED_BY,
STORAGE_SIZE_BYTES, RECORD_COUNT,
LAST_SYNC_COUNT, LAST_SYNC_TIMESTAMP, LAST_SYNC_RESULT,
};
internal static readonly int IDENTITY_ID_IDX = Array.IndexOf(DatasetColumns.ALL, DatasetColumns.IDENTITY_ID);
internal static readonly int DATASET_NAME_IDX = Array.IndexOf(DatasetColumns.ALL, DatasetColumns.DATASET_NAME);
internal static readonly int CREATION_TIMESTAMP_IDX = Array.IndexOf(DatasetColumns.ALL, DatasetColumns.CREATION_TIMESTAMP);
internal static readonly int LAST_MODIFIED_TIMESTAMP_IDX = Array.IndexOf(DatasetColumns.ALL, DatasetColumns.LAST_MODIFIED_TIMESTAMP);
internal static readonly int LAST_MODIFIED_BY_IDX = Array.IndexOf(DatasetColumns.ALL, DatasetColumns.LAST_MODIFIED_BY);
internal static readonly int STORAGE_SIZE_BYTES_IDX = Array.IndexOf(DatasetColumns.ALL, DatasetColumns.STORAGE_SIZE_BYTES);
internal static readonly int RECORD_COUNT_IDX = Array.IndexOf(DatasetColumns.ALL, DatasetColumns.RECORD_COUNT);
internal static readonly int LAST_SYNC_COUNT_IDX = Array.IndexOf(DatasetColumns.ALL, DatasetColumns.LAST_SYNC_COUNT);
internal static readonly int LAST_SYNC_TIMESTAMP_IDX = Array.IndexOf(DatasetColumns.ALL, DatasetColumns.LAST_SYNC_TIMESTAMP);
internal static readonly int LAST_SYNC_RESULT_IDX = Array.IndexOf(DatasetColumns.ALL, DatasetColumns.LAST_SYNC_RESULT);
public static string BuildQuery(string conditions)
{
string query = "SELECT " + string.Join(",", ALL) + " FROM " + SQLiteLocalStorage.TABLE_DATASETS;
if (conditions != null && conditions.Trim().Length > 0)
{
query += " WHERE " + conditions;
}
return query;
}
public static string BuildInsert()
{
return DatasetColumns.BuildInsert(ALL);
}
public static string BuildInsert(string[] fieldList)
{
string insert = "INSERT INTO " + SQLiteLocalStorage.TABLE_DATASETS + " (" + string.Join(",", fieldList) + ") " +
" VALUES ( ";
for (int i = 0; i < fieldList.Length; i++)
{
insert += "@" + fieldList[i] + (i < fieldList.Length - 1 ? " , " : " ");
}
insert += " )";
return insert;
}
public static string BuildUpdate(string[] fieldList, string conditions)
{
string update = "UPDATE " + SQLiteLocalStorage.TABLE_DATASETS + " SET ";
for (int i = 0; i < fieldList.Length; i++)
{
update += fieldList[i] + " = @" + fieldList[i] + (i < fieldList.Length - 1 ? " , " : " ");
}
if (conditions != null && conditions.Trim().Length > 0)
{
update += " WHERE " + conditions;
}
return update;
}
public static string BuildDelete(string conditions)
{
string delete = "DELETE FROM " + SQLiteLocalStorage.TABLE_DATASETS;
if (conditions != null && conditions.Trim().Length > 0)
{
delete += " WHERE " + conditions;
}
return delete;
}
}
static class RecordColumns
{
internal const string IDENTITY_ID = "identity_id";
internal const string DATASET_NAME = "dataset_name";
internal const string KEY = "key";
internal const string VALUE = "value";
internal const string SYNC_COUNT = "sync_count";
internal const string LAST_MODIFIED_TIMESTAMP = "last_modified_timestamp";
internal const string LAST_MODIFIED_BY = "last_modified_by";
internal const string DEVICE_LAST_MODIFIED_TIMESTAMP = "device_last_modified_timestamp";
internal const string MODIFIED = "modified";
internal static readonly string[] ALL = new string[] {
IDENTITY_ID, DATASET_NAME, KEY, VALUE, SYNC_COUNT, LAST_MODIFIED_TIMESTAMP,
LAST_MODIFIED_BY, DEVICE_LAST_MODIFIED_TIMESTAMP, MODIFIED
};
internal static readonly int IDENTITY_ID_IDX = Array.IndexOf(RecordColumns.ALL, RecordColumns.IDENTITY_ID);
internal static readonly int DATASET_NAME_IDX = Array.IndexOf(RecordColumns.ALL, RecordColumns.DATASET_NAME);
internal static readonly int KEY_IDX = Array.IndexOf(RecordColumns.ALL, RecordColumns.KEY);
internal static readonly int VALUE_IDX = Array.IndexOf(RecordColumns.ALL, RecordColumns.VALUE);
internal static readonly int SYNC_COUNT_IDX = Array.IndexOf(RecordColumns.ALL, RecordColumns.SYNC_COUNT);
internal static readonly int LAST_MODIFIED_TIMESTAMP_IDX = Array.IndexOf(RecordColumns.ALL, RecordColumns.LAST_MODIFIED_TIMESTAMP);
internal static readonly int LAST_MODIFIED_BY_IDX = Array.IndexOf(RecordColumns.ALL, RecordColumns.LAST_MODIFIED_BY);
internal static readonly int DEVICE_LAST_MODIFIED_TIMESTAMP_IDX = Array.IndexOf(RecordColumns.ALL, RecordColumns.DEVICE_LAST_MODIFIED_TIMESTAMP);
internal static readonly int MODIFIED_IDX = Array.IndexOf(RecordColumns.ALL, RecordColumns.MODIFIED);
public static string BuildQuery(string conditions)
{
string query = "SELECT " + string.Join(",", ALL) + " FROM " + SQLiteLocalStorage.TABLE_RECORDS;
if (conditions != null && conditions.Trim().Length > 0)
{
query += " WHERE " + conditions;
}
return query;
}
public static string BuildInsert()
{
return RecordColumns.BuildInsert(ALL);
}
public static string BuildInsert(string[] fieldList)
{
string insert = "INSERT INTO " + SQLiteLocalStorage.TABLE_RECORDS + " (" + string.Join(" ,", fieldList) + " ) " +
" VALUES ( ";
for (int i = 0; i < fieldList.Length; i++)
{
insert += "@" + fieldList[i] + (i < fieldList.Length - 1 ? " , " : " ");
}
insert += " )";
return insert;
}
public static string BuildUpdate(string[] fieldList, string conditions)
{
string update = "UPDATE " + SQLiteLocalStorage.TABLE_RECORDS + " SET ";
for (int i = 0; i < fieldList.Length; i++)
{
update += fieldList[i] + " = @" + fieldList[i] + (i < fieldList.Length - 1 ? " , " : " ");
}
if (conditions != null && conditions.Trim().Length > 0)
{
update += " WHERE " + conditions;
}
return update;
}
public static string BuildDelete(string conditions)
{
string delete = "DELETE FROM " + SQLiteLocalStorage.TABLE_RECORDS;
if (conditions != null && conditions.Trim().Length > 0)
{
delete += " WHERE " + conditions;
}
return delete;
}
}
#endregion
#region helper class
internal class Statement
{
public string Query { get; set; }
public object[] Parameters { get; set; }
}
#endregion
#region helper methods
internal static byte[] ToUtf8(string sText)
{
byte[] byteArray;
int nLen = Encoding.UTF8.GetByteCount(sText) + 1;
byteArray = new byte[nLen];
nLen = Encoding.UTF8.GetBytes(sText, 0, sText.Length, byteArray, 0);
byteArray[nLen] = 0;
return byteArray;
}
#endregion
#region public api's
/// <summary>
/// Create a dataset
/// </summary>
/// <param name="identityId">Identity Id</param>
/// <param name="datasetName">Dataset name.</param>
public void CreateDataset(string identityId, string datasetName)
{
lock (sqlite_lock)
{
DatasetMetadata metadata = GetMetadataHelper(identityId, datasetName);
if (metadata == null)
{
string query = DatasetColumns.BuildInsert(
new string[]
{
DatasetColumns.IDENTITY_ID,
DatasetColumns.DATASET_NAME,
DatasetColumns.CREATION_TIMESTAMP,
DatasetColumns.LAST_MODIFIED_TIMESTAMP
});
CreateDatasetHelper(query, identityId, datasetName, AWSSDKUtils.CorrectedUtcNow.ToLocalTime(), AWSSDKUtils.CorrectedUtcNow.ToLocalTime());
}
}
}
/// <summary>
/// Retrieves the string value of a key in dataset. The value can be null
/// when the record doesn't exist or is marked as deleted.
/// </summary>
/// <returns>string value of the record, or null if not present or deleted.</returns>
/// <param name="identityId">Identity identifier.</param>
/// <param name="datasetName">Dataset name.</param>
/// <param name="key">record key.</param>
public string GetValue(string identityId, string datasetName, string key)
{
lock (sqlite_lock)
{
Record record = GetRecord(identityId, datasetName, key);
if (record == null)
{
return null;
}
else
{
return record.Value;
}
}
}
/// <summary>
/// Puts the value of a key in dataset. If a new value is assigned to the
/// key, the record is marked as dirty. If the value is null, then the record
/// is marked as deleted. The changed record will be synced with remote
/// storage.
/// </summary>
/// <param name="identityId">Identity identifier.</param>
/// <param name="datasetName">Dataset name.</param>
/// <param name="key">record key.</param>
/// <param name="value">string value. If null, the record is marked as deleted.</param>
public void PutValue(string identityId, string datasetName, string key, string value)
{
lock (sqlite_lock)
{
bool result = PutValueHelper(identityId, datasetName, key, value);
if (!result)
{
_logger.DebugFormat("{0}", @"Cognito Sync - SQLiteStorage - Put Value Failed");
}
else
{
UpdateLastModifiedTimestamp(identityId, datasetName);
}
}
}
/// <summary>
/// Retrieves a key-value map from dataset, excluding marked as deleted
/// values.
/// </summary>
/// <returns>a key-value map of all but deleted values.</returns>
/// <param name="identityId">Identity identifier.</param>
/// <param name="datasetName">Dataset name.</param>
public Dictionary<string, string> GetValueMap(string identityId, string datasetName)
{
lock (sqlite_lock)
{
Dictionary<string, string> values = new Dictionary<string, string>();
List<Record> records = GetRecords(identityId, datasetName);
foreach (Record record in records)
{
if (!record.IsDeleted)
{
values.Add(record.Key, record.Value);
}
}
return values;
}
}
/// <summary>
/// Puts a key-value map into a dataset. This is optimized for batch
/// operation. It's the preferred way to put a list of records into dataset.
/// </summary>
/// <param name="identityId">Identity identifier.</param>
/// <param name="datasetName">Dataset name.</param>
/// <param name="values">a key-value map.</param>
public void PutAllValues(string identityId, string datasetName, IDictionary<string, string> values)
{
lock (sqlite_lock)
{
foreach (KeyValuePair<string, string> entry in values)
{
PutValueHelper(identityId, datasetName, entry.Key, entry.Value);
}
UpdateLastModifiedTimestamp(identityId, datasetName);
}
}
/// <summary>
/// Gets a list of dataset's metadata information.
/// </summary>
/// <returns>a list of dataset metadata</returns>
/// <param name="identityId">Identity identifier.</param>
/// <exception cref="DataStorageException"></exception>
public List<DatasetMetadata> GetDatasetMetadata(string identityId)
{
lock (sqlite_lock)
{
string query = DatasetColumns.BuildQuery(
DatasetColumns.IDENTITY_ID + " = @whereIdentityId "
);
return GetDatasetMetadataHelper(query, identityId);
}
}
/// <summary>
/// Retrieves the metadata of a dataset.
/// </summary>
/// <returns>The dataset metadata.</returns>
/// <param name="identityId">Identity identifier.</param>
/// <param name="datasetName">Dataset name.</param>
/// <exception cref="DataStorageException"></exception>
#if BCL
[System.Security.SecuritySafeCritical]
#endif
public DatasetMetadata GetDatasetMetadata(string identityId, string datasetName)
{
lock (sqlite_lock)
{
return GetMetadataHelper(identityId, datasetName);
}
}
/// <summary>
/// Gets a raw record from local store. If the dataset/key combo doesn't
/// // exist, null will be returned.
/// </summary>
/// <returns>a Record object if found, null otherwise.</returns>
/// <param name="identityId">Identity identifier.</param>
/// <param name="datasetName">Dataset name.</param>
/// <param name="key">Key for the record.</param>
public Record GetRecord(string identityId, string datasetName, string key)
{
lock (sqlite_lock)
{
Record record = null;
string query = RecordColumns.BuildQuery(
RecordColumns.IDENTITY_ID + " = @identityId AND " +
RecordColumns.DATASET_NAME + " = @datasetName AND " +
RecordColumns.KEY + " = @key "
);
record = GetRecordHelper(query, identityId, datasetName, key);
return record;
}
}
/// <summary>
/// Gets a list of all records.
/// </summary>
/// <returns>A list of records which have been updated since lastSyncCount.</returns>
/// <param name="identityId">Identity identifier.</param>
/// <param name="datasetName">Dataset name.</param>
public List<Record> GetRecords(string identityId, string datasetName)
{
lock (sqlite_lock)
{
string query =
RecordColumns.BuildQuery(
RecordColumns.IDENTITY_ID + " = @whereIdentityId AND " +
RecordColumns.DATASET_NAME + " = @whereDatasetName "
);
return GetRecordsHelper(query, identityId, datasetName);
}
}
/// <summary>
/// Puts a list of raw records into dataset.
/// </summary>
/// <param name="identityId">Identity identifier.</param>
/// <param name="datasetName">Dataset name.</param>
/// <param name="records">A list of Records.</param>
public void PutRecords(string identityId, string datasetName, List<Record> records)
{
lock (sqlite_lock)
{
foreach (Record record in records)
{
this.UpdateOrInsertRecord(identityId, datasetName, record);
}
}
}
/// <summary>
/// Puts a list of raw records into that dataset if
/// the local version hasn't changed (to be used in
/// synchronizations).
/// </summary>
/// <param name="identityId">Identity id.</param>
/// <param name="datasetName">Dataset name.</param>
/// /// <param name="records">A list of remote records to compare with</param>
/// <param name="localRecords">A list of records to check for changes.</param>
public void ConditionallyPutRecords(String identityId, String datasetName, List<Record> records, List<Record> localRecords)
{
/*
* Grab an instance of the record from the local store with the remote change's
* key and the snapshot version.
* 1) If both are null the remote change is new and we should save.
* 2) If both exist but the value has changed locally we shouldn't overwrite with the remote changes,
* which will still exist in remote, but should update the sync count to avoid a false-conflict later.
* 3) If both exist and the values have not changed, we should save the remote change.
* 4) If the current check exists but it wasn't in the snapshot, we should save.
*/
Dictionary<string, Record> localRecordMap = new Dictionary<string, Record>();
foreach (Record record in localRecords)
{
localRecordMap[record.Key] = record;
}
foreach (Record record in records)
{
Record oldDatabaseRecord;
localRecordMap.TryGetValue(record.Key, out oldDatabaseRecord);
// locking to ensure that database is not changed between GetRecord and UpdateOrInsertRecord
lock (sqlite_lock)
{
Record databaseRecord = this.GetRecord(identityId, datasetName, record.Key);
if (databaseRecord != null && oldDatabaseRecord != null)
{
if (databaseRecord.SyncCount != oldDatabaseRecord.SyncCount
|| !string.Equals(databaseRecord.LastModifiedBy, oldDatabaseRecord.LastModifiedBy))
{
continue;
}
if (!string.Equals(databaseRecord.Value, oldDatabaseRecord.Value))
{
if (string.Equals(record.Value, oldDatabaseRecord.Value))
{
// The value has changed, so this is a local change during the push record operation.
// Avoid a future conflict by updating the metadata so that it looks like the modifications that
// occurred during the put record operation happened after the put operation completed.
Record resolvedRecord =
new Record(
record.Key,
databaseRecord.Value,
record.SyncCount,
record.LastModifiedDate,
record.LastModifiedBy,
databaseRecord.DeviceLastModifiedDate,
true
);
UpdateOrInsertRecord(identityId, datasetName, resolvedRecord);
}
else
{
continue;
}
}
else
{
UpdateOrInsertRecord(identityId, datasetName, record);
}
}
else
{
UpdateOrInsertRecord(identityId, datasetName, record);
}
}
}
}
/// <summary>
/// Deletes a dataset. All the records associated with dataset are cleared and
/// dataset is marked as deleted for future sync.
/// </summary>
/// <param name="identityId">Identity identifier.</param>
/// <param name="datasetName">Dataset name.</param>
/// <exception cref="DatasetNotFoundException"></exception>
public void DeleteDataset(string identityId, string datasetName)
{
DeleteDataset(identityId, datasetName, null);
}
private void DeleteDataset(string identityId, string datasetName, List<Statement> additionalStatements)
{
lock (sqlite_lock)
{
string deleteRecordsQuery =
RecordColumns.BuildDelete(
RecordColumns.IDENTITY_ID + " = @whereIdentityId AND " +
RecordColumns.DATASET_NAME + " = @whereDatasetName "
);
Statement s1 = new Statement
{
Query = deleteRecordsQuery,
Parameters = new string[] { identityId, datasetName }
};
string deleteDatasetQuery =
DatasetColumns.BuildUpdate(
new string[] {
DatasetColumns.LAST_MODIFIED_TIMESTAMP,
DatasetColumns.LAST_SYNC_COUNT
},
DatasetColumns.IDENTITY_ID + " = @whereIdentityId AND " +
DatasetColumns.DATASET_NAME + " = @whereDatasetName "
);
Statement s2 = new Statement
{
Query = deleteDatasetQuery,
Parameters = new object[] { AWSSDKUtils.CorrectedUtcNow.ToLocalTime(), -1, identityId, datasetName }
};
List<Statement> statementsToExecute = new List<Statement>() { s1, s2 };
if (additionalStatements != null)
{
statementsToExecute.AddRange(additionalStatements);
}
ExecuteMultipleHelper(statementsToExecute);
}
}
/// <summary>
/// This is different from <see cref="DeleteDataset(String,String)"/>. Not only does it
/// clears all records in the dataset, it also remove it from metadata table.
/// It won't be visible in <see cref="GetDatasetMetadata(String,String)"/>.
/// </summary>
/// <param name="identityId">Identity identifier.</param>
/// <param name="datasetName">Dataset name.</param>
public void PurgeDataset(string identityId, string datasetName)
{
lock (sqlite_lock)
{
string query = DatasetColumns.BuildDelete(
DatasetColumns.IDENTITY_ID + " = @whereIdentityId AND " +
DatasetColumns.DATASET_NAME + " = @whereDatasetName "
);
Statement s1 = new Statement
{
Query = query,
Parameters = new string[] { identityId, datasetName }
};
DeleteDataset(identityId, datasetName, new List<Statement>() { s1 });
}
}
/// <summary>
/// Retrieves the last sync count. This sync count is a counter that
/// represents when the last sync happened. The counter should be updated on
/// a successful sync.
/// </summary>
/// <returns>The last sync count.</returns>
/// <param name="identityId">Identity identifier.</param>
/// <param name="datasetName">Dataset name.</param>
public long GetLastSyncCount(string identityId, string datasetName)
{
lock (sqlite_lock)
{
string query = DatasetColumns.BuildQuery(
DatasetColumns.IDENTITY_ID + " = @whereIdentityId AND " +
DatasetColumns.DATASET_NAME + " = @whereDatasetName "
);
return GetLastSyncCountHelper(query, identityId, datasetName);
}
}
/// <summary>
/// Retrieves a list of locally modified records since last successful sync
/// operation.
/// </summary>
/// <returns>a list of locally modified records</returns>
/// <param name="identityId">Identity identifier.</param>
/// <param name="datasetName">Dataset name.</param>
public List<Record> GetModifiedRecords(string identityId, string datasetName)
{
lock (sqlite_lock)
{
string query =
RecordColumns.BuildQuery(
RecordColumns.IDENTITY_ID + " = @whereIdentityId AND " +
RecordColumns.DATASET_NAME + " = @whereDatasetName AND " +
RecordColumns.MODIFIED + " = @whereModified "
);
return GetModifiedRecordsHelper(query, identityId, datasetName, 1); ;
}
}
/// <summary>
/// Updates the last sync count after successful sync with the remote data
/// store.
/// </summary>
/// <param name="identityId">Identity identifier.</param>
/// <param name="datasetName">Dataset name.</param>
/// <param name="lastSyncCount">Last sync count.</param>
public void UpdateLastSyncCount(string identityId, string datasetName, long lastSyncCount)
{
lock (sqlite_lock)
{
string query = DatasetColumns.BuildUpdate(
new string[] {
DatasetColumns.LAST_SYNC_COUNT,
DatasetColumns.LAST_SYNC_TIMESTAMP
},
RecordColumns.IDENTITY_ID + " = @whereIdentityId AND " +
RecordColumns.DATASET_NAME + " = @whereDatasetName "
);
UpdateLastSyncCountHelper(query, lastSyncCount, AWSSDKUtils.CorrectedUtcNow.ToLocalTime(), identityId, datasetName);
}
}
/// <summary>
/// Wipes all locally cached data including dataset metadata and records. All
/// opened dataset handler should not perform further operations to avoid
/// inconsistent state.
/// </summary>
public void WipeData()
{
lock (sqlite_lock)
{
string query1 = DatasetColumns.BuildDelete(null);
string query2 = RecordColumns.BuildDelete(null);
ExecuteMultipleHelper(new List<Statement>() { new Statement { Query = query1 }, new Statement { Query = query2 } });
}
}
/// <summary>
/// Reparents all datasets from old identity id to a new one.
/// </summary>
/// <param name="oldIdentityId">Old identity identifier.</param>
/// <param name="newIdentityId">New identity identifier.</param>
public void ChangeIdentityId(string oldIdentityId, string newIdentityId)
{
_logger.DebugFormat("Reparenting datasets from {0} to {1}", oldIdentityId, newIdentityId);
lock (sqlite_lock)
{
List<Statement> statements = new List<Statement>();
// if oldIdentityId is unknown, aka the dataset is created prior to
// having a cognito id, just reparent datasets from unknown to
// newIdentityId
if (DatasetUtils.UNKNOWN_IDENTITY_ID == oldIdentityId)
{
HashSet<string> commonDatasetNames = GetCommonDatasetNames(oldIdentityId, newIdentityId);
// append UNKNOWN to the name of all non unique datasets
foreach (String oldDatasetName in commonDatasetNames)
{
string updateDatasetQuery = "UPDATE " + TABLE_DATASETS
+ " SET " + DatasetColumns.DATASET_NAME + " = @" + DatasetColumns.DATASET_NAME
+ " WHERE " + DatasetColumns.IDENTITY_ID + " = @" + DatasetColumns.IDENTITY_ID
+ " AND " + DatasetColumns.DATASET_NAME + " = @old" + DatasetColumns.DATASET_NAME + " ";
string timestamp = AWSSDKUtils.ConvertToUnixEpochMilliSeconds(AWSSDKUtils.CorrectedUtcNow).ToString(CultureInfo.InvariantCulture);
Statement updateDatasetStatement = new Statement()
{
Query = updateDatasetQuery,
Parameters = new string[] { oldDatasetName + "." + oldIdentityId + "-" + timestamp, oldIdentityId, oldDatasetName }
};
statements.Add(updateDatasetStatement);
string updateRecordsQuery = "UPDATE " + TABLE_RECORDS
+ " SET " + RecordColumns.DATASET_NAME + " = @" + RecordColumns.DATASET_NAME
+ " WHERE " + RecordColumns.IDENTITY_ID + " = @" + RecordColumns.IDENTITY_ID
+ " AND " + RecordColumns.DATASET_NAME + " = @old" + RecordColumns.DATASET_NAME + " ";
Statement updateRecordsStatement = new Statement()
{
Query = updateRecordsQuery,
Parameters = new string[] { oldDatasetName + "." + oldIdentityId + "-" + timestamp, oldIdentityId, oldDatasetName }
};
statements.Add(updateRecordsStatement);
}
string updateIdentityDatasetQuery = DatasetColumns.BuildUpdate(
new string[] { DatasetColumns.IDENTITY_ID },
DatasetColumns.IDENTITY_ID + " = @oldIdentityId "
);
Statement UpdateIdentityDatasetStatement = new Statement()
{
Query = updateIdentityDatasetQuery,
Parameters = new string[] { newIdentityId, oldIdentityId }
};
statements.Add(UpdateIdentityDatasetStatement);
string updateRecordsIdentityQuery = RecordColumns.BuildUpdate(
new string[] { RecordColumns.IDENTITY_ID },
RecordColumns.IDENTITY_ID + " = @oldIdentityId "
);
Statement UpdateIdentityRecordsStatement = new Statement()
{
Query = updateRecordsIdentityQuery,
Parameters = new string[] { newIdentityId, oldIdentityId }
};
statements.Add(UpdateIdentityRecordsStatement);
}
else
{
// 1. copy oldIdentityId/dataset to newIdentityId/dataset
// datasets table
string copyDatasetToNewIdentity = "INSERT INTO " + TABLE_DATASETS + "("
+ DatasetColumns.IDENTITY_ID + ","
+ DatasetColumns.DATASET_NAME + ","
+ DatasetColumns.CREATION_TIMESTAMP + ","
+ DatasetColumns.STORAGE_SIZE_BYTES + ","
+ DatasetColumns.RECORD_COUNT
// last sync count is reset to default 0
+ ")"
+ " SELECT "
+ "'" + newIdentityId + "'," // assign new owner
+ DatasetColumns.DATASET_NAME + ","
+ DatasetColumns.CREATION_TIMESTAMP + ","
+ DatasetColumns.STORAGE_SIZE_BYTES + ","
+ DatasetColumns.RECORD_COUNT
+ " FROM " + TABLE_DATASETS
+ " WHERE " + DatasetColumns.IDENTITY_ID + " = @" + DatasetColumns.IDENTITY_ID + " ";
statements.Add(new Statement
{
Query = copyDatasetToNewIdentity,
Parameters = new string[] { oldIdentityId }
});
// records table
string copyRecordsToNewIdentity = "INSERT INTO " + TABLE_RECORDS + "("
+ RecordColumns.IDENTITY_ID + ","
+ RecordColumns.DATASET_NAME + ","
+ RecordColumns.KEY + ","
+ RecordColumns.VALUE + ","
// sync count is resset to default 0
+ RecordColumns.LAST_MODIFIED_TIMESTAMP + ","
+ RecordColumns.LAST_MODIFIED_BY + ","
+ RecordColumns.DEVICE_LAST_MODIFIED_TIMESTAMP
// modified is reset to default 1 (dirty)
+ ")"
+ " SELECT "
+ "'" + newIdentityId + "'," // assign new owner
+ RecordColumns.DATASET_NAME + ","
+ RecordColumns.KEY + ","
+ RecordColumns.VALUE + ","
+ RecordColumns.LAST_MODIFIED_TIMESTAMP + ","
+ RecordColumns.LAST_MODIFIED_BY + ","
+ RecordColumns.DEVICE_LAST_MODIFIED_TIMESTAMP
+ " FROM " + TABLE_RECORDS
+ " WHERE " + RecordColumns.IDENTITY_ID + " = @" + RecordColumns.IDENTITY_ID + " ";
statements.Add(new Statement
{
Query = copyRecordsToNewIdentity,
Parameters = new string[] { oldIdentityId }
});
// 2. rename oldIdentityId/dataset to
// newIdentityId/dataset.oldIdentityId
// datasets table
string updateDatasetToNewIdentityQuery = "UPDATE " + TABLE_DATASETS
+ " SET "
+ DatasetColumns.IDENTITY_ID + " = '" + newIdentityId + "', "
+ DatasetColumns.DATASET_NAME + " = "
+ DatasetColumns.DATASET_NAME + " || '." + oldIdentityId + "', "
+ DatasetColumns.LAST_SYNC_COUNT + " = 1" // set the sync count to one, because that is what the server did
+ " WHERE " + DatasetColumns.IDENTITY_ID + " = @" + DatasetColumns.IDENTITY_ID + " ";
statements.Add(new Statement
{
Query = updateDatasetToNewIdentityQuery,
Parameters = new string[] { oldIdentityId }
});
// records table
string updateRecordsToNewIdentityQuery = "UPDATE " + TABLE_RECORDS
+ " SET "
+ RecordColumns.IDENTITY_ID + " = '" + newIdentityId + "', "
+ RecordColumns.DATASET_NAME + " = "
+ RecordColumns.DATASET_NAME + " || '." + oldIdentityId + "'"
+ " WHERE " + RecordColumns.IDENTITY_ID + " = @" + RecordColumns.IDENTITY_ID + " ";
statements.Add(new Statement
{
Query = updateRecordsToNewIdentityQuery,
Parameters = new string[] { oldIdentityId }
});
}
//execute all of them
ExecuteMultipleHelper(statements);
}
}
/// <summary>
/// Updates local dataset metadata
/// </summary>
/// <param name="identityId">Identity identifier.</param>
/// <param name="datasetMetadata">Dataset metadata.</param>
public void UpdateDatasetMetadata(string identityId, List<DatasetMetadata> datasetMetadata)
{
lock (sqlite_lock)
{
foreach (DatasetMetadata metadata in datasetMetadata)
{
if (!UpdateDatasetMetadataInternal(identityId, metadata))
{
string message = string.Format(CultureInfo.InvariantCulture, "Failure to update dataset metadata with Identity Id {0}", identityId);
_logger.Error(new AmazonClientException(message), message);
}
}
}
}
/// <summary>
/// Updates the last modified timestamp
/// </summary>
/// <param name="identityId">Identity Identifier.</param>
/// <param name="datasetName">Dataset name.</param>
public void UpdateLastModifiedTimestamp(string identityId, string datasetName)
{
lock (sqlite_lock)
{
string query =
DatasetColumns.BuildUpdate(
new string[] { DatasetColumns.LAST_MODIFIED_TIMESTAMP },
DatasetColumns.IDENTITY_ID + " = @whereIdentityId AND " +
DatasetColumns.DATASET_NAME + " = @whereDatasetName "
);
UpdateLastModifiedTimestampHelper(query, AWSSDKUtils.CorrectedUtcNow.ToLocalTime(), identityId, datasetName);
}
}
#endregion
#region private methods
#if BCL
[System.Security.SecuritySafeCritical]
#endif
private bool UpdateDatasetMetadataInternal(string identityId, DatasetMetadata metadata)
{
lock (sqlite_lock)
{
DatasetMetadata local = GetMetadataHelper(identityId, metadata.DatasetName);
if (local == null)
{
string updateDatasetMetadataQuery = DatasetColumns.BuildInsert();
ExecuteMultipleHelper(new List<Statement>(){
new Statement{
Query = updateDatasetMetadataQuery,
Parameters = new object[]{identityId,metadata.DatasetName,metadata.CreationDate,metadata.LastModifiedDate,metadata.RecordCount,metadata.StorageSizeBytes,0,0,null}
}
});
}
else
{
string updateDatasetMetadataQuery = DatasetColumns.BuildUpdate(
new string[] {
DatasetColumns.DATASET_NAME,
DatasetColumns.CREATION_TIMESTAMP,
DatasetColumns.LAST_MODIFIED_TIMESTAMP,
DatasetColumns.LAST_MODIFIED_BY,
DatasetColumns.RECORD_COUNT,
DatasetColumns.STORAGE_SIZE_BYTES
},
DatasetColumns.IDENTITY_ID + " = @whereIdentityId AND " +
DatasetColumns.DATASET_NAME + " = @whereDatasetName "
);
ExecuteMultipleHelper(new List<Statement>(){
new Statement{
Query = updateDatasetMetadataQuery,
Parameters = new object[]{metadata.DatasetName,metadata.CreationDate,metadata.LastModifiedDate,metadata.LastModifiedBy,metadata.RecordCount,metadata.StorageSizeBytes,identityId,metadata.DatasetName}
}
});
}
return true;
}
}
private bool PutValueHelper(string identityId, string datasetName, string key, string value)
{
lock (sqlite_lock)
{
Record record = GetRecord(identityId, datasetName, key);
if (record != null && string.Equals(record.Value, value))
{
return true;
}
if (record == null)
{
string insertRecord = RecordColumns.BuildInsert();
ExecuteMultipleHelper(new List<Statement>{new Statement{
Query = insertRecord,
Parameters = new object[]{identityId,datasetName,key,value,0,AWSSDKUtils.CorrectedUtcNow.ToLocalTime(),string.Empty,AWSSDKUtils.CorrectedUtcNow.ToLocalTime(),1}
}});
return true;
}
else
{
string insertRecord =
RecordColumns.BuildUpdate(
new string[] {
RecordColumns.IDENTITY_ID, RecordColumns.DATASET_NAME, RecordColumns.KEY,
RecordColumns.VALUE, RecordColumns.MODIFIED, RecordColumns.SYNC_COUNT,
RecordColumns.DEVICE_LAST_MODIFIED_TIMESTAMP
},
RecordColumns.IDENTITY_ID + " = @whereIdentityId AND " +
RecordColumns.DATASET_NAME + " = @whereDatasetName AND " +
RecordColumns.KEY + " = @whereKey "
);
ExecuteMultipleHelper(new List<Statement>{new Statement{
Query = insertRecord,
Parameters = new object[]{identityId,datasetName,key,value,1,record.SyncCount,AWSSDKUtils.CorrectedUtcNow.ToLocalTime(),identityId,datasetName,key}
}});
return true;
}
}
}
private HashSet<string> GetCommonDatasetNames(string oldIdentityId, string newIdentityId)
{
HashSet<string> newNameSet = new HashSet<string>();
HashSet<string> oldNameSet = new HashSet<string>();
if (oldIdentityId != null && newIdentityId != null)
{
List<DatasetMetadata> newDatasets = GetDatasetMetadata(newIdentityId);
List<DatasetMetadata> oldDatasets = GetDatasetMetadata(oldIdentityId);
foreach (DatasetMetadata oldMetaData in oldDatasets)
{
oldNameSet.Add(oldMetaData.DatasetName);
}
foreach (DatasetMetadata newMetaData in newDatasets)
{
newNameSet.Add(newMetaData.DatasetName);
}
oldNameSet.IntersectWith(newNameSet);
}
return oldNameSet;
}
#endregion
}
}
| 1,069 |
amazon-cognito-sync-manager-net | aws | C# | #if AWS_ASYNC_API
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using System.Threading;
using Amazon.CognitoSync.Model;
namespace Amazon.CognitoSync.SyncManager.Internal
{
/// <summary>
/// Remote data storage using Cognito Sync service on which we can invoke
/// actions like creating a dataset or record.
/// </summary>
public partial class CognitoSyncStorage : IDisposable
{
#region ListDataset
/// <summary>
/// Gets a list of <see cref="DatasetMetadata"/>
/// </summary>
/// <param name="cancellationToken">
/// A cancellation token that can be used by other objects or threads to receive notice of cancellation.
/// </param>
/// <exception cref="Amazon.CognitoSync.SyncManager.DataStorageException"></exception>
public async Task<List<DatasetMetadata>> ListDatasetMetadataAsync(CancellationToken cancellationToken)
{
return await PopulateListDatasetMetadata(null, new List<DatasetMetadata>(), cancellationToken).ConfigureAwait(false);
}
private async Task<List<DatasetMetadata>> PopulateListDatasetMetadata(string nextToken, List<DatasetMetadata> datasets, CancellationToken cancellationToken)
{
ListDatasetsRequest request = new ListDatasetsRequest();
// a large enough number to reduce # of requests
request.MaxResults = 64;
request.NextToken = nextToken;
ListDatasetsResponse response = await client.ListDatasetsAsync(request, cancellationToken).ConfigureAwait(false);
foreach (Amazon.CognitoSync.Model.Dataset dataset in response.Datasets)
{
datasets.Add(ModelToDatasetMetadata(dataset));
}
nextToken = response.NextToken;
if (nextToken != null)
{
await PopulateListDatasetMetadata(nextToken, datasets, cancellationToken).ConfigureAwait(false);
}
return datasets;
}
#endregion
#region ListUpdates
/// <summary>
/// Gets a list of records which have been updated since lastSyncCount
/// (inclusive). If the value of a record equals null, then the record is
/// deleted. If you pass 0 as lastSyncCount, the full list of records will be
/// returned.
/// </summary>
/// <returns>A list of records which have been updated since lastSyncCount.</returns>
/// <param name="datasetName">Dataset name.</param>
/// <param name="lastSyncCount">Last sync count.</param>
/// <param name="cancellationToken">
/// A cancellation token that can be used by other objects or threads to receive notice of cancellation.
/// </param>
/// <exception cref="Amazon.CognitoSync.SyncManager.DataStorageException"></exception>
public async Task<DatasetUpdates> ListUpdatesAsync(string datasetName, long lastSyncCount, CancellationToken cancellationToken)
{
return await PopulateListUpdatesAsync(datasetName, lastSyncCount, new List<Record>(), null, cancellationToken).ConfigureAwait(false);
}
private async Task<DatasetUpdates> PopulateListUpdatesAsync(string datasetName, long lastSyncCount, List<Record> records, string nextToken, CancellationToken cancellationToken)
{
ListRecordsRequest request = new ListRecordsRequest();
request.IdentityPoolId = identityPoolId;
request.IdentityId = this.GetCurrentIdentityId();
request.DatasetName = datasetName;
request.LastSyncCount = lastSyncCount;
// mark it large enough to reduce # of requests
request.MaxResults = 1024;
request.NextToken = nextToken;
ListRecordsResponse listRecordsResponse = await client.ListRecordsAsync(request, cancellationToken).ConfigureAwait(false);
foreach (Amazon.CognitoSync.Model.Record remoteRecord in listRecordsResponse.Records)
{
records.Add(ModelToRecord(remoteRecord));
}
// update last evaluated key
nextToken = listRecordsResponse.NextToken;
if (nextToken != null)
await PopulateListUpdatesAsync(datasetName, lastSyncCount, records, nextToken, cancellationToken).ConfigureAwait(false);
DatasetUpdates updates = new DatasetUpdates(
datasetName,
records,
listRecordsResponse.DatasetSyncCount,
listRecordsResponse.SyncSessionToken,
listRecordsResponse.DatasetExists,
listRecordsResponse.DatasetDeletedAfterRequestedSyncCount,
listRecordsResponse.MergedDatasetNames
);
return updates;
}
#endregion
#region PutRecords
/// <summary>
/// Post updates to remote storage. Each record has a sync count. If the sync
/// count doesn't match what's on the remote storage, i.e. the record is
/// modified by a different device, this operation throws ConflictException.
/// Otherwise it returns a list of records that are updated successfully.
/// </summary>
/// <returns>The records.</returns>
/// <param name="datasetName">Dataset name.</param>
/// <param name="records">Records.</param>
/// <param name="syncSessionToken">Sync session token.</param>
/// <param name="cancellationToken">
/// A cancellation token that can be used by other objects or threads to receive notice of cancellation.
/// </param>
/// <exception cref="Amazon.CognitoSync.SyncManager.DatasetNotFoundException"></exception>
/// <exception cref="Amazon.CognitoSync.SyncManager.DataConflictException"></exception>
public async Task<List<Record>> PutRecordsAsync(string datasetName, List<Record> records, string syncSessionToken, CancellationToken cancellationToken)
{
UpdateRecordsRequest request = new UpdateRecordsRequest();
request.DatasetName = datasetName;
request.IdentityPoolId = identityPoolId;
request.IdentityId = this.GetCurrentIdentityId();
request.SyncSessionToken = syncSessionToken;
// create patches
List<RecordPatch> patches = new List<RecordPatch>();
foreach (Record record in records)
{
patches.Add(RecordToPatch(record));
}
request.RecordPatches = patches;
List<Record> updatedRecords = new List<Record>();
try
{
UpdateRecordsResponse updateRecordsResponse = await client.UpdateRecordsAsync(request, cancellationToken).ConfigureAwait(false);
foreach (Amazon.CognitoSync.Model.Record remoteRecord in updateRecordsResponse.Records)
{
updatedRecords.Add(ModelToRecord(remoteRecord));
}
return updatedRecords;
}
catch (Exception ex)
{
throw HandleException(ex, "Failed to update records in dataset: " + datasetName);
}
}
#endregion
#region DeleteDataset
/// <summary>
/// Deletes a dataset.
/// </summary>
/// <param name="datasetName">Dataset name.</param>
/// <param name="cancellationToken">
/// A cancellation token that can be used by other objects or threads to receive notice of cancellation.
/// </param>
/// <exception cref="Amazon.CognitoSync.SyncManager.DatasetNotFoundException"></exception>
public async Task DeleteDatasetAsync(string datasetName, CancellationToken cancellationToken)
{
DeleteDatasetRequest request = new DeleteDatasetRequest();
request.IdentityPoolId = identityPoolId;
request.IdentityId = this.GetCurrentIdentityId();
request.DatasetName = datasetName;
try
{
await client.DeleteDatasetAsync(request, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
throw HandleException(ex, "Failed to delete dataset: " + datasetName);
}
}
#endregion
#region GetDatasetMetadata
/// <summary>
/// Retrieves the metadata of a dataset.
/// </summary>
/// <param name="datasetName">Dataset name.</param>
/// <param name="cancellationToken">
/// A cancellation token that can be used by other objects or threads to receive notice of cancellation.
/// </param>
/// <exception cref="Amazon.CognitoSync.SyncManager.DataStorageException"></exception>
public async Task<DatasetMetadata> GetDatasetMetadataAsync(string datasetName, CancellationToken cancellationToken)
{
DescribeDatasetRequest request = new DescribeDatasetRequest();
request.IdentityPoolId = identityPoolId;
request.IdentityId = this.GetCurrentIdentityId();
request.DatasetName = datasetName;
try
{
DescribeDatasetResponse describeDatasetResponse = await client.DescribeDatasetAsync(request, cancellationToken).ConfigureAwait(false);
return ModelToDatasetMetadata(describeDatasetResponse.Dataset);
}
catch (Exception ex)
{
throw new DataStorageException("Failed to get metadata of dataset: "
+ datasetName, ex);
}
}
#endregion
}
}
#endif | 221 |
Subsets and Splits