mirror of
https://github.com/zrwusa/data-structure-typed.git
synced 2024-11-23 12:54:04 +00:00
[core] HashTable, HashMap, SkipList implemented
This commit is contained in:
parent
17e0fcb061
commit
1687f3cde8
1
.github/workflows/ci.yml
vendored
1
.github/workflows/ci.yml
vendored
|
@ -9,6 +9,7 @@ on:
|
|||
paths-ignore:
|
||||
- 'lib/**'
|
||||
- 'dist/**'
|
||||
- 'umd/**'
|
||||
- 'docs/**'
|
||||
- 'coverage/**'
|
||||
- '**/*.md'
|
||||
|
|
30
README.md
30
README.md
|
@ -441,24 +441,17 @@ Array.from(dijkstraResult?.seen ?? []).map(vertex => vertex.id) // ['A', 'B', 'D
|
|||
<tr>
|
||||
<td>Set</td>
|
||||
<td>std::set<T></td>
|
||||
<td>Set</td>
|
||||
<td>Set<E></td>
|
||||
<td>HashSet<E></td>
|
||||
<td>set</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Map</td>
|
||||
<td>std::map<K, V></td>
|
||||
<td>Map</td>
|
||||
<td>Map<K, V></td>
|
||||
<td>HashMap<K, V></td>
|
||||
<td>dict</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Unordered Map</td>
|
||||
<td>std::unordered_map<K, V></td>
|
||||
<td>N/A</td>
|
||||
<td>HashMap<K, V></td>
|
||||
<td>defaultdict</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Unordered Set</td>
|
||||
<td>std::unordered_set<T></td>
|
||||
|
@ -466,24 +459,31 @@ Array.from(dijkstraResult?.seen ?? []).map(vertex => vertex.id) // ['A', 'B', 'D
|
|||
<td>HashSet<E></td>
|
||||
<td>N/A</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Unordered Map</td>
|
||||
<td>std::unordered_map<K, V></td>
|
||||
<td>HashTable<K, V></td>
|
||||
<td>HashMap<K, V></td>
|
||||
<td>defaultdict</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Queue</td>
|
||||
<td>std::queue<T></td>
|
||||
<td>Queue</td>
|
||||
<td>Queue<E></td>
|
||||
<td>Queue<E></td>
|
||||
<td>N/A</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Priority Queue</td>
|
||||
<td>std::priority_queue<T></td>
|
||||
<td>PriorityQueue</td>
|
||||
<td>PriorityQueue<E></td>
|
||||
<td>PriorityQueue<E></td>
|
||||
<td>N/A</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Stack</td>
|
||||
<td>std::stack<T></td>
|
||||
<td>Stack</td>
|
||||
<td>Stack<E></td>
|
||||
<td>Stack<E></td>
|
||||
<td>N/A</td>
|
||||
</tr>
|
||||
|
@ -497,7 +497,7 @@ Array.from(dijkstraResult?.seen ?? []).map(vertex => vertex.id) // ['A', 'B', 'D
|
|||
<tr>
|
||||
<td>Deque</td>
|
||||
<td>std::deque<T></td>
|
||||
<td>Deque</td>
|
||||
<td>Deque<E></td>
|
||||
<td>N/A</td>
|
||||
<td>N/A</td>
|
||||
</tr>
|
||||
|
@ -525,14 +525,14 @@ Array.from(dijkstraResult?.seen ?? []).map(vertex => vertex.id) // ['A', 'B', 'D
|
|||
<tr>
|
||||
<td>Ordered Dictionary</td>
|
||||
<td>N/A</td>
|
||||
<td>Map</td>
|
||||
<td>Map<K, V></td>
|
||||
<td>N/A</td>
|
||||
<td>OrderedDict</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Double-Ended Queue (Deque)</td>
|
||||
<td>std::deque<T></td>
|
||||
<td>Deque</td>
|
||||
<td>Deque<E></td>
|
||||
<td>N/A</td>
|
||||
<td>N/A</td>
|
||||
</tr>
|
||||
|
|
|
@ -87,12 +87,21 @@ export class SegmentTree {
|
|||
* included in the range. If not provided, it defaults to the index of the last element in the "values" array.
|
||||
*/
|
||||
constructor(values: number[], start?: number, end?: number) {
|
||||
console.log('values.length:', values.length);
|
||||
start = start || 0;
|
||||
end = end || values.length - 1;
|
||||
this._values = values;
|
||||
this._start = start;
|
||||
this._end = end;
|
||||
this._root = this.build(start, end);
|
||||
|
||||
if (values.length > 0) {
|
||||
console.log('Initializing with non-empty array');
|
||||
this._root = this.build(start, end);
|
||||
} else {
|
||||
console.log('Initializing with empty array');
|
||||
this._root = null;
|
||||
this._values = [];
|
||||
}
|
||||
}
|
||||
|
||||
private _values: number[] = [];
|
||||
|
@ -119,15 +128,18 @@ export class SegmentTree {
|
|||
}
|
||||
|
||||
/**
|
||||
* The function builds a segment tree by recursively dividing the given range into smaller segments and creating nodes
|
||||
* for each segment.
|
||||
* The build function creates a segment tree by recursively dividing the given range into smaller segments and assigning
|
||||
* the sum of values to each segment.
|
||||
* @param {number} start - The `start` parameter represents the starting index of the segment or range for which we are
|
||||
* building the segment tree.
|
||||
* @param {number} end - The `end` parameter represents the ending index of the segment or range for which we are
|
||||
* building the segment tree.
|
||||
* @param {number} end - The "end" parameter represents the ending index of the segment or range for which we want to
|
||||
* build a segment tree.
|
||||
* @returns a SegmentTreeNode object.
|
||||
*/
|
||||
build(start: number, end: number): SegmentTreeNode {
|
||||
if (start > end) {
|
||||
return new SegmentTreeNode(start, end, 0);
|
||||
}
|
||||
if (start === end) return new SegmentTreeNode(start, end, this._values[start]);
|
||||
|
||||
const mid = start + Math.floor((end - start) / 2);
|
||||
|
@ -192,33 +204,39 @@ export class SegmentTree {
|
|||
return 0;
|
||||
}
|
||||
|
||||
if (indexA < 0 || indexB >= this.values.length || indexA > indexB) {
|
||||
return NaN;
|
||||
}
|
||||
|
||||
const dfs = (cur: SegmentTreeNode, i: number, j: number): number => {
|
||||
if (cur.start === i && cur.end === j) {
|
||||
if (i <= cur.start && j >= cur.end) {
|
||||
// The range [i, j] completely covers the current node's range [cur.start, cur.end]
|
||||
return cur.sum;
|
||||
}
|
||||
const mid = cur.start + Math.floor((cur.end - cur.start) / 2);
|
||||
if (j <= mid) {
|
||||
// TODO after no-non-null-assertion not ensure the logic
|
||||
if (cur.left) {
|
||||
return dfs(cur.left, i, j);
|
||||
} else {
|
||||
return NaN;
|
||||
}
|
||||
} else if (i > mid) {
|
||||
// TODO after no-non-null-assertion not ensure the logic
|
||||
if (cur.right) {
|
||||
// TODO after no-non-null-assertion not ensure the logic
|
||||
return dfs(cur.right, i, j);
|
||||
} else {
|
||||
return NaN;
|
||||
}
|
||||
} else {
|
||||
// TODO after no-non-null-assertion not ensure the logic
|
||||
if (cur.left && cur.right) {
|
||||
return dfs(cur.left, i, mid) + dfs(cur.right, mid + 1, j);
|
||||
} else {
|
||||
return NaN;
|
||||
// Query both left and right subtrees
|
||||
let leftSum = 0;
|
||||
let rightSum = 0;
|
||||
if (cur.left) {
|
||||
leftSum = dfs(cur.left, i, mid);
|
||||
}
|
||||
if (cur.right) {
|
||||
rightSum = dfs(cur.right, mid + 1, j);
|
||||
}
|
||||
return leftSum + rightSum;
|
||||
}
|
||||
};
|
||||
return dfs(root, indexA, indexB);
|
||||
|
|
203
src/data-structures/hash/hash-map.ts
Normal file
203
src/data-structures/hash/hash-map.ts
Normal file
|
@ -0,0 +1,203 @@
|
|||
import {HashFunction} from '../../types';
|
||||
|
||||
/**
|
||||
* data-structure-typed
|
||||
*
|
||||
* @author Tyler Zeng
|
||||
* @copyright Copyright (c) 2022 Tyler Zeng <zrwusa@gmail.com>
|
||||
* @license MIT License
|
||||
*/
|
||||
export class HashMap<K, V> {
|
||||
get hashFn(): HashFunction<K> {
|
||||
return this._hashFn;
|
||||
}
|
||||
|
||||
set hashFn(value: HashFunction<K>) {
|
||||
this._hashFn = value;
|
||||
}
|
||||
get table(): Array<Array<[K, V]>> {
|
||||
return this._table;
|
||||
}
|
||||
|
||||
set table(value: Array<Array<[K, V]>>) {
|
||||
this._table = value;
|
||||
}
|
||||
|
||||
get capacityMultiplier(): number {
|
||||
return this._capacityMultiplier;
|
||||
}
|
||||
|
||||
set capacityMultiplier(value: number) {
|
||||
this._capacityMultiplier = value;
|
||||
}
|
||||
|
||||
get loadFactor(): number {
|
||||
return this._loadFactor;
|
||||
}
|
||||
|
||||
set loadFactor(value: number) {
|
||||
this._loadFactor = value;
|
||||
}
|
||||
|
||||
get initialCapacity(): number {
|
||||
return this._initialCapacity;
|
||||
}
|
||||
|
||||
set initialCapacity(value: number) {
|
||||
this._initialCapacity = value;
|
||||
}
|
||||
|
||||
get size(): number {
|
||||
return this._size;
|
||||
}
|
||||
|
||||
set size(value: number) {
|
||||
this._size = value;
|
||||
}
|
||||
|
||||
private _initialCapacity: number;
|
||||
private _loadFactor: number;
|
||||
private _capacityMultiplier: number;
|
||||
private _size: number;
|
||||
private _table: Array<Array<[K, V]>>;
|
||||
private _hashFn: HashFunction<K>;
|
||||
|
||||
/**
|
||||
* The constructor initializes the properties of a hash table, including the initial capacity, load factor, capacity
|
||||
* multiplier, size, table array, and hash function.
|
||||
* @param [initialCapacity=16] - The initial capacity is the initial size of the hash table. It determines the number of
|
||||
* buckets or slots available for storing key-value pairs. The default value is 16.
|
||||
* @param [loadFactor=0.75] - The load factor is a measure of how full the hash table can be before it is resized. It is
|
||||
* a value between 0 and 1, where 1 means the hash table is completely full and 0 means it is completely empty. When the
|
||||
* load factor is reached, the hash table will
|
||||
* @param [hashFn] - The `hashFn` parameter is an optional parameter that represents the hash function used to calculate
|
||||
* the index of a key in the hash table. If a custom hash function is not provided, a default hash function is used. The
|
||||
* default hash function converts the key to a string, calculates the sum of the
|
||||
*/
|
||||
constructor(initialCapacity = 16, loadFactor = 0.75, hashFn?: HashFunction<K>) {
|
||||
this._initialCapacity = initialCapacity;
|
||||
this._loadFactor = loadFactor;
|
||||
this._capacityMultiplier = 2;
|
||||
this._size = 0;
|
||||
this._table = new Array(initialCapacity);
|
||||
this._hashFn =
|
||||
hashFn ||
|
||||
((key: K) => {
|
||||
const strKey = String(key);
|
||||
let hash = 0;
|
||||
for (let i = 0; i < strKey.length; i++) {
|
||||
hash += strKey.charCodeAt(i);
|
||||
}
|
||||
return hash % this.table.length;
|
||||
});
|
||||
}
|
||||
|
||||
private _hash(key: K): number {
|
||||
return this._hashFn(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* The `resizeTable` function resizes the table used in a hash map by creating a new table with a specified capacity and
|
||||
* rehashing the key-value pairs from the old table into the new table.
|
||||
* @param {number} newCapacity - The newCapacity parameter is the desired capacity for the resized table. It represents
|
||||
* the number of buckets that the new table should have.
|
||||
*/
|
||||
private resizeTable(newCapacity: number): void {
|
||||
const newTable = new Array(newCapacity);
|
||||
for (const bucket of this._table) {
|
||||
// Note that this is this._table
|
||||
if (bucket) {
|
||||
for (const [key, value] of bucket) {
|
||||
const newIndex = this._hash(key) % newCapacity;
|
||||
if (!newTable[newIndex]) {
|
||||
newTable[newIndex] = [];
|
||||
}
|
||||
newTable[newIndex].push([key, value]);
|
||||
}
|
||||
}
|
||||
}
|
||||
this._table = newTable; // Again, here is this._table
|
||||
}
|
||||
|
||||
set(key: K, value: V): void {
|
||||
const loadFactor = this.size / this.table.length;
|
||||
if (loadFactor >= this.loadFactor) {
|
||||
this.resizeTable(this.table.length * this.capacityMultiplier);
|
||||
}
|
||||
|
||||
const index = this._hash(key);
|
||||
if (!this.table[index]) {
|
||||
this.table[index] = [];
|
||||
}
|
||||
|
||||
// Check if the key already exists in the bucket
|
||||
for (let i = 0; i < this.table[index].length; i++) {
|
||||
if (this.table[index][i][0] === key) {
|
||||
this.table[index][i][1] = value;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
this.table[index].push([key, value]);
|
||||
this.size++;
|
||||
}
|
||||
|
||||
get(key: K): V | undefined {
|
||||
const index = this._hash(key);
|
||||
if (!this.table[index]) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
for (const [k, v] of this.table[index]) {
|
||||
if (k === key) {
|
||||
return v;
|
||||
}
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
remove(key: K): void {
|
||||
const index = this._hash(key);
|
||||
if (!this.table[index]) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (let i = 0; i < this.table[index].length; i++) {
|
||||
if (this.table[index][i][0] === key) {
|
||||
this.table[index].splice(i, 1);
|
||||
this.size--;
|
||||
|
||||
// Check if the table needs to be resized down
|
||||
const loadFactor = this.size / this.table.length;
|
||||
if (loadFactor < this.loadFactor / this.capacityMultiplier) {
|
||||
this.resizeTable(this.table.length / this.capacityMultiplier);
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
*entries(): IterableIterator<[K, V]> {
|
||||
for (const bucket of this.table) {
|
||||
if (bucket) {
|
||||
for (const [key, value] of bucket) {
|
||||
yield [key, value];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Symbol.iterator](): IterableIterator<[K, V]> {
|
||||
return this.entries();
|
||||
}
|
||||
|
||||
clear(): void {
|
||||
this.size = 0;
|
||||
this.table = new Array(this.initialCapacity);
|
||||
}
|
||||
|
||||
isEmpty(): boolean {
|
||||
return this.size === 0;
|
||||
}
|
||||
}
|
|
@ -5,10 +5,11 @@
|
|||
* @copyright Copyright (c) 2022 Tyler Zeng <zrwusa@gmail.com>
|
||||
* @license MIT License
|
||||
*/
|
||||
export class HashNode<K, V> {
|
||||
|
||||
export class HashTableNode<K, V> {
|
||||
key: K;
|
||||
val: V;
|
||||
next: HashNode<K, V> | null;
|
||||
next: HashTableNode<K, V> | null;
|
||||
|
||||
constructor(key: K, val: V) {
|
||||
this.key = key;
|
||||
|
@ -17,23 +18,25 @@ export class HashNode<K, V> {
|
|||
}
|
||||
}
|
||||
|
||||
import {HashFunction} from '../../types';
|
||||
|
||||
export class HashTable<K, V> {
|
||||
get buckets(): Array<HashNode<K, V> | null> {
|
||||
get hashFn(): HashFunction<K> {
|
||||
return this._hashFn;
|
||||
}
|
||||
|
||||
set hashFn(value: HashFunction<K>) {
|
||||
this._hashFn = value;
|
||||
}
|
||||
|
||||
get buckets(): Array<HashTableNode<K, V> | null> {
|
||||
return this._buckets;
|
||||
}
|
||||
|
||||
set buckets(value: Array<HashNode<K, V> | null>) {
|
||||
set buckets(value: Array<HashTableNode<K, V> | null>) {
|
||||
this._buckets = value;
|
||||
}
|
||||
|
||||
get size(): number {
|
||||
return this._size;
|
||||
}
|
||||
|
||||
set size(value: number) {
|
||||
this._size = value;
|
||||
}
|
||||
|
||||
get capacity(): number {
|
||||
return this._capacity;
|
||||
}
|
||||
|
@ -42,82 +45,163 @@ export class HashTable<K, V> {
|
|||
this._capacity = value;
|
||||
}
|
||||
|
||||
private static readonly DEFAULT_CAPACITY = 16;
|
||||
private static readonly LOAD_FACTOR = 0.75;
|
||||
|
||||
private _capacity: number;
|
||||
private _size: number;
|
||||
private _buckets: Array<HashNode<K, V> | null>;
|
||||
private _buckets: Array<HashTableNode<K, V> | null>;
|
||||
private _hashFn: HashFunction<K>;
|
||||
|
||||
/**
|
||||
* The constructor initializes the capacity, size, and buckets of an object.
|
||||
* @param [capacity=1000] - The `capacity` parameter represents the maximum number of elements that the data structure
|
||||
* can hold. It is an optional parameter with a default value of 1000.
|
||||
*/
|
||||
constructor(capacity = 1000) {
|
||||
this._capacity = capacity;
|
||||
constructor(capacity: number = HashTable.DEFAULT_CAPACITY, hashFn?: HashFunction<K>) {
|
||||
this._hashFn = hashFn || this._defaultHashFn;
|
||||
this._capacity = Math.max(capacity, HashTable.DEFAULT_CAPACITY);
|
||||
this._size = 0;
|
||||
this._buckets = new Array(this.capacity).fill(null);
|
||||
this._buckets = new Array<HashTableNode<K, V> | null>(this._capacity).fill(null);
|
||||
}
|
||||
|
||||
/**
|
||||
* The hash function takes a key, converts it to a string, calculates the sum of the ASCII values of its characters, and
|
||||
* returns the remainder when divided by the capacity of the data structure.
|
||||
* @param {K} key - The `key` parameter represents the key that needs to be hashed. It is of type `K`, which means it can
|
||||
* be any data type that can be converted to a string.
|
||||
* @returns The hash value of the key modulo the capacity of the data structure.
|
||||
* The function `_defaultHashFn` calculates the hash value of a given key and returns the remainder when divided by the
|
||||
* capacity of the data structure.
|
||||
* @param {K} key - The `key` parameter is the input value that needs to be hashed. It can be of any type, but in this
|
||||
* code snippet, it is checked whether the key is a string or an object. If it is a string, the `_murmurStringHashFn`
|
||||
* function is used to
|
||||
* @returns the hash value of the key modulo the capacity of the data structure.
|
||||
*/
|
||||
private hash(key: K): number {
|
||||
protected _defaultHashFn(key: K): number {
|
||||
// Can be replaced with other hash functions as needed
|
||||
const hashValue = typeof key === 'string' ? this._murmurStringHashFn(key) : this._objectHash(key);
|
||||
return hashValue % this._capacity;
|
||||
}
|
||||
|
||||
/**
|
||||
* The `_multiplicativeStringHashFn` function calculates a hash value for a given string key using the multiplicative
|
||||
* string hash function.
|
||||
* @param {K} key - The `key` parameter is the input value for which we want to calculate the hash. It can be of any
|
||||
* type, as it is generic (`K`). The function converts the `key` to a string using the `String()` function.
|
||||
* @returns a number, which is the result of the multiplicative string hash function applied to the input key.
|
||||
*/
|
||||
protected _multiplicativeStringHashFn<K>(key: K): number {
|
||||
const keyString = String(key);
|
||||
let hash = 0;
|
||||
for (let i = 0; i < keyString.length; i++) {
|
||||
hash += keyString.charCodeAt(i);
|
||||
const charCode = keyString.charCodeAt(i);
|
||||
// Some constants for adjusting the hash function
|
||||
const A = 0.618033988749895;
|
||||
const M = 1 << 30; // 2^30
|
||||
hash = (hash * A + charCode) % M;
|
||||
}
|
||||
return hash % this.capacity;
|
||||
return Math.abs(hash); // Take absolute value to ensure non-negative numbers
|
||||
}
|
||||
|
||||
/**
|
||||
* The put function adds a key-value pair to a hash table, handling collisions by chaining.
|
||||
* @param {K} key - The key parameter represents the key of the key-value pair that you want to insert into the hash
|
||||
* table. It is of type K, which can be any data type that can be used as a key, such as a string, number, or object.
|
||||
* @param {V} val - The `val` parameter represents the value associated with the key in the hash table.
|
||||
* @returns Nothing is being returned. The return type of the function is void, which means it does not return any value.
|
||||
* The function `_murmurStringHashFn` calculates a hash value for a given string key using the MurmurHash algorithm.
|
||||
* @param {K} key - The `key` parameter is the input value for which you want to calculate the hash. It can be of any
|
||||
* type, but it will be converted to a string using the `String()` function before calculating the hash.
|
||||
* @returns a number, which is the hash value calculated for the given key.
|
||||
*/
|
||||
put(key: K, val: V): void {
|
||||
const index = this.hash(key);
|
||||
const newNode = new HashNode(key, val);
|
||||
protected _murmurStringHashFn<K>(key: K): number {
|
||||
const keyString = String(key);
|
||||
const seed = 0;
|
||||
let hash = seed;
|
||||
|
||||
if (!this.buckets[index]) {
|
||||
this.buckets[index] = newNode;
|
||||
for (let i = 0; i < keyString.length; i++) {
|
||||
const char = keyString.charCodeAt(i);
|
||||
hash = (hash ^ char) * 0x5bd1e995;
|
||||
hash = (hash ^ (hash >>> 15)) * 0x27d4eb2d;
|
||||
hash = hash ^ (hash >>> 15);
|
||||
}
|
||||
|
||||
return Math.abs(hash);
|
||||
}
|
||||
|
||||
/**
|
||||
* The _hash function takes a key and returns a number.
|
||||
* @param {K} key - The parameter "key" is of type K, which represents the type of the key that will be hashed.
|
||||
* @returns The hash function is returning a number.
|
||||
*/
|
||||
protected _hash(key: K): number {
|
||||
return this.hashFn(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* The function calculates a hash value for a given string using the djb2 algorithm.
|
||||
* @param {string} key - The `key` parameter in the `stringHash` function is a string value that represents the input for
|
||||
* which we want to calculate the hash value.
|
||||
* @returns a number, which is the hash value of the input string.
|
||||
*/
|
||||
protected _stringHash(key: string): number {
|
||||
let hash = 0;
|
||||
for (let i = 0; i < key.length; i++) {
|
||||
hash = (hash * 31 + key.charCodeAt(i)) & 0xffffffff;
|
||||
}
|
||||
return hash;
|
||||
}
|
||||
|
||||
/**
|
||||
* The function `_objectHash` takes a key and returns a hash value, using a custom hash function for objects.
|
||||
* @param {K} key - The parameter "key" is of type "K", which means it can be any type. It could be a string, number,
|
||||
* boolean, object, or any other type of value. The purpose of the objectHash function is to generate a hash value for
|
||||
* the key, which can be used for
|
||||
* @returns a number, which is the hash value of the key.
|
||||
*/
|
||||
protected _objectHash(key: K): number {
|
||||
// If the key is an object, you can write a custom hash function
|
||||
// For example, convert the object's properties to a string and use string hashing
|
||||
// This is just an example; you should write a specific object hash function as needed
|
||||
return this._stringHash(JSON.stringify(key));
|
||||
}
|
||||
|
||||
/**
|
||||
* The set function adds a key-value pair to the hash table, handling collisions and resizing if necessary.
|
||||
* @param {K} key - The key parameter represents the key of the key-value pair that you want to insert into the hash
|
||||
* table. It is of type K, which is a generic type representing the key's data type.
|
||||
* @param {V} val - The parameter `val` represents the value that you want to associate with the given key in the hash
|
||||
* table.
|
||||
* @returns Nothing is being returned. The return type of the `put` method is `void`, which means it does not return any
|
||||
* value.
|
||||
*/
|
||||
set(key: K, val: V): void {
|
||||
const index = this._hash(key);
|
||||
const newNode = new HashTableNode<K, V>(key, val);
|
||||
|
||||
if (!this._buckets[index]) {
|
||||
this._buckets[index] = newNode;
|
||||
} else {
|
||||
// Handle collision by chaining
|
||||
let currentNode = this.buckets[index]!;
|
||||
while (currentNode.next) {
|
||||
// Handle collisions, consider using open addressing, etc.
|
||||
let currentNode = this._buckets[index]!;
|
||||
while (currentNode) {
|
||||
if (currentNode.key === key) {
|
||||
// Update the val if the key already exists
|
||||
// If the key already exists, update the value
|
||||
currentNode.val = val;
|
||||
return;
|
||||
}
|
||||
if (!currentNode.next) {
|
||||
break;
|
||||
}
|
||||
currentNode = currentNode.next;
|
||||
}
|
||||
if (currentNode.key === key) {
|
||||
// Update the val if the key already exists (last node)
|
||||
currentNode.val = val;
|
||||
} else {
|
||||
// Add the new node to the end of the chain
|
||||
currentNode.next = newNode;
|
||||
}
|
||||
// Add to the end of the linked list
|
||||
currentNode.next = newNode;
|
||||
}
|
||||
this._size++;
|
||||
|
||||
// If the load factor is too high, resize the hash table
|
||||
if (this._size / this._capacity >= HashTable.LOAD_FACTOR) {
|
||||
this._expand();
|
||||
}
|
||||
this.size++;
|
||||
}
|
||||
|
||||
/**
|
||||
* The `get` function retrieves the value associated with a given key from a hash table.
|
||||
* @param {K} key - The parameter "key" represents the key of the element that we want to retrieve from the data
|
||||
* @param {K} key - The `key` parameter represents the key of the element that we want to retrieve from the data
|
||||
* structure.
|
||||
* @returns The method is returning the value associated with the given key if it exists in the hash table. If the key is
|
||||
* not found, it returns `undefined`.
|
||||
*/
|
||||
get(key: K): V | undefined {
|
||||
const index = this.hash(key);
|
||||
let currentNode = this.buckets[index];
|
||||
const index = this._hash(key);
|
||||
let currentNode = this._buckets[index];
|
||||
|
||||
while (currentNode) {
|
||||
if (currentNode.key === key) {
|
||||
|
@ -129,29 +213,65 @@ export class HashTable<K, V> {
|
|||
}
|
||||
|
||||
/**
|
||||
* The `remove` function removes a key-value pair from a hash table.
|
||||
* The remove function removes a key-value pair from a hash table.
|
||||
* @param {K} key - The `key` parameter represents the key of the key-value pair that needs to be removed from the hash
|
||||
* table.
|
||||
* @returns Nothing is being returned. The `remove` method has a return type of `void`, which means it does not return
|
||||
* any value.
|
||||
*/
|
||||
remove(key: K): void {
|
||||
const index = this.hash(key);
|
||||
let currentNode = this.buckets[index];
|
||||
let prevNode: HashNode<K, V> | null = null;
|
||||
const index = this._hash(key);
|
||||
let currentNode = this._buckets[index];
|
||||
let prevNode: HashTableNode<K, V> | null = null;
|
||||
|
||||
while (currentNode) {
|
||||
if (currentNode.key === key) {
|
||||
if (prevNode) {
|
||||
prevNode.next = currentNode.next;
|
||||
} else {
|
||||
this.buckets[index] = currentNode.next;
|
||||
this._buckets[index] = currentNode.next;
|
||||
}
|
||||
this.size--;
|
||||
this._size--;
|
||||
currentNode.next = null; // Release memory
|
||||
return;
|
||||
}
|
||||
prevNode = currentNode;
|
||||
currentNode = currentNode.next;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The `expand` function increases the capacity of a hash table by creating a new array of buckets with double the
|
||||
* capacity and rehashing all the existing key-value pairs into the new buckets.
|
||||
*/
|
||||
protected _expand(): void {
|
||||
const newCapacity = this._capacity * 2;
|
||||
const newBuckets = new Array<HashTableNode<K, V> | null>(newCapacity).fill(null);
|
||||
|
||||
for (const bucket of this._buckets) {
|
||||
let currentNode = bucket;
|
||||
while (currentNode) {
|
||||
const newIndex = this._hash(currentNode.key);
|
||||
const newNode = new HashTableNode<K, V>(currentNode.key, currentNode.val);
|
||||
|
||||
if (!newBuckets[newIndex]) {
|
||||
newBuckets[newIndex] = newNode;
|
||||
} else {
|
||||
let currentNewNode = newBuckets[newIndex]!;
|
||||
while (currentNewNode.next) {
|
||||
currentNewNode = currentNewNode.next;
|
||||
}
|
||||
currentNewNode.next = newNode;
|
||||
}
|
||||
currentNode = currentNode.next;
|
||||
}
|
||||
}
|
||||
|
||||
this._buckets = newBuckets;
|
||||
this._capacity = newCapacity;
|
||||
}
|
||||
|
||||
get size(): number {
|
||||
return this._size;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,3 +4,4 @@ export * from './coordinate-set';
|
|||
export * from './pair';
|
||||
export * from './tree-map';
|
||||
export * from './tree-set';
|
||||
export * from './hash-map';
|
||||
|
|
|
@ -1 +1,166 @@
|
|||
export class SkipLinkedList {}
|
||||
/**
|
||||
* data-structure-typed
|
||||
*
|
||||
* @author Tyler Zeng
|
||||
* @copyright Copyright (c) 2022 Tyler Zeng <zrwusa@gmail.com>
|
||||
* @license MIT License
|
||||
*/
|
||||
|
||||
export class SkipListNode<K, V> {
|
||||
key: K;
|
||||
value: V;
|
||||
forward: SkipListNode<K, V>[];
|
||||
|
||||
constructor(key: K, value: V, level: number) {
|
||||
this.key = key;
|
||||
this.value = value;
|
||||
this.forward = new Array(level);
|
||||
}
|
||||
}
|
||||
|
||||
export class SkipList<K, V> {
|
||||
get probability(): number {
|
||||
return this._probability;
|
||||
}
|
||||
|
||||
set probability(value: number) {
|
||||
this._probability = value;
|
||||
}
|
||||
get maxLevel(): number {
|
||||
return this._maxLevel;
|
||||
}
|
||||
|
||||
set maxLevel(value: number) {
|
||||
this._maxLevel = value;
|
||||
}
|
||||
get level(): number {
|
||||
return this._level;
|
||||
}
|
||||
|
||||
set level(value: number) {
|
||||
this._level = value;
|
||||
}
|
||||
get head(): SkipListNode<K, V> {
|
||||
return this._head;
|
||||
}
|
||||
|
||||
set head(value: SkipListNode<K, V>) {
|
||||
this._head = value;
|
||||
}
|
||||
private _head: SkipListNode<K, V>;
|
||||
private _level: number;
|
||||
private _maxLevel: number;
|
||||
private _probability: number;
|
||||
|
||||
/**
|
||||
* The constructor initializes a SkipList with a specified maximum level and probability.
|
||||
* @param [maxLevel=16] - The `maxLevel` parameter represents the maximum level that a skip list can have. It determines
|
||||
* the maximum number of levels that can be created in the skip list.
|
||||
* @param [probability=0.5] - The probability parameter represents the probability of a node being promoted to a higher
|
||||
* level in the skip list. It is used to determine the height of each node in the skip list.
|
||||
*/
|
||||
constructor(maxLevel = 16, probability = 0.5) {
|
||||
this._head = new SkipListNode<K, V>(null as any, null as any, maxLevel);
|
||||
this._level = 0;
|
||||
this._maxLevel = maxLevel;
|
||||
this._probability = probability;
|
||||
}
|
||||
|
||||
/**
|
||||
* The function "randomLevel" generates a random level based on a given probability and maximum level.
|
||||
* @returns the level, which is a number.
|
||||
*/
|
||||
private randomLevel(): number {
|
||||
let level = 1;
|
||||
while (Math.random() < this.probability && level < this.maxLevel) {
|
||||
level++;
|
||||
}
|
||||
return level;
|
||||
}
|
||||
|
||||
/**
|
||||
* The add function adds a new node with a given key and value to a Skip List data structure.
|
||||
* @param {K} key - The key parameter represents the key of the node that needs to be added to the skip list.
|
||||
* @param {V} value - The "value" parameter represents the value associated with the key that is being added to the Skip
|
||||
* List.
|
||||
*/
|
||||
add(key: K, value: V): void {
|
||||
const newNode = new SkipListNode(key, value, this.randomLevel());
|
||||
const update: SkipListNode<K, V>[] = new Array(this.maxLevel).fill(this.head);
|
||||
let current = this.head;
|
||||
|
||||
for (let i = this.level - 1; i >= 0; i--) {
|
||||
while (current.forward[i] && current.forward[i].key < key) {
|
||||
current = current.forward[i];
|
||||
}
|
||||
update[i] = current;
|
||||
}
|
||||
|
||||
for (let i = 0; i < newNode.forward.length; i++) {
|
||||
newNode.forward[i] = update[i].forward[i];
|
||||
update[i].forward[i] = newNode;
|
||||
}
|
||||
|
||||
if (newNode.forward[0] !== null) {
|
||||
this.level = Math.max(this.level, newNode.forward.length);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The function `get` retrieves the value associated with a given key from a skip list data structure.
|
||||
* @param {K} key - The `key` parameter is the key of the element that we want to retrieve from the data structure.
|
||||
* @returns The method `get(key: K)` returns the value associated with the given key if it exists in the data structure,
|
||||
* otherwise it returns `undefined`.
|
||||
*/
|
||||
get(key: K): V | undefined {
|
||||
let current = this.head;
|
||||
for (let i = this.level - 1; i >= 0; i--) {
|
||||
while (current.forward[i] && current.forward[i].key < key) {
|
||||
current = current.forward[i];
|
||||
}
|
||||
}
|
||||
|
||||
current = current.forward[0];
|
||||
|
||||
if (current && current.key === key) {
|
||||
return current.value;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* The `remove` function removes a node with a specific key from a Skip List data structure.
|
||||
* @param {K} key - The key parameter represents the key of the node that needs to be removed from the skip list.
|
||||
* @returns The `remove` method returns a boolean value. It returns `true` if the key was successfully removed from the
|
||||
* skip list, and `false` if the key was not found in the skip list.
|
||||
*/
|
||||
remove(key: K): boolean {
|
||||
const update: SkipListNode<K, V>[] = new Array(this.maxLevel).fill(this.head);
|
||||
let current = this.head;
|
||||
|
||||
for (let i = this.level - 1; i >= 0; i--) {
|
||||
while (current.forward[i] && current.forward[i].key < key) {
|
||||
current = current.forward[i];
|
||||
}
|
||||
update[i] = current;
|
||||
}
|
||||
|
||||
current = current.forward[0];
|
||||
|
||||
if (current && current.key === key) {
|
||||
for (let i = 0; i < this.level; i++) {
|
||||
if (update[i].forward[i] !== current) {
|
||||
break;
|
||||
}
|
||||
update[i].forward[i] = current.forward[i];
|
||||
}
|
||||
while (this.level > 0 && this.head.forward[this.level - 1] === null) {
|
||||
this.level--;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
1
src/types/data-structures/hash.ts
Normal file
1
src/types/data-structures/hash.ts
Normal file
|
@ -0,0 +1 @@
|
|||
export type HashFunction<K> = (key: K) => number;
|
|
@ -13,3 +13,4 @@ export * from './heap';
|
|||
export * from './singly-linked-list';
|
||||
export * from './doubly-linked-list';
|
||||
export * from './navigator';
|
||||
export * from './hash';
|
||||
|
|
50
test/unit/data-structures/binary-tree/segment-tree.test.ts
Normal file
50
test/unit/data-structures/binary-tree/segment-tree.test.ts
Normal file
|
@ -0,0 +1,50 @@
|
|||
import {SegmentTree} from '../../../../src';
|
||||
|
||||
describe('SegmentTree', () => {
|
||||
let segmentTree: SegmentTree;
|
||||
|
||||
beforeEach(() => {
|
||||
// Create an example SegmentTree for testing
|
||||
const values = [1, 2, 3, 4, 5];
|
||||
segmentTree = new SegmentTree(values);
|
||||
});
|
||||
|
||||
it('should build a valid segment tree', () => {
|
||||
// Check if the root node's sum is correct
|
||||
expect(segmentTree.root?.sum).toBe(15);
|
||||
});
|
||||
|
||||
it('should update a node in the segment tree', () => {
|
||||
// Update a node value
|
||||
segmentTree.updateNode(2, 10);
|
||||
|
||||
// Check if the sum of the root node is correct after the update
|
||||
expect(segmentTree.root?.sum).toBe(22);
|
||||
});
|
||||
|
||||
it('should query sum by range correctly', () => {
|
||||
// Check if the sum within a specific range is correct
|
||||
expect(segmentTree.querySumByRange(1, 3)).toBe(9); // 2 + 3 + 4 = 9
|
||||
});
|
||||
|
||||
it('should handle edge cases for querySumByRange', () => {
|
||||
// Check behavior when the range goes beyond boundaries
|
||||
expect(segmentTree.querySumByRange(0, 4)).toBe(15); // Valid range, should return sum of the specified range
|
||||
expect(segmentTree.querySumByRange(3, 2)).toBe(NaN); // End index is less than start index, should return NaN
|
||||
expect(segmentTree.querySumByRange(0, 10)).toBe(NaN); // Beyond upper bound, should return NaN
|
||||
});
|
||||
|
||||
it('should handle an empty input array', () => {
|
||||
// Check behavior when dealing with an empty input array
|
||||
const emptySegmentTree = new SegmentTree([]);
|
||||
expect(emptySegmentTree.root).toBe(null);
|
||||
expect(emptySegmentTree.querySumByRange(0, 2)).toBe(0); // Sum of an empty array should be 0
|
||||
});
|
||||
|
||||
it('should handle a single-element input array', () => {
|
||||
// Check behavior when the input array contains a single element
|
||||
const singleElementSegmentTree = new SegmentTree([42]);
|
||||
expect(singleElementSegmentTree.root?.sum).toBe(42);
|
||||
expect(singleElementSegmentTree.querySumByRange(0, 0)).toBe(42); // Range covering the only element should return that element's value
|
||||
});
|
||||
});
|
104
test/unit/data-structures/hash/hash-map.test.ts
Normal file
104
test/unit/data-structures/hash/hash-map.test.ts
Normal file
|
@ -0,0 +1,104 @@
|
|||
import {HashMap} from '../../../../src';
|
||||
|
||||
describe('HashMap', () => {
|
||||
let hashMap: HashMap<string, number>;
|
||||
|
||||
beforeEach(() => {
|
||||
hashMap = new HashMap<string, number>();
|
||||
});
|
||||
|
||||
it('should initialize correctly', () => {
|
||||
expect(hashMap.size).toBe(0);
|
||||
expect(hashMap.table.length).toBe(16);
|
||||
expect(hashMap.loadFactor).toBe(0.75);
|
||||
expect(hashMap.capacityMultiplier).toBe(2);
|
||||
expect(hashMap.initialCapacity).toBe(16);
|
||||
expect(hashMap.isEmpty()).toBe(true);
|
||||
});
|
||||
|
||||
it('should put and get values', () => {
|
||||
hashMap.set('one', 1);
|
||||
hashMap.set('two', 2);
|
||||
hashMap.set('three', 3);
|
||||
|
||||
expect(hashMap.get('one')).toBe(1);
|
||||
expect(hashMap.get('two')).toBe(2);
|
||||
expect(hashMap.get('three')).toBe(3);
|
||||
});
|
||||
|
||||
it('should handle key collisions', () => {
|
||||
// Force a collision by setting two different keys to the same bucket
|
||||
hashMap.hashFn = () => 0; // Override hash function to return the same index
|
||||
hashMap.set('key1', 1);
|
||||
hashMap.set('key2', 2);
|
||||
|
||||
expect(hashMap.get('key1')).toBe(1);
|
||||
expect(hashMap.get('key2')).toBe(2);
|
||||
});
|
||||
|
||||
it('should remove values', () => {
|
||||
hashMap.set('one', 1);
|
||||
hashMap.set('two', 2);
|
||||
|
||||
hashMap.remove('one');
|
||||
expect(hashMap.get('one')).toBeUndefined();
|
||||
expect(hashMap.size).toBe(1);
|
||||
});
|
||||
|
||||
it('should clear the HashMap', () => {
|
||||
hashMap.set('one', 1);
|
||||
hashMap.set('two', 2);
|
||||
|
||||
hashMap.clear();
|
||||
expect(hashMap.size).toBe(0);
|
||||
expect(hashMap.isEmpty()).toBe(true);
|
||||
});
|
||||
|
||||
it('should iterate over entries', () => {
|
||||
hashMap.set('one', 1);
|
||||
hashMap.set('two', 2);
|
||||
hashMap.set('three', 3);
|
||||
|
||||
const entries = Array.from(hashMap.entries());
|
||||
expect(entries).toEqual(
|
||||
expect.arrayContaining([
|
||||
['one', 1],
|
||||
['two', 2],
|
||||
['three', 3]
|
||||
])
|
||||
);
|
||||
});
|
||||
|
||||
it('should resize the table when load factor is exceeded', () => {
|
||||
// Set a small initial capacity for testing resizing
|
||||
hashMap = new HashMap<string, number>(4, 0.5);
|
||||
|
||||
hashMap.set('one', 1);
|
||||
hashMap.set('two', 2);
|
||||
hashMap.set('three', 3);
|
||||
hashMap.set('four', 4); // This should trigger a resize
|
||||
|
||||
expect(hashMap.table.length).toBe(8);
|
||||
expect(hashMap.get('one')).toBe(1);
|
||||
expect(hashMap.get('two')).toBe(2);
|
||||
expect(hashMap.get('three')).toBe(3);
|
||||
expect(hashMap.get('four')).toBe(4);
|
||||
});
|
||||
|
||||
it('should allow using a custom hash function', () => {
|
||||
const customHashFn = () => {
|
||||
// A simple custom hash function that always returns 0
|
||||
return 0;
|
||||
};
|
||||
hashMap = new HashMap<string, number>(16, 0.75, customHashFn);
|
||||
|
||||
hashMap.set('one', 1);
|
||||
hashMap.set('two', 2);
|
||||
|
||||
expect(hashMap.get('one')).toBe(1);
|
||||
expect(hashMap.get('two')).toBe(2);
|
||||
// Since the custom hash function always returns 0, these keys will collide.
|
||||
// Make sure they are stored separately.
|
||||
expect(hashMap.table[0].length).toBe(2);
|
||||
});
|
||||
});
|
|
@ -1,10 +1,10 @@
|
|||
import {HashNode, HashTable} from '../../../../src';
|
||||
import {HashTableNode, HashTable} from '../../../../src';
|
||||
|
||||
describe('HashNode', () => {
|
||||
it('should create a HashNode with key and value', () => {
|
||||
const key = 'testKey';
|
||||
const value = 'testValue';
|
||||
const hashNode = new HashNode(key, value);
|
||||
const hashNode = new HashTableNode(key, value);
|
||||
|
||||
expect(hashNode.key).toBe(key);
|
||||
expect(hashNode.val).toBe(value);
|
||||
|
@ -16,9 +16,9 @@ describe('HashTable', () => {
|
|||
it('should initialize with default capacity', () => {
|
||||
const hashTable = new HashTable<string, string>();
|
||||
|
||||
expect(hashTable.capacity).toBe(1000);
|
||||
expect(hashTable.capacity).toBe(16);
|
||||
expect(hashTable.size).toBe(0);
|
||||
expect(hashTable.buckets.length).toBe(1000);
|
||||
expect(hashTable.buckets.length).toBe(16);
|
||||
});
|
||||
|
||||
it('should initialize with custom capacity', () => {
|
||||
|
@ -35,7 +35,7 @@ describe('HashTable', () => {
|
|||
const key = 'testKey';
|
||||
const value = 'testValue';
|
||||
|
||||
hashTable.put(key, value);
|
||||
hashTable.set(key, value);
|
||||
const retrievedValue = hashTable.get(key);
|
||||
|
||||
expect(retrievedValue).toBe(value);
|
||||
|
@ -48,8 +48,8 @@ describe('HashTable', () => {
|
|||
const key2 = 'testKey2';
|
||||
const value2 = 'testValue2';
|
||||
|
||||
hashTable.put(key1, value1);
|
||||
hashTable.put(key2, value2);
|
||||
hashTable.set(key1, value1);
|
||||
hashTable.set(key2, value2);
|
||||
|
||||
const retrievedValue1 = hashTable.get(key1);
|
||||
const retrievedValue2 = hashTable.get(key2);
|
||||
|
@ -64,8 +64,8 @@ describe('HashTable', () => {
|
|||
const initialValue = 'testValue1';
|
||||
const updatedValue = 'testValue2';
|
||||
|
||||
hashTable.put(key, initialValue);
|
||||
hashTable.put(key, updatedValue);
|
||||
hashTable.set(key, initialValue);
|
||||
hashTable.set(key, updatedValue);
|
||||
|
||||
const retrievedValue = hashTable.get(key);
|
||||
|
||||
|
@ -86,7 +86,7 @@ describe('HashTable', () => {
|
|||
const key = 'testKey';
|
||||
const value = 'testValue';
|
||||
|
||||
hashTable.put(key, value);
|
||||
hashTable.set(key, value);
|
||||
hashTable.remove(key);
|
||||
|
||||
const retrievedValue = hashTable.get(key);
|
||||
|
@ -95,3 +95,90 @@ describe('HashTable', () => {
|
|||
expect(hashTable.size).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('HashTable', () => {
|
||||
let hashTable: HashTable<string, number>;
|
||||
|
||||
beforeEach(() => {
|
||||
hashTable = new HashTable<string, number>();
|
||||
});
|
||||
|
||||
it('should insert and retrieve values correctly', () => {
|
||||
hashTable.set('one', 1);
|
||||
hashTable.set('two', 2);
|
||||
|
||||
expect(hashTable.get('one')).toBe(1);
|
||||
expect(hashTable.get('two')).toBe(2);
|
||||
});
|
||||
|
||||
it('should update values correctly', () => {
|
||||
hashTable.set('one', 1);
|
||||
expect(hashTable.get('one')).toBe(1);
|
||||
|
||||
hashTable.set('one', 100); // Update the value
|
||||
expect(hashTable.get('one')).toBe(100);
|
||||
});
|
||||
|
||||
it('should handle collisions correctly', () => {
|
||||
hashTable = new HashTable<string, number>(1); // Set a small capacity to force collisions
|
||||
hashTable.set('one', 1);
|
||||
hashTable.set('two', 2);
|
||||
|
||||
expect(hashTable.get('one')).toBe(1);
|
||||
expect(hashTable.get('two')).toBe(2);
|
||||
});
|
||||
|
||||
it('should remove values correctly', () => {
|
||||
hashTable.set('one', 1);
|
||||
hashTable.set('two', 2);
|
||||
hashTable.remove('one');
|
||||
|
||||
expect(hashTable.get('one')).toBeUndefined();
|
||||
expect(hashTable.get('two')).toBe(2);
|
||||
});
|
||||
|
||||
it('should handle non-existent keys correctly', () => {
|
||||
expect(hashTable.get('non-existent')).toBeUndefined();
|
||||
hashTable.remove('non-existent'); // Removing a non-existent key should not cause errors
|
||||
});
|
||||
|
||||
it('should handle custom hash function correctly', () => {
|
||||
// const customHashFn = () => {
|
||||
// // Custom hash function that returns a fixed value for all keys
|
||||
// return 42;
|
||||
// };
|
||||
|
||||
hashTable = new HashTable<string, number>(16);
|
||||
hashTable.set('one', 1);
|
||||
expect(hashTable.get('one')).toBe(1);
|
||||
expect(hashTable.get('two')).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should expand when load factor exceeds threshold', () => {
|
||||
hashTable = new HashTable<string, number>(2); // Set a small capacity to trigger expansion
|
||||
hashTable.set('one', 1);
|
||||
hashTable.set('two', 2);
|
||||
hashTable.set('three', 3); // This should trigger an expansion
|
||||
|
||||
expect(hashTable.capacity).toBe(16);
|
||||
expect(hashTable.get('one')).toBe(1);
|
||||
expect(hashTable.get('two')).toBe(2);
|
||||
expect(hashTable.get('three')).toBe(3);
|
||||
});
|
||||
});
|
||||
|
||||
describe('HashTable performance', function () {
|
||||
it('Items set performance', function () {
|
||||
const mag = 100000;
|
||||
const ht = new HashTable();
|
||||
// const s = performance.now();
|
||||
for (let i = 0; i < mag; i++) {
|
||||
ht.set(i, i);
|
||||
}
|
||||
// const s1 = performance.now();
|
||||
const map = new Map();
|
||||
for (let i = 0; i < mag; i++) {
|
||||
map.set(i, i);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
|
55
test/unit/data-structures/linked-list/skip-list.test.ts
Normal file
55
test/unit/data-structures/linked-list/skip-list.test.ts
Normal file
|
@ -0,0 +1,55 @@
|
|||
import {SkipList} from '../../../../src';
|
||||
|
||||
describe('SkipList', () => {
|
||||
let skipList: SkipList<number, string>;
|
||||
|
||||
beforeEach(() => {
|
||||
skipList = new SkipList<number, string>();
|
||||
});
|
||||
|
||||
it('should insert and retrieve elements correctly', () => {
|
||||
skipList.add(1, 'One');
|
||||
skipList.add(2, 'Two');
|
||||
skipList.add(3, 'Three');
|
||||
|
||||
expect(skipList.get(1)).toBe('One');
|
||||
expect(skipList.get(2)).toBe('Two');
|
||||
expect(skipList.get(3)).toBe('Three');
|
||||
});
|
||||
|
||||
it('should return undefined for non-existent keys', () => {
|
||||
skipList.add(1, 'One');
|
||||
skipList.add(2, 'Two');
|
||||
|
||||
expect(skipList.get(3)).toBeUndefined();
|
||||
expect(skipList.get(0)).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should remove elements correctly', () => {
|
||||
skipList.add(1, 'One');
|
||||
skipList.add(2, 'Two');
|
||||
skipList.add(3, 'Three');
|
||||
|
||||
skipList.remove(2);
|
||||
|
||||
expect(skipList.get(2)).toBeUndefined(); // 修改这里的断言
|
||||
});
|
||||
|
||||
it('should handle random data correctly', () => {
|
||||
const randomData: Array<[number, string]> = [
|
||||
[5, 'Five'],
|
||||
[1, 'One'],
|
||||
[3, 'Three'],
|
||||
[2, 'Two'],
|
||||
[4, 'Four']
|
||||
];
|
||||
|
||||
for (const [key, value] of randomData) {
|
||||
skipList.add(key, value);
|
||||
}
|
||||
|
||||
expect(skipList.get(3)).toBe('Three');
|
||||
expect(skipList.get(5)).toBe('Five');
|
||||
expect(skipList.get(4)).toBe('Four');
|
||||
});
|
||||
});
|
Loading…
Reference in a new issue