diff --git a/package.json b/package.json
index 5a2d572e..daf4fb01 100644
--- a/package.json
+++ b/package.json
@@ -179,7 +179,8 @@
"@storm-stack/web-docs": "workspace:*",
"@storm-stack/server-cache": "workspace:*",
"@storm-stack/string-fns": "workspace:*",
- "@storm-stack/types": "workspace:*"
+ "@storm-stack/types": "workspace:*",
+ "@storm-stack/hashing": "workspace:*"
}
},
"prettier": "@storm-software/prettier/config.json",
diff --git a/packages/hashing/README.md b/packages/hashing/README.md
new file mode 100644
index 00000000..55a943f2
--- /dev/null
+++ b/packages/hashing/README.md
@@ -0,0 +1,62 @@
+
+
+
+# hashing
+
+A package that includes hashing utility functions
+
+
+
+
+## Installing
+
+Using [pnpm](http://pnpm.io):
+
+```bash
+pnpm add -D @storm-stack/hashing
+```
+
+
+ Using npm
+
+```bash
+npm install -D @storm-stack/hashing
+```
+
+
+
+
+ Using yarn
+
+```bash
+yarn add -D @storm-stack/hashing
+```
+
+
+
+## Reduced Package Size
+
+This project uses [tsup](https://tsup.egoist.dev/) to package the source code
+due to its ability to remove unused code and ship smaller javascript files
+thanks to code splitting. This helps to greatly reduce the size of the package
+and to make it easier to use in other projects.
+
+## Development
+
+This project is built using [Nx](https://nx.dev). As a result, many of the usual
+commands are available to assist in development.
+
+### Building
+
+Run `nx build hashing` to build the library.
+
+### Running unit tests
+
+Run `nx test hashing` to execute the unit tests via [Jest](https://jestjs.io).
+
+### Linting
+
+Run `nx lint hashing` to run [ESLint](https://eslint.org/) on the package.
+
+
+
diff --git a/packages/hashing/jest.config.ts b/packages/hashing/jest.config.ts
new file mode 100644
index 00000000..23b558c8
--- /dev/null
+++ b/packages/hashing/jest.config.ts
@@ -0,0 +1,3 @@
+import { getJestConfig } from "@storm-software/testing-tools";
+
+export default getJestConfig("packages/hashing", true, "hashing");
diff --git a/packages/hashing/package.json b/packages/hashing/package.json
new file mode 100644
index 00000000..197c82dc
--- /dev/null
+++ b/packages/hashing/package.json
@@ -0,0 +1,15 @@
+{
+ "name": "@storm-stack/hashing",
+ "version": "0.0.1",
+ "type": "module",
+ "description": "⚡ The storm-stack monorepo contains utility applications, tools, and various libraries to create modern and scalable web applications.",
+ "repository": {
+ "type": "github",
+ "url": "https://github.com/storm-software/storm-stack.git",
+ "directory": "packages/hashing"
+ },
+ "private": false,
+ "publishConfig": {
+ "access": "public"
+ }
+}
diff --git a/packages/hashing/project.json b/packages/hashing/project.json
new file mode 100644
index 00000000..02932e0f
--- /dev/null
+++ b/packages/hashing/project.json
@@ -0,0 +1,52 @@
+{
+ "name": "hashing",
+ "$schema": "../../node_modules/nx/schemas/project-schema.json",
+ "projectType": "library",
+ "sourceRoot": "packages/hashing/src",
+ "targets": {
+ "build": {
+ "executor": "@storm-software/workspace-tools:tsup-neutral",
+ "outputs": ["{options.outputPath}"],
+ "options": {
+ "entry": "packages/hashing/src/index.ts",
+ "outputPath": "dist/packages/hashing",
+ "tsConfig": "packages/hashing/tsconfig.json",
+ "project": "packages/hashing/package.json",
+ "defaultConfiguration": "production",
+ "assets": [
+ {
+ "input": "packages/hashing",
+ "glob": "*.md",
+ "output": "/"
+ },
+ {
+ "input": "",
+ "glob": "LICENSE",
+ "output": "/"
+ }
+ ],
+ "platform": "neutral"
+ },
+ "configurations": {
+ "production": {
+ "debug": false,
+ "verbose": false
+ },
+ "development": {
+ "debug": true,
+ "verbose": true
+ }
+ }
+ },
+ "nx-release-publish": {
+ "cache": true,
+ "inputs": ["default", "^production"],
+ "dependsOn": ["^build"],
+ "executor": "@storm-software/workspace-tools:npm-publish",
+ "options": {
+ "packageRoot": "dist/{projectRoot}",
+ "registry": "https://registry.npmjs.org/"
+ }
+ }
+ }
+}
diff --git a/packages/hashing/src/hash-object.ts b/packages/hashing/src/hash-object.ts
new file mode 100644
index 00000000..52c7e517
--- /dev/null
+++ b/packages/hashing/src/hash-object.ts
@@ -0,0 +1,457 @@
+// Based on https://github.com/puleos/object-hash v3.0.0 (MIT)
+
+import { isFunction } from "@storm-stack/types";
+
+export interface HashOptions {
+ /**
+ * A function that returns a boolean specifying if the key should be excluded from hashing
+ */
+ excludeKeys?: ((key: string) => boolean) | string[] | undefined;
+
+ /**
+ * hash object keys, values ignored
+ */
+ excludeValues?: boolean | undefined;
+
+ /**
+ * ignore unknown object types
+ */
+ ignoreUnknown?: boolean | undefined;
+
+ /**
+ * optional function that replaces values before hashing
+ */
+ replacer?: ((value: any) => any) | undefined;
+
+ /**
+ * consider 'name' property of functions for hashing
+ */
+ respectFunctionNames?: boolean | undefined;
+
+ /**
+ * consider function properties when hashing
+ */
+ respectFunctionProperties?: boolean | undefined;
+
+ /**
+ * Respect special properties (prototype, constructor) when hashing to distinguish between types
+ */
+ respectType?: boolean | undefined;
+
+ /**
+ * Sort all arrays before hashing
+ */
+ unorderedArrays?: boolean | undefined;
+
+ /**
+ * Sort `Set` and `Map` instances before hashing
+ */
+ unorderedObjects?: boolean | undefined;
+
+ /**
+ * Sort `Set` and `Map` instances before hashing
+ */
+ unorderedSets?: boolean | undefined;
+}
+
+type CreateHasherOptions = Omit & {
+ excludeKeys?: (key: string) => boolean;
+};
+
+// Defaults
+export const HASH_OBJECT_DEFAULT_OPTIONS: HashOptions = Object.freeze({
+ ignoreUnknown: false,
+ respectType: false,
+ respectFunctionNames: false,
+ respectFunctionProperties: false,
+ unorderedObjects: true,
+ unorderedArrays: false,
+ unorderedSets: false,
+ excludeKeys: undefined,
+ excludeValues: undefined,
+ replacer: undefined
+});
+
+export const HASH_OBJECT_EXCLUDED_KEYS = Object.freeze(["__id__"]);
+
+/**
+ * Serialize any JS value into a stable, hashable string
+ *
+ * @param object - The value to hash
+ * @param options - The hashing options
+ * @returns The object hashed into a string
+ */
+export function hashObject(object: any, options?: HashOptions): string {
+ const opts = options
+ ? { ...HASH_OBJECT_DEFAULT_OPTIONS, ...options }
+ : HASH_OBJECT_DEFAULT_OPTIONS;
+ opts.excludeKeys = (key: string) => {
+ let exclude = false;
+ if (options?.excludeKeys) {
+ if (isFunction(options.excludeKeys)) {
+ exclude = Boolean(options.excludeKeys(key));
+ } else if (Array.isArray(options?.excludeKeys)) {
+ exclude = Boolean(options.excludeKeys.includes(key));
+ }
+ }
+
+ return exclude || HASH_OBJECT_EXCLUDED_KEYS.includes(key);
+ };
+
+ const hasher = createHasher(opts as CreateHasherOptions);
+ hasher.dispatch(object);
+
+ return hasher.toString();
+}
+
+const defaultPrototypesKeys = Object.freeze([
+ "prototype",
+ "__proto__",
+ "constructor"
+]);
+
+function createHasher(options: CreateHasherOptions) {
+ let buff = "";
+ let context = new Map();
+ const write = (str: string) => {
+ buff += str;
+ };
+
+ return {
+ toString() {
+ return buff;
+ },
+ getContext() {
+ return context;
+ },
+ dispatch(value: any): string | void {
+ if (options.replacer) {
+ value = options.replacer(value);
+ }
+ const type = value === null ? "null" : typeof value;
+ return this[type](value);
+ },
+ object(object: any): string | void {
+ if (object && typeof object.toJSON === "function") {
+ return this.object(object.toJSON());
+ }
+
+ const objString = Object.prototype.toString.call(object);
+
+ let objType = "";
+ const objectLength = objString.length;
+
+ // '[object a]'.length === 10, the minimum
+ objectLength < 10
+ ? (objType = "unknown:[" + objString + "]")
+ : (objType = objString.slice(8, objectLength - 1));
+ objType = objType.toLowerCase();
+
+ let objectNumber = null;
+ if ((objectNumber = context.get(object)) === undefined) {
+ context.set(object, context.size);
+ } else {
+ return this.dispatch("[CIRCULAR:" + objectNumber + "]");
+ }
+
+ if (
+ typeof Buffer !== "undefined" &&
+ Buffer.isBuffer &&
+ Buffer.isBuffer(object)
+ ) {
+ write("buffer:");
+ return write(object.toString("utf8"));
+ }
+
+ if (
+ objType !== "object" &&
+ objType !== "function" &&
+ objType !== "asyncfunction"
+ ) {
+ // @ts-ignore
+ if (this[objType]) {
+ // @ts-ignore
+ this[objType](object);
+ } else if (!options.ignoreUnknown) {
+ this.unknown(object, objType);
+ }
+ } else {
+ let keys = Object.keys(object);
+ if (options.unorderedObjects) {
+ keys = keys.sort();
+ }
+ let extraKeys = [] as readonly string[];
+ // Make sure to incorporate special properties, so Types with different prototypes will produce
+ // a different hash and objects derived from different functions (`new Foo`, `new Bar`) will
+ // produce different hashes. We never do this for native functions since some seem to break because of that.
+ if (options.respectType !== false && !isNativeFunction(object)) {
+ extraKeys = defaultPrototypesKeys;
+ }
+
+ if (options.excludeKeys) {
+ keys = keys.filter(key => {
+ // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
+ return !options.excludeKeys!(key);
+ });
+ extraKeys = extraKeys.filter(key => {
+ // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
+ return !options.excludeKeys!(key);
+ });
+ }
+
+ write("object:" + (keys.length + extraKeys.length) + ":");
+ const dispatchForKey = (key: string) => {
+ this.dispatch(key);
+ write(":");
+ if (!options.excludeValues) {
+ this.dispatch(object[key]);
+ }
+ write(",");
+ };
+ for (const key of keys) {
+ dispatchForKey(key);
+ }
+ for (const key of extraKeys) {
+ dispatchForKey(key);
+ }
+ }
+ },
+ array(arr: any, unordered: boolean): string | void {
+ unordered =
+ unordered === undefined ? options.unorderedArrays !== false : unordered; // default to options.unorderedArrays
+
+ write("array:" + arr.length + ":");
+ if (!unordered || arr.length <= 1) {
+ for (const entry of arr) {
+ this.dispatch(entry);
+ }
+ return;
+ }
+
+ // The unordered case is a little more complicated: since there is no canonical ordering on objects,
+ // i.e. {a:1} < {a:2} and {a:1} > {a:2} are both false,
+ // We first serialize each entry using a PassThrough stream before sorting.
+ // also: we can’t use the same context for all entries since the order of hashing should *not* matter. instead,
+ // we keep track of the additions to a copy of the context and add all of them to the global context when we’re done
+ const contextAdditions = new Map();
+ const entries = arr.map((entry: any) => {
+ const hasher = createHasher(options);
+ hasher.dispatch(entry);
+ for (const [key, value] of hasher.getContext()) {
+ contextAdditions.set(key, value);
+ }
+ return hasher.toString();
+ });
+ context = contextAdditions;
+ entries.sort();
+ return this.array(entries, false);
+ },
+ date(date: any) {
+ return write("date:" + date.toJSON());
+ },
+ symbol(sym: any) {
+ return write("symbol:" + sym.toString());
+ },
+ unkown(value: any, type: string) {
+ write(type);
+ if (!value) {
+ return;
+ }
+ write(":");
+ if (value && typeof value.entries === "function") {
+ return this.array([...value.entries()], true /* ordered */);
+ }
+ },
+ unknown(value: any, type: string) {
+ write(type);
+ if (!value) {
+ return;
+ }
+ write(":");
+ if (value && typeof value.entries === "function") {
+ return this.array([...value.entries()], true /* ordered */);
+ }
+ },
+ error(err: any) {
+ return write("error:" + err.toString());
+ },
+ boolean(bool: any) {
+ return write("bool:" + bool);
+ },
+ string(string: any) {
+ write("string:" + string.length + ":");
+ write(string);
+ },
+ function(fn: any) {
+ write("fn:");
+ if (isNativeFunction(fn)) {
+ this.dispatch("[native]");
+ } else {
+ this.dispatch(fn.toString());
+ }
+
+ if (options.respectFunctionNames !== false) {
+ // Make sure we can still distinguish native functions
+ // by their name, otherwise String and Function will
+ // have the same hash
+ this.dispatch("function-name:" + String(fn.name));
+ }
+
+ if (options.respectFunctionProperties) {
+ this.object(fn);
+ }
+ },
+ number(number: any) {
+ return write("number:" + number);
+ },
+ xml(xml: any) {
+ return write("xml:" + xml.toString());
+ },
+ null() {
+ return write("Null");
+ },
+ undefined() {
+ return write("Undefined");
+ },
+ regexp(regex: any) {
+ return write("regex:" + regex.toString());
+ },
+ uint8array(arr: any) {
+ write("uint8array:");
+ return this.dispatch(Array.prototype.slice.call(arr));
+ },
+ uint8clampedarray(arr: any) {
+ write("uint8clampedarray:");
+ return this.dispatch(Array.prototype.slice.call(arr));
+ },
+ int8array(arr: any) {
+ write("int8array:");
+ return this.dispatch(Array.prototype.slice.call(arr));
+ },
+ uint16array(arr: any) {
+ write("uint16array:");
+ return this.dispatch(Array.prototype.slice.call(arr));
+ },
+ int16array(arr: any) {
+ write("int16array:");
+ return this.dispatch(Array.prototype.slice.call(arr));
+ },
+ uint32array(arr: any) {
+ write("uint32array:");
+ return this.dispatch(Array.prototype.slice.call(arr));
+ },
+ int32array(arr: any) {
+ write("int32array:");
+ return this.dispatch(Array.prototype.slice.call(arr));
+ },
+ float32array(arr: any) {
+ write("float32array:");
+ return this.dispatch(Array.prototype.slice.call(arr));
+ },
+ float64array(arr: any) {
+ write("float64array:");
+ return this.dispatch(Array.prototype.slice.call(arr));
+ },
+ arraybuffer(arr: any) {
+ write("arraybuffer:");
+ return this.dispatch(new Uint8Array(arr));
+ },
+ url(url: any) {
+ return write("url:" + url.toString());
+ },
+ map(map: any) {
+ write("map:");
+ const arr = [...map];
+ return this.array(arr, options.unorderedSets !== false);
+ },
+ set(set: any) {
+ write("set:");
+ const arr = [...set];
+ return this.array(arr, options.unorderedSets !== false);
+ },
+ file(file: any) {
+ write("file:");
+ return this.dispatch([file.name, file.size, file.type, file.lastModfied]);
+ },
+ blob() {
+ if (options.ignoreUnknown) {
+ return write("[blob]");
+ }
+ throw new Error(
+ "Hashing Blob objects is currently not supported\n" +
+ 'Use "options.replacer" or "options.ignoreUnknown"\n'
+ );
+ },
+ domwindow() {
+ return write("domwindow");
+ },
+ bigint(number: number) {
+ return write("bigint:" + number.toString());
+ },
+ /* Node.js standard native objects */
+ process() {
+ return write("process");
+ },
+ timer() {
+ return write("timer");
+ },
+ pipe() {
+ return write("pipe");
+ },
+ tcp() {
+ return write("tcp");
+ },
+ udp() {
+ return write("udp");
+ },
+ tty() {
+ return write("tty");
+ },
+ statwatcher() {
+ return write("statwatcher");
+ },
+ securecontext() {
+ return write("securecontext");
+ },
+ connection() {
+ return write("connection");
+ },
+ zlib() {
+ return write("zlib");
+ },
+ context() {
+ return write("context");
+ },
+ nodescript() {
+ return write("nodescript");
+ },
+ httpparser() {
+ return write("httpparser");
+ },
+ dataview() {
+ return write("dataview");
+ },
+ signal() {
+ return write("signal");
+ },
+ fsevent() {
+ return write("fsevent");
+ },
+ tlswrap() {
+ return write("tlswrap");
+ }
+ };
+}
+
+const nativeFunc = "[native code] }";
+const nativeFuncLength = nativeFunc.length;
+
+/** Check if the given function is a native function */
+function isNativeFunction(f: any) {
+ if (typeof f !== "function") {
+ return false;
+ }
+ return (
+ Function.prototype.toString.call(f).slice(-nativeFuncLength) === nativeFunc
+ );
+}
diff --git a/packages/hashing/src/hash.ts b/packages/hashing/src/hash.ts
new file mode 100644
index 00000000..3f66626d
--- /dev/null
+++ b/packages/hashing/src/hash.ts
@@ -0,0 +1,16 @@
+import { isString } from "@storm-stack/types";
+import { HashOptions, hashObject } from "./hash-object";
+import { sha256base64 } from "./sha-256";
+
+/**
+ * Hash any JS value into a string
+ *
+ * @param object - The value to hash
+ * @param options - Hashing options
+ * @returns A hashed string value
+ */
+export function hash(object: any, options?: HashOptions): string {
+ return sha256base64(
+ isString(object) ? object : hashObject(object, options)
+ ).slice(0, 10);
+}
diff --git a/packages/hashing/src/index.ts b/packages/hashing/src/index.ts
new file mode 100644
index 00000000..3309dfa2
--- /dev/null
+++ b/packages/hashing/src/index.ts
@@ -0,0 +1,10 @@
+/**
+ * The hashing library used by Storm Software for building TypeScript applications.
+ *
+ * @remarks
+ * A package that includes hashing utility functions
+ *
+ * @packageDocumentation
+ */
+
+export * from "./hash";
diff --git a/packages/hashing/src/sha-256.ts b/packages/hashing/src/sha-256.ts
new file mode 100644
index 00000000..1bb9c669
--- /dev/null
+++ b/packages/hashing/src/sha-256.ts
@@ -0,0 +1,328 @@
+// Based on https://github.com/brix/crypto-js 4.1.1 (MIT)
+
+// Based on https://github.com/brix/crypto-js 4.1.1 (MIT)
+
+export class WordArray {
+ words: number[];
+ sigBytes: number;
+
+ constructor(words?: number[], sigBytes?: number) {
+ words = this.words = words || [];
+
+ this.sigBytes = sigBytes === undefined ? words.length * 4 : sigBytes;
+ }
+
+ toString(encoder?: typeof Hex): string {
+ return (encoder || Hex).stringify(this);
+ }
+
+ concat(wordArray: WordArray) {
+ // Clamp excess bits
+ this.clamp();
+
+ // Concat
+ if (this.sigBytes % 4) {
+ // Copy one byte at a time
+ for (let i = 0; i < wordArray.sigBytes; i++) {
+ const thatByte =
+ (wordArray.words[i >>> 2]! >>> (24 - (i % 4) * 8)) & 0xff;
+ this.words[(this.sigBytes + i) >>> 2]! |=
+ thatByte << (24 - ((this.sigBytes + i) % 4) * 8);
+ }
+ } else {
+ // Copy one word at a time
+ for (let j = 0; j < wordArray.sigBytes; j += 4) {
+ this.words[(this.sigBytes + j) >>> 2] = wordArray.words[j >>> 2]!;
+ }
+ }
+ this.sigBytes += wordArray.sigBytes;
+
+ // Chainable
+ return this;
+ }
+
+ clamp() {
+ // Clamp
+ this.words[this.sigBytes >>> 2]! &=
+ 0xff_ff_ff_ff << (32 - (this.sigBytes % 4) * 8);
+ this.words.length = Math.ceil(this.sigBytes / 4);
+ }
+
+ clone() {
+ return new WordArray([...this.words]);
+ }
+}
+
+export const Hex = {
+ stringify(wordArray: WordArray) {
+ // Convert
+ const hexChars: string[] = [];
+ for (let i = 0; i < wordArray.sigBytes; i++) {
+ const bite = (wordArray.words[i >>> 2]! >>> (24 - (i % 4) * 8)) & 0xff;
+ hexChars.push((bite >>> 4).toString(16), (bite & 0x0f).toString(16));
+ }
+
+ return hexChars.join("");
+ }
+};
+
+export const Base64 = {
+ stringify(wordArray: WordArray) {
+ const keyStr =
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
+ const base64Chars: string[] = [];
+ for (let i = 0; i < wordArray.sigBytes; i += 3) {
+ const byte1 = (wordArray.words[i >>> 2]! >>> (24 - (i % 4) * 8)) & 0xff;
+ const byte2 =
+ (wordArray.words[(i + 1) >>> 2]! >>> (24 - ((i + 1) % 4) * 8)) & 0xff;
+ const byte3 =
+ (wordArray.words[(i + 2) >>> 2]! >>> (24 - ((i + 2) % 4) * 8)) & 0xff;
+
+ const triplet = (byte1 << 16) | (byte2 << 8) | byte3;
+ for (let j = 0; j < 4 && i * 8 + j * 6 < wordArray.sigBytes * 8; j++) {
+ base64Chars.push(keyStr.charAt((triplet >>> (6 * (3 - j))) & 0x3f));
+ }
+ }
+ return base64Chars.join("");
+ }
+};
+
+export const Latin1 = {
+ parse(latin1Str: string) {
+ // Shortcut
+ const latin1StrLength = latin1Str.length;
+
+ // Convert
+ const words: number[] = [];
+ for (let i = 0; i < latin1StrLength; i++) {
+ if (latin1Str.codePointAt(i)) {
+ words[i >>> 2]! |=
+ (latin1Str.codePointAt(i)! & 0xff) << (24 - (i % 4) * 8);
+ }
+ }
+
+ return new WordArray(words, latin1StrLength);
+ }
+};
+
+export const Utf8 = {
+ parse(utf8Str: string) {
+ return Latin1.parse(unescape(encodeURIComponent(utf8Str)));
+ }
+};
+
+export class BufferedBlockAlgorithm {
+ _data = new WordArray();
+ _nDataBytes = 0;
+ _minBufferSize = 0;
+ blockSize = 512 / 32;
+
+ reset() {
+ this._data = new WordArray();
+ this._nDataBytes = 0;
+ }
+
+ _append(data: string | WordArray) {
+ // Convert string to WordArray, else assume WordArray already
+ if (typeof data === "string") {
+ data = Utf8.parse(data);
+ }
+
+ // Append
+ // eslint-disable-next-line unicorn/prefer-spread
+ this._data.concat(data);
+ this._nDataBytes += data.sigBytes;
+ }
+
+ // eslint-disable-next-line @typescript-eslint/no-unused-vars
+ _doProcessBlock(_dataWords: any, _offset: any) {}
+
+ _process(doFlush?: boolean) {
+ let processedWords;
+
+ // Count blocks ready
+ let nBlocksReady = this._data.sigBytes / (this.blockSize * 4); /* bytes */
+ doFlush
+ ? (nBlocksReady = Math.ceil(nBlocksReady))
+ : (nBlocksReady = Math.max(
+ Math.trunc(nBlocksReady) - this._minBufferSize,
+ 0
+ ));
+
+ // Count words ready
+ const nWordsReady = nBlocksReady * this.blockSize;
+
+ // Count bytes ready
+ const nBytesReady = Math.min(nWordsReady * 4, this._data.sigBytes);
+
+ // Process blocks
+ if (nWordsReady) {
+ for (let offset = 0; offset < nWordsReady; offset += this.blockSize) {
+ // Perform concrete-algorithm logic
+ this._doProcessBlock(this._data.words, offset);
+ }
+
+ // Remove processed words
+ processedWords = this._data.words.splice(0, nWordsReady);
+ this._data.sigBytes -= nBytesReady;
+ }
+
+ // Return processed words
+ return new WordArray(processedWords, nBytesReady);
+ }
+}
+
+export class Hasher extends BufferedBlockAlgorithm {
+ update(messageUpdate: string) {
+ // Append
+ this._append(messageUpdate);
+
+ // Update the hash
+ this._process();
+
+ // Chainable
+ return this;
+ }
+
+ finalize(messageUpdate: string) {
+ // Final message update
+ if (messageUpdate) {
+ this._append(messageUpdate);
+ }
+ }
+}
+
+// Initialization and round constants tables
+const H = [
+ 1_779_033_703, -1_150_833_019, 1_013_904_242, -1_521_486_534, 1_359_893_119,
+ -1_694_144_372, 528_734_635, 1_541_459_225
+];
+const K = [
+ 1_116_352_408, 1_899_447_441, -1_245_643_825, -373_957_723, 961_987_163,
+ 1_508_970_993, -1_841_331_548, -1_424_204_075, -670_586_216, 310_598_401,
+ 607_225_278, 1_426_881_987, 1_925_078_388, -2_132_889_090, -1_680_079_193,
+ -1_046_744_716, -459_576_895, -272_742_522, 264_347_078, 604_807_628,
+ 770_255_983, 1_249_150_122, 1_555_081_692, 1_996_064_986, -1_740_746_414,
+ -1_473_132_947, -1_341_970_488, -1_084_653_625, -958_395_405, -710_438_585,
+ 113_926_993, 338_241_895, 666_307_205, 773_529_912, 1_294_757_372,
+ 1_396_182_291, 1_695_183_700, 1_986_661_051, -2_117_940_946, -1_838_011_259,
+ -1_564_481_375, -1_474_664_885, -1_035_236_496, -949_202_525, -778_901_479,
+ -694_614_492, -200_395_387, 275_423_344, 430_227_734, 506_948_616,
+ 659_060_556, 883_997_877, 958_139_571, 1_322_822_218, 1_537_002_063,
+ 1_747_873_779, 1_955_562_222, 2_024_104_815, -2_067_236_844, -1_933_114_872,
+ -1_866_530_822, -1_538_233_109, -1_090_935_817, -965_641_998
+];
+
+// Reusable object
+const W: number[] = [];
+
+/**
+ * SHA-256 hash algorithm.
+ */
+export class SHA256 extends Hasher {
+ _hash = new WordArray([...H]);
+
+ override reset() {
+ super.reset();
+ this._hash = new WordArray([...H]);
+ }
+
+ override _doProcessBlock(M: number[], offset: number) {
+ // Shortcut
+ const H = this._hash.words;
+
+ // Working variables
+ let a = H[0];
+ let b = H[1];
+ let c = H[2];
+ let d = H[3];
+ let e = H[4];
+ let f = H[5];
+ let g = H[6];
+ let h = H[7];
+
+ // Computation
+ for (let i = 0; i < 64; i++) {
+ if (i < 16) {
+ W[i] = Math.trunc(M[offset + i]!);
+ } else {
+ const gamma0x = W[i - 15];
+ const gamma0 =
+ ((gamma0x! << 25) | (gamma0x! >>> 7)) ^
+ ((gamma0x! << 14) | (gamma0x! >>> 18)) ^
+ (gamma0x! >>> 3);
+
+ const gamma1x = W[i - 2];
+ const gamma1 =
+ ((gamma1x! << 15) | (gamma1x! >>> 17)) ^
+ ((gamma1x! << 13) | (gamma1x! >>> 19)) ^
+ (gamma1x! >>> 10);
+
+ W[i] = gamma0 + W[i - 7]! + gamma1 + W[i - 16]!;
+ }
+
+ const ch = (e! & f!) ^ (~e! & g!);
+ const maj = (a! & b!) ^ (a! & c!) ^ (b! & c!);
+
+ const sigma0 =
+ ((a! << 30) | (a! >>> 2)) ^
+ ((a! << 19) | (a! >>> 13)) ^
+ ((a! << 10) | (a! >>> 22));
+ const sigma1 =
+ ((e! << 26) | (e! >>> 6)) ^
+ ((e! << 21) | (e! >>> 11)) ^
+ ((e! << 7) | (e! >>> 25));
+
+ const t1 = h! + sigma1 + ch + K[i]! + W[i]!;
+ const t2 = sigma0 + maj;
+
+ h = g;
+ g = f;
+ f = e;
+ e = Math.trunc(d! + t1);
+ d = c;
+ c = b;
+ b = a;
+ a = Math.trunc(t1 + t2);
+ }
+
+ // Intermediate hash value
+ H[0] = Math.trunc(H[0]! + a!);
+ H[1] = Math.trunc(H[1]! + b!);
+ H[2] = Math.trunc(H[2]! + c!);
+ H[3] = Math.trunc(H[3]! + d!);
+ H[4] = Math.trunc(H[4]! + e!);
+ H[5] = Math.trunc(H[5]! + f!);
+ H[6] = Math.trunc(H[6]! + g!);
+ H[7] = Math.trunc(H[7]! + h!);
+ }
+
+ override finalize(messageUpdate: string): WordArray {
+ super.finalize(messageUpdate);
+
+ const nBitsTotal = this._nDataBytes * 8;
+ const nBitsLeft = this._data.sigBytes * 8;
+
+ // Add padding
+ this._data.words[nBitsLeft >>> 5]! |= 0x80 << (24 - (nBitsLeft % 32));
+ this._data.words[(((nBitsLeft + 64) >>> 9) << 4) + 14] = Math.floor(
+ nBitsTotal / 0x1_00_00_00_00
+ );
+ this._data.words[(((nBitsLeft + 64) >>> 9) << 4) + 15] = nBitsTotal;
+ this._data.sigBytes = this._data.words.length * 4;
+
+ // Hash final blocks
+ this._process();
+
+ // Return final computed hash
+ return this._hash;
+ }
+}
+
+export function sha256(message: string) {
+ return new SHA256().finalize(message).toString();
+}
+
+export function sha256base64(message: string) {
+ return new SHA256().finalize(message).toString(Base64);
+}
diff --git a/packages/hashing/tsconfig.json b/packages/hashing/tsconfig.json
new file mode 100644
index 00000000..d4a4f78c
--- /dev/null
+++ b/packages/hashing/tsconfig.json
@@ -0,0 +1,10 @@
+{
+ "extends": "../../tsconfig.base.json",
+ "compilerOptions": {
+ "outDir": "../../dist/out-tsc",
+ "noEmit": true
+ },
+ "files": [],
+ "include": ["src/**/*.ts", "src/**/*.js", "bin/**/*"],
+ "exclude": ["jest.config.ts", "src/**/*.spec.ts", "src/**/*.test.ts"]
+}
diff --git a/packages/hashing/tsconfig.spec.json b/packages/hashing/tsconfig.spec.json
new file mode 100644
index 00000000..d41aea47
--- /dev/null
+++ b/packages/hashing/tsconfig.spec.json
@@ -0,0 +1,13 @@
+{
+ "extends": "./tsconfig.json",
+ "compilerOptions": {
+ "outDir": "../../dist/out-tsc",
+ "types": ["jest", "node"]
+ },
+ "include": [
+ "jest.config.ts",
+ "src/**/*.test.ts",
+ "src/**/*.spec.ts",
+ "src/**/*.d.ts"
+ ]
+}
diff --git a/packages/types/src/index.ts b/packages/types/src/index.ts
index eb6ef275..46936969 100644
--- a/packages/types/src/index.ts
+++ b/packages/types/src/index.ts
@@ -7,5 +7,6 @@
* @packageDocumentation
*/
+export * from "./convert";
export * from "./type-checks";
export * from "./utility-types";
diff --git a/packages/unique-identifier/src/cuid.ts b/packages/unique-identifier/src/cuid.ts
index 89883be5..e10a9059 100644
--- a/packages/unique-identifier/src/cuid.ts
+++ b/packages/unique-identifier/src/cuid.ts
@@ -1,4 +1,4 @@
-import { hash } from "./hash";
+import { hash } from "@storm-stack/hashing";
import { randomLetter } from "./random";
/**
@@ -49,19 +49,15 @@ function createEntropy(length = 4, random = Math.random) {
* @param options - Options
* @returns The environment's Fingerprint
*/
-function fingerprint(
- options: {
- globalObj?: any;
- } = {
- globalObj:
- typeof global === "undefined"
- ? typeof window === "undefined"
- ? {}
- : window
- : global
- }
-) {
- const globals = Object.keys(options.globalObj).toString();
+function fingerprint(options?: { globalObj?: any }) {
+ const globalObj =
+ (options?.globalObj ?? typeof global === "undefined")
+ ? typeof window === "undefined"
+ ? {}
+ : window
+ : global;
+
+ const globals = Object.keys(globalObj).toString();
const sourceString =
globals.length > 0
? globals + createEntropy(CUID_LARGE_LENGTH, Math.random)
@@ -91,7 +87,11 @@ export function cuid(): string {
// The salt should be long enough to be globally unique across the full
// length of the hash. For simplicity, we use the same length as the
// intended id output.
- const salt = createEntropy(length, Math.random);
+ const salt = createEntropy(CUID_LARGE_LENGTH, Math.random);
- return `${randomLetter() + hash(`${time + salt + count + fingerprint()}`).substring(1, length)}`;
+ const hashed = hash(`${time + salt + count + fingerprint()}`);
+ return `${
+ randomLetter() +
+ hashed.slice(1, Math.min(hashed.length - 1, CUID_LARGE_LENGTH))
+ }`;
}
diff --git a/packages/unique-identifier/src/hash.ts b/packages/unique-identifier/src/hash.ts
deleted file mode 100644
index c32f6d30..00000000
--- a/packages/unique-identifier/src/hash.ts
+++ /dev/null
@@ -1,119 +0,0 @@
-import { sha3_512 } from "@noble/hashes/sha3";
-import { isSet, isString } from "@storm-stack/types";
-
-/**
- * Default radix for the BigInt.toString() method.
- */
-const DEFAULT_RADIX = 36;
-
-/**
- * Transform a Uint8Array into a BigInt.
- *
- * @remarks
- * Adapted from https://github.com/juanelas/bigint-conversion
- * MIT License Copyright (c) 2018 Juan Hernández Serrano
- *
- * @param buf - Buffer to transform
- * @returns A BigInt value
- */
-function bufToBigInt(buf: Uint8Array): bigint {
- const bits = 8n;
-
- let value = 0n;
- for (const i of buf.values()) {
- const bi = BigInt(i);
- value = (value << bits) + bi;
- }
-
- return value;
-}
-
-/**
- * Create a hash from a string.
- *
- * @param input - String to hash
- * @returns The hashed string
- */
-export function hash(input: string | object): string {
- return isString(input) ? hashString(input) : hashObject(input);
-}
-
-/**
- * Create a hash from a string.
- *
- * @param input - String to hash
- * @returns The hashed string
- */
-function hashString(inputStr = ""): string {
- // Drop the first character because it will bias the histogram
- // to the left.
- return bufToBigInt(sha3_512(inputStr)).toString(DEFAULT_RADIX).slice(1);
-}
-
-const HASH_TABLE = new WeakMap