--- /dev/null
+// SPDX-FileCopyrightText: 2024 Chris Duncan <chris@zoso.dev>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+type Job = {
+ id: number
+ reject: (value: any) => void
+ resolve: (value: any) => void
+ data: any
+ results: any[]
+}
+
+type Thread = {
+ worker: Worker
+ job: Job | null
+}
+
+/**
+* Processes an array of tasks using Web Workers.
+*/
+export class Pool {
+ static #cores: number = Math.max(1, navigator.hardwareConcurrency - 1)
+ #queue: Job[] = []
+ #threads: Thread[] = []
+ #url: string
+
+ get threadsBusy (): number {
+ let n = 0
+ for (const thread of this.#threads) {
+ n += +(thread.job != null)
+ }
+ return n
+ }
+ get threadsIdle (): number {
+ let n = 0
+ for (const thread of this.#threads) {
+ n += +(thread.job == null)
+ }
+ return n
+ }
+
+ async assign (data: any): Promise<any> {
+ if (!(data instanceof ArrayBuffer || Array.isArray(data))) data = [data]
+ return new Promise((resolve, reject) => {
+ const job: Job = {
+ id: performance.now(),
+ results: [],
+ data,
+ resolve,
+ reject
+ }
+ if (this.#queue.length > 0) {
+ this.#queue.push(job)
+ } else {
+ for (const thread of this.#threads) this.#assign(thread, job)
+ }
+ })
+ }
+
+ /**
+ *
+ * @param {string} worker - Stringified worker class
+ * @param {number} [count=1] - Integer between 1 and CPU thread count shared among all Pools
+ */
+ constructor (worker: string, count: number = 1) {
+ count = Math.min(Pool.#cores, Math.max(1, Math.floor(Math.abs(count))))
+ this.#url = URL.createObjectURL(new Blob([worker], { type: 'text/javascript' }))
+ for (let i = 0; i < count; i++) {
+ const thread = {
+ worker: new Worker(this.#url, { type: 'module' }),
+ job: null
+ }
+ thread.worker.addEventListener('message', message => {
+ let result = JSON.parse(new TextDecoder().decode(message.data) || "[]")
+ if (!Array.isArray(result)) result = [result]
+ this.#report(thread, result)
+ })
+ this.#threads.push(thread)
+ Pool.#cores = Math.max(1, Pool.#cores - this.#threads.length)
+ }
+ }
+
+ #assign (thread: Thread, job: Job): void {
+ if (job.data instanceof ArrayBuffer) {
+ if (job.data.byteLength > 0) {
+ thread.job = job
+ thread.worker.postMessage({ buffer: job.data }, [job.data])
+ }
+ } else {
+ const chunk: number = 1 + (job.data.length / this.threadsIdle)
+ const next = job.data.slice(0, chunk)
+ job.data = job.data.slice(chunk)
+ if (job.data.length === 0) this.#queue.shift()
+ if (next?.length > 0) {
+ const buffer = new TextEncoder().encode(JSON.stringify(next)).buffer
+ thread.job = job
+ thread.worker.postMessage({ buffer }, [buffer])
+ }
+ }
+ }
+
+ #isJobDone (jobId: number): boolean {
+ for (const thread of this.#threads) {
+ if (thread.job?.id === jobId) return false
+ }
+ return true
+ }
+
+ #report (thread: Thread, results: any[]): void {
+ if (thread.job == null) {
+ throw new Error('Thread returned results but had nowhere to report it.')
+ }
+ const job = thread.job
+ if (this.#queue.length > 0) {
+ this.#assign(thread, this.#queue[0])
+ } else {
+ thread.job = null
+ }
+ if (results.length > 0) {
+ job.results.push(...results)
+ }
+ if (this.#isJobDone(job.id)) {
+ job.resolve(job.results)
+ }
+ }
+}
+
+/**
+* Provides basic worker event messaging to extending classes.
+*
+* In order to be properly bundled in a format that can be used to create an
+* inline Web Worker, the extending classes must export WorkerInterface and
+* themselves as a string:
+*```
+* export default `
+* const WorkerInterface = ${WorkerInterface}
+* const Pow = ${Pow}
+* `
+* ```
+* They must also initialize the event listener by calling their inherited
+* `listen()` function. Finally, they must override the implementation of the
+* `work()` function. See the documentation of those functions for details.
+*/
+export class WorkerInterface {
+ /**
+ * Processes data through a worker.
+ *
+ * Extending classes must override this template by implementing the same
+ * function signature and providing their own processing call in the try-catch
+ * block.
+ *
+ * @param {any[]} data - Array of data to process
+ * @returns Promise for that data after being processed
+ */
+ static async work (data: any[]): Promise<any[]> {
+ return new Promise(async (resolve, reject): Promise<void> => {
+ for (let d of data) {
+ try {
+ d = await d
+ } catch (err) {
+ reject(err)
+ }
+ }
+ resolve(data)
+ })
+ }
+
+ /**
+ * Encodes worker results as an ArrayBuffer so it can be transferred back to
+ * the main thread.
+ *
+ * @param {any[]} results - Array of processed data
+ */
+ static report (results: any[]): void {
+ const buffer = new TextEncoder().encode(JSON.stringify(results)).buffer
+ //@ts-expect-error
+ postMessage(buffer, [buffer])
+ }
+
+ /**
+ * Listens for messages from the main thread.
+ *
+ * Extending classes must call this in a static initialization block:
+ * ```
+ * static {
+ * Pow.listen()
+ * }
+ * ```
+ */
+ static listen (): void {
+ addEventListener('message', (message: any): void => {
+ const { name, buffer } = message.data
+ if (name === 'STOP') {
+ close()
+ const buffer = new ArrayBuffer(0)
+ //@ts-expect-error
+ postMessage(buffer, [buffer])
+ } else {
+ const data = JSON.parse(new TextDecoder().decode(buffer))
+ this.work(data).then(this.report)
+ }
+ })
+ }
+}
--- /dev/null
+export const NanoPowCpuHasher = `
+const blake2b_sigma = [
+ [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
+ [14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3],
+ [11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4],
+ [7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8],
+ [9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13],
+ [2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9],
+ [12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11],
+ [13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10],
+ [6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5],
+ [10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0],
+ [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
+ [14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3]
+]
+
+function G (v, a, b, c, d, m, x, y) {
+ v[a] += v[b]
+ v[a] += m[x]
+ v[d] ^= v[a]
+ v[d] = (v[d] >> 32n) | (v[d] << 32n)
+ v[c] += v[d]
+ v[b] ^= v[c]
+ v[b] = (v[b] >> 24n) | (v[b] << 40n)
+ v[a] += v[b]
+ v[a] += m[y]
+ v[d] ^= v[a]
+ v[d] = (v[d] >> 16n) | (v[d] << 48n)
+ v[c] += v[d]
+ v[b] ^= v[c]
+ v[b] = (v[b] >> 63n) | (v[b] << 1n)
+}
+
+function hash (m, v) {
+ for (let r = 0; r < 12; r++) {
+ const s = blake2b_sigma[r]
+ G(v, 0, 4, 8, 12, m, s[0], s[1])
+ G(v, 1, 5, 9, 13, m, s[2], s[3])
+ G(v, 2, 6, 10, 14, m, s[4], s[5])
+ G(v, 3, 7, 11, 15, m, s[6], s[7])
+ G(v, 0, 5, 10, 15, m, s[8], s[9])
+ G(v, 1, 6, 11, 12, m, s[10], s[11])
+ G(v, 2, 7, 8, 13, m, s[12], s[13])
+ G(v, 3, 4, 9, 14, m, s[14], s[15])
+ }
+ return (0x6a09e667f3bcc908 ^ v[0] ^ v[8])
+}
+`