diff --git a/ADOOR_ACE/Figures/Teaser.png b/ADOOR_ACE/Figures/Teaser.png
new file mode 100644
index 0000000000000000000000000000000000000000..227768f7cf8eb645264932342493e0d3585b0e2d
--- /dev/null
+++ b/ADOOR_ACE/Figures/Teaser.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:dbc8667fec9f056aeba6eff7c5f061bfd9e68a4a2d4c5b2e9563e086b65eab0d
+size 1231507
diff --git a/ADOOR_ACE/Figures/gradio.png b/ADOOR_ACE/Figures/gradio.png
new file mode 100644
index 0000000000000000000000000000000000000000..61fc64b32cbda2b2e85509ddbe1bd25373274a6a
--- /dev/null
+++ b/ADOOR_ACE/Figures/gradio.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:71d8ef7ade951bae9b707c30795f7aa35aa744b3789f478cfe1058a432ee7e1f
+size 989956
diff --git a/ADOOR_ACE/Figures/tryon.png b/ADOOR_ACE/Figures/tryon.png
new file mode 100644
index 0000000000000000000000000000000000000000..4d73338cb797cbac095ba4e5755ec8ce8884cc2c
--- /dev/null
+++ b/ADOOR_ACE/Figures/tryon.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4cb49a677fc59a86acafcb62f1ba899dffd03b2bfb82864d3ca5e2e49e2d056d
+size 566630
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN/40000305_0839.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN/40000305_0839.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..f5b41208515b0276be9ce4c450adc6470dc5ac4f
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN/40000305_0839.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN/40015705_0900.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN/40015705_0900.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..aa852cede7e2187e20ffb23108ccf816f4bca7ae
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN/40015705_0900.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN/94816372_0474.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN/94816372_0474.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..c1aa8f18495886ee5208d02fd992a03235e64833
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN/94816372_0474.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN/96457221_0104.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN/96457221_0104.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..cb164beb2ed89928e96386b6e5bf7bded1a96906
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN/96457221_0104.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN/_t_40000305_0839.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN/_t_40000305_0839.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..3bbc711b94311beaeafb1c652eae888fcc89d861
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN/_t_40000305_0839.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN/_t_40015705_0900.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN/_t_40015705_0900.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..356fcc4ae7d92b9bafb21b2c74333decaff69b7b
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN/_t_40015705_0900.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN/_t_94816372_0474.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN/_t_94816372_0474.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..92bc8d970f868c0f7f9191846bb06d266b5d5f47
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN/_t_94816372_0474.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN/_t_96457221_0104.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN/_t_96457221_0104.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..8e1dec863d57908c859be5100628e8efc3c73c67
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN/_t_96457221_0104.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/40000305_0839.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/40000305_0839.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..b1b8ef399b2d0a47d0f7940d9ef0e7c126eb70ed
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/40000305_0839.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/40015705_0900.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/40015705_0900.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..1af0f2fd9f02802ca2c07e2a7c339c29a200322f
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/40015705_0900.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/40015804_0799.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/40015804_0799.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..bfd04f35e5e16684067f7569474156574d5a560a
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/40015804_0799.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/40015903_0416.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/40015903_0416.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..d0ac1119e07e45bd4cddf466866cf1195f23908d
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/40015903_0416.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/40016009_0888.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/40016009_0888.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..d901925d5fa385a8d2d52ed73dcec1dd8626f4c1
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/40016009_0888.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/40016108_0104.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/40016108_0104.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..f89ce2324ad27b1dfee9889c40bfdeacdff76c6f
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/40016108_0104.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/40016108_0691.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/40016108_0691.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..48f6292a3bad004c8a84c458c3865ce580abe5e7
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/40016108_0691.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/40071282_0900.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/40071282_0900.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..af3919d47852f61d8a8af9cbdbdec9eac8159392
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/40071282_0900.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/40071367_0430.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/40071367_0430.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..9fecb429de0cdec7641eeee4a65887344fd9f6e2
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/40071367_0430.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/43952724_0060.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/43952724_0060.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..19b0a2a68b4ee50752b733f6f02b2a7cd0534e88
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/43952724_0060.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/43955534_0305.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/43955534_0305.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..43a10acee818d29c50b7fbd30953ae062d4f0f8b
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/43955534_0305.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/43955534_0447.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/43955534_0447.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..caab76e7d284f0995cc63abda033daa0b8cbe91a
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/43955534_0447.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/43969197_0294.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/43969197_0294.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..e70c51005c7e473d6ff9926ef604278e0ebd37c2
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/43969197_0294.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/43979646_0060.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/43979646_0060.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..24b3c1bd8bdfb87c9b5513ab512a7457b7d3347d
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/43979646_0060.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/43979646_0608.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/43979646_0608.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..c6ad7cee574a1a26c28c2ef4d292937769d4aebb
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/43979646_0608.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/43980338_0131.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/43980338_0131.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..928a1fa6347e3dbda13a698a479b534af7d99c72
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/43980338_0131.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/43980338_0484.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/43980338_0484.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..0a91ad1e8b8615c94b2e77b9cfdd0502bb1b2726
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/43980338_0484.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/43980338_0613.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/43980338_0613.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..7fa2197c0864a2460547e228cabf5d10102dd7cd
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/43980338_0613.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/43980611_0060.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/43980611_0060.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..7350173664d630e063d631e87721399990ebaf3f
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/43980611_0060.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/43980611_0365.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/43980611_0365.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..c47e4637beec32e048a22886e125dce5c6a0003b
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/43980611_0365.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/43986941_0858.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/43986941_0858.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..8b46deacfae7012f714df5d524d928818c0bef7a
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/43986941_0858.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/79683142_0060.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/79683142_0060.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..c88c0015d3387ceae82523f9f0e92d9da7b19090
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/79683142_0060.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/79683142_0100.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/79683142_0100.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..6395a3389ece702faa3dfe4f483531f8ff6cf44e
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/79683142_0100.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/79683142_0304.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/79683142_0304.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..660b7b90befcfca5d566f967cb17bf4e12a16f20
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/79683142_0304.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_40000305_0839.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_40000305_0839.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..91951863f9d25faf0dea7e22cf82e32d8d51b784
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_40000305_0839.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_40015705_0900.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_40015705_0900.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..6bafab48c2e29f307d89f090bb270d149e6d5184
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_40015705_0900.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_40015804_0799.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_40015804_0799.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..2ea739b5bb99ab318e4cfcc52d372d0301e474ae
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_40015804_0799.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_40015903_0416.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_40015903_0416.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..dfbd3aa4f046599fddac37a13a0ba671000a5076
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_40015903_0416.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_40016009_0888.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_40016009_0888.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..04b8076eee68d5fb0b4b0a65ee2369d903a00050
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_40016009_0888.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_40016108_0104.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_40016108_0104.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..469675eecc101d2d6d5a580e4afd8c7359e1f954
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_40016108_0104.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_40016108_0691.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_40016108_0691.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..20fa3797241ec79ad387045ba97e9f1d780f824a
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_40016108_0691.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_40071282_0900.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_40071282_0900.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..7379959f39f1723d4ff35b4386701e0c54f91375
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_40071282_0900.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_40071367_0430.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_40071367_0430.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..595d9b7cee9e307cdc320a155552919e25b2a661
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_40071367_0430.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_43952724_0060.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_43952724_0060.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..9f2dc13a9436de9b68b9def9c2972d80cfdc8b1a
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_43952724_0060.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_43955534_0305.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_43955534_0305.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..f5b7b25c6a18f951565fee91a7012c6c5604e249
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_43955534_0305.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_43955534_0447.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_43955534_0447.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..9826245b7fc65335d9c62605fe47426bcfe9b293
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_43955534_0447.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_43969197_0294.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_43969197_0294.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..f24b8d1b3ff3df7240c40591af1bf7be61a220cb
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_43969197_0294.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_43979646_0060.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_43979646_0060.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..f5009ee5dab92de3e9c9f352ed887403d68e8903
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_43979646_0060.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_43979646_0608.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_43979646_0608.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..099f9b54a40fcf231c75abc8c216f33614ebdb2f
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_43979646_0608.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_43980338_0131.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_43980338_0131.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..a6f3944c858a7de84f35c0bdf948a4ff477e77fb
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_43980338_0131.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_43980338_0484.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_43980338_0484.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..99425fcf756742783b874908cbc27c41e06538cb
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_43980338_0484.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_43980338_0613.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_43980338_0613.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..f2c2744e787ab2a4ad0d1a3eb012fe347e71e849
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_43980338_0613.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_43980611_0060.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_43980611_0060.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..758f1da34864876b4012e177f6e26cd36c47e814
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_43980611_0060.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_43980611_0365.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_43980611_0365.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..689070f2605269ce382bc0926b22f0e09fdc6548
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_43980611_0365.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_43986941_0858.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_43986941_0858.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..57fe35a6c2f476c038afb9af6aa9819aa068dd4e
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_43986941_0858.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_79683142_0060.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_79683142_0060.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..d3558a19a66856cdfb755f1705354dc9393f8182
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_79683142_0060.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_79683142_0100.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_79683142_0100.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..cea0dad9cf3037ad1159091e7e81f006cb84e7c1
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_79683142_0100.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_79683142_0304.jpg b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_79683142_0304.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..3a93af2d7c891b83718f728a13d08637757e0d04
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/INFERRED_TRAIN_SHUFFLED/_t_79683142_0304.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40000305_0839.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40000305_0839.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..3efddcab1ad02a00215715a294dba14f8e1cd1af
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40000305_0839.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40015705_0900.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40015705_0900.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..264efbc63289daba8c426358dd46a20e20a2b5f6
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40015705_0900.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40015804_0799.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40015804_0799.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..3226164ded4f1966888ed468f221cf17d19438f1
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40015804_0799.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40015903_0416.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40015903_0416.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..3e1a87ea7dda5867b17420c32d860ca027865e51
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40015903_0416.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40016009_0888.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40016009_0888.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..4501020d86c37586cc8f7870eaed7bde17c042c9
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40016009_0888.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40016108_0104.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40016108_0104.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..3116966684af779d7fc29c032d769d47b4faf2c0
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40016108_0104.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40016108_0691.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40016108_0691.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..8a7fbe2a8fdf4546ecb17891c242f0224661eb16
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40016108_0691.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40071282_0900.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40071282_0900.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..548bba7fb4889b95157bbfb08e77faa11659cafd
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40071282_0900.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40071367_0430.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40071367_0430.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..16fdd3c47039665125ad191a81c75285403f4c44
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40071367_0430.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40071367_0888.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40071367_0888.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..53754f040be383e9f466eb417b16a3ac0560d1fa
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40071367_0888.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40071558_0061.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40071558_0061.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..59a93d5a64d4520b1e64cacdf3e01e5a99a50888
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40071558_0061.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40071558_0734.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40071558_0734.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..b48e5126e84d46b7cd41a3197cd7e55bbeb7e2d8
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40071558_0734.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40071732_0839.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40071732_0839.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..a006b642e913c85f0b3ad1e99c9a1ac44058af5c
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40071732_0839.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40071923_0101.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40071923_0101.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..350fedf8cb50ade31b7d4ccfbee461b01c7c5390
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40071923_0101.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40071923_0900.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40071923_0900.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..2ea6a830ad76df59f92b5fa2b360721db69196f0
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40071923_0900.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40072203_0034.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40072203_0034.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..8b1e606650046990b5de144ad1027ef2ae5042f5
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40072203_0034.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40072203_0060.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40072203_0060.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..a3625ceb36f3680e8a6d6a6f98711b9545ea6e13
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40072203_0060.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40072203_0109.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40072203_0109.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..3edf0e6011fa0633ba19bb6d04d84fe87b7477f8
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40072203_0109.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40072203_0839.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40072203_0839.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..7ad5b7fa34769b449581fad2beddf63dffac5ac5
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40072203_0839.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40072203_0900.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40072203_0900.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..25346de510b5b419deedd3d5da7d1046ea12b167
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40072203_0900.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40072661_0060.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40072661_0060.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..b795ef499bf0ea17a88be0e4dbba0fb7f65809a0
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40072661_0060.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40072661_0839.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40072661_0839.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..d2d049750ca5451a539aa47d4e40329db02d270e
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40072661_0839.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40076362_0109.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40076362_0109.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..4f5c859c3ddeb7f4d75e21517d289df779948838
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40076362_0109.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40076362_0799.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40076362_0799.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..9d67794ec0631f5c9b59d4a2b81544da0310a92c
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40076362_0799.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40099668_0888.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40099668_0888.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..3a8376b94b7ee2daed36767913ae7d926d10d955
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40099668_0888.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40099958_0060.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40099958_0060.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..79d0c6a7c134c994d659af9899f5a9f4dfb2169d
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40099958_0060.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40099958_0104.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40099958_0104.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..5d60e88ddf93b4f54ef53c145bdc9f9c7fe9c1f2
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40099958_0104.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40123578_0900.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40123578_0900.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..5cf03fc24da882a0a4468138a30cf7082489bc5b
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40123578_0900.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40123677_0034.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40123677_0034.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..cd4577cfdc490a7218d1a99ed862c0ddab69763c
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40123677_0034.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40123677_0416.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40123677_0416.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..6a44083e7edd648f979c602aa862d6515b03a133
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40123677_0416.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40123677_0734.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40123677_0734.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..186f239be56bf3850c4977891148179a480d9d4f
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40123677_0734.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40123943_0900.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40123943_0900.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..3fe5396a46c0a7e1e60c189c2fce857b77373552
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40123943_0900.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40124025_0734.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40124025_0734.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..299ae8ebc43c5f8c69bd4ed64d39eac0df8c7d05
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40124025_0734.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40195438_0129.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40195438_0129.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..f77fec98ffdf66afdfe00430a2b83458e5a53557
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40195438_0129.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40257051_0888.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40257051_0888.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..33dd9ca4cb60f9e2a92ef6f0055f7114f7f36cc4
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40257051_0888.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40271224_0372.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40271224_0372.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..8c14a856bb3bb182797557603c33ec84866fc7f2
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40271224_0372.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40273594_0104.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40273594_0104.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..dd3d7829c54f82288f2b88354303aafa7f7c995e
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40273594_0104.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40273693_0390.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40273693_0390.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..558dd82718b9ef456251aa414138cfb1d225a75b
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40273693_0390.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40273693_0416.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40273693_0416.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..d5e8171a2b2ab05e7a901f3b571975f018fe924e
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40273693_0416.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40274003_0185.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40274003_0185.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..88b991b7f56a6598523f2638e0cf88d9727bd020
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40274003_0185.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40274003_0416.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40274003_0416.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..a272c4c1e8008bcc6b4d180ca0cd4ef181c8c111
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40274003_0416.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40274003_0888.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40274003_0888.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..d8179fcb15ce6751a6a189ba88eeee790ce38b65
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40274003_0888.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40275901_0803.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40275901_0803.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..5a020f3bdb5627b9615757d27fa56627572f3bcd
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40275901_0803.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40297033_0201.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40297033_0201.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..f09ed0c44201f5dc60eb31e1019955826ecad3ed
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40297033_0201.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40297033_0803.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40297033_0803.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..fc17f6624a399441c3abcea6f3646fbf4f691275
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40297033_0803.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40307282_0803.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40307282_0803.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..be46fe6fe6eca286a323d9d442e9f37a1442f058
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40307282_0803.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40307398_0416.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40307398_0416.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..31921ab5203d3858a952658c582ce4e8be69751b
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40307398_0416.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40307503_0416.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40307503_0416.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..0f04aff6cbec62076fb1efdfd7d35a76815f1390
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40307503_0416.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40307619_0060.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40307619_0060.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..c943726196f99cd16e78f3e68a9ff50a071a41e8
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40307619_0060.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40307619_0390.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40307619_0390.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..3a96aa2266eb307e61ce007476ce8ace8a30c142
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40307619_0390.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40326313_0060.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40326313_0060.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..b3707540b70a3d5c1efe1ccc216d48ff4c4aa341
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40326313_0060.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40326313_0390.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40326313_0390.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..7f1566aec4772276229201b3331e504fcbd9ccdf
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40326313_0390.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40326313_0839.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40326313_0839.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..dd7b6dfcd9a17101c52104cead73a3dd7c4e7d16
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40326313_0839.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/VITONGEN/40326627_0061.jpg b/ADOOR_ACE/Generated-Images/VITONGEN/40326627_0061.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..36ba5e2b334b1e959cd0a91e0658360c2003214b
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/VITONGEN/40326627_0061.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/WEFAGEN/40000305_0839.jpg b/ADOOR_ACE/Generated-Images/WEFAGEN/40000305_0839.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..6884c4d72f1289fc29b0e985b485ee1497f7246c
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/WEFAGEN/40000305_0839.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/WEFAGEN/40015705_0900.jpg b/ADOOR_ACE/Generated-Images/WEFAGEN/40015705_0900.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..4b426bcbc594791333d8140347ade08977bb40e2
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/WEFAGEN/40015705_0900.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/WEFAGEN/40015804_0799.jpg b/ADOOR_ACE/Generated-Images/WEFAGEN/40015804_0799.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..ff8c67271bd5985f15c2c8d1c4c191174d6c8b78
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/WEFAGEN/40015804_0799.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/WEFAGEN/40015903_0416.jpg b/ADOOR_ACE/Generated-Images/WEFAGEN/40015903_0416.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..746be7acfa67892e591bb407c9b06ff40e23f626
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/WEFAGEN/40015903_0416.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/WEFAGEN/40016009_0888.jpg b/ADOOR_ACE/Generated-Images/WEFAGEN/40016009_0888.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..9ff74f5855653a506f58fcfecf8d4054db7c79e7
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/WEFAGEN/40016009_0888.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/WEFAGEN/40016108_0104.jpg b/ADOOR_ACE/Generated-Images/WEFAGEN/40016108_0104.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..26720e6f578a68661a18ab462bd3defd6cb022d3
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/WEFAGEN/40016108_0104.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/WEFAGEN/40016108_0691.jpg b/ADOOR_ACE/Generated-Images/WEFAGEN/40016108_0691.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..66244659ea0753aae6e91bc8b53bd22cfe4741ce
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/WEFAGEN/40016108_0691.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/WEFAGEN/40071282_0900.jpg b/ADOOR_ACE/Generated-Images/WEFAGEN/40071282_0900.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..f698a9d8a595db5a7172f307668f8670a7680902
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/WEFAGEN/40071282_0900.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/WEFAGEN/40071367_0430.jpg b/ADOOR_ACE/Generated-Images/WEFAGEN/40071367_0430.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..7ae8c0d0035b8c72afcb5a5c5b190afcb2d32549
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/WEFAGEN/40071367_0430.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/WEFAGEN/40071367_0888.jpg b/ADOOR_ACE/Generated-Images/WEFAGEN/40071367_0888.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..6ab68c933fc5dcd8a4cc8336b110270df054449e
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/WEFAGEN/40071367_0888.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/WEFAGEN/40071558_0061.jpg b/ADOOR_ACE/Generated-Images/WEFAGEN/40071558_0061.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..6590930d55da1bc2ad53b71867b93b2a5249e5f4
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/WEFAGEN/40071558_0061.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/WEFAGEN/40071558_0734.jpg b/ADOOR_ACE/Generated-Images/WEFAGEN/40071558_0734.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..10b467a060a3b2f60e19cd5be1a1ea4cc9804261
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/WEFAGEN/40071558_0734.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/WEFAGEN/40071732_0839.jpg b/ADOOR_ACE/Generated-Images/WEFAGEN/40071732_0839.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..c32295012781e78f112d52f594c74917db66adfb
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/WEFAGEN/40071732_0839.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/WEFAGEN/40071923_0101.jpg b/ADOOR_ACE/Generated-Images/WEFAGEN/40071923_0101.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..f675fe2a4db477c755343c6e3a93b6fc0276b62c
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/WEFAGEN/40071923_0101.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/WEFAGEN/40071923_0900.jpg b/ADOOR_ACE/Generated-Images/WEFAGEN/40071923_0900.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..4b198f78353ecf0fbc50e4870d10c96cdeafd766
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/WEFAGEN/40071923_0900.jpg differ
diff --git a/ADOOR_ACE/Generated-Images/WEFAGEN/40072203_0034.jpg b/ADOOR_ACE/Generated-Images/WEFAGEN/40072203_0034.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..76aa41eff0d457cbbc2a06c85f6227fe9a6d6c48
Binary files /dev/null and b/ADOOR_ACE/Generated-Images/WEFAGEN/40072203_0034.jpg differ
diff --git a/ADOOR_ACE/Interaction-Server/Various-scripts/anydoor.yaml b/ADOOR_ACE/Interaction-Server/Various-scripts/anydoor.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..6e40ed1630aff16d9112ad5f2cebadcd07dcbc64
--- /dev/null
+++ b/ADOOR_ACE/Interaction-Server/Various-scripts/anydoor.yaml
@@ -0,0 +1,85 @@
+model:
+ target: cldm.cldm.ControlLDM
+ params:
+ linear_start: 0.00085
+ linear_end: 0.0120
+ num_timesteps_cond: 1
+ log_every_t: 200
+ timesteps: 1000
+ first_stage_key: "jpg"
+ cond_stage_key: "ref"
+ control_key: "hint"
+ image_size: 64
+ channels: 4
+ cond_stage_trainable: false
+ conditioning_key: crossattn
+ monitor: val/loss_simple_ema
+ scale_factor: 0.18215
+ use_ema: False
+ only_mid_control: False
+
+ control_stage_config:
+ target: cldm.cldm.ControlNet
+ params:
+ use_checkpoint: True
+ image_size: 32 # unused
+ in_channels: 4
+ hint_channels: 4 #3
+ model_channels: 320
+ attention_resolutions: [ 4, 2, 1 ]
+ num_res_blocks: 2
+ channel_mult: [ 1, 2, 4, 4 ]
+ num_head_channels: 64 # need to fix for flash-attn
+ use_spatial_transformer: True
+ use_linear_in_transformer: True
+ transformer_depth: 1
+ context_dim: 1024
+ legacy: False
+
+ unet_config:
+ target: cldm.cldm.ControlledUnetModel
+ params:
+ use_checkpoint: True
+ image_size: 32 # unused
+ in_channels: 4
+ out_channels: 4
+ model_channels: 320
+ attention_resolutions: [ 4, 2, 1 ]
+ num_res_blocks: 2
+ channel_mult: [ 1, 2, 4, 4 ]
+ num_head_channels: 64 # need to fix for flash-attn
+ use_spatial_transformer: True
+ use_linear_in_transformer: True
+ transformer_depth: 1
+ context_dim: 1024
+ legacy: False
+
+ first_stage_config:
+ target: ldm.models.autoencoder.AutoencoderKL
+ params:
+ embed_dim: 4
+ monitor: val/rec_loss
+ ddconfig:
+ #attn_type: "vanilla-xformers"
+ double_z: true
+ z_channels: 4
+ resolution: 256
+ in_channels: 3
+ out_ch: 3
+ ch: 128
+ ch_mult:
+ - 1
+ - 2
+ - 4
+ - 4
+ num_res_blocks: 2
+ attn_resolutions: []
+ dropout: 0.0
+ lossconfig:
+ target: torch.nn.Identity
+
+ cond_stage_config:
+ target: ldm.modules.encoders.modules.FrozenDinoV2Encoder
+ weight: /work/dinov2_vitg14_pretrain.pth
+
+
diff --git a/ADOOR_ACE/Interaction-Server/Various-scripts/datasets.yaml b/ADOOR_ACE/Interaction-Server/Various-scripts/datasets.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..2431f91ce5f2f6300b8f5887f95086585f4f01ff
--- /dev/null
+++ b/ADOOR_ACE/Interaction-Server/Various-scripts/datasets.yaml
@@ -0,0 +1,68 @@
+Train:
+ YoutubeVOS:
+ image_dir: path/YTBVOS/train/JPEGImages/
+ anno: path/YTBVOS/train/Annotations
+ meta: path/YTBVOS/train/meta.json
+
+ YoutubeVIS:
+ image_dir: path/youtubevis/train/JPEGImages/
+ anno: path/youtubevis/train/Annotations/
+ meta: path/youtubevis/train/meta.json
+
+ VIPSeg:
+ image_dir: path/VIPSeg/VIPSeg_720P/images/
+ anno: path/VIPSeg/VIPSeg_720P/panomasksRGB/
+
+ UVO:
+ train:
+ image_dir: path/UVO/uvo_frames_sparse
+ video_json: path/UVO/UVO_sparse_train_video_with_interpolation.json
+ image_json: path/UVO/UVO_sparse_train_video_with_interpolation_reorg.json
+ val:
+ image_dir: path/UVO/uvo_frames_sparse
+ video_json: path/UVO/VideoSparseSet/UVO_sparse_val_video_with_interpolation.json
+ image_json: path/UVO/VideoSparseSet/UVO_sparse_val_video_interpolation_reorg.json
+
+ Mose:
+ image_dir: path/MOSE/train/JPEGImages/
+ anno: path/MOSE/train/Annotations/
+
+ MVImageNet:
+ txt: ./datasets/Preprocess/mvimagenet.txt
+ image_dir: /mnt/workspace/xizhi/data/MVImgNet/
+
+ VitonHD:
+ image_dir: /work/wefa-door/cloth/
+
+ Dresscode:
+ image_dir: /mnt/workspace/xizhi/data/dresscode/DressCode/upper_body/label_maps/
+
+ FashionTryon:
+ image_dir: path/TryOn/FashionTryOn/train
+
+ Lvis:
+ image_dir: path/COCO/train2017
+ json_path: path/lvis_v1/lvis_v1_train.json
+
+ SAM:
+ sub1: path/SAM/0000
+ sub2: path/SAM/0001
+ sub3: path/SAM/0002
+ sub4: path/SAM/0004
+
+ Saliency:
+ MSRA_root: path/Saliency/MSRA10K_Imgs_GT/
+ TR_root: path/Saliency/DUTS-TR/DUTS-TR-Image/
+ TE_root: path/Saliency/DUTS-TE/DUTS-TE-Image/
+ HFlickr_root: path/HFlickr/masks/
+
+Test:
+ DreamBooth:
+ fg_dir: path/DreamBooth/AnyDoor_DreamBooth
+ bg_dir: path/DreamBooth/v1_800
+
+ VitonHDTest:
+ image_dir: /work/wefa-door/cloth
+
+
+
diff --git a/ADOOR_ACE/Interaction-Server/Various-scripts/demo.yaml b/ADOOR_ACE/Interaction-Server/Various-scripts/demo.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..ae30b4452d59aa5a7abb4bf47778245adf3c7c61
--- /dev/null
+++ b/ADOOR_ACE/Interaction-Server/Various-scripts/demo.yaml
@@ -0,0 +1,4 @@
+pretrained_model: path/epoch=1-step=8687.ckpt
+config_file: configs/anydoor.yaml
+save_memory: False
+use_interactive_seg: True
diff --git a/ADOOR_ACE/Interaction-Server/Various-scripts/gethf.py b/ADOOR_ACE/Interaction-Server/Various-scripts/gethf.py
new file mode 100644
index 0000000000000000000000000000000000000000..228cca62f7aa0e2ffd72689030f461e7c467abaa
--- /dev/null
+++ b/ADOOR_ACE/Interaction-Server/Various-scripts/gethf.py
@@ -0,0 +1,11 @@
+from huggingface_hub import hf_hub_download
+
+# Define the repository name, owner, and filename
+repository_name = "wefa-door"
+owner = "thisisAce"
+filename = "e66s22300wefa0.ckpt"
+
+# Download the file
+file_path = hf_hub_download(repo_id=f"{owner}/{repository_name}", filename=filename)
+
+print(f"File downloaded to: {file_path}")
diff --git a/ADOOR_ACE/Interaction-Server/Various-scripts/inference.yaml b/ADOOR_ACE/Interaction-Server/Various-scripts/inference.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..08df3bd1401f07001f85b46e29e52db64faca67f
--- /dev/null
+++ b/ADOOR_ACE/Interaction-Server/Various-scripts/inference.yaml
@@ -0,0 +1,3 @@
+pretrained_model: /work/epoch=8-step=3416.ckpt
+config_file: configs/anydoor.yaml
+save_memory: False
diff --git a/ADOOR_ACE/Interaction-Server/Various-scripts/run_inference copy 2.py b/ADOOR_ACE/Interaction-Server/Various-scripts/run_inference copy 2.py
new file mode 100644
index 0000000000000000000000000000000000000000..4acf5dc788057d0b5382044b1be2793ae1802c5c
--- /dev/null
+++ b/ADOOR_ACE/Interaction-Server/Various-scripts/run_inference copy 2.py
@@ -0,0 +1,294 @@
+import cv2
+import einops
+import numpy as np
+import torch
+import random
+from pytorch_lightning import seed_everything
+from cldm.model import create_model, load_state_dict
+from cldm.ddim_hacked import DDIMSampler
+from cldm.hack import disable_verbosity, enable_sliced_attention
+from datasets.data_utils import *
+cv2.setNumThreads(0)
+cv2.ocl.setUseOpenCL(False)
+import albumentations as A
+from omegaconf import OmegaConf
+from PIL import Image
+
+
+save_memory = False
+disable_verbosity()
+if save_memory:
+ enable_sliced_attention()
+
+
+config = OmegaConf.load('./configs/inference.yaml')
+model_ckpt = config.pretrained_model
+model_config = config.config_file
+
+model = create_model(model_config ).cpu()
+model.load_state_dict(load_state_dict(model_ckpt, location='cuda'))
+model = model.cuda()
+ddim_sampler = DDIMSampler(model)
+
+
+
+def aug_data_mask(image, mask):
+ transform = A.Compose([
+ A.HorizontalFlip(p=0.5),
+ A.RandomBrightnessContrast(p=0.5),
+ ])
+ transformed = transform(image=image.astype(np.uint8), mask = mask)
+ transformed_image = transformed["image"]
+ transformed_mask = transformed["mask"]
+ return transformed_image, transformed_mask
+
+
+def process_pairs(ref_image, ref_mask, tar_image, tar_mask):
+ # ========= Reference ===========
+ # ref expand
+ ref_box_yyxx = get_bbox_from_mask(ref_mask)
+
+ # ref filter mask
+ ref_mask_3 = np.stack([ref_mask,ref_mask,ref_mask],-1)
+ masked_ref_image = ref_image * ref_mask_3 + np.ones_like(ref_image) * 255 * (1-ref_mask_3)
+
+ y1,y2,x1,x2 = ref_box_yyxx
+ masked_ref_image = masked_ref_image[y1:y2,x1:x2,:]
+ ref_mask = ref_mask[y1:y2,x1:x2]
+
+
+ ratio = np.random.randint(12, 13) / 10
+ masked_ref_image, ref_mask = expand_image_mask(masked_ref_image, ref_mask, ratio=ratio)
+ ref_mask_3 = np.stack([ref_mask,ref_mask,ref_mask],-1)
+
+ # to square and resize
+ masked_ref_image = pad_to_square(masked_ref_image, pad_value = 255, random = False)
+ masked_ref_image = cv2.resize(masked_ref_image, (224,224) ).astype(np.uint8)
+
+ ref_mask_3 = pad_to_square(ref_mask_3 * 255, pad_value = 0, random = False)
+ ref_mask_3 = cv2.resize(ref_mask_3, (224,224) ).astype(np.uint8)
+ ref_mask = ref_mask_3[:,:,0]
+
+ # ref aug
+ masked_ref_image_aug = masked_ref_image #aug_data(masked_ref_image)
+
+ # collage aug
+ masked_ref_image_compose, ref_mask_compose = masked_ref_image, ref_mask #aug_data_mask(masked_ref_image, ref_mask)
+ masked_ref_image_aug = masked_ref_image_compose.copy()
+ ref_mask_3 = np.stack([ref_mask_compose,ref_mask_compose,ref_mask_compose],-1)
+ ref_image_collage = sobel(masked_ref_image_compose, ref_mask_compose/255)
+
+ # ========= Target ===========
+ tar_box_yyxx = get_bbox_from_mask(tar_mask)
+ tar_box_yyxx = expand_bbox(tar_mask, tar_box_yyxx, ratio=[1.1,1.2])
+
+ # crop
+ tar_box_yyxx_crop = expand_bbox(tar_image, tar_box_yyxx, ratio=[1.5, 3]) #1.2 1.6
+ tar_box_yyxx_crop = box2squre(tar_image, tar_box_yyxx_crop) # crop box
+ y1,y2,x1,x2 = tar_box_yyxx_crop
+
+ cropped_target_image = tar_image[y1:y2,x1:x2,:]
+ tar_box_yyxx = box_in_box(tar_box_yyxx, tar_box_yyxx_crop)
+ y1,y2,x1,x2 = tar_box_yyxx
+
+ # collage
+ ref_image_collage = cv2.resize(ref_image_collage, (x2-x1, y2-y1))
+ ref_mask_compose = cv2.resize(ref_mask_compose.astype(np.uint8), (x2-x1, y2-y1))
+ ref_mask_compose = (ref_mask_compose > 128).astype(np.uint8)
+
+ collage = cropped_target_image.copy()
+ collage[y1:y2,x1:x2,:] = ref_image_collage
+
+ collage_mask = cropped_target_image.copy() * 0.0
+ collage_mask[y1:y2,x1:x2,:] = 1.0
+
+ # the size before pad
+ H1, W1 = collage.shape[0], collage.shape[1]
+ cropped_target_image = pad_to_square(cropped_target_image, pad_value = 0, random = False).astype(np.uint8)
+ collage = pad_to_square(collage, pad_value = 0, random = False).astype(np.uint8)
+ collage_mask = pad_to_square(collage_mask, pad_value = -1, random = False).astype(np.uint8)
+
+ # the size after pad
+ H2, W2 = collage.shape[0], collage.shape[1]
+ cropped_target_image = cv2.resize(cropped_target_image, (512,512)).astype(np.float32)
+ collage = cv2.resize(collage, (512,512)).astype(np.float32)
+ collage_mask = (cv2.resize(collage_mask, (512,512)).astype(np.float32) > 0.5).astype(np.float32)
+
+ masked_ref_image_aug = masked_ref_image_aug / 255
+ cropped_target_image = cropped_target_image / 127.5 - 1.0
+ collage = collage / 127.5 - 1.0
+ collage = np.concatenate([collage, collage_mask[:,:,:1] ] , -1)
+
+ item = dict(ref=masked_ref_image_aug.copy(), jpg=cropped_target_image.copy(), hint=collage.copy(), extra_sizes=np.array([H1, W1, H2, W2]), tar_box_yyxx_crop=np.array( tar_box_yyxx_crop ) )
+ return item
+
+
+def crop_back( pred, tar_image, extra_sizes, tar_box_yyxx_crop):
+ H1, W1, H2, W2 = extra_sizes
+ y1,y2,x1,x2 = tar_box_yyxx_crop
+ pred = cv2.resize(pred, (W2, H2))
+ m = 5 # maigin_pixel
+
+ if W1 == H1:
+ tar_image[y1+m :y2-m, x1+m:x2-m, :] = pred[m:-m, m:-m]
+ return tar_image
+
+ if W1 < W2:
+ pad1 = int((W2 - W1) / 2)
+ pad2 = W2 - W1 - pad1
+ pred = pred[:,pad1: -pad2, :]
+ else:
+ pad1 = int((H2 - H1) / 2)
+ pad2 = H2 - H1 - pad1
+ pred = pred[pad1: -pad2, :, :]
+
+ gen_image = tar_image.copy()
+ gen_image[y1+m :y2-m, x1+m:x2-m, :] = pred[m:-m, m:-m]
+ return gen_image
+
+
+def inference_single_image(ref_image, ref_mask, tar_image, tar_mask, guidance_scale = 5.0):
+ item = process_pairs(ref_image, ref_mask, tar_image, tar_mask)
+ ref = item['ref'] * 255
+ tar = item['jpg'] * 127.5 + 127.5
+ hint = item['hint'] * 127.5 + 127.5
+
+ hint_image = hint[:,:,:-1]
+ hint_mask = item['hint'][:,:,-1] * 255
+ hint_mask = np.stack([hint_mask,hint_mask,hint_mask],-1)
+ ref = cv2.resize(ref.astype(np.uint8), (512,512))
+
+ seed = random.randint(0, 65535)
+ if save_memory:
+ model.low_vram_shift(is_diffusing=False)
+
+ ref = item['ref']
+ tar = item['jpg']
+ hint = item['hint']
+ num_samples = 1
+
+ control = torch.from_numpy(hint.copy()).float().cuda()
+ control = torch.stack([control for _ in range(num_samples)], dim=0)
+ control = einops.rearrange(control, 'b h w c -> b c h w').clone()
+
+
+ clip_input = torch.from_numpy(ref.copy()).float().cuda()
+ clip_input = torch.stack([clip_input for _ in range(num_samples)], dim=0)
+ clip_input = einops.rearrange(clip_input, 'b h w c -> b c h w').clone()
+
+ guess_mode = False
+ H,W = 512,512
+
+ cond = {"c_concat": [control], "c_crossattn": [model.get_learned_conditioning( clip_input )]}
+ un_cond = {"c_concat": None if guess_mode else [control], "c_crossattn": [model.get_learned_conditioning([torch.zeros((1,3,224,224))] * num_samples)]}
+ shape = (4, H // 8, W // 8)
+
+ if save_memory:
+ model.low_vram_shift(is_diffusing=True)
+
+ # ====
+ num_samples = 1 #gr.Slider(label="Images", minimum=1, maximum=12, value=1, step=1)
+ image_resolution = 512 #gr.Slider(label="Image Resolution", minimum=256, maximum=768, value=512, step=64)
+ strength = 1 #gr.Slider(label="Control Strength", minimum=0.0, maximum=2.0, value=1.0, step=0.01)
+ guess_mode = False #gr.Checkbox(label='Guess Mode', value=False)
+ #detect_resolution = 512 #gr.Slider(label="Segmentation Resolution", minimum=128, maximum=1024, value=512, step=1)
+ ddim_steps = 50 #gr.Slider(label="Steps", minimum=1, maximum=100, value=20, step=1)
+ scale = guidance_scale #gr.Slider(label="Guidance Scale", minimum=0.1, maximum=30.0, value=9.0, step=0.1)
+ seed = -1 #gr.Slider(label="Seed", minimum=-1, maximum=2147483647, step=1, randomize=True)
+ eta = 0.0 #gr.Number(label="eta (DDIM)", value=0.0)
+
+ model.control_scales = [strength * (0.825 ** float(12 - i)) for i in range(13)] if guess_mode else ([strength] * 13) # Magic number. IDK why. Perhaps because 0.825**12<0.01 but 0.826**12>0.01
+ samples, intermediates = ddim_sampler.sample(ddim_steps, num_samples,
+ shape, cond, verbose=False, eta=eta,
+ unconditional_guidance_scale=scale,
+ unconditional_conditioning=un_cond)
+ if save_memory:
+ model.low_vram_shift(is_diffusing=False)
+
+ x_samples = model.decode_first_stage(samples)
+ x_samples = (einops.rearrange(x_samples, 'b c h w -> b h w c') * 127.5 + 127.5).cpu().numpy()#.clip(0, 255).astype(np.uint8)
+
+ result = x_samples[0][:,:,::-1]
+ result = np.clip(result,0,255)
+
+ pred = x_samples[0]
+ pred = np.clip(pred,0,255)[1:,:,:]
+ sizes = item['extra_sizes']
+ tar_box_yyxx_crop = item['tar_box_yyxx_crop']
+ gen_image = crop_back(pred, tar_image, sizes, tar_box_yyxx_crop)
+ return gen_image
+
+
+if __name__ == '__main__':
+ '''
+ # ==== Example for inferring a single image ===
+ reference_image_path = './examples/TestDreamBooth/FG/01.png'
+ bg_image_path = './examples/TestDreamBooth/BG/000000309203_GT.png'
+ bg_mask_path = './examples/TestDreamBooth/BG/000000309203_mask.png'
+ save_path = './examples/TestDreamBooth/GEN/gen_res.png'
+
+ # reference image + reference mask
+ # You could use the demo of SAM to extract RGB-A image with masks
+ # https://segment-anything.com/demo
+ image = cv2.imread( reference_image_path, cv2.IMREAD_UNCHANGED)
+ mask = (image[:,:,-1] > 128).astype(np.uint8)
+ image = image[:,:,:-1]
+ image = cv2.cvtColor(image.copy(), cv2.COLOR_BGR2RGB)
+ ref_image = image
+ ref_mask = mask
+
+ # background image
+ back_image = cv2.imread(bg_image_path).astype(np.uint8)
+ back_image = cv2.cvtColor(back_image, cv2.COLOR_BGR2RGB)
+
+ # background mask
+ tar_mask = cv2.imread(bg_mask_path)[:,:,0] > 128
+ tar_mask = tar_mask.astype(np.uint8)
+
+ gen_image = inference_single_image(ref_image, ref_mask, back_image.copy(), tar_mask)
+ h,w = back_image.shape[0], back_image.shape[0]
+ ref_image = cv2.resize(ref_image, (w,h))
+ vis_image = cv2.hconcat([ref_image, back_image, gen_image])
+
+ cv2.imwrite(save_path, vis_image [:,:,::-1])
+ '''
+ #'''
+ # ==== Example for inferring VITON-HD Test dataset ===
+
+ from omegaconf import OmegaConf
+ import os
+ DConf = OmegaConf.load('./configs/datasets.yaml')
+ save_dir = '../VITONGEN'
+ if not os.path.exists(save_dir):
+ os.mkdir(save_dir)
+
+ test_dir = DConf.Test.VitonHDTest.image_dir
+ image_names = os.listdir(test_dir)
+
+ for image_name in image_names:
+ ref_image_path = os.path.join(test_dir, image_name)
+ tar_image_path = ref_image_path.replace('/cloth/', '/image/')
+ ref_mask_path = ref_image_path.replace('/cloth/','/cloth-mask/')
+ tar_mask_path = ref_image_path.replace('/cloth/', '/image-parse-v3/').replace('.jpg','.png')
+
+ ref_image = cv2.imread(ref_image_path)
+ ref_image = cv2.cvtColor(ref_image, cv2.COLOR_BGR2RGB)
+
+ gt_image = cv2.imread(tar_image_path)
+ gt_image = cv2.cvtColor(gt_image, cv2.COLOR_BGR2RGB)
+
+ ref_mask = (cv2.imread(ref_mask_path) > 128).astype(np.uint8)[:,:,0]
+
+ tar_mask = Image.open(tar_mask_path ).convert('P')
+ tar_mask= np.array(tar_mask)
+ tar_mask = tar_mask == 5
+
+ gen_image = inference_single_image(ref_image, ref_mask, gt_image.copy(), tar_mask)
+ gen_path = os.path.join(save_dir, image_name)
+
+ vis_image = cv2.hconcat([ref_image, gt_image, gen_image])
+ cv2.imwrite(gen_path, vis_image[:,:,::-1])
+ #'''
+
+
+
diff --git a/ADOOR_ACE/Interaction-Server/Various-scripts/run_inference_train copy 2.py b/ADOOR_ACE/Interaction-Server/Various-scripts/run_inference_train copy 2.py
new file mode 100644
index 0000000000000000000000000000000000000000..d6777d2d466c7e216365ab37eddee69d98d07024
--- /dev/null
+++ b/ADOOR_ACE/Interaction-Server/Various-scripts/run_inference_train copy 2.py
@@ -0,0 +1,319 @@
+import cv2
+import einops
+import numpy as np
+import torch
+import random
+from pytorch_lightning import seed_everything
+from cldm.model import create_model, load_state_dict
+from cldm.ddim_hacked import DDIMSampler
+from cldm.hack import disable_verbosity, enable_sliced_attention
+from datasets.data_utils import *
+cv2.setNumThreads(0)
+cv2.ocl.setUseOpenCL(False)
+import albumentations as A
+from omegaconf import OmegaConf
+from PIL import Image
+
+
+save_memory = False
+disable_verbosity()
+if save_memory:
+ enable_sliced_attention()
+
+
+config = OmegaConf.load('./configs/inference.yaml')
+model_ckpt = config.pretrained_model
+model_config = config.config_file
+
+model = create_model(model_config ).cpu()
+model.load_state_dict(load_state_dict(model_ckpt, location='cuda'))
+model = model.cuda()
+ddim_sampler = DDIMSampler(model)
+
+
+
+def aug_data_mask(image, mask):
+ transform = A.Compose([
+ A.HorizontalFlip(p=0.5),
+ A.RandomBrightnessContrast(p=0.5),
+ ])
+ transformed = transform(image=image.astype(np.uint8), mask = mask)
+ transformed_image = transformed["image"]
+ transformed_mask = transformed["mask"]
+ return transformed_image, transformed_mask
+
+
+def process_pairs(ref_image, ref_mask, tar_image, tar_mask):
+ # ========= Reference ===========
+ # ref expand
+ ref_box_yyxx = get_bbox_from_mask(ref_mask)
+
+ # ref filter mask
+ ref_mask_3 = np.stack([ref_mask,ref_mask,ref_mask],-1)
+ masked_ref_image = ref_image * ref_mask_3 + np.ones_like(ref_image) * 255 * (1-ref_mask_3)
+
+ y1,y2,x1,x2 = ref_box_yyxx
+ masked_ref_image = masked_ref_image[y1:y2,x1:x2,:]
+ ref_mask = ref_mask[y1:y2,x1:x2]
+
+
+ ratio = np.random.randint(12, 13) / 10
+ masked_ref_image, ref_mask = expand_image_mask(masked_ref_image, ref_mask, ratio=ratio)
+ ref_mask_3 = np.stack([ref_mask,ref_mask,ref_mask],-1)
+
+ # to square and resize
+ masked_ref_image = pad_to_square(masked_ref_image, pad_value = 255, random = False)
+ masked_ref_image = cv2.resize(masked_ref_image, (224,224) ).astype(np.uint8)
+
+ ref_mask_3 = pad_to_square(ref_mask_3 * 255, pad_value = 0, random = False)
+ ref_mask_3 = cv2.resize(ref_mask_3, (224,224) ).astype(np.uint8)
+ ref_mask = ref_mask_3[:,:,0]
+
+ # ref aug
+ masked_ref_image_aug = masked_ref_image #aug_data(masked_ref_image)
+
+ # collage aug
+ masked_ref_image_compose, ref_mask_compose = masked_ref_image, ref_mask #aug_data_mask(masked_ref_image, ref_mask)
+ masked_ref_image_aug = masked_ref_image_compose.copy()
+ ref_mask_3 = np.stack([ref_mask_compose,ref_mask_compose,ref_mask_compose],-1)
+ ref_image_collage = sobel(masked_ref_image_compose, ref_mask_compose/255)
+
+ # ========= Target ===========
+ tar_box_yyxx = get_bbox_from_mask(tar_mask)
+ tar_box_yyxx = expand_bbox(tar_mask, tar_box_yyxx, ratio=[1.1,1.2])
+
+ # crop
+ tar_box_yyxx_crop = expand_bbox(tar_image, tar_box_yyxx, ratio=[1.5, 3]) #1.2 1.6
+ tar_box_yyxx_crop = box2squre(tar_image, tar_box_yyxx_crop) # crop box
+ y1,y2,x1,x2 = tar_box_yyxx_crop
+
+ cropped_target_image = tar_image[y1:y2,x1:x2,:]
+ tar_box_yyxx = box_in_box(tar_box_yyxx, tar_box_yyxx_crop)
+ y1,y2,x1,x2 = tar_box_yyxx
+
+ # collage
+ ref_image_collage = cv2.resize(ref_image_collage, (x2-x1, y2-y1))
+ ref_mask_compose = cv2.resize(ref_mask_compose.astype(np.uint8), (x2-x1, y2-y1))
+ ref_mask_compose = (ref_mask_compose > 128).astype(np.uint8)
+
+ collage = cropped_target_image.copy()
+ collage[y1:y2,x1:x2,:] = ref_image_collage
+
+ collage_mask = cropped_target_image.copy() * 0.0
+ collage_mask[y1:y2,x1:x2,:] = 1.0
+
+ # the size before pad
+ H1, W1 = collage.shape[0], collage.shape[1]
+ cropped_target_image = pad_to_square(cropped_target_image, pad_value = 0, random = False).astype(np.uint8)
+ collage = pad_to_square(collage, pad_value = 0, random = False).astype(np.uint8)
+ collage_mask = pad_to_square(collage_mask, pad_value = -1, random = False).astype(np.uint8)
+
+ # the size after pad
+ H2, W2 = collage.shape[0], collage.shape[1]
+ cropped_target_image = cv2.resize(cropped_target_image, (512,512)).astype(np.float32)
+ collage = cv2.resize(collage, (512,512)).astype(np.float32)
+ collage_mask = (cv2.resize(collage_mask, (512,512)).astype(np.float32) > 0.5).astype(np.float32)
+
+ masked_ref_image_aug = masked_ref_image_aug / 255
+ cropped_target_image = cropped_target_image / 127.5 - 1.0
+ collage = collage / 127.5 - 1.0
+ collage = np.concatenate([collage, collage_mask[:,:,:1] ] , -1)
+
+ item = dict(ref=masked_ref_image_aug.copy(), jpg=cropped_target_image.copy(), hint=collage.copy(), extra_sizes=np.array([H1, W1, H2, W2]), tar_box_yyxx_crop=np.array( tar_box_yyxx_crop ) )
+ return item
+
+
+def crop_back( pred, tar_image, extra_sizes, tar_box_yyxx_crop):
+ H1, W1, H2, W2 = extra_sizes
+ y1,y2,x1,x2 = tar_box_yyxx_crop
+ pred = cv2.resize(pred, (W2, H2))
+ m = 5 # maigin_pixel
+
+ if W1 == H1:
+ tar_image[y1+m :y2-m, x1+m:x2-m, :] = pred[m:-m, m:-m]
+ return tar_image
+
+ if W1 < W2:
+ pad1 = int((W2 - W1) / 2)
+ pad2 = W2 - W1 - pad1
+ pred = pred[:,pad1: -pad2, :]
+ else:
+ pad1 = int((H2 - H1) / 2)
+ pad2 = H2 - H1 - pad1
+ pred = pred[pad1: -pad2, :, :]
+
+ gen_image = tar_image.copy()
+ gen_image[y1+m :y2-m, x1+m:x2-m, :] = pred[m:-m, m:-m]
+ return gen_image
+
+
+def inference_single_image(ref_image, ref_mask, tar_image, tar_mask, guidance_scale = 5.0):
+ item = process_pairs(ref_image, ref_mask, tar_image, tar_mask)
+ ref = item['ref'] * 255
+ tar = item['jpg'] * 127.5 + 127.5
+ hint = item['hint'] * 127.5 + 127.5
+
+ hint_image = hint[:,:,:-1]
+ hint_mask = item['hint'][:,:,-1] * 255
+ hint_mask = np.stack([hint_mask,hint_mask,hint_mask],-1)
+ ref = cv2.resize(ref.astype(np.uint8), (512,512))
+
+ seed = random.randint(0, 65535)
+ if save_memory:
+ model.low_vram_shift(is_diffusing=False)
+
+ ref = item['ref']
+ tar = item['jpg']
+ hint = item['hint']
+ num_samples = 1
+
+ control = torch.from_numpy(hint.copy()).float().cuda()
+ control = torch.stack([control for _ in range(num_samples)], dim=0)
+ control = einops.rearrange(control, 'b h w c -> b c h w').clone()
+
+
+ clip_input = torch.from_numpy(ref.copy()).float().cuda()
+ clip_input = torch.stack([clip_input for _ in range(num_samples)], dim=0)
+ clip_input = einops.rearrange(clip_input, 'b h w c -> b c h w').clone()
+
+ guess_mode = False
+ H,W = 512,512
+
+ cond = {"c_concat": [control], "c_crossattn": [model.get_learned_conditioning( clip_input )]}
+ un_cond = {"c_concat": None if guess_mode else [control], "c_crossattn": [model.get_learned_conditioning([torch.zeros((1,3,224,224))] * num_samples)]}
+ shape = (4, H // 8, W // 8)
+
+ if save_memory:
+ model.low_vram_shift(is_diffusing=True)
+
+ # ====
+ num_samples = 1 #gr.Slider(label="Images", minimum=1, maximum=12, value=1, step=1)
+ image_resolution = 512 #gr.Slider(label="Image Resolution", minimum=256, maximum=768, value=512, step=64)
+ strength = 1 #gr.Slider(label="Control Strength", minimum=0.0, maximum=2.0, value=1.0, step=0.01)
+ guess_mode = False #gr.Checkbox(label='Guess Mode', value=False)
+ #detect_resolution = 512 #gr.Slider(label="Segmentation Resolution", minimum=128, maximum=1024, value=512, step=1)
+ ddim_steps = 50 #gr.Slider(label="Steps", minimum=1, maximum=100, value=20, step=1)
+ scale = guidance_scale #gr.Slider(label="Guidance Scale", minimum=0.1, maximum=30.0, value=9.0, step=0.1)
+ seed = -1 #gr.Slider(label="Seed", minimum=-1, maximum=2147483647, step=1, randomize=True)
+ eta = 0.0 #gr.Number(label="eta (DDIM)", value=0.0)
+
+ model.control_scales = [strength * (0.825 ** float(12 - i)) for i in range(13)] if guess_mode else ([strength] * 13) # Magic number. IDK why. Perhaps because 0.825**12<0.01 but 0.826**12>0.01
+ samples, intermediates = ddim_sampler.sample(ddim_steps, num_samples,
+ shape, cond, verbose=False, eta=eta,
+ unconditional_guidance_scale=scale,
+ unconditional_conditioning=un_cond)
+ if save_memory:
+ model.low_vram_shift(is_diffusing=False)
+
+ x_samples = model.decode_first_stage(samples)
+ x_samples = (einops.rearrange(x_samples, 'b c h w -> b h w c') * 127.5 + 127.5).cpu().numpy()#.clip(0, 255).astype(np.uint8)
+
+ result = x_samples[0][:,:,::-1]
+ result = np.clip(result,0,255)
+
+ pred = x_samples[0]
+ pred = np.clip(pred,0,255)[1:,:,:]
+ sizes = item['extra_sizes']
+ tar_box_yyxx_crop = item['tar_box_yyxx_crop']
+ gen_image = crop_back(pred, tar_image, sizes, tar_box_yyxx_crop)
+ return gen_image
+
+
+if __name__ == '__main__':
+ '''
+ # ==== Example for inferring a single image ===
+ reference_image_path = './examples/TestDreamBooth/FG/01.png'
+ bg_image_path = './examples/TestDreamBooth/BG/000000309203_GT.png'
+ bg_mask_path = './examples/TestDreamBooth/BG/000000309203_mask.png'
+ save_path = './examples/TestDreamBooth/GEN/gen_res.png'
+
+ # reference image + reference mask
+ # You could use the demo of SAM to extract RGB-A image with masks
+ # https://segment-anything.com/demo
+ image = cv2.imread( reference_image_path, cv2.IMREAD_UNCHANGED)
+ mask = (image[:,:,-1] > 128).astype(np.uint8)
+ image = image[:,:,:-1]
+ image = cv2.cvtColor(image.copy(), cv2.COLOR_BGR2RGB)
+ ref_image = image
+ ref_mask = mask
+
+ # background image
+ back_image = cv2.imread(bg_image_path).astype(np.uint8)
+ back_image = cv2.cvtColor(back_image, cv2.COLOR_BGR2RGB)
+
+ # background mask
+ tar_mask = cv2.imread(bg_mask_path)[:,:,0] > 128
+ tar_mask = tar_mask.astype(np.uint8)
+
+ gen_image = inference_single_image(ref_image, ref_mask, back_image.copy(), tar_mask)
+ h,w = back_image.shape[0], back_image.shape[0]
+ ref_image = cv2.resize(ref_image, (w,h))
+ vis_image = cv2.hconcat([ref_image, back_image, gen_image])
+
+ cv2.imwrite(save_path, vis_image [:,:,::-1])
+ '''
+ #'''
+ # ==== Example for inferring VITON-HD Test dataset ===
+
+ from omegaconf import OmegaConf
+ import os
+ DConf = OmegaConf.load('./configs/datasets.yaml')
+ save_dir = '/work/wefa-door/INFERRED_TRAIN'
+
+ if not os.path.exists(save_dir):
+ os.mkdir(save_dir)
+
+ test_dir = '/work/wefa-door/cloth_train_inf'
+ image_names = os.listdir(test_dir)
+
+ for image_name in image_names:
+ ref_image_path = os.path.join(test_dir, image_name)
+ tar_image_path = ref_image_path.replace('/cloth_train_inf/', '/image/')
+ ref_mask_path = ref_image_path.replace('/cloth_train_inf/','/cloth-mask/')
+ tar_mask_path = ref_image_path.replace('/cloth_train_inf/', '/image-parse-v3/').replace('.jpg','.png')
+
+ ref_image = cv2.imread(ref_image_path)
+ ref_image = cv2.cvtColor(ref_image, cv2.COLOR_BGR2RGB)
+
+ gt_image = cv2.imread(tar_image_path)
+ gt_image = cv2.cvtColor(gt_image, cv2.COLOR_BGR2RGB)
+
+ ref_mask = (cv2.imread(ref_mask_path) > 128).astype(np.uint8)[:,:,0]
+
+ tar_mask = Image.open(tar_mask_path ).convert('P')
+ tar_mask= np.array(tar_mask)
+ tar_mask = tar_mask == 9
+
+ gen_image = inference_single_image(ref_image, ref_mask, gt_image.copy(), tar_mask)
+ gen_path = os.path.join(save_dir, image_name)
+
+ vis_image = cv2.hconcat([ref_image, gt_image, gen_image])
+ cv2.imwrite(gen_path, vis_image[:,:,::-1])
+ #'''
+
+ for image_name in image_names:
+ ref_image_path = os.path.join(test_dir, image_name)
+ tar_image_path = ref_image_path.replace('/cloth_train_inf/', '/image/')
+ ref_mask_path = ref_image_path.replace('/cloth_train_inf/','/cloth-mask/')
+ tar_mask_path = ref_image_path.replace('/cloth_train_inf/', '/image-parse-v3/').replace('.jpg','.png')
+
+ ref_image = cv2.imread(ref_image_path)
+ ref_image = cv2.cvtColor(ref_image, cv2.COLOR_BGR2RGB)
+
+ gt_image = cv2.imread(tar_image_path)
+ gt_image = cv2.cvtColor(gt_image, cv2.COLOR_BGR2RGB)
+
+ ref_mask = (cv2.imread(ref_mask_path) > 128).astype(np.uint8)[:,:,0]
+
+ tar_mask = Image.open(tar_mask_path ).convert('P')
+ tar_mask= np.array(tar_mask)
+ tar_mask = tar_mask == 5
+
+ gen_image = inference_single_image(ref_image, ref_mask, gt_image.copy(), tar_mask)
+ gen_path = ogen_path = os.path.join(save_dir, '_t_' + image_name)
+
+ vis_image = cv2.hconcat([ref_image, gt_image, gen_image])
+ cv2.imwrite(gen_path, vis_image[:,:,::-1])
+ #'''
+
+
diff --git a/ADOOR_ACE/Interaction-Server/Various-scripts/run_inference_train2 copy.py b/ADOOR_ACE/Interaction-Server/Various-scripts/run_inference_train2 copy.py
new file mode 100644
index 0000000000000000000000000000000000000000..736d633c143da232e4139c6cecadf7f6646147c0
--- /dev/null
+++ b/ADOOR_ACE/Interaction-Server/Various-scripts/run_inference_train2 copy.py
@@ -0,0 +1,318 @@
+import cv2
+import einops
+import numpy as np
+import torch
+import random
+from pytorch_lightning import seed_everything
+from cldm.model import create_model, load_state_dict
+from cldm.ddim_hacked import DDIMSampler
+from cldm.hack import disable_verbosity, enable_sliced_attention
+from datasets.data_utils import *
+cv2.setNumThreads(0)
+cv2.ocl.setUseOpenCL(False)
+import albumentations as A
+from omegaconf import OmegaConf
+from PIL import Image
+
+
+save_memory = False
+disable_verbosity()
+if save_memory:
+ enable_sliced_attention()
+
+
+config = OmegaConf.load('./configs/inference.yaml')
+model_ckpt = config.pretrained_model
+model_config = config.config_file
+
+model = create_model(model_config ).cpu()
+model.load_state_dict(load_state_dict(model_ckpt, location='cuda'))
+model = model.cuda()
+ddim_sampler = DDIMSampler(model)
+
+
+
+def aug_data_mask(image, mask):
+ transform = A.Compose([
+ A.HorizontalFlip(p=0.5),
+ A.RandomBrightnessContrast(p=0.5),
+ ])
+ transformed = transform(image=image.astype(np.uint8), mask = mask)
+ transformed_image = transformed["image"]
+ transformed_mask = transformed["mask"]
+ return transformed_image, transformed_mask
+
+
+def process_pairs(ref_image, ref_mask, tar_image, tar_mask):
+ # ========= Reference ===========
+ # ref expand
+ ref_box_yyxx = get_bbox_from_mask(ref_mask)
+
+ # ref filter mask
+ ref_mask_3 = np.stack([ref_mask,ref_mask,ref_mask],-1)
+ masked_ref_image = ref_image * ref_mask_3 + np.ones_like(ref_image) * 255 * (1-ref_mask_3)
+
+ y1,y2,x1,x2 = ref_box_yyxx
+ masked_ref_image = masked_ref_image[y1:y2,x1:x2,:]
+ ref_mask = ref_mask[y1:y2,x1:x2]
+
+
+ ratio = np.random.randint(12, 13) / 10
+ masked_ref_image, ref_mask = expand_image_mask(masked_ref_image, ref_mask, ratio=ratio)
+ ref_mask_3 = np.stack([ref_mask,ref_mask,ref_mask],-1)
+
+ # to square and resize
+ masked_ref_image = pad_to_square(masked_ref_image, pad_value = 255, random = False)
+ masked_ref_image = cv2.resize(masked_ref_image, (224,224) ).astype(np.uint8)
+
+ ref_mask_3 = pad_to_square(ref_mask_3 * 255, pad_value = 0, random = False)
+ ref_mask_3 = cv2.resize(ref_mask_3, (224,224) ).astype(np.uint8)
+ ref_mask = ref_mask_3[:,:,0]
+
+ # ref aug
+ masked_ref_image_aug = masked_ref_image #aug_data(masked_ref_image)
+
+ # collage aug
+ masked_ref_image_compose, ref_mask_compose = masked_ref_image, ref_mask #aug_data_mask(masked_ref_image, ref_mask)
+ masked_ref_image_aug = masked_ref_image_compose.copy()
+ ref_mask_3 = np.stack([ref_mask_compose,ref_mask_compose,ref_mask_compose],-1)
+ ref_image_collage = sobel(masked_ref_image_compose, ref_mask_compose/255)
+
+ # ========= Target ===========
+ tar_box_yyxx = get_bbox_from_mask(tar_mask)
+ tar_box_yyxx = expand_bbox(tar_mask, tar_box_yyxx, ratio=[1.1,1.2])
+
+ # crop
+ tar_box_yyxx_crop = expand_bbox(tar_image, tar_box_yyxx, ratio=[1.5, 3]) #1.2 1.6
+ tar_box_yyxx_crop = box2squre(tar_image, tar_box_yyxx_crop) # crop box
+ y1,y2,x1,x2 = tar_box_yyxx_crop
+
+ cropped_target_image = tar_image[y1:y2,x1:x2,:]
+ tar_box_yyxx = box_in_box(tar_box_yyxx, tar_box_yyxx_crop)
+ y1,y2,x1,x2 = tar_box_yyxx
+
+ # collage
+ ref_image_collage = cv2.resize(ref_image_collage, (x2-x1, y2-y1))
+ ref_mask_compose = cv2.resize(ref_mask_compose.astype(np.uint8), (x2-x1, y2-y1))
+ ref_mask_compose = (ref_mask_compose > 128).astype(np.uint8)
+
+ collage = cropped_target_image.copy()
+ collage[y1:y2,x1:x2,:] = ref_image_collage
+
+ collage_mask = cropped_target_image.copy() * 0.0
+ collage_mask[y1:y2,x1:x2,:] = 1.0
+
+ # the size before pad
+ H1, W1 = collage.shape[0], collage.shape[1]
+ cropped_target_image = pad_to_square(cropped_target_image, pad_value = 0, random = False).astype(np.uint8)
+ collage = pad_to_square(collage, pad_value = 0, random = False).astype(np.uint8)
+ collage_mask = pad_to_square(collage_mask, pad_value = -1, random = False).astype(np.uint8)
+
+ # the size after pad
+ H2, W2 = collage.shape[0], collage.shape[1]
+ cropped_target_image = cv2.resize(cropped_target_image, (512,512)).astype(np.float32)
+ collage = cv2.resize(collage, (512,512)).astype(np.float32)
+ collage_mask = (cv2.resize(collage_mask, (512,512)).astype(np.float32) > 0.5).astype(np.float32)
+
+ masked_ref_image_aug = masked_ref_image_aug / 255
+ cropped_target_image = cropped_target_image / 127.5 - 1.0
+ collage = collage / 127.5 - 1.0
+ collage = np.concatenate([collage, collage_mask[:,:,:1] ] , -1)
+
+ item = dict(ref=masked_ref_image_aug.copy(), jpg=cropped_target_image.copy(), hint=collage.copy(), extra_sizes=np.array([H1, W1, H2, W2]), tar_box_yyxx_crop=np.array( tar_box_yyxx_crop ) )
+ return item
+
+
+def crop_back( pred, tar_image, extra_sizes, tar_box_yyxx_crop):
+ H1, W1, H2, W2 = extra_sizes
+ y1,y2,x1,x2 = tar_box_yyxx_crop
+ pred = cv2.resize(pred, (W2, H2))
+ m = 5 # maigin_pixel
+
+ if W1 == H1:
+ tar_image[y1+m :y2-m, x1+m:x2-m, :] = pred[m:-m, m:-m]
+ return tar_image
+
+ if W1 < W2:
+ pad1 = int((W2 - W1) / 2)
+ pad2 = W2 - W1 - pad1
+ pred = pred[:,pad1: -pad2, :]
+ else:
+ pad1 = int((H2 - H1) / 2)
+ pad2 = H2 - H1 - pad1
+ pred = pred[pad1: -pad2, :, :]
+
+ gen_image = tar_image.copy()
+ gen_image[y1+m :y2-m, x1+m:x2-m, :] = pred[m:-m, m:-m]
+ return gen_image
+
+
+def inference_single_image(ref_image, ref_mask, tar_image, tar_mask, guidance_scale = 5.0):
+ item = process_pairs(ref_image, ref_mask, tar_image, tar_mask)
+ ref = item['ref'] * 255
+ tar = item['jpg'] * 127.5 + 127.5
+ hint = item['hint'] * 127.5 + 127.5
+
+ hint_image = hint[:,:,:-1]
+ hint_mask = item['hint'][:,:,-1] * 255
+ hint_mask = np.stack([hint_mask,hint_mask,hint_mask],-1)
+ ref = cv2.resize(ref.astype(np.uint8), (512,512))
+
+ seed = random.randint(0, 65535)
+ if save_memory:
+ model.low_vram_shift(is_diffusing=False)
+
+ ref = item['ref']
+ tar = item['jpg']
+ hint = item['hint']
+ num_samples = 1
+
+ control = torch.from_numpy(hint.copy()).float().cuda()
+ control = torch.stack([control for _ in range(num_samples)], dim=0)
+ control = einops.rearrange(control, 'b h w c -> b c h w').clone()
+
+
+ clip_input = torch.from_numpy(ref.copy()).float().cuda()
+ clip_input = torch.stack([clip_input for _ in range(num_samples)], dim=0)
+ clip_input = einops.rearrange(clip_input, 'b h w c -> b c h w').clone()
+
+ guess_mode = False
+ H,W = 512,512
+
+ cond = {"c_concat": [control], "c_crossattn": [model.get_learned_conditioning( clip_input )]}
+ un_cond = {"c_concat": None if guess_mode else [control], "c_crossattn": [model.get_learned_conditioning([torch.zeros((1,3,224,224))] * num_samples)]}
+ shape = (4, H // 8, W // 8)
+
+ if save_memory:
+ model.low_vram_shift(is_diffusing=True)
+
+ # ====
+ num_samples = 1 #gr.Slider(label="Images", minimum=1, maximum=12, value=1, step=1)
+ image_resolution = 512 #gr.Slider(label="Image Resolution", minimum=256, maximum=768, value=512, step=64)
+ strength = 1 #gr.Slider(label="Control Strength", minimum=0.0, maximum=2.0, value=1.0, step=0.01)
+ guess_mode = False #gr.Checkbox(label='Guess Mode', value=False)
+ #detect_resolution = 512 #gr.Slider(label="Segmentation Resolution", minimum=128, maximum=1024, value=512, step=1)
+ ddim_steps = 50 #gr.Slider(label="Steps", minimum=1, maximum=100, value=20, step=1)
+ scale = guidance_scale #gr.Slider(label="Guidance Scale", minimum=0.1, maximum=30.0, value=9.0, step=0.1)
+ seed = -1 #gr.Slider(label="Seed", minimum=-1, maximum=2147483647, step=1, randomize=True)
+ eta = 0.0 #gr.Number(label="eta (DDIM)", value=0.0)
+
+ model.control_scales = [strength * (0.825 ** float(12 - i)) for i in range(13)] if guess_mode else ([strength] * 13) # Magic number. IDK why. Perhaps because 0.825**12<0.01 but 0.826**12>0.01
+ samples, intermediates = ddim_sampler.sample(ddim_steps, num_samples,
+ shape, cond, verbose=False, eta=eta,
+ unconditional_guidance_scale=scale,
+ unconditional_conditioning=un_cond)
+ if save_memory:
+ model.low_vram_shift(is_diffusing=False)
+
+ x_samples = model.decode_first_stage(samples)
+ x_samples = (einops.rearrange(x_samples, 'b c h w -> b h w c') * 127.5 + 127.5).cpu().numpy()#.clip(0, 255).astype(np.uint8)
+
+ result = x_samples[0][:,:,::-1]
+ result = np.clip(result,0,255)
+
+ pred = x_samples[0]
+ pred = np.clip(pred,0,255)[1:,:,:]
+ sizes = item['extra_sizes']
+ tar_box_yyxx_crop = item['tar_box_yyxx_crop']
+ gen_image = crop_back(pred, tar_image, sizes, tar_box_yyxx_crop)
+ return gen_image
+
+
+if __name__ == '__main__':
+ '''
+ # ==== Example for inferring a single image ===
+ reference_image_path = './examples/TestDreamBooth/FG/01.png'
+ bg_image_path = './examples/TestDreamBooth/BG/000000309203_GT.png'
+ bg_mask_path = './examples/TestDreamBooth/BG/000000309203_mask.png'
+ save_path = './examples/TestDreamBooth/GEN/gen_res.png'
+
+ # reference image + reference mask
+ # You could use the demo of SAM to extract RGB-A image with masks
+ # https://segment-anything.com/demo
+ image = cv2.imread( reference_image_path, cv2.IMREAD_UNCHANGED)
+ mask = (image[:,:,-1] > 128).astype(np.uint8)
+ image = image[:,:,:-1]
+ image = cv2.cvtColor(image.copy(), cv2.COLOR_BGR2RGB)
+ ref_image = image
+ ref_mask = mask
+
+ # background image
+ back_image = cv2.imread(bg_image_path).astype(np.uint8)
+ back_image = cv2.cvtColor(back_image, cv2.COLOR_BGR2RGB)
+
+ # background mask
+ tar_mask = cv2.imread(bg_mask_path)[:,:,0] > 128
+ tar_mask = tar_mask.astype(np.uint8)
+
+ gen_image = inference_single_image(ref_image, ref_mask, back_image.copy(), tar_mask)
+ h,w = back_image.shape[0], back_image.shape[0]
+ ref_image = cv2.resize(ref_image, (w,h))
+ vis_image = cv2.hconcat([ref_image, back_image, gen_image])
+
+ cv2.imwrite(save_path, vis_image [:,:,::-1])
+ '''
+ #'''
+ # ==== Example for inferring VITON-HD Test dataset ===
+
+ from omegaconf import OmegaConf
+ import os
+ DConf = OmegaConf.load('./configs/datasets.yaml')
+ save_dir = '/work/wefa-door/INFERRED_TRAIN_SHUFFLED1'
+
+ if not os.path.exists(save_dir):
+ os.mkdir(save_dir)
+
+ test_dir = '/work/wefa-door/cloth_s'
+ image_names = os.listdir(test_dir)
+ # tops
+ for image_name in image_names:
+ ref_image_path = os.path.join(test_dir, image_name)
+ tar_image_path = ref_image_path.replace('/cloth_s/', '/image/')
+ ref_mask_path = ref_image_path.replace('/cloth_s/','/cloth-mask_s/')
+ tar_mask_path = ref_image_path.replace('/cloth_s/', '/image-parse-v3/').replace('.jpg','.png')
+
+ ref_image = cv2.imread(ref_image_path)
+ ref_image = cv2.cvtColor(ref_image, cv2.COLOR_BGR2RGB)
+
+ gt_image = cv2.imread(tar_image_path)
+ gt_image = cv2.cvtColor(gt_image, cv2.COLOR_BGR2RGB)
+
+ ref_mask = (cv2.imread(ref_mask_path) > 128).astype(np.uint8)[:,:,0]
+
+ tar_mask = Image.open(tar_mask_path ).convert('P')
+ tar_mask= np.array(tar_mask)
+ tar_mask = tar_mask == 5
+
+ gen_image = inference_single_image(ref_image, ref_mask, gt_image.copy(), tar_mask)
+ gen_path = ogen_path = os.path.join(save_dir, '_t_' + image_name)
+
+ vis_image = cv2.hconcat([ref_image, gt_image, gen_image])
+ cv2.imwrite(gen_path, vis_image[:,:,::-1])
+ # bottoms
+ for image_name in image_names:
+ ref_image_path = os.path.join(test_dir, image_name)
+ tar_image_path = ref_image_path.replace('/cloth_s/', '/image/')
+ ref_mask_path = ref_image_path.replace('/cloth_s/','/cloth-mask_s/')
+ tar_mask_path = ref_image_path.replace('/cloth_s/', '/image-parse-v3/').replace('.jpg','.png')
+
+ ref_image = cv2.imread(ref_image_path)
+ ref_image = cv2.cvtColor(ref_image, cv2.COLOR_BGR2RGB)
+
+ gt_image = cv2.imread(tar_image_path)
+ gt_image = cv2.cvtColor(gt_image, cv2.COLOR_BGR2RGB)
+
+ ref_mask = (cv2.imread(ref_mask_path) > 128).astype(np.uint8)[:,:,0]
+
+ tar_mask = Image.open(tar_mask_path ).convert('P')
+ tar_mask= np.array(tar_mask)
+ tar_mask = tar_mask == 9
+
+ gen_image = inference_single_image(ref_image, ref_mask, gt_image.copy(), tar_mask)
+ gen_path = os.path.join(save_dir, image_name)
+
+ vis_image = cv2.hconcat([ref_image, gt_image, gen_image])
+ cv2.imwrite(gen_path, vis_image[:,:,::-1])
+
+
+
diff --git a/ADOOR_ACE/Interaction-Server/Various-scripts/run_inference_train_shuffled.py b/ADOOR_ACE/Interaction-Server/Various-scripts/run_inference_train_shuffled.py
new file mode 100644
index 0000000000000000000000000000000000000000..9bc901e8e53333307446b101876c7822f637316d
--- /dev/null
+++ b/ADOOR_ACE/Interaction-Server/Various-scripts/run_inference_train_shuffled.py
@@ -0,0 +1,318 @@
+import cv2
+import einops
+import numpy as np
+import torch
+import random
+from pytorch_lightning import seed_everything
+from cldm.model import create_model, load_state_dict
+from cldm.ddim_hacked import DDIMSampler
+from cldm.hack import disable_verbosity, enable_sliced_attention
+from datasets.data_utils import *
+cv2.setNumThreads(0)
+cv2.ocl.setUseOpenCL(False)
+import albumentations as A
+from omegaconf import OmegaConf
+from PIL import Image
+
+
+save_memory = False
+disable_verbosity()
+if save_memory:
+ enable_sliced_attention()
+
+
+config = OmegaConf.load('./configs/inference.yaml')
+model_ckpt = config.pretrained_model
+model_config = config.config_file
+
+model = create_model(model_config ).cpu()
+model.load_state_dict(load_state_dict(model_ckpt, location='cuda'))
+model = model.cuda()
+ddim_sampler = DDIMSampler(model)
+
+
+
+def aug_data_mask(image, mask):
+ transform = A.Compose([
+ A.HorizontalFlip(p=0.5),
+ A.RandomBrightnessContrast(p=0.5),
+ ])
+ transformed = transform(image=image.astype(np.uint8), mask = mask)
+ transformed_image = transformed["image"]
+ transformed_mask = transformed["mask"]
+ return transformed_image, transformed_mask
+
+
+def process_pairs(ref_image, ref_mask, tar_image, tar_mask):
+ # ========= Reference ===========
+ # ref expand
+ ref_box_yyxx = get_bbox_from_mask(ref_mask)
+
+ # ref filter mask
+ ref_mask_3 = np.stack([ref_mask,ref_mask,ref_mask],-1)
+ masked_ref_image = ref_image * ref_mask_3 + np.ones_like(ref_image) * 255 * (1-ref_mask_3)
+
+ y1,y2,x1,x2 = ref_box_yyxx
+ masked_ref_image = masked_ref_image[y1:y2,x1:x2,:]
+ ref_mask = ref_mask[y1:y2,x1:x2]
+
+
+ ratio = np.random.randint(12, 13) / 10
+ masked_ref_image, ref_mask = expand_image_mask(masked_ref_image, ref_mask, ratio=ratio)
+ ref_mask_3 = np.stack([ref_mask,ref_mask,ref_mask],-1)
+
+ # to square and resize
+ masked_ref_image = pad_to_square(masked_ref_image, pad_value = 255, random = False)
+ masked_ref_image = cv2.resize(masked_ref_image, (224,224) ).astype(np.uint8)
+
+ ref_mask_3 = pad_to_square(ref_mask_3 * 255, pad_value = 0, random = False)
+ ref_mask_3 = cv2.resize(ref_mask_3, (224,224) ).astype(np.uint8)
+ ref_mask = ref_mask_3[:,:,0]
+
+ # ref aug
+ masked_ref_image_aug = masked_ref_image #aug_data(masked_ref_image)
+
+ # collage aug
+ masked_ref_image_compose, ref_mask_compose = masked_ref_image, ref_mask #aug_data_mask(masked_ref_image, ref_mask)
+ masked_ref_image_aug = masked_ref_image_compose.copy()
+ ref_mask_3 = np.stack([ref_mask_compose,ref_mask_compose,ref_mask_compose],-1)
+ ref_image_collage = sobel(masked_ref_image_compose, ref_mask_compose/255)
+
+ # ========= Target ===========
+ tar_box_yyxx = get_bbox_from_mask(tar_mask)
+ tar_box_yyxx = expand_bbox(tar_mask, tar_box_yyxx, ratio=[1.1,1.2])
+
+ # crop
+ tar_box_yyxx_crop = expand_bbox(tar_image, tar_box_yyxx, ratio=[1.5, 3]) #1.2 1.6
+ tar_box_yyxx_crop = box2squre(tar_image, tar_box_yyxx_crop) # crop box
+ y1,y2,x1,x2 = tar_box_yyxx_crop
+
+ cropped_target_image = tar_image[y1:y2,x1:x2,:]
+ tar_box_yyxx = box_in_box(tar_box_yyxx, tar_box_yyxx_crop)
+ y1,y2,x1,x2 = tar_box_yyxx
+
+ # collage
+ ref_image_collage = cv2.resize(ref_image_collage, (x2-x1, y2-y1))
+ ref_mask_compose = cv2.resize(ref_mask_compose.astype(np.uint8), (x2-x1, y2-y1))
+ ref_mask_compose = (ref_mask_compose > 128).astype(np.uint8)
+
+ collage = cropped_target_image.copy()
+ collage[y1:y2,x1:x2,:] = ref_image_collage
+
+ collage_mask = cropped_target_image.copy() * 0.0
+ collage_mask[y1:y2,x1:x2,:] = 1.0
+
+ # the size before pad
+ H1, W1 = collage.shape[0], collage.shape[1]
+ cropped_target_image = pad_to_square(cropped_target_image, pad_value = 0, random = False).astype(np.uint8)
+ collage = pad_to_square(collage, pad_value = 0, random = False).astype(np.uint8)
+ collage_mask = pad_to_square(collage_mask, pad_value = -1, random = False).astype(np.uint8)
+
+ # the size after pad
+ H2, W2 = collage.shape[0], collage.shape[1]
+ cropped_target_image = cv2.resize(cropped_target_image, (512,512)).astype(np.float32)
+ collage = cv2.resize(collage, (512,512)).astype(np.float32)
+ collage_mask = (cv2.resize(collage_mask, (512,512)).astype(np.float32) > 0.5).astype(np.float32)
+
+ masked_ref_image_aug = masked_ref_image_aug / 255
+ cropped_target_image = cropped_target_image / 127.5 - 1.0
+ collage = collage / 127.5 - 1.0
+ collage = np.concatenate([collage, collage_mask[:,:,:1] ] , -1)
+
+ item = dict(ref=masked_ref_image_aug.copy(), jpg=cropped_target_image.copy(), hint=collage.copy(), extra_sizes=np.array([H1, W1, H2, W2]), tar_box_yyxx_crop=np.array( tar_box_yyxx_crop ) )
+ return item
+
+
+def crop_back( pred, tar_image, extra_sizes, tar_box_yyxx_crop):
+ H1, W1, H2, W2 = extra_sizes
+ y1,y2,x1,x2 = tar_box_yyxx_crop
+ pred = cv2.resize(pred, (W2, H2))
+ m = 5 # maigin_pixel
+
+ if W1 == H1:
+ tar_image[y1+m :y2-m, x1+m:x2-m, :] = pred[m:-m, m:-m]
+ return tar_image
+
+ if W1 < W2:
+ pad1 = int((W2 - W1) / 2)
+ pad2 = W2 - W1 - pad1
+ pred = pred[:,pad1: -pad2, :]
+ else:
+ pad1 = int((H2 - H1) / 2)
+ pad2 = H2 - H1 - pad1
+ pred = pred[pad1: -pad2, :, :]
+
+ gen_image = tar_image.copy()
+ gen_image[y1+m :y2-m, x1+m:x2-m, :] = pred[m:-m, m:-m]
+ return gen_image
+
+
+def inference_single_image(ref_image, ref_mask, tar_image, tar_mask, guidance_scale = 5.0):
+ item = process_pairs(ref_image, ref_mask, tar_image, tar_mask)
+ ref = item['ref'] * 255
+ tar = item['jpg'] * 127.5 + 127.5
+ hint = item['hint'] * 127.5 + 127.5
+
+ hint_image = hint[:,:,:-1]
+ hint_mask = item['hint'][:,:,-1] * 255
+ hint_mask = np.stack([hint_mask,hint_mask,hint_mask],-1)
+ ref = cv2.resize(ref.astype(np.uint8), (512,512))
+
+ seed = random.randint(0, 65535)
+ if save_memory:
+ model.low_vram_shift(is_diffusing=False)
+
+ ref = item['ref']
+ tar = item['jpg']
+ hint = item['hint']
+ num_samples = 1
+
+ control = torch.from_numpy(hint.copy()).float().cuda()
+ control = torch.stack([control for _ in range(num_samples)], dim=0)
+ control = einops.rearrange(control, 'b h w c -> b c h w').clone()
+
+
+ clip_input = torch.from_numpy(ref.copy()).float().cuda()
+ clip_input = torch.stack([clip_input for _ in range(num_samples)], dim=0)
+ clip_input = einops.rearrange(clip_input, 'b h w c -> b c h w').clone()
+
+ guess_mode = False
+ H,W = 512,512
+
+ cond = {"c_concat": [control], "c_crossattn": [model.get_learned_conditioning( clip_input )]}
+ un_cond = {"c_concat": None if guess_mode else [control], "c_crossattn": [model.get_learned_conditioning([torch.zeros((1,3,224,224))] * num_samples)]}
+ shape = (4, H // 8, W // 8)
+
+ if save_memory:
+ model.low_vram_shift(is_diffusing=True)
+
+ # ====
+ num_samples = 1 #gr.Slider(label="Images", minimum=1, maximum=12, value=1, step=1)
+ image_resolution = 512 #gr.Slider(label="Image Resolution", minimum=256, maximum=768, value=512, step=64)
+ strength = 1 #gr.Slider(label="Control Strength", minimum=0.0, maximum=2.0, value=1.0, step=0.01)
+ guess_mode = False #gr.Checkbox(label='Guess Mode', value=False)
+ #detect_resolution = 512 #gr.Slider(label="Segmentation Resolution", minimum=128, maximum=1024, value=512, step=1)
+ ddim_steps = 50 #gr.Slider(label="Steps", minimum=1, maximum=100, value=20, step=1)
+ scale = guidance_scale #gr.Slider(label="Guidance Scale", minimum=0.1, maximum=30.0, value=9.0, step=0.1)
+ seed = -1 #gr.Slider(label="Seed", minimum=-1, maximum=2147483647, step=1, randomize=True)
+ eta = 0.0 #gr.Number(label="eta (DDIM)", value=0.0)
+
+ model.control_scales = [strength * (0.825 ** float(12 - i)) for i in range(13)] if guess_mode else ([strength] * 13) # Magic number. IDK why. Perhaps because 0.825**12<0.01 but 0.826**12>0.01
+ samples, intermediates = ddim_sampler.sample(ddim_steps, num_samples,
+ shape, cond, verbose=False, eta=eta,
+ unconditional_guidance_scale=scale,
+ unconditional_conditioning=un_cond)
+ if save_memory:
+ model.low_vram_shift(is_diffusing=False)
+
+ x_samples = model.decode_first_stage(samples)
+ x_samples = (einops.rearrange(x_samples, 'b c h w -> b h w c') * 127.5 + 127.5).cpu().numpy()#.clip(0, 255).astype(np.uint8)
+
+ result = x_samples[0][:,:,::-1]
+ result = np.clip(result,0,255)
+
+ pred = x_samples[0]
+ pred = np.clip(pred,0,255)[1:,:,:]
+ sizes = item['extra_sizes']
+ tar_box_yyxx_crop = item['tar_box_yyxx_crop']
+ gen_image = crop_back(pred, tar_image, sizes, tar_box_yyxx_crop)
+ return gen_image
+
+
+if __name__ == '__main__':
+ '''
+ # ==== Example for inferring a single image ===
+ reference_image_path = './examples/TestDreamBooth/FG/01.png'
+ bg_image_path = './examples/TestDreamBooth/BG/000000309203_GT.png'
+ bg_mask_path = './examples/TestDreamBooth/BG/000000309203_mask.png'
+ save_path = './examples/TestDreamBooth/GEN/gen_res.png'
+
+ # reference image + reference mask
+ # You could use the demo of SAM to extract RGB-A image with masks
+ # https://segment-anything.com/demo
+ image = cv2.imread( reference_image_path, cv2.IMREAD_UNCHANGED)
+ mask = (image[:,:,-1] > 128).astype(np.uint8)
+ image = image[:,:,:-1]
+ image = cv2.cvtColor(image.copy(), cv2.COLOR_BGR2RGB)
+ ref_image = image
+ ref_mask = mask
+
+ # background image
+ back_image = cv2.imread(bg_image_path).astype(np.uint8)
+ back_image = cv2.cvtColor(back_image, cv2.COLOR_BGR2RGB)
+
+ # background mask
+ tar_mask = cv2.imread(bg_mask_path)[:,:,0] > 128
+ tar_mask = tar_mask.astype(np.uint8)
+
+ gen_image = inference_single_image(ref_image, ref_mask, back_image.copy(), tar_mask)
+ h,w = back_image.shape[0], back_image.shape[0]
+ ref_image = cv2.resize(ref_image, (w,h))
+ vis_image = cv2.hconcat([ref_image, back_image, gen_image])
+
+ cv2.imwrite(save_path, vis_image [:,:,::-1])
+ '''
+ #'''
+ # ==== Example for inferring VITON-HD Test dataset ===
+
+ from omegaconf import OmegaConf
+ import os
+ DConf = OmegaConf.load('./configs/datasets.yaml')
+ save_dir = '/work/wefa-door/INFERRED_TRAIN_SHUFFLED'
+
+ if not os.path.exists(save_dir):
+ os.mkdir(save_dir)
+
+ test_dir = '/work/wefa-door/cloth_s'
+ image_names = os.listdir(test_dir)
+ # tops
+ for image_name in image_names:
+ ref_image_path = os.path.join(test_dir, image_name)
+ tar_image_path = ref_image_path.replace('/cloth_s/', '/image/')
+ ref_mask_path = ref_image_path.replace('/cloth_s/','/cloth-mask_s/')
+ tar_mask_path = ref_image_path.replace('/cloth_s/', '/image-parse-v3/').replace('.jpg','.png')
+
+ ref_image = cv2.imread(ref_image_path)
+ ref_image = cv2.cvtColor(ref_image, cv2.COLOR_BGR2RGB)
+
+ gt_image = cv2.imread(tar_image_path)
+ gt_image = cv2.cvtColor(gt_image, cv2.COLOR_BGR2RGB)
+
+ ref_mask = (cv2.imread(ref_mask_path) > 128).astype(np.uint8)[:,:,0]
+
+ tar_mask = Image.open(tar_mask_path ).convert('P')
+ tar_mask= np.array(tar_mask)
+ tar_mask = tar_mask == 5
+
+ gen_image = inference_single_image(ref_image, ref_mask, gt_image.copy(), tar_mask)
+ gen_path = ogen_path = os.path.join(save_dir, '_t_' + image_name)
+
+ vis_image = cv2.hconcat([ref_image, gt_image, gen_image])
+ cv2.imwrite(gen_path, vis_image[:,:,::-1])
+ # bottoms
+ for image_name in image_names:
+ ref_image_path = os.path.join(test_dir, image_name)
+ tar_image_path = ref_image_path.replace('/cloth_s/', '/image/')
+ ref_mask_path = ref_image_path.replace('/cloth_s/','/cloth-mask_s/')
+ tar_mask_path = ref_image_path.replace('/cloth_s/', '/image-parse-v3/').replace('.jpg','.png')
+
+ ref_image = cv2.imread(ref_image_path)
+ ref_image = cv2.cvtColor(ref_image, cv2.COLOR_BGR2RGB)
+
+ gt_image = cv2.imread(tar_image_path)
+ gt_image = cv2.cvtColor(gt_image, cv2.COLOR_BGR2RGB)
+
+ ref_mask = (cv2.imread(ref_mask_path) > 128).astype(np.uint8)[:,:,0]
+
+ tar_mask = Image.open(tar_mask_path ).convert('P')
+ tar_mask= np.array(tar_mask)
+ tar_mask = tar_mask == 9
+
+ gen_image = inference_single_image(ref_image, ref_mask, gt_image.copy(), tar_mask)
+ gen_path = os.path.join(save_dir, image_name)
+
+ vis_image = cv2.hconcat([ref_image, gt_image, gen_image])
+ cv2.imwrite(gen_path, vis_image[:,:,::-1])
+
+
+
diff --git a/ADOOR_ACE/Interaction-Server/Various-scripts/run_train_anydoor copy.py b/ADOOR_ACE/Interaction-Server/Various-scripts/run_train_anydoor copy.py
new file mode 100644
index 0000000000000000000000000000000000000000..565eb7c35d97736baab0a169bad9e794cf8f773f
--- /dev/null
+++ b/ADOOR_ACE/Interaction-Server/Various-scripts/run_train_anydoor copy.py
@@ -0,0 +1,78 @@
+import pytorch_lightning as pl
+from torch.utils.data import DataLoader
+from datasets.ytb_vos import YoutubeVOSDataset
+from datasets.ytb_vis import YoutubeVISDataset
+from datasets.saliency_modular import SaliencyDataset
+from datasets.vipseg import VIPSegDataset
+from datasets.mvimagenet import MVImageNetDataset
+from datasets.sam import SAMDataset
+from datasets.uvo import UVODataset
+from datasets.uvo_val import UVOValDataset
+from datasets.mose import MoseDataset
+from datasets.vitonhd import VitonHDDataset
+from datasets.fashiontryon import FashionTryonDataset
+from datasets.lvis import LvisDataset
+from cldm.logger import ImageLogger
+from cldm.model import create_model, load_state_dict
+from torch.utils.data import ConcatDataset
+from cldm.hack import disable_verbosity, enable_sliced_attention
+from omegaconf import OmegaConf
+from pytorch_lightning.callbacks import Callback
+
+class InferenceCallback(Callback):
+ def on_epoch_end(self, trainer, pl_module):
+ # Code to run your inference script goes here
+ print(f"Running inference after epoch {trainer.current_epoch}")
+ # Example: Call your inference function here
+ # inference_function(pl_module)
+
+save_memory = False
+disable_verbosity()
+if save_memory:
+ enable_sliced_attention()
+
+# Configs
+resume_path = '/work/epoch=8-step=3416.ckpt'
+batch_size = 16
+logger_freq = 1000
+learning_rate = 1e-5
+sd_locked = False
+only_mid_control = False
+n_gpus = 4
+accumulate_grad_batches=1
+
+# First use cpu to load models. Pytorch Lightning will automatically move it to GPUs.
+model = create_model('./configs/anydoor.yaml').cpu()
+model.load_state_dict(load_state_dict(resume_path, location='cpu'))
+model.learning_rate = learning_rate
+model.sd_locked = sd_locked
+model.only_mid_control = only_mid_control
+
+# Datasets
+DConf = OmegaConf.load('./configs/datasets.yaml')
+# dataset1 = YoutubeVOSDataset(**DConf.Train.YoutubeVOS)
+# dataset2 = SaliencyDataset(**DConf.Train.Saliency)
+# dataset3 = VIPSegDataset(**DConf.Train.VIPSeg)
+# dataset4 = YoutubeVISDataset(**DConf.Train.YoutubeVIS)
+# dataset5 = MVImageNetDataset(**DConf.Train.MVImageNet)
+# dataset6 = SAMDataset(**DConf.Train.SAM)
+# dataset7 = UVODataset(**DConf.Train.UVO.train)
+dataset8 = VitonHDDataset(**DConf.Train.VitonHD)
+# dataset9 = UVOValDataset(**DConf.Train.UVO.val)
+# dataset10 = MoseDataset(**DConf.Train.Mose)
+# dataset11 = FashionTryonDataset(**DConf.Train.FashionTryon)
+# dataset12 = LvisDataset(**DConf.Train.Lvis)
+
+# image_data = [dataset2, dataset6, dataset12]
+# video_data = [dataset1, dataset3, dataset4, dataset7, dataset9, dataset10 ]
+# tryon_data = [dataset8, dataset11]
+# threed_data = [dataset5]
+
+# The ratio of each dataset is adjusted by setting the __len__
+# dataset = ConcatDataset( image_data + video_data + tryon_data + threed_data + video_data + tryon_data + threed_data )
+dataloader = DataLoader(dataset8, num_workers=8, batch_size=batch_size, shuffle=True)
+logger = ImageLogger(batch_frequency=logger_freq)
+trainer = pl.Trainer(gpus=n_gpus, strategy="ddp", precision=16, accelerator="gpu", callbacks=[logger], progress_bar_refresh_rate=1, accumulate_grad_batches=accumulate_grad_batches)
+
+# Train!
+trainer.fit(model, dataloader)
diff --git a/ADOOR_ACE/Interaction-Server/Various-scripts/shuffle copy.py b/ADOOR_ACE/Interaction-Server/Various-scripts/shuffle copy.py
new file mode 100644
index 0000000000000000000000000000000000000000..c2ebef8cd5f6558f8a2176150679d5ea3a8e027f
--- /dev/null
+++ b/ADOOR_ACE/Interaction-Server/Various-scripts/shuffle copy.py
@@ -0,0 +1,27 @@
+import os
+import random
+
+# Directory paths
+dir1 = 'cloth_s'
+dir2 = 'cloth-mask_s'
+
+# Ensure both directories have the same set of filenames
+assert sorted(os.listdir(dir1)) == sorted(os.listdir(dir2)), "Directories do not have the same set of files"
+
+# Get the list of filenames without extension from one directory
+filenames = [os.path.splitext(f)[0] for f in os.listdir(dir1)]
+# Shuffle the list of filenames
+random.shuffle(filenames)
+
+# Create mappings for new filenames in both directories
+new_filenames = {os.path.splitext(f)[0]: new_name for f, new_name in zip(sorted(os.listdir(dir1)), filenames)}
+
+# Rename files in both directories
+for dir in [dir1, dir2]:
+ for old_file in os.listdir(dir):
+ old_base, ext = os.path.splitext(old_file)
+ # Only rename if the file is in the mapping
+ if old_base in new_filenames:
+ new_file = new_filenames[old_base] + ext
+ os.rename(os.path.join(dir, old_file), os.path.join(dir, new_file))
+
diff --git a/ADOOR_ACE/Interaction-Server/Various-scripts/sort.py b/ADOOR_ACE/Interaction-Server/Various-scripts/sort.py
new file mode 100644
index 0000000000000000000000000000000000000000..413d1e61d6fc2cd525ceb597add3da488dce87dd
--- /dev/null
+++ b/ADOOR_ACE/Interaction-Server/Various-scripts/sort.py
@@ -0,0 +1,41 @@
+import os
+import shutil
+
+# Directories configuration
+dirs = {
+ 'image': 'jpg',
+ 'image-parse-v3': 'png',
+ 'cloth': 'jpg',
+ 'cloth-mask': 'jpg'
+}
+spare_dir = 'spare'
+
+# Ensure the spare directory exists
+os.makedirs(spare_dir, exist_ok=True)
+
+# Function to get base names of files in a directory
+def get_base_names(dir_path, ext):
+ return {os.path.splitext(f)[0] for f in os.listdir(dir_path) if f.endswith('.' + ext)}
+
+# Collect base names from all directories
+base_names = {}
+for dir_name, ext in dirs.items():
+ base_names[dir_name] = get_base_names(dir_name, ext)
+
+# Identify files without a match in all directories
+spare_files = set()
+for dir_name, names in base_names.items():
+ # Find files that do not have a match in every other directory
+ other_dirs = set(dirs.keys()) - {dir_name}
+ for base_name in names:
+ if not all(base_name in base_names[other_dir] for other_dir in other_dirs):
+ spare_files.add((dir_name, base_name + '.' + dirs[dir_name]))
+
+# Move spare files to the spare directory
+for dir_name, file_name in spare_files:
+ src = os.path.join(dir_name, file_name)
+ dst = os.path.join(spare_dir, file_name)
+ shutil.move(src, dst)
+ print(f"Moved {src} to {dst}")
+
+print("Operation completed.")
diff --git a/ADOOR_ACE/Interaction-Server/Various-scripts/tool_add_control_sd21.py b/ADOOR_ACE/Interaction-Server/Various-scripts/tool_add_control_sd21.py
new file mode 100644
index 0000000000000000000000000000000000000000..8231dd7e7ff4c543d9cdb73bc045b9ae308e1738
--- /dev/null
+++ b/ADOOR_ACE/Interaction-Server/Various-scripts/tool_add_control_sd21.py
@@ -0,0 +1,55 @@
+import sys
+import os
+
+assert len(sys.argv) == 3, 'Args are wrong.'
+
+input_path = sys.argv[1]
+output_path = sys.argv[2]
+
+assert os.path.exists(input_path), 'Input model does not exist.'
+assert not os.path.exists(output_path), 'Output filename already exists.'
+assert os.path.exists(os.path.dirname(output_path)), 'Output path is not valid.'
+
+import torch
+from share import *
+from cldm.model import create_model
+
+
+def get_node_name(name, parent_name):
+ if len(name) <= len(parent_name):
+ return False, ''
+ p = name[:len(parent_name)]
+ if p != parent_name:
+ return False, ''
+ return True, name[len(parent_name):]
+
+
+model = create_model(config_path='./configs/anydoor.yaml')
+
+pretrained_weights = torch.load(input_path)
+if 'state_dict' in pretrained_weights:
+ pretrained_weights = pretrained_weights['state_dict']
+
+scratch_dict = model.state_dict()
+
+target_dict = {}
+for k in scratch_dict.keys():
+
+ is_control, name = get_node_name(k, 'control_')
+ if 'control_model.input_blocks.0.0' in k:
+ print('skipped key: ', k)
+ continue
+
+ if is_control:
+ copy_k = 'model.diffusion_' + name
+ else:
+ copy_k = k
+ if copy_k in pretrained_weights:
+ target_dict[k] = pretrained_weights[copy_k].clone()
+ else:
+ target_dict[k] = scratch_dict[k].clone()
+ print(f'These weights are newly added: {k}')
+
+model.load_state_dict(target_dict, strict=False)
+torch.save(model.state_dict(), output_path)
+print('Done.')
diff --git a/ADOOR_ACE/Interaction-Server/fileman.js b/ADOOR_ACE/Interaction-Server/fileman.js
new file mode 100644
index 0000000000000000000000000000000000000000..8c571c01d0935d353971aff0dedb7ce7265fea7d
--- /dev/null
+++ b/ADOOR_ACE/Interaction-Server/fileman.js
@@ -0,0 +1,26 @@
+const express = require('express');
+const path = require('path');
+const app = express();
+const port = 9999;
+const dirToJson = require('dir-to-json');
+
+// Set up the input directory path and serve it
+const inputDirectoryPath = path.join('/work/Ace_ComfyUI/input');
+app.use('/files', (req, res) => {
+ Promise.all([
+ dirToJson("/work/Ace_ComfyUI/input"),
+ dirToJson("/work/Ace_ComfyUI/output")
+ ]).then(([inputStructure, outputStructure]) => {
+ res.json({
+ 'Input': inputStructure,
+ 'Output': outputStructure
+ });
+ }).catch(error => {
+ res.status(500).send('Could not read directory structure');
+ });
+ });
+
+// Start the server
+app.listen(port, () => {
+ console.log(`File manager running at http://localhost:${port}`);
+});
diff --git a/ADOOR_ACE/Interaction-Server/new_server.js b/ADOOR_ACE/Interaction-Server/new_server.js
new file mode 100644
index 0000000000000000000000000000000000000000..eb3344492405add279b65d8c894e11dfc29d15ac
--- /dev/null
+++ b/ADOOR_ACE/Interaction-Server/new_server.js
@@ -0,0 +1,139 @@
+const express = require('express');
+const path = require('path');
+const fs = require('fs');
+const basicAuth = require('basic-auth');
+const app = express();
+const port = 8081;
+const cors = require('cors');
+const { createProxyMiddleware } = require('http-proxy-middleware');
+
+// CORS options
+const corsOptions = {
+ origin: '*', // Allow only HTTP requests from port 8080
+ optionsSuccessStatus: 200 // some legacy browsers (IE11, various SmartTVs) choke on 204
+};
+
+app.use(cors(corsOptions));
+
+const authMiddleware = (req, res, next) => {
+ const unauthorized = (res) => {res.set('WWW-Authenticate', 'Basic realm=Authorization Required');return res.sendStatus(401);};
+ const user = basicAuth(req);
+ if (!user || !user.name || !user.pass) {return unauthorized(res);}
+ if (user.name === 'ace' && user.pass === '2024=ace=2024') {return next();} else {return unauthorized(res);}
+};
+
+app.use(authMiddleware);
+
+// Define the proxy route
+app.use('/api', createProxyMiddleware({
+ target: 'http://localhost:8999', // Target host
+ changeOrigin: true, // Needed for virtual hosted sites
+ pathRewrite: {
+ '^/api': '', // Rewrite URL: remove '/api' (if needed)
+ },
+}));
+
+const pageHeader = `
+
+
+
+
+ Image Processing Form
+
+
+
+
+
Image Processing Form
+
+
+
+
Previews
+
+
+
+
+
+
+
+
Processed Image
+
+
+
+
+
+
+`;
+
+app.get('/', (req, res) => {
+ res.send(pageHeader);
+});
+
+app.listen(port, () => {
+ console.log(`Server running at http://localhost:${port}`);
+});
\ No newline at end of file
diff --git a/ADOOR_ACE/Interaction-Server/node_modules/.bin/mime b/ADOOR_ACE/Interaction-Server/node_modules/.bin/mime
new file mode 120000
index 0000000000000000000000000000000000000000..fbb7ee0eed8d1dd0fe3b5a9d6ff41d1c4f044675
--- /dev/null
+++ b/ADOOR_ACE/Interaction-Server/node_modules/.bin/mime
@@ -0,0 +1 @@
+../mime/cli.js
\ No newline at end of file
diff --git a/ADOOR_ACE/Interaction-Server/node_modules/.package-lock.json b/ADOOR_ACE/Interaction-Server/node_modules/.package-lock.json
new file mode 100644
index 0000000000000000000000000000000000000000..7e25071c58f526a7f5c156763e9b6fb146681461
--- /dev/null
+++ b/ADOOR_ACE/Interaction-Server/node_modules/.package-lock.json
@@ -0,0 +1,990 @@
+{
+ "name": "Interaction-Server",
+ "lockfileVersion": 3,
+ "requires": true,
+ "packages": {
+ "node_modules/@types/http-proxy": {
+ "version": "1.17.14",
+ "resolved": "https://registry.npmjs.org/@types/http-proxy/-/http-proxy-1.17.14.tgz",
+ "integrity": "sha512-SSrD0c1OQzlFX7pGu1eXxSEjemej64aaNPRhhVYUGqXh0BtldAAx37MG8btcumvpgKyZp1F5Gn3JkktdxiFv6w==",
+ "dependencies": {
+ "@types/node": "*"
+ }
+ },
+ "node_modules/@types/node": {
+ "version": "20.12.7",
+ "resolved": "https://registry.npmjs.org/@types/node/-/node-20.12.7.tgz",
+ "integrity": "sha512-wq0cICSkRLVaf3UGLMGItu/PtdY7oaXaI/RVU+xliKVOtRna3PRY57ZDfztpDL0n11vfymMUnXv8QwYCO7L1wg==",
+ "dependencies": {
+ "undici-types": "~5.26.4"
+ }
+ },
+ "node_modules/accepts": {
+ "version": "1.3.8",
+ "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz",
+ "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==",
+ "dependencies": {
+ "mime-types": "~2.1.34",
+ "negotiator": "0.6.3"
+ },
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/array-flatten": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz",
+ "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg=="
+ },
+ "node_modules/basic-auth": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/basic-auth/-/basic-auth-2.0.1.tgz",
+ "integrity": "sha512-NF+epuEdnUYVlGuhaxbbq+dvJttwLnGY+YixlXlME5KpQ5W3CnXA5cVTneY3SPbPDRkcjMbifrwmFYcClgOZeg==",
+ "dependencies": {
+ "safe-buffer": "5.1.2"
+ },
+ "engines": {
+ "node": ">= 0.8"
+ }
+ },
+ "node_modules/batch": {
+ "version": "0.6.1",
+ "resolved": "https://registry.npmjs.org/batch/-/batch-0.6.1.tgz",
+ "integrity": "sha512-x+VAiMRL6UPkx+kudNvxTl6hB2XNNCG2r+7wixVfIYwu/2HKRXimwQyaumLjMveWvT2Hkd/cAJw+QBMfJ/EKVw=="
+ },
+ "node_modules/body-parser": {
+ "version": "1.20.2",
+ "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.2.tgz",
+ "integrity": "sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==",
+ "dependencies": {
+ "bytes": "3.1.2",
+ "content-type": "~1.0.5",
+ "debug": "2.6.9",
+ "depd": "2.0.0",
+ "destroy": "1.2.0",
+ "http-errors": "2.0.0",
+ "iconv-lite": "0.4.24",
+ "on-finished": "2.4.1",
+ "qs": "6.11.0",
+ "raw-body": "2.5.2",
+ "type-is": "~1.6.18",
+ "unpipe": "1.0.0"
+ },
+ "engines": {
+ "node": ">= 0.8",
+ "npm": "1.2.8000 || >= 1.4.16"
+ }
+ },
+ "node_modules/braces": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
+ "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==",
+ "dependencies": {
+ "fill-range": "^7.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/bytes": {
+ "version": "3.1.2",
+ "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz",
+ "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==",
+ "engines": {
+ "node": ">= 0.8"
+ }
+ },
+ "node_modules/call-bind": {
+ "version": "1.0.7",
+ "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz",
+ "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==",
+ "dependencies": {
+ "es-define-property": "^1.0.0",
+ "es-errors": "^1.3.0",
+ "function-bind": "^1.1.2",
+ "get-intrinsic": "^1.2.4",
+ "set-function-length": "^1.2.1"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/content-disposition": {
+ "version": "0.5.4",
+ "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz",
+ "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==",
+ "dependencies": {
+ "safe-buffer": "5.2.1"
+ },
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/content-disposition/node_modules/safe-buffer": {
+ "version": "5.2.1",
+ "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
+ "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==",
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/feross"
+ },
+ {
+ "type": "patreon",
+ "url": "https://www.patreon.com/feross"
+ },
+ {
+ "type": "consulting",
+ "url": "https://feross.org/support"
+ }
+ ]
+ },
+ "node_modules/content-type": {
+ "version": "1.0.5",
+ "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz",
+ "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/cookie": {
+ "version": "0.6.0",
+ "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz",
+ "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/cookie-signature": {
+ "version": "1.0.6",
+ "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz",
+ "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ=="
+ },
+ "node_modules/cors": {
+ "version": "2.8.5",
+ "resolved": "https://registry.npmjs.org/cors/-/cors-2.8.5.tgz",
+ "integrity": "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==",
+ "dependencies": {
+ "object-assign": "^4",
+ "vary": "^1"
+ },
+ "engines": {
+ "node": ">= 0.10"
+ }
+ },
+ "node_modules/debug": {
+ "version": "2.6.9",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
+ "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
+ "dependencies": {
+ "ms": "2.0.0"
+ }
+ },
+ "node_modules/define-data-property": {
+ "version": "1.1.4",
+ "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz",
+ "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==",
+ "dependencies": {
+ "es-define-property": "^1.0.0",
+ "es-errors": "^1.3.0",
+ "gopd": "^1.0.1"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/depd": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz",
+ "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==",
+ "engines": {
+ "node": ">= 0.8"
+ }
+ },
+ "node_modules/destroy": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz",
+ "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==",
+ "engines": {
+ "node": ">= 0.8",
+ "npm": "1.2.8000 || >= 1.4.16"
+ }
+ },
+ "node_modules/dir-to-json": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/dir-to-json/-/dir-to-json-1.0.0.tgz",
+ "integrity": "sha512-yDfgiqPrKf4Qje1CgsFRB1Dl1TjV9U8YVo6ykIR1gbRf4E/7ErmrhP1FbUfSccs6hojSgEkLWd7xO6XY7ioplg==",
+ "engines": {
+ "node": ">=14.0.0"
+ }
+ },
+ "node_modules/ee-first": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz",
+ "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow=="
+ },
+ "node_modules/encodeurl": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz",
+ "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==",
+ "engines": {
+ "node": ">= 0.8"
+ }
+ },
+ "node_modules/es-define-property": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz",
+ "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==",
+ "dependencies": {
+ "get-intrinsic": "^1.2.4"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
+ "node_modules/es-errors": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz",
+ "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==",
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
+ "node_modules/escape-html": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz",
+ "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow=="
+ },
+ "node_modules/etag": {
+ "version": "1.8.1",
+ "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz",
+ "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/eventemitter3": {
+ "version": "4.0.7",
+ "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz",
+ "integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw=="
+ },
+ "node_modules/express": {
+ "version": "4.19.2",
+ "resolved": "https://registry.npmjs.org/express/-/express-4.19.2.tgz",
+ "integrity": "sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==",
+ "dependencies": {
+ "accepts": "~1.3.8",
+ "array-flatten": "1.1.1",
+ "body-parser": "1.20.2",
+ "content-disposition": "0.5.4",
+ "content-type": "~1.0.4",
+ "cookie": "0.6.0",
+ "cookie-signature": "1.0.6",
+ "debug": "2.6.9",
+ "depd": "2.0.0",
+ "encodeurl": "~1.0.2",
+ "escape-html": "~1.0.3",
+ "etag": "~1.8.1",
+ "finalhandler": "1.2.0",
+ "fresh": "0.5.2",
+ "http-errors": "2.0.0",
+ "merge-descriptors": "1.0.1",
+ "methods": "~1.1.2",
+ "on-finished": "2.4.1",
+ "parseurl": "~1.3.3",
+ "path-to-regexp": "0.1.7",
+ "proxy-addr": "~2.0.7",
+ "qs": "6.11.0",
+ "range-parser": "~1.2.1",
+ "safe-buffer": "5.2.1",
+ "send": "0.18.0",
+ "serve-static": "1.15.0",
+ "setprototypeof": "1.2.0",
+ "statuses": "2.0.1",
+ "type-is": "~1.6.18",
+ "utils-merge": "1.0.1",
+ "vary": "~1.1.2"
+ },
+ "engines": {
+ "node": ">= 0.10.0"
+ }
+ },
+ "node_modules/express/node_modules/safe-buffer": {
+ "version": "5.2.1",
+ "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
+ "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==",
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/feross"
+ },
+ {
+ "type": "patreon",
+ "url": "https://www.patreon.com/feross"
+ },
+ {
+ "type": "consulting",
+ "url": "https://feross.org/support"
+ }
+ ]
+ },
+ "node_modules/fill-range": {
+ "version": "7.0.1",
+ "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
+ "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==",
+ "dependencies": {
+ "to-regex-range": "^5.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/finalhandler": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz",
+ "integrity": "sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==",
+ "dependencies": {
+ "debug": "2.6.9",
+ "encodeurl": "~1.0.2",
+ "escape-html": "~1.0.3",
+ "on-finished": "2.4.1",
+ "parseurl": "~1.3.3",
+ "statuses": "2.0.1",
+ "unpipe": "~1.0.0"
+ },
+ "engines": {
+ "node": ">= 0.8"
+ }
+ },
+ "node_modules/follow-redirects": {
+ "version": "1.15.6",
+ "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz",
+ "integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==",
+ "funding": [
+ {
+ "type": "individual",
+ "url": "https://github.com/sponsors/RubenVerborgh"
+ }
+ ],
+ "engines": {
+ "node": ">=4.0"
+ },
+ "peerDependenciesMeta": {
+ "debug": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/forwarded": {
+ "version": "0.2.0",
+ "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz",
+ "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/fresh": {
+ "version": "0.5.2",
+ "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz",
+ "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/function-bind": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz",
+ "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==",
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/get-intrinsic": {
+ "version": "1.2.4",
+ "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz",
+ "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==",
+ "dependencies": {
+ "es-errors": "^1.3.0",
+ "function-bind": "^1.1.2",
+ "has-proto": "^1.0.1",
+ "has-symbols": "^1.0.3",
+ "hasown": "^2.0.0"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/gopd": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz",
+ "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==",
+ "dependencies": {
+ "get-intrinsic": "^1.1.3"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/has-property-descriptors": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz",
+ "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==",
+ "dependencies": {
+ "es-define-property": "^1.0.0"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/has-proto": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz",
+ "integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==",
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/has-symbols": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz",
+ "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==",
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/hasown": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz",
+ "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==",
+ "dependencies": {
+ "function-bind": "^1.1.2"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
+ "node_modules/http-errors": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz",
+ "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==",
+ "dependencies": {
+ "depd": "2.0.0",
+ "inherits": "2.0.4",
+ "setprototypeof": "1.2.0",
+ "statuses": "2.0.1",
+ "toidentifier": "1.0.1"
+ },
+ "engines": {
+ "node": ">= 0.8"
+ }
+ },
+ "node_modules/http-proxy": {
+ "version": "1.18.1",
+ "resolved": "https://registry.npmjs.org/http-proxy/-/http-proxy-1.18.1.tgz",
+ "integrity": "sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ==",
+ "dependencies": {
+ "eventemitter3": "^4.0.0",
+ "follow-redirects": "^1.0.0",
+ "requires-port": "^1.0.0"
+ },
+ "engines": {
+ "node": ">=8.0.0"
+ }
+ },
+ "node_modules/http-proxy-middleware": {
+ "version": "2.0.6",
+ "resolved": "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-2.0.6.tgz",
+ "integrity": "sha512-ya/UeJ6HVBYxrgYotAZo1KvPWlgB48kUJLDePFeneHsVujFaW5WNj2NgWCAE//B1Dl02BIfYlpNgBy8Kf8Rjmw==",
+ "dependencies": {
+ "@types/http-proxy": "^1.17.8",
+ "http-proxy": "^1.18.1",
+ "is-glob": "^4.0.1",
+ "is-plain-obj": "^3.0.0",
+ "micromatch": "^4.0.2"
+ },
+ "engines": {
+ "node": ">=12.0.0"
+ },
+ "peerDependencies": {
+ "@types/express": "^4.17.13"
+ },
+ "peerDependenciesMeta": {
+ "@types/express": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/iconv-lite": {
+ "version": "0.4.24",
+ "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
+ "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==",
+ "dependencies": {
+ "safer-buffer": ">= 2.1.2 < 3"
+ },
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/inherits": {
+ "version": "2.0.4",
+ "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
+ "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
+ },
+ "node_modules/ipaddr.js": {
+ "version": "1.9.1",
+ "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz",
+ "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==",
+ "engines": {
+ "node": ">= 0.10"
+ }
+ },
+ "node_modules/is-extglob": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
+ "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==",
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/is-glob": {
+ "version": "4.0.3",
+ "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz",
+ "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==",
+ "dependencies": {
+ "is-extglob": "^2.1.1"
+ },
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/is-number": {
+ "version": "7.0.0",
+ "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
+ "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
+ "engines": {
+ "node": ">=0.12.0"
+ }
+ },
+ "node_modules/is-plain-obj": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-3.0.0.tgz",
+ "integrity": "sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA==",
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/media-typer": {
+ "version": "0.3.0",
+ "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz",
+ "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/merge-descriptors": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz",
+ "integrity": "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w=="
+ },
+ "node_modules/methods": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz",
+ "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/micromatch": {
+ "version": "4.0.5",
+ "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz",
+ "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==",
+ "dependencies": {
+ "braces": "^3.0.2",
+ "picomatch": "^2.3.1"
+ },
+ "engines": {
+ "node": ">=8.6"
+ }
+ },
+ "node_modules/mime": {
+ "version": "1.6.0",
+ "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz",
+ "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==",
+ "bin": {
+ "mime": "cli.js"
+ },
+ "engines": {
+ "node": ">=4"
+ }
+ },
+ "node_modules/mime-db": {
+ "version": "1.52.0",
+ "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
+ "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/mime-types": {
+ "version": "2.1.35",
+ "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
+ "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
+ "dependencies": {
+ "mime-db": "1.52.0"
+ },
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/ms": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
+ "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="
+ },
+ "node_modules/negotiator": {
+ "version": "0.6.3",
+ "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz",
+ "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/object-assign": {
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
+ "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==",
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/object-inspect": {
+ "version": "1.13.1",
+ "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.1.tgz",
+ "integrity": "sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==",
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/on-finished": {
+ "version": "2.4.1",
+ "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz",
+ "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==",
+ "dependencies": {
+ "ee-first": "1.1.1"
+ },
+ "engines": {
+ "node": ">= 0.8"
+ }
+ },
+ "node_modules/parseurl": {
+ "version": "1.3.3",
+ "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz",
+ "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==",
+ "engines": {
+ "node": ">= 0.8"
+ }
+ },
+ "node_modules/path-to-regexp": {
+ "version": "0.1.7",
+ "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz",
+ "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ=="
+ },
+ "node_modules/picomatch": {
+ "version": "2.3.1",
+ "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
+ "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
+ "engines": {
+ "node": ">=8.6"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/jonschlinkert"
+ }
+ },
+ "node_modules/proxy-addr": {
+ "version": "2.0.7",
+ "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz",
+ "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==",
+ "dependencies": {
+ "forwarded": "0.2.0",
+ "ipaddr.js": "1.9.1"
+ },
+ "engines": {
+ "node": ">= 0.10"
+ }
+ },
+ "node_modules/qs": {
+ "version": "6.11.0",
+ "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz",
+ "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==",
+ "dependencies": {
+ "side-channel": "^1.0.4"
+ },
+ "engines": {
+ "node": ">=0.6"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/range-parser": {
+ "version": "1.2.1",
+ "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz",
+ "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/raw-body": {
+ "version": "2.5.2",
+ "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz",
+ "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==",
+ "dependencies": {
+ "bytes": "3.1.2",
+ "http-errors": "2.0.0",
+ "iconv-lite": "0.4.24",
+ "unpipe": "1.0.0"
+ },
+ "engines": {
+ "node": ">= 0.8"
+ }
+ },
+ "node_modules/requires-port": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
+ "integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ=="
+ },
+ "node_modules/safe-buffer": {
+ "version": "5.1.2",
+ "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz",
+ "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="
+ },
+ "node_modules/safer-buffer": {
+ "version": "2.1.2",
+ "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
+ "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
+ },
+ "node_modules/send": {
+ "version": "0.18.0",
+ "resolved": "https://registry.npmjs.org/send/-/send-0.18.0.tgz",
+ "integrity": "sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==",
+ "dependencies": {
+ "debug": "2.6.9",
+ "depd": "2.0.0",
+ "destroy": "1.2.0",
+ "encodeurl": "~1.0.2",
+ "escape-html": "~1.0.3",
+ "etag": "~1.8.1",
+ "fresh": "0.5.2",
+ "http-errors": "2.0.0",
+ "mime": "1.6.0",
+ "ms": "2.1.3",
+ "on-finished": "2.4.1",
+ "range-parser": "~1.2.1",
+ "statuses": "2.0.1"
+ },
+ "engines": {
+ "node": ">= 0.8.0"
+ }
+ },
+ "node_modules/send/node_modules/ms": {
+ "version": "2.1.3",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
+ "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="
+ },
+ "node_modules/serve-index": {
+ "version": "1.9.1",
+ "resolved": "https://registry.npmjs.org/serve-index/-/serve-index-1.9.1.tgz",
+ "integrity": "sha512-pXHfKNP4qujrtteMrSBb0rc8HJ9Ms/GrXwcUtUtD5s4ewDJI8bT3Cz2zTVRMKtri49pLx2e0Ya8ziP5Ya2pZZw==",
+ "dependencies": {
+ "accepts": "~1.3.4",
+ "batch": "0.6.1",
+ "debug": "2.6.9",
+ "escape-html": "~1.0.3",
+ "http-errors": "~1.6.2",
+ "mime-types": "~2.1.17",
+ "parseurl": "~1.3.2"
+ },
+ "engines": {
+ "node": ">= 0.8.0"
+ }
+ },
+ "node_modules/serve-index/node_modules/depd": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz",
+ "integrity": "sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/serve-index/node_modules/http-errors": {
+ "version": "1.6.3",
+ "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz",
+ "integrity": "sha512-lks+lVC8dgGyh97jxvxeYTWQFvh4uw4yC12gVl63Cg30sjPX4wuGcdkICVXDAESr6OJGjqGA8Iz5mkeN6zlD7A==",
+ "dependencies": {
+ "depd": "~1.1.2",
+ "inherits": "2.0.3",
+ "setprototypeof": "1.1.0",
+ "statuses": ">= 1.4.0 < 2"
+ },
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/serve-index/node_modules/inherits": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz",
+ "integrity": "sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw=="
+ },
+ "node_modules/serve-index/node_modules/setprototypeof": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.0.tgz",
+ "integrity": "sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ=="
+ },
+ "node_modules/serve-index/node_modules/statuses": {
+ "version": "1.5.0",
+ "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz",
+ "integrity": "sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA==",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/serve-static": {
+ "version": "1.15.0",
+ "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.15.0.tgz",
+ "integrity": "sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==",
+ "dependencies": {
+ "encodeurl": "~1.0.2",
+ "escape-html": "~1.0.3",
+ "parseurl": "~1.3.3",
+ "send": "0.18.0"
+ },
+ "engines": {
+ "node": ">= 0.8.0"
+ }
+ },
+ "node_modules/set-function-length": {
+ "version": "1.2.2",
+ "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz",
+ "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==",
+ "dependencies": {
+ "define-data-property": "^1.1.4",
+ "es-errors": "^1.3.0",
+ "function-bind": "^1.1.2",
+ "get-intrinsic": "^1.2.4",
+ "gopd": "^1.0.1",
+ "has-property-descriptors": "^1.0.2"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
+ "node_modules/setprototypeof": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz",
+ "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw=="
+ },
+ "node_modules/side-channel": {
+ "version": "1.0.6",
+ "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz",
+ "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==",
+ "dependencies": {
+ "call-bind": "^1.0.7",
+ "es-errors": "^1.3.0",
+ "get-intrinsic": "^1.2.4",
+ "object-inspect": "^1.13.1"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/statuses": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz",
+ "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==",
+ "engines": {
+ "node": ">= 0.8"
+ }
+ },
+ "node_modules/to-regex-range": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
+ "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
+ "dependencies": {
+ "is-number": "^7.0.0"
+ },
+ "engines": {
+ "node": ">=8.0"
+ }
+ },
+ "node_modules/toidentifier": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz",
+ "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==",
+ "engines": {
+ "node": ">=0.6"
+ }
+ },
+ "node_modules/type-is": {
+ "version": "1.6.18",
+ "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz",
+ "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==",
+ "dependencies": {
+ "media-typer": "0.3.0",
+ "mime-types": "~2.1.24"
+ },
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/undici-types": {
+ "version": "5.26.5",
+ "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz",
+ "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="
+ },
+ "node_modules/unpipe": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz",
+ "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==",
+ "engines": {
+ "node": ">= 0.8"
+ }
+ },
+ "node_modules/utils-merge": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz",
+ "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==",
+ "engines": {
+ "node": ">= 0.4.0"
+ }
+ },
+ "node_modules/vary": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz",
+ "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==",
+ "engines": {
+ "node": ">= 0.8"
+ }
+ }
+ }
+}
diff --git a/ADOOR_ACE/Interaction-Server/node_modules/@types/http-proxy/LICENSE b/ADOOR_ACE/Interaction-Server/node_modules/@types/http-proxy/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..9e841e7a26e4eb057b24511e7b92d42b257a80e5
--- /dev/null
+++ b/ADOOR_ACE/Interaction-Server/node_modules/@types/http-proxy/LICENSE
@@ -0,0 +1,21 @@
+ MIT License
+
+ Copyright (c) Microsoft Corporation.
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to deal
+ in the Software without restriction, including without limitation the rights
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in all
+ copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ SOFTWARE
diff --git a/ADOOR_ACE/Interaction-Server/node_modules/@types/http-proxy/README.md b/ADOOR_ACE/Interaction-Server/node_modules/@types/http-proxy/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..a8dbabf67a83fde03ef776681e7ed4c208913019
--- /dev/null
+++ b/ADOOR_ACE/Interaction-Server/node_modules/@types/http-proxy/README.md
@@ -0,0 +1,15 @@
+# Installation
+> `npm install --save @types/http-proxy`
+
+# Summary
+This package contains type definitions for http-proxy (https://github.com/nodejitsu/node-http-proxy).
+
+# Details
+Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/http-proxy.
+
+### Additional Details
+ * Last updated: Tue, 07 Nov 2023 03:09:37 GMT
+ * Dependencies: [@types/node](https://npmjs.com/package/@types/node)
+
+# Credits
+These definitions were written by [Maxime LUCE](https://github.com/SomaticIT), [Florian Oellerich](https://github.com/Raigen), [Daniel Schmidt](https://github.com/DanielMSchmidt), [Jordan Abreu](https://github.com/jabreu610), and [Samuel Bodin](https://github.com/bodinsamuel).
diff --git a/ADOOR_ACE/Interaction-Server/node_modules/@types/http-proxy/index.d.ts b/ADOOR_ACE/Interaction-Server/node_modules/@types/http-proxy/index.d.ts
new file mode 100644
index 0000000000000000000000000000000000000000..6f3f2a725c20d56410dae49079c1c66a6493785a
--- /dev/null
+++ b/ADOOR_ACE/Interaction-Server/node_modules/@types/http-proxy/index.d.ts
@@ -0,0 +1,250 @@
+///
+
+import * as events from "events";
+import * as http from "http";
+import * as https from "https";
+import * as net from "net";
+import * as stream from "stream";
+import * as url from "url";
+
+interface ProxyTargetDetailed {
+ host: string;
+ port: number;
+ protocol?: string | undefined;
+ hostname?: string | undefined;
+ socketPath?: string | undefined;
+ key?: string | undefined;
+ passphrase?: string | undefined;
+ pfx?: Buffer | string | undefined;
+ cert?: string | undefined;
+ ca?: string | undefined;
+ ciphers?: string | undefined;
+ secureProtocol?: string | undefined;
+}
+
+declare class Server
+ extends events.EventEmitter
+{
+ /**
+ * Creates the proxy server with specified options.
+ * @param options - Config object passed to the proxy
+ */
+ constructor(options?: Server.ServerOptions);
+
+ /**
+ * Used for proxying regular HTTP(S) requests
+ * @param req - Client request.
+ * @param res - Client response.
+ * @param options - Additional options.
+ */
+ web(
+ req: http.IncomingMessage,
+ res: http.ServerResponse,
+ options?: Server.ServerOptions,
+ callback?: Server.ErrorCallback,
+ ): void;
+
+ /**
+ * Used for proxying regular HTTP(S) requests
+ * @param req - Client request.
+ * @param socket - Client socket.
+ * @param head - Client head.
+ * @param options - Additionnal options.
+ */
+ ws(
+ req: http.IncomingMessage,
+ socket: any,
+ head: any,
+ options?: Server.ServerOptions,
+ callback?: Server.ErrorCallback,
+ ): void;
+
+ /**
+ * A function that wraps the object in a webserver, for your convenience
+ * @param port - Port to listen on
+ * @param hostname - The hostname to listen on
+ */
+ listen(port: number, hostname?: string): Server;
+
+ /**
+ * A function that closes the inner webserver and stops listening on given port
+ */
+ close(callback?: () => void): void;
+
+ /**
+ * Creates the proxy server with specified options.
+ * @param options Config object passed to the proxy
+ * @returns Proxy object with handlers for `ws` and `web` requests
+ */
+ // tslint:disable:no-unnecessary-generics
+ static createProxyServer(
+ options?: Server.ServerOptions,
+ ): Server;
+
+ /**
+ * Creates the proxy server with specified options.
+ * @param options Config object passed to the proxy
+ * @returns Proxy object with handlers for `ws` and `web` requests
+ */
+ // tslint:disable:no-unnecessary-generics
+ static createServer(
+ options?: Server.ServerOptions,
+ ): Server;
+
+ /**
+ * Creates the proxy server with specified options.
+ * @param options Config object passed to the proxy
+ * @returns Proxy object with handlers for `ws` and `web` requests
+ */
+ // tslint:disable:no-unnecessary-generics
+ static createProxy(
+ options?: Server.ServerOptions,
+ ): Server;
+
+ addListener(event: string, listener: () => void): this;
+ on(event: string, listener: () => void): this;
+ on(event: "error", listener: Server.ErrorCallback): this;
+ on(event: "start", listener: Server.StartCallback): this;
+ on(
+ event: "proxyReq",
+ listener: Server.ProxyReqCallback,
+ ): this;
+ on(event: "proxyRes", listener: Server.ProxyResCallback): this;
+ on(event: "proxyReqWs", listener: Server.ProxyReqWsCallback): this;
+ on(event: "econnreset", listener: Server.EconnresetCallback): this;
+ on(event: "end", listener: Server.EndCallback): this;
+ on(event: "open", listener: Server.OpenCallback): this;
+ on(event: "close", listener: Server.CloseCallback): this;
+
+ once(event: string, listener: () => void): this;
+ once(event: "error", listener: Server.ErrorCallback): this;
+ once(event: "start", listener: Server.StartCallback): this;
+ once(
+ event: "proxyReq",
+ listener: Server.ProxyReqCallback,
+ ): this;
+ once(event: "proxyRes", listener: Server.ProxyResCallback): this;
+ once(event: "proxyReqWs", listener: Server.ProxyReqWsCallback): this;
+ once(event: "econnreset", listener: Server.EconnresetCallback): this;
+ once(event: "end", listener: Server.EndCallback): this;
+ once(event: "open", listener: Server.OpenCallback): this;
+ once(event: "close", listener: Server.CloseCallback): this;
+ removeListener(event: string, listener: () => void): this;
+ removeAllListeners(event?: string): this;
+ getMaxListeners(): number;
+ setMaxListeners(n: number): this;
+ listeners(event: string): Array<() => void>;
+ emit(event: string, ...args: any[]): boolean;
+ listenerCount(type: string): number;
+}
+
+declare namespace Server {
+ type ProxyTarget = ProxyTargetUrl | ProxyTargetDetailed;
+ type ProxyTargetUrl = string | Partial;
+
+ interface ServerOptions {
+ /** URL string to be parsed with the url module. */
+ target?: ProxyTarget | undefined;
+ /** URL string to be parsed with the url module. */
+ forward?: ProxyTargetUrl | undefined;
+ /** Object to be passed to http(s).request. */
+ agent?: any;
+ /** Object to be passed to https.createServer(). */
+ ssl?: any;
+ /** If you want to proxy websockets. */
+ ws?: boolean | undefined;
+ /** Adds x- forward headers. */
+ xfwd?: boolean | undefined;
+ /** Verify SSL certificate. */
+ secure?: boolean | undefined;
+ /** Explicitly specify if we are proxying to another proxy. */
+ toProxy?: boolean | undefined;
+ /** Specify whether you want to prepend the target's path to the proxy path. */
+ prependPath?: boolean | undefined;
+ /** Specify whether you want to ignore the proxy path of the incoming request. */
+ ignorePath?: boolean | undefined;
+ /** Local interface string to bind for outgoing connections. */
+ localAddress?: string | undefined;
+ /** Changes the origin of the host header to the target URL. */
+ changeOrigin?: boolean | undefined;
+ /** specify whether you want to keep letter case of response header key */
+ preserveHeaderKeyCase?: boolean | undefined;
+ /** Basic authentication i.e. 'user:password' to compute an Authorization header. */
+ auth?: string | undefined;
+ /** Rewrites the location hostname on (301 / 302 / 307 / 308) redirects, Default: null. */
+ hostRewrite?: string | undefined;
+ /** Rewrites the location host/ port on (301 / 302 / 307 / 308) redirects based on requested host/ port.Default: false. */
+ autoRewrite?: boolean | undefined;
+ /** Rewrites the location protocol on (301 / 302 / 307 / 308) redirects to 'http' or 'https'.Default: null. */
+ protocolRewrite?: string | undefined;
+ /** rewrites domain of set-cookie headers. */
+ cookieDomainRewrite?: false | string | { [oldDomain: string]: string } | undefined;
+ /** rewrites path of set-cookie headers. Default: false */
+ cookiePathRewrite?: false | string | { [oldPath: string]: string } | undefined;
+ /** object with extra headers to be added to target requests. */
+ headers?: { [header: string]: string } | undefined;
+ /** Timeout (in milliseconds) when proxy receives no response from target. Default: 120000 (2 minutes) */
+ proxyTimeout?: number | undefined;
+ /** Timeout (in milliseconds) for incoming requests */
+ timeout?: number | undefined;
+ /** Specify whether you want to follow redirects. Default: false */
+ followRedirects?: boolean | undefined;
+ /** If set to true, none of the webOutgoing passes are called and it's your responsibility to appropriately return the response by listening and acting on the proxyRes event */
+ selfHandleResponse?: boolean | undefined;
+ /** Buffer */
+ buffer?: stream.Stream | undefined;
+ }
+
+ type StartCallback = (
+ req: TIncomingMessage,
+ res: TServerResponse,
+ target: ProxyTargetUrl,
+ ) => void;
+ type ProxyReqCallback<
+ TClientRequest = http.ClientRequest,
+ TIncomingMessage = http.IncomingMessage,
+ TServerResponse = http.ServerResponse,
+ > = (proxyReq: TClientRequest, req: TIncomingMessage, res: TServerResponse, options: ServerOptions) => void;
+ type ProxyResCallback = (
+ proxyRes: TIncomingMessage,
+ req: TIncomingMessage,
+ res: TServerResponse,
+ ) => void;
+ type ProxyReqWsCallback = (
+ proxyReq: TClientRequest,
+ req: TIncomingMessage,
+ socket: net.Socket,
+ options: ServerOptions,
+ head: any,
+ ) => void;
+ type EconnresetCallback<
+ TError = Error,
+ TIncomingMessage = http.IncomingMessage,
+ TServerResponse = http.ServerResponse,
+ > = (
+ err: TError,
+ req: TIncomingMessage,
+ res: TServerResponse,
+ target: ProxyTargetUrl,
+ ) => void;
+ type EndCallback = (
+ req: TIncomingMessage,
+ res: TServerResponse,
+ proxyRes: TIncomingMessage,
+ ) => void;
+ type OpenCallback = (proxySocket: net.Socket) => void;
+ type CloseCallback = (
+ proxyRes: TIncomingMessage,
+ proxySocket: net.Socket,
+ proxyHead: any,
+ ) => void;
+ type ErrorCallback =
+ (
+ err: TError,
+ req: TIncomingMessage,
+ res: TServerResponse | net.Socket,
+ target?: ProxyTargetUrl,
+ ) => void;
+}
+
+export = Server;
diff --git a/ADOOR_ACE/Interaction-Server/node_modules/@types/http-proxy/package.json b/ADOOR_ACE/Interaction-Server/node_modules/@types/http-proxy/package.json
new file mode 100644
index 0000000000000000000000000000000000000000..997dcbb081a055900fe00dbad2ee0a8faa97ab84
--- /dev/null
+++ b/ADOOR_ACE/Interaction-Server/node_modules/@types/http-proxy/package.json
@@ -0,0 +1,47 @@
+{
+ "name": "@types/http-proxy",
+ "version": "1.17.14",
+ "description": "TypeScript definitions for http-proxy",
+ "homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/http-proxy",
+ "license": "MIT",
+ "contributors": [
+ {
+ "name": "Maxime LUCE",
+ "githubUsername": "SomaticIT",
+ "url": "https://github.com/SomaticIT"
+ },
+ {
+ "name": "Florian Oellerich",
+ "githubUsername": "Raigen",
+ "url": "https://github.com/Raigen"
+ },
+ {
+ "name": "Daniel Schmidt",
+ "githubUsername": "DanielMSchmidt",
+ "url": "https://github.com/DanielMSchmidt"
+ },
+ {
+ "name": "Jordan Abreu",
+ "githubUsername": "jabreu610",
+ "url": "https://github.com/jabreu610"
+ },
+ {
+ "name": "Samuel Bodin",
+ "githubUsername": "bodinsamuel",
+ "url": "https://github.com/bodinsamuel"
+ }
+ ],
+ "main": "",
+ "types": "index.d.ts",
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git",
+ "directory": "types/http-proxy"
+ },
+ "scripts": {},
+ "dependencies": {
+ "@types/node": "*"
+ },
+ "typesPublisherContentHash": "3e198b1ca48b5a5de433fc322508d2fec21a03c1b52c9470ee47b725146db123",
+ "typeScriptVersion": "4.5"
+}
\ No newline at end of file
diff --git a/ADOOR_ACE/Interaction-Server/node_modules/@types/node/LICENSE b/ADOOR_ACE/Interaction-Server/node_modules/@types/node/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..9e841e7a26e4eb057b24511e7b92d42b257a80e5
--- /dev/null
+++ b/ADOOR_ACE/Interaction-Server/node_modules/@types/node/LICENSE
@@ -0,0 +1,21 @@
+ MIT License
+
+ Copyright (c) Microsoft Corporation.
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to deal
+ in the Software without restriction, including without limitation the rights
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in all
+ copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ SOFTWARE
diff --git a/ADOOR_ACE/Interaction-Server/node_modules/@types/node/README.md b/ADOOR_ACE/Interaction-Server/node_modules/@types/node/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..a9680f7065f9f6717fa33f19b7b40759abdff669
--- /dev/null
+++ b/ADOOR_ACE/Interaction-Server/node_modules/@types/node/README.md
@@ -0,0 +1,15 @@
+# Installation
+> `npm install --save @types/node`
+
+# Summary
+This package contains type definitions for node (https://nodejs.org/).
+
+# Details
+Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/node.
+
+### Additional Details
+ * Last updated: Tue, 09 Apr 2024 21:07:24 GMT
+ * Dependencies: [undici-types](https://npmjs.com/package/undici-types)
+
+# Credits
+These definitions were written by [Microsoft TypeScript](https://github.com/Microsoft), [Alberto Schiabel](https://github.com/jkomyno), [Alvis HT Tang](https://github.com/alvis), [Andrew Makarov](https://github.com/r3nya), [Benjamin Toueg](https://github.com/btoueg), [Chigozirim C.](https://github.com/smac89), [David Junger](https://github.com/touffy), [Deividas Bakanas](https://github.com/DeividasBakanas), [Eugene Y. Q. Shen](https://github.com/eyqs), [Hannes Magnusson](https://github.com/Hannes-Magnusson-CK), [Huw](https://github.com/hoo29), [Kelvin Jin](https://github.com/kjin), [Klaus Meinhardt](https://github.com/ajafff), [Lishude](https://github.com/islishude), [Mariusz Wiktorczyk](https://github.com/mwiktorczyk), [Mohsen Azimi](https://github.com/mohsen1), [Nikita Galkin](https://github.com/galkin), [Parambir Singh](https://github.com/parambirs), [Sebastian Silbermann](https://github.com/eps1lon), [Thomas den Hollander](https://github.com/ThomasdenH), [Wilco Bakker](https://github.com/WilcoBakker), [wwwy3y3](https://github.com/wwwy3y3), [Samuel Ainsworth](https://github.com/samuela), [Kyle Uehlein](https://github.com/kuehlein), [Thanik Bhongbhibhat](https://github.com/bhongy), [Marcin Kopacz](https://github.com/chyzwar), [Trivikram Kamat](https://github.com/trivikr), [Junxiao Shi](https://github.com/yoursunny), [Ilia Baryshnikov](https://github.com/qwelias), [ExE Boss](https://github.com/ExE-Boss), [Piotr Błażejewicz](https://github.com/peterblazejewicz), [Anna Henningsen](https://github.com/addaleax), [Victor Perin](https://github.com/victorperin), [Yongsheng Zhang](https://github.com/ZYSzys), [NodeJS Contributors](https://github.com/NodeJS), [Linus Unnebäck](https://github.com/LinusU), [wafuwafu13](https://github.com/wafuwafu13), [Matteo Collina](https://github.com/mcollina), and [Dmitry Semigradsky](https://github.com/Semigradsky).
diff --git a/ADOOR_ACE/Interaction-Server/node_modules/@types/node/assert.d.ts b/ADOOR_ACE/Interaction-Server/node_modules/@types/node/assert.d.ts
new file mode 100644
index 0000000000000000000000000000000000000000..450b3216c0f0ebc0ee1ae36edaf59ae71e739fbe
--- /dev/null
+++ b/ADOOR_ACE/Interaction-Server/node_modules/@types/node/assert.d.ts
@@ -0,0 +1,1043 @@
+/**
+ * The `node:assert` module provides a set of assertion functions for verifying
+ * invariants.
+ * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/assert.js)
+ */
+declare module "assert" {
+ /**
+ * An alias of {@link ok}.
+ * @since v0.5.9
+ * @param value The input that is checked for being truthy.
+ */
+ function assert(value: unknown, message?: string | Error): asserts value;
+ namespace assert {
+ /**
+ * Indicates the failure of an assertion. All errors thrown by the `node:assert`module will be instances of the `AssertionError` class.
+ */
+ class AssertionError extends Error {
+ /**
+ * Set to the `actual` argument for methods such as {@link assert.strictEqual()}.
+ */
+ actual: unknown;
+ /**
+ * Set to the `expected` argument for methods such as {@link assert.strictEqual()}.
+ */
+ expected: unknown;
+ /**
+ * Set to the passed in operator value.
+ */
+ operator: string;
+ /**
+ * Indicates if the message was auto-generated (`true`) or not.
+ */
+ generatedMessage: boolean;
+ /**
+ * Value is always `ERR_ASSERTION` to show that the error is an assertion error.
+ */
+ code: "ERR_ASSERTION";
+ constructor(options?: {
+ /** If provided, the error message is set to this value. */
+ message?: string | undefined;
+ /** The `actual` property on the error instance. */
+ actual?: unknown | undefined;
+ /** The `expected` property on the error instance. */
+ expected?: unknown | undefined;
+ /** The `operator` property on the error instance. */
+ operator?: string | undefined;
+ /** If provided, the generated stack trace omits frames before this function. */
+ // eslint-disable-next-line @typescript-eslint/ban-types
+ stackStartFn?: Function | undefined;
+ });
+ }
+ /**
+ * This feature is deprecated and will be removed in a future version.
+ * Please consider using alternatives such as the `mock` helper function.
+ * @since v14.2.0, v12.19.0
+ * @deprecated Deprecated
+ */
+ class CallTracker {
+ /**
+ * The wrapper function is expected to be called exactly `exact` times. If the
+ * function has not been called exactly `exact` times when `tracker.verify()` is called, then `tracker.verify()` will throw an
+ * error.
+ *
+ * ```js
+ * import assert from 'node:assert';
+ *
+ * // Creates call tracker.
+ * const tracker = new assert.CallTracker();
+ *
+ * function func() {}
+ *
+ * // Returns a function that wraps func() that must be called exact times
+ * // before tracker.verify().
+ * const callsfunc = tracker.calls(func);
+ * ```
+ * @since v14.2.0, v12.19.0
+ * @param [fn='A no-op function']
+ * @param [exact=1]
+ * @return that wraps `fn`.
+ */
+ calls(exact?: number): () => void;
+ calls any>(fn?: Func, exact?: number): Func;
+ /**
+ * Example:
+ *
+ * ```js
+ * import assert from 'node:assert';
+ *
+ * const tracker = new assert.CallTracker();
+ *
+ * function func() {}
+ * const callsfunc = tracker.calls(func);
+ * callsfunc(1, 2, 3);
+ *
+ * assert.deepStrictEqual(tracker.getCalls(callsfunc),
+ * [{ thisArg: undefined, arguments: [1, 2, 3] }]);
+ * ```
+ * @since v18.8.0, v16.18.0
+ * @param fn
+ * @return An Array with all the calls to a tracked function.
+ */
+ getCalls(fn: Function): CallTrackerCall[];
+ /**
+ * The arrays contains information about the expected and actual number of calls of
+ * the functions that have not been called the expected number of times.
+ *
+ * ```js
+ * import assert from 'node:assert';
+ *
+ * // Creates call tracker.
+ * const tracker = new assert.CallTracker();
+ *
+ * function func() {}
+ *
+ * // Returns a function that wraps func() that must be called exact times
+ * // before tracker.verify().
+ * const callsfunc = tracker.calls(func, 2);
+ *
+ * // Returns an array containing information on callsfunc()
+ * console.log(tracker.report());
+ * // [
+ * // {
+ * // message: 'Expected the func function to be executed 2 time(s) but was
+ * // executed 0 time(s).',
+ * // actual: 0,
+ * // expected: 2,
+ * // operator: 'func',
+ * // stack: stack trace
+ * // }
+ * // ]
+ * ```
+ * @since v14.2.0, v12.19.0
+ * @return An Array of objects containing information about the wrapper functions returned by `calls`.
+ */
+ report(): CallTrackerReportInformation[];
+ /**
+ * Reset calls of the call tracker.
+ * If a tracked function is passed as an argument, the calls will be reset for it.
+ * If no arguments are passed, all tracked functions will be reset.
+ *
+ * ```js
+ * import assert from 'node:assert';
+ *
+ * const tracker = new assert.CallTracker();
+ *
+ * function func() {}
+ * const callsfunc = tracker.calls(func);
+ *
+ * callsfunc();
+ * // Tracker was called once
+ * assert.strictEqual(tracker.getCalls(callsfunc).length, 1);
+ *
+ * tracker.reset(callsfunc);
+ * assert.strictEqual(tracker.getCalls(callsfunc).length, 0);
+ * ```
+ * @since v18.8.0, v16.18.0
+ * @param fn a tracked function to reset.
+ */
+ reset(fn?: Function): void;
+ /**
+ * Iterates through the list of functions passed to `tracker.calls()` and will throw an error for functions that
+ * have not been called the expected number of times.
+ *
+ * ```js
+ * import assert from 'node:assert';
+ *
+ * // Creates call tracker.
+ * const tracker = new assert.CallTracker();
+ *
+ * function func() {}
+ *
+ * // Returns a function that wraps func() that must be called exact times
+ * // before tracker.verify().
+ * const callsfunc = tracker.calls(func, 2);
+ *
+ * callsfunc();
+ *
+ * // Will throw an error since callsfunc() was only called once.
+ * tracker.verify();
+ * ```
+ * @since v14.2.0, v12.19.0
+ */
+ verify(): void;
+ }
+ interface CallTrackerCall {
+ thisArg: object;
+ arguments: unknown[];
+ }
+ interface CallTrackerReportInformation {
+ message: string;
+ /** The actual number of times the function was called. */
+ actual: number;
+ /** The number of times the function was expected to be called. */
+ expected: number;
+ /** The name of the function that is wrapped. */
+ operator: string;
+ /** A stack trace of the function. */
+ stack: object;
+ }
+ type AssertPredicate = RegExp | (new() => object) | ((thrown: unknown) => boolean) | object | Error;
+ /**
+ * Throws an `AssertionError` with the provided error message or a default
+ * error message. If the `message` parameter is an instance of an `Error` then
+ * it will be thrown instead of the `AssertionError`.
+ *
+ * ```js
+ * import assert from 'node:assert/strict';
+ *
+ * assert.fail();
+ * // AssertionError [ERR_ASSERTION]: Failed
+ *
+ * assert.fail('boom');
+ * // AssertionError [ERR_ASSERTION]: boom
+ *
+ * assert.fail(new TypeError('need array'));
+ * // TypeError: need array
+ * ```
+ *
+ * Using `assert.fail()` with more than two arguments is possible but deprecated.
+ * See below for further details.
+ * @since v0.1.21
+ * @param [message='Failed']
+ */
+ function fail(message?: string | Error): never;
+ /** @deprecated since v10.0.0 - use fail([message]) or other assert functions instead. */
+ function fail(
+ actual: unknown,
+ expected: unknown,
+ message?: string | Error,
+ operator?: string,
+ // eslint-disable-next-line @typescript-eslint/ban-types
+ stackStartFn?: Function,
+ ): never;
+ /**
+ * Tests if `value` is truthy. It is equivalent to`assert.equal(!!value, true, message)`.
+ *
+ * If `value` is not truthy, an `AssertionError` is thrown with a `message`property set equal to the value of the `message` parameter. If the `message`parameter is `undefined`, a default
+ * error message is assigned. If the `message`parameter is an instance of an `Error` then it will be thrown instead of the`AssertionError`.
+ * If no arguments are passed in at all `message` will be set to the string:`` 'No value argument passed to `assert.ok()`' ``.
+ *
+ * Be aware that in the `repl` the error message will be different to the one
+ * thrown in a file! See below for further details.
+ *
+ * ```js
+ * import assert from 'node:assert/strict';
+ *
+ * assert.ok(true);
+ * // OK
+ * assert.ok(1);
+ * // OK
+ *
+ * assert.ok();
+ * // AssertionError: No value argument passed to `assert.ok()`
+ *
+ * assert.ok(false, 'it\'s false');
+ * // AssertionError: it's false
+ *
+ * // In the repl:
+ * assert.ok(typeof 123 === 'string');
+ * // AssertionError: false == true
+ *
+ * // In a file (e.g. test.js):
+ * assert.ok(typeof 123 === 'string');
+ * // AssertionError: The expression evaluated to a falsy value:
+ * //
+ * // assert.ok(typeof 123 === 'string')
+ *
+ * assert.ok(false);
+ * // AssertionError: The expression evaluated to a falsy value:
+ * //
+ * // assert.ok(false)
+ *
+ * assert.ok(0);
+ * // AssertionError: The expression evaluated to a falsy value:
+ * //
+ * // assert.ok(0)
+ * ```
+ *
+ * ```js
+ * import assert from 'node:assert/strict';
+ *
+ * // Using `assert()` works the same:
+ * assert(0);
+ * // AssertionError: The expression evaluated to a falsy value:
+ * //
+ * // assert(0)
+ * ```
+ * @since v0.1.21
+ */
+ function ok(value: unknown, message?: string | Error): asserts value;
+ /**
+ * **Strict assertion mode**
+ *
+ * An alias of {@link strictEqual}.
+ *
+ * **Legacy assertion mode**
+ *
+ * > Stability: 3 - Legacy: Use {@link strictEqual} instead.
+ *
+ * Tests shallow, coercive equality between the `actual` and `expected` parameters
+ * using the [`==` operator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Equality). `NaN` is specially handled
+ * and treated as being identical if both sides are `NaN`.
+ *
+ * ```js
+ * import assert from 'node:assert';
+ *
+ * assert.equal(1, 1);
+ * // OK, 1 == 1
+ * assert.equal(1, '1');
+ * // OK, 1 == '1'
+ * assert.equal(NaN, NaN);
+ * // OK
+ *
+ * assert.equal(1, 2);
+ * // AssertionError: 1 == 2
+ * assert.equal({ a: { b: 1 } }, { a: { b: 1 } });
+ * // AssertionError: { a: { b: 1 } } == { a: { b: 1 } }
+ * ```
+ *
+ * If the values are not equal, an `AssertionError` is thrown with a `message`property set equal to the value of the `message` parameter. If the `message`parameter is undefined, a default
+ * error message is assigned. If the `message`parameter is an instance of an `Error` then it will be thrown instead of the`AssertionError`.
+ * @since v0.1.21
+ */
+ function equal(actual: unknown, expected: unknown, message?: string | Error): void;
+ /**
+ * **Strict assertion mode**
+ *
+ * An alias of {@link notStrictEqual}.
+ *
+ * **Legacy assertion mode**
+ *
+ * > Stability: 3 - Legacy: Use {@link notStrictEqual} instead.
+ *
+ * Tests shallow, coercive inequality with the [`!=` operator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Inequality). `NaN` is
+ * specially handled and treated as being identical if both sides are `NaN`.
+ *
+ * ```js
+ * import assert from 'node:assert';
+ *
+ * assert.notEqual(1, 2);
+ * // OK
+ *
+ * assert.notEqual(1, 1);
+ * // AssertionError: 1 != 1
+ *
+ * assert.notEqual(1, '1');
+ * // AssertionError: 1 != '1'
+ * ```
+ *
+ * If the values are equal, an `AssertionError` is thrown with a `message`property set equal to the value of the `message` parameter. If the `message`parameter is undefined, a default error
+ * message is assigned. If the `message`parameter is an instance of an `Error` then it will be thrown instead of the`AssertionError`.
+ * @since v0.1.21
+ */
+ function notEqual(actual: unknown, expected: unknown, message?: string | Error): void;
+ /**
+ * **Strict assertion mode**
+ *
+ * An alias of {@link deepStrictEqual}.
+ *
+ * **Legacy assertion mode**
+ *
+ * > Stability: 3 - Legacy: Use {@link deepStrictEqual} instead.
+ *
+ * Tests for deep equality between the `actual` and `expected` parameters. Consider
+ * using {@link deepStrictEqual} instead. {@link deepEqual} can have
+ * surprising results.
+ *
+ * _Deep equality_ means that the enumerable "own" properties of child objects
+ * are also recursively evaluated by the following rules.
+ * @since v0.1.21
+ */
+ function deepEqual(actual: unknown, expected: unknown, message?: string | Error): void;
+ /**
+ * **Strict assertion mode**
+ *
+ * An alias of {@link notDeepStrictEqual}.
+ *
+ * **Legacy assertion mode**
+ *
+ * > Stability: 3 - Legacy: Use {@link notDeepStrictEqual} instead.
+ *
+ * Tests for any deep inequality. Opposite of {@link deepEqual}.
+ *
+ * ```js
+ * import assert from 'node:assert';
+ *
+ * const obj1 = {
+ * a: {
+ * b: 1,
+ * },
+ * };
+ * const obj2 = {
+ * a: {
+ * b: 2,
+ * },
+ * };
+ * const obj3 = {
+ * a: {
+ * b: 1,
+ * },
+ * };
+ * const obj4 = { __proto__: obj1 };
+ *
+ * assert.notDeepEqual(obj1, obj1);
+ * // AssertionError: { a: { b: 1 } } notDeepEqual { a: { b: 1 } }
+ *
+ * assert.notDeepEqual(obj1, obj2);
+ * // OK
+ *
+ * assert.notDeepEqual(obj1, obj3);
+ * // AssertionError: { a: { b: 1 } } notDeepEqual { a: { b: 1 } }
+ *
+ * assert.notDeepEqual(obj1, obj4);
+ * // OK
+ * ```
+ *
+ * If the values are deeply equal, an `AssertionError` is thrown with a`message` property set equal to the value of the `message` parameter. If the`message` parameter is undefined, a default
+ * error message is assigned. If the`message` parameter is an instance of an `Error` then it will be thrown
+ * instead of the `AssertionError`.
+ * @since v0.1.21
+ */
+ function notDeepEqual(actual: unknown, expected: unknown, message?: string | Error): void;
+ /**
+ * Tests strict equality between the `actual` and `expected` parameters as
+ * determined by [`Object.is()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/is).
+ *
+ * ```js
+ * import assert from 'node:assert/strict';
+ *
+ * assert.strictEqual(1, 2);
+ * // AssertionError [ERR_ASSERTION]: Expected inputs to be strictly equal:
+ * //
+ * // 1 !== 2
+ *
+ * assert.strictEqual(1, 1);
+ * // OK
+ *
+ * assert.strictEqual('Hello foobar', 'Hello World!');
+ * // AssertionError [ERR_ASSERTION]: Expected inputs to be strictly equal:
+ * // + actual - expected
+ * //
+ * // + 'Hello foobar'
+ * // - 'Hello World!'
+ * // ^
+ *
+ * const apples = 1;
+ * const oranges = 2;
+ * assert.strictEqual(apples, oranges, `apples ${apples} !== oranges ${oranges}`);
+ * // AssertionError [ERR_ASSERTION]: apples 1 !== oranges 2
+ *
+ * assert.strictEqual(1, '1', new TypeError('Inputs are not identical'));
+ * // TypeError: Inputs are not identical
+ * ```
+ *
+ * If the values are not strictly equal, an `AssertionError` is thrown with a`message` property set equal to the value of the `message` parameter. If the`message` parameter is undefined, a
+ * default error message is assigned. If the`message` parameter is an instance of an `Error` then it will be thrown
+ * instead of the `AssertionError`.
+ * @since v0.1.21
+ */
+ function strictEqual(actual: unknown, expected: T, message?: string | Error): asserts actual is T;
+ /**
+ * Tests strict inequality between the `actual` and `expected` parameters as
+ * determined by [`Object.is()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/is).
+ *
+ * ```js
+ * import assert from 'node:assert/strict';
+ *
+ * assert.notStrictEqual(1, 2);
+ * // OK
+ *
+ * assert.notStrictEqual(1, 1);
+ * // AssertionError [ERR_ASSERTION]: Expected "actual" to be strictly unequal to:
+ * //
+ * // 1
+ *
+ * assert.notStrictEqual(1, '1');
+ * // OK
+ * ```
+ *
+ * If the values are strictly equal, an `AssertionError` is thrown with a`message` property set equal to the value of the `message` parameter. If the`message` parameter is undefined, a
+ * default error message is assigned. If the`message` parameter is an instance of an `Error` then it will be thrown
+ * instead of the `AssertionError`.
+ * @since v0.1.21
+ */
+ function notStrictEqual(actual: unknown, expected: unknown, message?: string | Error): void;
+ /**
+ * Tests for deep equality between the `actual` and `expected` parameters.
+ * "Deep" equality means that the enumerable "own" properties of child objects
+ * are recursively evaluated also by the following rules.
+ * @since v1.2.0
+ */
+ function deepStrictEqual(actual: unknown, expected: T, message?: string | Error): asserts actual is T;
+ /**
+ * Tests for deep strict inequality. Opposite of {@link deepStrictEqual}.
+ *
+ * ```js
+ * import assert from 'node:assert/strict';
+ *
+ * assert.notDeepStrictEqual({ a: 1 }, { a: '1' });
+ * // OK
+ * ```
+ *
+ * If the values are deeply and strictly equal, an `AssertionError` is thrown
+ * with a `message` property set equal to the value of the `message` parameter. If
+ * the `message` parameter is undefined, a default error message is assigned. If
+ * the `message` parameter is an instance of an `Error` then it will be thrown
+ * instead of the `AssertionError`.
+ * @since v1.2.0
+ */
+ function notDeepStrictEqual(actual: unknown, expected: unknown, message?: string | Error): void;
+ /**
+ * Expects the function `fn` to throw an error.
+ *
+ * If specified, `error` can be a [`Class`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes),
+ * [`RegExp`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions), a validation function,
+ * a validation object where each property will be tested for strict deep equality,
+ * or an instance of error where each property will be tested for strict deep
+ * equality including the non-enumerable `message` and `name` properties. When
+ * using an object, it is also possible to use a regular expression, when
+ * validating against a string property. See below for examples.
+ *
+ * If specified, `message` will be appended to the message provided by the`AssertionError` if the `fn` call fails to throw or in case the error validation
+ * fails.
+ *
+ * Custom validation object/error instance:
+ *
+ * ```js
+ * import assert from 'node:assert/strict';
+ *
+ * const err = new TypeError('Wrong value');
+ * err.code = 404;
+ * err.foo = 'bar';
+ * err.info = {
+ * nested: true,
+ * baz: 'text',
+ * };
+ * err.reg = /abc/i;
+ *
+ * assert.throws(
+ * () => {
+ * throw err;
+ * },
+ * {
+ * name: 'TypeError',
+ * message: 'Wrong value',
+ * info: {
+ * nested: true,
+ * baz: 'text',
+ * },
+ * // Only properties on the validation object will be tested for.
+ * // Using nested objects requires all properties to be present. Otherwise
+ * // the validation is going to fail.
+ * },
+ * );
+ *
+ * // Using regular expressions to validate error properties:
+ * assert.throws(
+ * () => {
+ * throw err;
+ * },
+ * {
+ * // The `name` and `message` properties are strings and using regular
+ * // expressions on those will match against the string. If they fail, an
+ * // error is thrown.
+ * name: /^TypeError$/,
+ * message: /Wrong/,
+ * foo: 'bar',
+ * info: {
+ * nested: true,
+ * // It is not possible to use regular expressions for nested properties!
+ * baz: 'text',
+ * },
+ * // The `reg` property contains a regular expression and only if the
+ * // validation object contains an identical regular expression, it is going
+ * // to pass.
+ * reg: /abc/i,
+ * },
+ * );
+ *
+ * // Fails due to the different `message` and `name` properties:
+ * assert.throws(
+ * () => {
+ * const otherErr = new Error('Not found');
+ * // Copy all enumerable properties from `err` to `otherErr`.
+ * for (const [key, value] of Object.entries(err)) {
+ * otherErr[key] = value;
+ * }
+ * throw otherErr;
+ * },
+ * // The error's `message` and `name` properties will also be checked when using
+ * // an error as validation object.
+ * err,
+ * );
+ * ```
+ *
+ * Validate instanceof using constructor:
+ *
+ * ```js
+ * import assert from 'node:assert/strict';
+ *
+ * assert.throws(
+ * () => {
+ * throw new Error('Wrong value');
+ * },
+ * Error,
+ * );
+ * ```
+ *
+ * Validate error message using [`RegExp`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions):
+ *
+ * Using a regular expression runs `.toString` on the error object, and will
+ * therefore also include the error name.
+ *
+ * ```js
+ * import assert from 'node:assert/strict';
+ *
+ * assert.throws(
+ * () => {
+ * throw new Error('Wrong value');
+ * },
+ * /^Error: Wrong value$/,
+ * );
+ * ```
+ *
+ * Custom error validation:
+ *
+ * The function must return `true` to indicate all internal validations passed.
+ * It will otherwise fail with an `AssertionError`.
+ *
+ * ```js
+ * import assert from 'node:assert/strict';
+ *
+ * assert.throws(
+ * () => {
+ * throw new Error('Wrong value');
+ * },
+ * (err) => {
+ * assert(err instanceof Error);
+ * assert(/value/.test(err));
+ * // Avoid returning anything from validation functions besides `true`.
+ * // Otherwise, it's not clear what part of the validation failed. Instead,
+ * // throw an error about the specific validation that failed (as done in this
+ * // example) and add as much helpful debugging information to that error as
+ * // possible.
+ * return true;
+ * },
+ * 'unexpected error',
+ * );
+ * ```
+ *
+ * `error` cannot be a string. If a string is provided as the second
+ * argument, then `error` is assumed to be omitted and the string will be used for`message` instead. This can lead to easy-to-miss mistakes. Using the same
+ * message as the thrown error message is going to result in an`ERR_AMBIGUOUS_ARGUMENT` error. Please read the example below carefully if using
+ * a string as the second argument gets considered:
+ *
+ * ```js
+ * import assert from 'node:assert/strict';
+ *
+ * function throwingFirst() {
+ * throw new Error('First');
+ * }
+ *
+ * function throwingSecond() {
+ * throw new Error('Second');
+ * }
+ *
+ * function notThrowing() {}
+ *
+ * // The second argument is a string and the input function threw an Error.
+ * // The first case will not throw as it does not match for the error message
+ * // thrown by the input function!
+ * assert.throws(throwingFirst, 'Second');
+ * // In the next example the message has no benefit over the message from the
+ * // error and since it is not clear if the user intended to actually match
+ * // against the error message, Node.js throws an `ERR_AMBIGUOUS_ARGUMENT` error.
+ * assert.throws(throwingSecond, 'Second');
+ * // TypeError [ERR_AMBIGUOUS_ARGUMENT]
+ *
+ * // The string is only used (as message) in case the function does not throw:
+ * assert.throws(notThrowing, 'Second');
+ * // AssertionError [ERR_ASSERTION]: Missing expected exception: Second
+ *
+ * // If it was intended to match for the error message do this instead:
+ * // It does not throw because the error messages match.
+ * assert.throws(throwingSecond, /Second$/);
+ *
+ * // If the error message does not match, an AssertionError is thrown.
+ * assert.throws(throwingFirst, /Second$/);
+ * // AssertionError [ERR_ASSERTION]
+ * ```
+ *
+ * Due to the confusing error-prone notation, avoid a string as the second
+ * argument.
+ * @since v0.1.21
+ */
+ function throws(block: () => unknown, message?: string | Error): void;
+ function throws(block: () => unknown, error: AssertPredicate, message?: string | Error): void;
+ /**
+ * Asserts that the function `fn` does not throw an error.
+ *
+ * Using `assert.doesNotThrow()` is actually not useful because there
+ * is no benefit in catching an error and then rethrowing it. Instead, consider
+ * adding a comment next to the specific code path that should not throw and keep
+ * error messages as expressive as possible.
+ *
+ * When `assert.doesNotThrow()` is called, it will immediately call the `fn`function.
+ *
+ * If an error is thrown and it is the same type as that specified by the `error`parameter, then an `AssertionError` is thrown. If the error is of a
+ * different type, or if the `error` parameter is undefined, the error is
+ * propagated back to the caller.
+ *
+ * If specified, `error` can be a [`Class`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes),
+ * [`RegExp`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions), or a validation
+ * function. See {@link throws} for more details.
+ *
+ * The following, for instance, will throw the `TypeError` because there is no
+ * matching error type in the assertion:
+ *
+ * ```js
+ * import assert from 'node:assert/strict';
+ *
+ * assert.doesNotThrow(
+ * () => {
+ * throw new TypeError('Wrong value');
+ * },
+ * SyntaxError,
+ * );
+ * ```
+ *
+ * However, the following will result in an `AssertionError` with the message
+ * 'Got unwanted exception...':
+ *
+ * ```js
+ * import assert from 'node:assert/strict';
+ *
+ * assert.doesNotThrow(
+ * () => {
+ * throw new TypeError('Wrong value');
+ * },
+ * TypeError,
+ * );
+ * ```
+ *
+ * If an `AssertionError` is thrown and a value is provided for the `message`parameter, the value of `message` will be appended to the `AssertionError` message:
+ *
+ * ```js
+ * import assert from 'node:assert/strict';
+ *
+ * assert.doesNotThrow(
+ * () => {
+ * throw new TypeError('Wrong value');
+ * },
+ * /Wrong value/,
+ * 'Whoops',
+ * );
+ * // Throws: AssertionError: Got unwanted exception: Whoops
+ * ```
+ * @since v0.1.21
+ */
+ function doesNotThrow(block: () => unknown, message?: string | Error): void;
+ function doesNotThrow(block: () => unknown, error: AssertPredicate, message?: string | Error): void;
+ /**
+ * Throws `value` if `value` is not `undefined` or `null`. This is useful when
+ * testing the `error` argument in callbacks. The stack trace contains all frames
+ * from the error passed to `ifError()` including the potential new frames for`ifError()` itself.
+ *
+ * ```js
+ * import assert from 'node:assert/strict';
+ *
+ * assert.ifError(null);
+ * // OK
+ * assert.ifError(0);
+ * // AssertionError [ERR_ASSERTION]: ifError got unwanted exception: 0
+ * assert.ifError('error');
+ * // AssertionError [ERR_ASSERTION]: ifError got unwanted exception: 'error'
+ * assert.ifError(new Error());
+ * // AssertionError [ERR_ASSERTION]: ifError got unwanted exception: Error
+ *
+ * // Create some random error frames.
+ * let err;
+ * (function errorFrame() {
+ * err = new Error('test error');
+ * })();
+ *
+ * (function ifErrorFrame() {
+ * assert.ifError(err);
+ * })();
+ * // AssertionError [ERR_ASSERTION]: ifError got unwanted exception: test error
+ * // at ifErrorFrame
+ * // at errorFrame
+ * ```
+ * @since v0.1.97
+ */
+ function ifError(value: unknown): asserts value is null | undefined;
+ /**
+ * Awaits the `asyncFn` promise or, if `asyncFn` is a function, immediately
+ * calls the function and awaits the returned promise to complete. It will then
+ * check that the promise is rejected.
+ *
+ * If `asyncFn` is a function and it throws an error synchronously,`assert.rejects()` will return a rejected `Promise` with that error. If the
+ * function does not return a promise, `assert.rejects()` will return a rejected`Promise` with an `ERR_INVALID_RETURN_VALUE` error. In both cases the error
+ * handler is skipped.
+ *
+ * Besides the async nature to await the completion behaves identically to {@link throws}.
+ *
+ * If specified, `error` can be a [`Class`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes),
+ * [`RegExp`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions), a validation function,
+ * an object where each property will be tested for, or an instance of error where
+ * each property will be tested for including the non-enumerable `message` and`name` properties.
+ *
+ * If specified, `message` will be the message provided by the `AssertionError` if the `asyncFn` fails to reject.
+ *
+ * ```js
+ * import assert from 'node:assert/strict';
+ *
+ * await assert.rejects(
+ * async () => {
+ * throw new TypeError('Wrong value');
+ * },
+ * {
+ * name: 'TypeError',
+ * message: 'Wrong value',
+ * },
+ * );
+ * ```
+ *
+ * ```js
+ * import assert from 'node:assert/strict';
+ *
+ * await assert.rejects(
+ * async () => {
+ * throw new TypeError('Wrong value');
+ * },
+ * (err) => {
+ * assert.strictEqual(err.name, 'TypeError');
+ * assert.strictEqual(err.message, 'Wrong value');
+ * return true;
+ * },
+ * );
+ * ```
+ *
+ * ```js
+ * import assert from 'node:assert/strict';
+ *
+ * assert.rejects(
+ * Promise.reject(new Error('Wrong value')),
+ * Error,
+ * ).then(() => {
+ * // ...
+ * });
+ * ```
+ *
+ * `error` cannot be a string. If a string is provided as the second
+ * argument, then `error` is assumed to be omitted and the string will be used for`message` instead. This can lead to easy-to-miss mistakes. Please read the
+ * example in {@link throws} carefully if using a string as the second
+ * argument gets considered.
+ * @since v10.0.0
+ */
+ function rejects(block: (() => Promise) | Promise, message?: string | Error): Promise;
+ function rejects(
+ block: (() => Promise) | Promise,
+ error: AssertPredicate,
+ message?: string | Error,
+ ): Promise;
+ /**
+ * Awaits the `asyncFn` promise or, if `asyncFn` is a function, immediately
+ * calls the function and awaits the returned promise to complete. It will then
+ * check that the promise is not rejected.
+ *
+ * If `asyncFn` is a function and it throws an error synchronously,`assert.doesNotReject()` will return a rejected `Promise` with that error. If
+ * the function does not return a promise, `assert.doesNotReject()` will return a
+ * rejected `Promise` with an `ERR_INVALID_RETURN_VALUE` error. In both cases
+ * the error handler is skipped.
+ *
+ * Using `assert.doesNotReject()` is actually not useful because there is little
+ * benefit in catching a rejection and then rejecting it again. Instead, consider
+ * adding a comment next to the specific code path that should not reject and keep
+ * error messages as expressive as possible.
+ *
+ * If specified, `error` can be a [`Class`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes),
+ * [`RegExp`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions), or a validation
+ * function. See {@link throws} for more details.
+ *
+ * Besides the async nature to await the completion behaves identically to {@link doesNotThrow}.
+ *
+ * ```js
+ * import assert from 'node:assert/strict';
+ *
+ * await assert.doesNotReject(
+ * async () => {
+ * throw new TypeError('Wrong value');
+ * },
+ * SyntaxError,
+ * );
+ * ```
+ *
+ * ```js
+ * import assert from 'node:assert/strict';
+ *
+ * assert.doesNotReject(Promise.reject(new TypeError('Wrong value')))
+ * .then(() => {
+ * // ...
+ * });
+ * ```
+ * @since v10.0.0
+ */
+ function doesNotReject(
+ block: (() => Promise) | Promise,
+ message?: string | Error,
+ ): Promise;
+ function doesNotReject(
+ block: (() => Promise) | Promise,
+ error: AssertPredicate,
+ message?: string | Error,
+ ): Promise;
+ /**
+ * Expects the `string` input to match the regular expression.
+ *
+ * ```js
+ * import assert from 'node:assert/strict';
+ *
+ * assert.match('I will fail', /pass/);
+ * // AssertionError [ERR_ASSERTION]: The input did not match the regular ...
+ *
+ * assert.match(123, /pass/);
+ * // AssertionError [ERR_ASSERTION]: The "string" argument must be of type string.
+ *
+ * assert.match('I will pass', /pass/);
+ * // OK
+ * ```
+ *
+ * If the values do not match, or if the `string` argument is of another type than`string`, an `AssertionError` is thrown with a `message` property set equal
+ * to the value of the `message` parameter. If the `message` parameter is
+ * undefined, a default error message is assigned. If the `message` parameter is an
+ * instance of an `Error` then it will be thrown instead of the `AssertionError`.
+ * @since v13.6.0, v12.16.0
+ */
+ function match(value: string, regExp: RegExp, message?: string | Error): void;
+ /**
+ * Expects the `string` input not to match the regular expression.
+ *
+ * ```js
+ * import assert from 'node:assert/strict';
+ *
+ * assert.doesNotMatch('I will fail', /fail/);
+ * // AssertionError [ERR_ASSERTION]: The input was expected to not match the ...
+ *
+ * assert.doesNotMatch(123, /pass/);
+ * // AssertionError [ERR_ASSERTION]: The "string" argument must be of type string.
+ *
+ * assert.doesNotMatch('I will pass', /different/);
+ * // OK
+ * ```
+ *
+ * If the values do match, or if the `string` argument is of another type than`string`, an `AssertionError` is thrown with a `message` property set equal
+ * to the value of the `message` parameter. If the `message` parameter is
+ * undefined, a default error message is assigned. If the `message` parameter is an
+ * instance of an `Error` then it will be thrown instead of the `AssertionError`.
+ * @since v13.6.0, v12.16.0
+ */
+ function doesNotMatch(value: string, regExp: RegExp, message?: string | Error): void;
+ /**
+ * In strict assertion mode, non-strict methods behave like their corresponding strict methods. For example,
+ * {@link deepEqual} will behave like {@link deepStrictEqual}.
+ *
+ * In strict assertion mode, error messages for objects display a diff. In legacy assertion mode, error
+ * messages for objects display the objects, often truncated.
+ *
+ * To use strict assertion mode:
+ *
+ * ```js
+ * import { strict as assert } from 'node:assert';COPY
+ * import assert from 'node:assert/strict';
+ * ```
+ *
+ * Example error diff:
+ *
+ * ```js
+ * import { strict as assert } from 'node:assert';
+ *
+ * assert.deepEqual([[[1, 2, 3]], 4, 5], [[[1, 2, '3']], 4, 5]);
+ * // AssertionError: Expected inputs to be strictly deep-equal:
+ * // + actual - expected ... Lines skipped
+ * //
+ * // [
+ * // [
+ * // ...
+ * // 2,
+ * // + 3
+ * // - '3'
+ * // ],
+ * // ...
+ * // 5
+ * // ]
+ * ```
+ *
+ * To deactivate the colors, use the `NO_COLOR` or `NODE_DISABLE_COLORS` environment variables. This will also
+ * deactivate the colors in the REPL. For more on color support in terminal environments, read the tty
+ * `getColorDepth()` documentation.
+ *
+ * @since v15.0.0, v13.9.0, v12.16.2, v9.9.0
+ */
+ namespace strict {
+ type AssertionError = assert.AssertionError;
+ type AssertPredicate = assert.AssertPredicate;
+ type CallTrackerCall = assert.CallTrackerCall;
+ type CallTrackerReportInformation = assert.CallTrackerReportInformation;
+ }
+ const strict:
+ & Omit<
+ typeof assert,
+ | "equal"
+ | "notEqual"
+ | "deepEqual"
+ | "notDeepEqual"
+ | "ok"
+ | "strictEqual"
+ | "deepStrictEqual"
+ | "ifError"
+ | "strict"
+ >
+ & {
+ (value: unknown, message?: string | Error): asserts value;
+ equal: typeof strictEqual;
+ notEqual: typeof notStrictEqual;
+ deepEqual: typeof deepStrictEqual;
+ notDeepEqual: typeof notDeepStrictEqual;
+ // Mapped types and assertion functions are incompatible?
+ // TS2775: Assertions require every name in the call target
+ // to be declared with an explicit type annotation.
+ ok: typeof ok;
+ strictEqual: typeof strictEqual;
+ deepStrictEqual: typeof deepStrictEqual;
+ ifError: typeof ifError;
+ strict: typeof strict;
+ };
+ }
+ export = assert;
+}
+declare module "node:assert" {
+ import assert = require("assert");
+ export = assert;
+}
diff --git a/ADOOR_ACE/Interaction-Server/node_modules/@types/node/assert/strict.d.ts b/ADOOR_ACE/Interaction-Server/node_modules/@types/node/assert/strict.d.ts
new file mode 100644
index 0000000000000000000000000000000000000000..f333913a4565f7067b05ddbc415490e585f2d1e1
--- /dev/null
+++ b/ADOOR_ACE/Interaction-Server/node_modules/@types/node/assert/strict.d.ts
@@ -0,0 +1,8 @@
+declare module "assert/strict" {
+ import { strict } from "node:assert";
+ export = strict;
+}
+declare module "node:assert/strict" {
+ import { strict } from "node:assert";
+ export = strict;
+}
diff --git a/ADOOR_ACE/Interaction-Server/node_modules/@types/node/async_hooks.d.ts b/ADOOR_ACE/Interaction-Server/node_modules/@types/node/async_hooks.d.ts
new file mode 100644
index 0000000000000000000000000000000000000000..0667a6150af717a759e15e4fe04b6d29fe533411
--- /dev/null
+++ b/ADOOR_ACE/Interaction-Server/node_modules/@types/node/async_hooks.d.ts
@@ -0,0 +1,539 @@
+/**
+ * We strongly discourage the use of the `async_hooks` API.
+ * Other APIs that can cover most of its use cases include:
+ *
+ * * `AsyncLocalStorage` tracks async context
+ * * `process.getActiveResourcesInfo()` tracks active resources
+ *
+ * The `node:async_hooks` module provides an API to track asynchronous resources.
+ * It can be accessed using:
+ *
+ * ```js
+ * import async_hooks from 'node:async_hooks';
+ * ```
+ * @experimental
+ * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/async_hooks.js)
+ */
+declare module "async_hooks" {
+ /**
+ * ```js
+ * import { executionAsyncId } from 'node:async_hooks';
+ * import fs from 'node:fs';
+ *
+ * console.log(executionAsyncId()); // 1 - bootstrap
+ * const path = '.';
+ * fs.open(path, 'r', (err, fd) => {
+ * console.log(executionAsyncId()); // 6 - open()
+ * });
+ * ```
+ *
+ * The ID returned from `executionAsyncId()` is related to execution timing, not
+ * causality (which is covered by `triggerAsyncId()`):
+ *
+ * ```js
+ * const server = net.createServer((conn) => {
+ * // Returns the ID of the server, not of the new connection, because the
+ * // callback runs in the execution scope of the server's MakeCallback().
+ * async_hooks.executionAsyncId();
+ *
+ * }).listen(port, () => {
+ * // Returns the ID of a TickObject (process.nextTick()) because all
+ * // callbacks passed to .listen() are wrapped in a nextTick().
+ * async_hooks.executionAsyncId();
+ * });
+ * ```
+ *
+ * Promise contexts may not get precise `executionAsyncIds` by default.
+ * See the section on `promise execution tracking`.
+ * @since v8.1.0
+ * @return The `asyncId` of the current execution context. Useful to track when something calls.
+ */
+ function executionAsyncId(): number;
+ /**
+ * Resource objects returned by `executionAsyncResource()` are most often internal
+ * Node.js handle objects with undocumented APIs. Using any functions or properties
+ * on the object is likely to crash your application and should be avoided.
+ *
+ * Using `executionAsyncResource()` in the top-level execution context will
+ * return an empty object as there is no handle or request object to use,
+ * but having an object representing the top-level can be helpful.
+ *
+ * ```js
+ * import { open } from 'node:fs';
+ * import { executionAsyncId, executionAsyncResource } from 'node:async_hooks';
+ *
+ * console.log(executionAsyncId(), executionAsyncResource()); // 1 {}
+ * open(new URL(import.meta.url), 'r', (err, fd) => {
+ * console.log(executionAsyncId(), executionAsyncResource()); // 7 FSReqWrap
+ * });
+ * ```
+ *
+ * This can be used to implement continuation local storage without the
+ * use of a tracking `Map` to store the metadata:
+ *
+ * ```js
+ * import { createServer } from 'node:http';
+ * import {
+ * executionAsyncId,
+ * executionAsyncResource,
+ * createHook,
+ * } from 'async_hooks';
+ * const sym = Symbol('state'); // Private symbol to avoid pollution
+ *
+ * createHook({
+ * init(asyncId, type, triggerAsyncId, resource) {
+ * const cr = executionAsyncResource();
+ * if (cr) {
+ * resource[sym] = cr[sym];
+ * }
+ * },
+ * }).enable();
+ *
+ * const server = createServer((req, res) => {
+ * executionAsyncResource()[sym] = { state: req.url };
+ * setTimeout(function() {
+ * res.end(JSON.stringify(executionAsyncResource()[sym]));
+ * }, 100);
+ * }).listen(3000);
+ * ```
+ * @since v13.9.0, v12.17.0
+ * @return The resource representing the current execution. Useful to store data within the resource.
+ */
+ function executionAsyncResource(): object;
+ /**
+ * ```js
+ * const server = net.createServer((conn) => {
+ * // The resource that caused (or triggered) this callback to be called
+ * // was that of the new connection. Thus the return value of triggerAsyncId()
+ * // is the asyncId of "conn".
+ * async_hooks.triggerAsyncId();
+ *
+ * }).listen(port, () => {
+ * // Even though all callbacks passed to .listen() are wrapped in a nextTick()
+ * // the callback itself exists because the call to the server's .listen()
+ * // was made. So the return value would be the ID of the server.
+ * async_hooks.triggerAsyncId();
+ * });
+ * ```
+ *
+ * Promise contexts may not get valid `triggerAsyncId`s by default. See
+ * the section on `promise execution tracking`.
+ * @return The ID of the resource responsible for calling the callback that is currently being executed.
+ */
+ function triggerAsyncId(): number;
+ interface HookCallbacks {
+ /**
+ * Called when a class is constructed that has the possibility to emit an asynchronous event.
+ * @param asyncId a unique ID for the async resource
+ * @param type the type of the async resource
+ * @param triggerAsyncId the unique ID of the async resource in whose execution context this async resource was created
+ * @param resource reference to the resource representing the async operation, needs to be released during destroy
+ */
+ init?(asyncId: number, type: string, triggerAsyncId: number, resource: object): void;
+ /**
+ * When an asynchronous operation is initiated or completes a callback is called to notify the user.
+ * The before callback is called just before said callback is executed.
+ * @param asyncId the unique identifier assigned to the resource about to execute the callback.
+ */
+ before?(asyncId: number): void;
+ /**
+ * Called immediately after the callback specified in before is completed.
+ * @param asyncId the unique identifier assigned to the resource which has executed the callback.
+ */
+ after?(asyncId: number): void;
+ /**
+ * Called when a promise has resolve() called. This may not be in the same execution id
+ * as the promise itself.
+ * @param asyncId the unique id for the promise that was resolve()d.
+ */
+ promiseResolve?(asyncId: number): void;
+ /**
+ * Called after the resource corresponding to asyncId is destroyed
+ * @param asyncId a unique ID for the async resource
+ */
+ destroy?(asyncId: number): void;
+ }
+ interface AsyncHook {
+ /**
+ * Enable the callbacks for a given AsyncHook instance. If no callbacks are provided enabling is a noop.
+ */
+ enable(): this;
+ /**
+ * Disable the callbacks for a given AsyncHook instance from the global pool of AsyncHook callbacks to be executed. Once a hook has been disabled it will not be called again until enabled.
+ */
+ disable(): this;
+ }
+ /**
+ * Registers functions to be called for different lifetime events of each async
+ * operation.
+ *
+ * The callbacks `init()`/`before()`/`after()`/`destroy()` are called for the
+ * respective asynchronous event during a resource's lifetime.
+ *
+ * All callbacks are optional. For example, if only resource cleanup needs to
+ * be tracked, then only the `destroy` callback needs to be passed. The
+ * specifics of all functions that can be passed to `callbacks` is in the `Hook Callbacks` section.
+ *
+ * ```js
+ * import { createHook } from 'node:async_hooks';
+ *
+ * const asyncHook = createHook({
+ * init(asyncId, type, triggerAsyncId, resource) { },
+ * destroy(asyncId) { },
+ * });
+ * ```
+ *
+ * The callbacks will be inherited via the prototype chain:
+ *
+ * ```js
+ * class MyAsyncCallbacks {
+ * init(asyncId, type, triggerAsyncId, resource) { }
+ * destroy(asyncId) {}
+ * }
+ *
+ * class MyAddedCallbacks extends MyAsyncCallbacks {
+ * before(asyncId) { }
+ * after(asyncId) { }
+ * }
+ *
+ * const asyncHook = async_hooks.createHook(new MyAddedCallbacks());
+ * ```
+ *
+ * Because promises are asynchronous resources whose lifecycle is tracked
+ * via the async hooks mechanism, the `init()`, `before()`, `after()`, and`destroy()` callbacks _must not_ be async functions that return promises.
+ * @since v8.1.0
+ * @param callbacks The `Hook Callbacks` to register
+ * @return Instance used for disabling and enabling hooks
+ */
+ function createHook(callbacks: HookCallbacks): AsyncHook;
+ interface AsyncResourceOptions {
+ /**
+ * The ID of the execution context that created this async event.
+ * @default executionAsyncId()
+ */
+ triggerAsyncId?: number | undefined;
+ /**
+ * Disables automatic `emitDestroy` when the object is garbage collected.
+ * This usually does not need to be set (even if `emitDestroy` is called
+ * manually), unless the resource's `asyncId` is retrieved and the
+ * sensitive API's `emitDestroy` is called with it.
+ * @default false
+ */
+ requireManualDestroy?: boolean | undefined;
+ }
+ /**
+ * The class `AsyncResource` is designed to be extended by the embedder's async
+ * resources. Using this, users can easily trigger the lifetime events of their
+ * own resources.
+ *
+ * The `init` hook will trigger when an `AsyncResource` is instantiated.
+ *
+ * The following is an overview of the `AsyncResource` API.
+ *
+ * ```js
+ * import { AsyncResource, executionAsyncId } from 'node:async_hooks';
+ *
+ * // AsyncResource() is meant to be extended. Instantiating a
+ * // new AsyncResource() also triggers init. If triggerAsyncId is omitted then
+ * // async_hook.executionAsyncId() is used.
+ * const asyncResource = new AsyncResource(
+ * type, { triggerAsyncId: executionAsyncId(), requireManualDestroy: false },
+ * );
+ *
+ * // Run a function in the execution context of the resource. This will
+ * // * establish the context of the resource
+ * // * trigger the AsyncHooks before callbacks
+ * // * call the provided function `fn` with the supplied arguments
+ * // * trigger the AsyncHooks after callbacks
+ * // * restore the original execution context
+ * asyncResource.runInAsyncScope(fn, thisArg, ...args);
+ *
+ * // Call AsyncHooks destroy callbacks.
+ * asyncResource.emitDestroy();
+ *
+ * // Return the unique ID assigned to the AsyncResource instance.
+ * asyncResource.asyncId();
+ *
+ * // Return the trigger ID for the AsyncResource instance.
+ * asyncResource.triggerAsyncId();
+ * ```
+ */
+ class AsyncResource {
+ /**
+ * AsyncResource() is meant to be extended. Instantiating a
+ * new AsyncResource() also triggers init. If triggerAsyncId is omitted then
+ * async_hook.executionAsyncId() is used.
+ * @param type The type of async event.
+ * @param triggerAsyncId The ID of the execution context that created
+ * this async event (default: `executionAsyncId()`), or an
+ * AsyncResourceOptions object (since v9.3.0)
+ */
+ constructor(type: string, triggerAsyncId?: number | AsyncResourceOptions);
+ /**
+ * Binds the given function to the current execution context.
+ * @since v14.8.0, v12.19.0
+ * @param fn The function to bind to the current execution context.
+ * @param type An optional name to associate with the underlying `AsyncResource`.
+ */
+ static bind any, ThisArg>(
+ fn: Func,
+ type?: string,
+ thisArg?: ThisArg,
+ ): Func;
+ /**
+ * Binds the given function to execute to this `AsyncResource`'s scope.
+ * @since v14.8.0, v12.19.0
+ * @param fn The function to bind to the current `AsyncResource`.
+ */
+ bind any>(fn: Func): Func;
+ /**
+ * Call the provided function with the provided arguments in the execution context
+ * of the async resource. This will establish the context, trigger the AsyncHooks
+ * before callbacks, call the function, trigger the AsyncHooks after callbacks, and
+ * then restore the original execution context.
+ * @since v9.6.0
+ * @param fn The function to call in the execution context of this async resource.
+ * @param thisArg The receiver to be used for the function call.
+ * @param args Optional arguments to pass to the function.
+ */
+ runInAsyncScope(
+ fn: (this: This, ...args: any[]) => Result,
+ thisArg?: This,
+ ...args: any[]
+ ): Result;
+ /**
+ * Call all `destroy` hooks. This should only ever be called once. An error will
+ * be thrown if it is called more than once. This **must** be manually called. If
+ * the resource is left to be collected by the GC then the `destroy` hooks will
+ * never be called.
+ * @return A reference to `asyncResource`.
+ */
+ emitDestroy(): this;
+ /**
+ * @return The unique `asyncId` assigned to the resource.
+ */
+ asyncId(): number;
+ /**
+ * @return The same `triggerAsyncId` that is passed to the `AsyncResource` constructor.
+ */
+ triggerAsyncId(): number;
+ }
+ /**
+ * This class creates stores that stay coherent through asynchronous operations.
+ *
+ * While you can create your own implementation on top of the `node:async_hooks`module, `AsyncLocalStorage` should be preferred as it is a performant and memory
+ * safe implementation that involves significant optimizations that are non-obvious
+ * to implement.
+ *
+ * The following example uses `AsyncLocalStorage` to build a simple logger
+ * that assigns IDs to incoming HTTP requests and includes them in messages
+ * logged within each request.
+ *
+ * ```js
+ * import http from 'node:http';
+ * import { AsyncLocalStorage } from 'node:async_hooks';
+ *
+ * const asyncLocalStorage = new AsyncLocalStorage();
+ *
+ * function logWithId(msg) {
+ * const id = asyncLocalStorage.getStore();
+ * console.log(`${id !== undefined ? id : '-'}:`, msg);
+ * }
+ *
+ * let idSeq = 0;
+ * http.createServer((req, res) => {
+ * asyncLocalStorage.run(idSeq++, () => {
+ * logWithId('start');
+ * // Imagine any chain of async operations here
+ * setImmediate(() => {
+ * logWithId('finish');
+ * res.end();
+ * });
+ * });
+ * }).listen(8080);
+ *
+ * http.get('http://localhost:8080');
+ * http.get('http://localhost:8080');
+ * // Prints:
+ * // 0: start
+ * // 1: start
+ * // 0: finish
+ * // 1: finish
+ * ```
+ *
+ * Each instance of `AsyncLocalStorage` maintains an independent storage context.
+ * Multiple instances can safely exist simultaneously without risk of interfering
+ * with each other's data.
+ * @since v13.10.0, v12.17.0
+ */
+ class AsyncLocalStorage {
+ /**
+ * Binds the given function to the current execution context.
+ * @since v19.8.0
+ * @experimental
+ * @param fn The function to bind to the current execution context.
+ * @return A new function that calls `fn` within the captured execution context.
+ */
+ static bind any>(fn: Func): Func;
+ /**
+ * Captures the current execution context and returns a function that accepts a
+ * function as an argument. Whenever the returned function is called, it
+ * calls the function passed to it within the captured context.
+ *
+ * ```js
+ * const asyncLocalStorage = new AsyncLocalStorage();
+ * const runInAsyncScope = asyncLocalStorage.run(123, () => AsyncLocalStorage.snapshot());
+ * const result = asyncLocalStorage.run(321, () => runInAsyncScope(() => asyncLocalStorage.getStore()));
+ * console.log(result); // returns 123
+ * ```
+ *
+ * AsyncLocalStorage.snapshot() can replace the use of AsyncResource for simple
+ * async context tracking purposes, for example:
+ *
+ * ```js
+ * class Foo {
+ * #runInAsyncScope = AsyncLocalStorage.snapshot();
+ *
+ * get() { return this.#runInAsyncScope(() => asyncLocalStorage.getStore()); }
+ * }
+ *
+ * const foo = asyncLocalStorage.run(123, () => new Foo());
+ * console.log(asyncLocalStorage.run(321, () => foo.get())); // returns 123
+ * ```
+ * @since v19.8.0
+ * @experimental
+ * @return A new function with the signature `(fn: (...args) : R, ...args) : R`.
+ */
+ static snapshot(): (fn: (...args: TArgs) => R, ...args: TArgs) => R;
+ /**
+ * Disables the instance of `AsyncLocalStorage`. All subsequent calls
+ * to `asyncLocalStorage.getStore()` will return `undefined` until`asyncLocalStorage.run()` or `asyncLocalStorage.enterWith()` is called again.
+ *
+ * When calling `asyncLocalStorage.disable()`, all current contexts linked to the
+ * instance will be exited.
+ *
+ * Calling `asyncLocalStorage.disable()` is required before the`asyncLocalStorage` can be garbage collected. This does not apply to stores
+ * provided by the `asyncLocalStorage`, as those objects are garbage collected
+ * along with the corresponding async resources.
+ *
+ * Use this method when the `asyncLocalStorage` is not in use anymore
+ * in the current process.
+ * @since v13.10.0, v12.17.0
+ * @experimental
+ */
+ disable(): void;
+ /**
+ * Returns the current store.
+ * If called outside of an asynchronous context initialized by
+ * calling `asyncLocalStorage.run()` or `asyncLocalStorage.enterWith()`, it
+ * returns `undefined`.
+ * @since v13.10.0, v12.17.0
+ */
+ getStore(): T | undefined;
+ /**
+ * Runs a function synchronously within a context and returns its
+ * return value. The store is not accessible outside of the callback function.
+ * The store is accessible to any asynchronous operations created within the
+ * callback.
+ *
+ * The optional `args` are passed to the callback function.
+ *
+ * If the callback function throws an error, the error is thrown by `run()` too.
+ * The stacktrace is not impacted by this call and the context is exited.
+ *
+ * Example:
+ *
+ * ```js
+ * const store = { id: 2 };
+ * try {
+ * asyncLocalStorage.run(store, () => {
+ * asyncLocalStorage.getStore(); // Returns the store object
+ * setTimeout(() => {
+ * asyncLocalStorage.getStore(); // Returns the store object
+ * }, 200);
+ * throw new Error();
+ * });
+ * } catch (e) {
+ * asyncLocalStorage.getStore(); // Returns undefined
+ * // The error will be caught here
+ * }
+ * ```
+ * @since v13.10.0, v12.17.0
+ */
+ run(store: T, callback: () => R): R;
+ run(store: T, callback: (...args: TArgs) => R, ...args: TArgs): R;
+ /**
+ * Runs a function synchronously outside of a context and returns its
+ * return value. The store is not accessible within the callback function or
+ * the asynchronous operations created within the callback. Any `getStore()`call done within the callback function will always return `undefined`.
+ *
+ * The optional `args` are passed to the callback function.
+ *
+ * If the callback function throws an error, the error is thrown by `exit()` too.
+ * The stacktrace is not impacted by this call and the context is re-entered.
+ *
+ * Example:
+ *
+ * ```js
+ * // Within a call to run
+ * try {
+ * asyncLocalStorage.getStore(); // Returns the store object or value
+ * asyncLocalStorage.exit(() => {
+ * asyncLocalStorage.getStore(); // Returns undefined
+ * throw new Error();
+ * });
+ * } catch (e) {
+ * asyncLocalStorage.getStore(); // Returns the same object or value
+ * // The error will be caught here
+ * }
+ * ```
+ * @since v13.10.0, v12.17.0
+ * @experimental
+ */
+ exit(callback: (...args: TArgs) => R, ...args: TArgs): R;
+ /**
+ * Transitions into the context for the remainder of the current
+ * synchronous execution and then persists the store through any following
+ * asynchronous calls.
+ *
+ * Example:
+ *
+ * ```js
+ * const store = { id: 1 };
+ * // Replaces previous store with the given store object
+ * asyncLocalStorage.enterWith(store);
+ * asyncLocalStorage.getStore(); // Returns the store object
+ * someAsyncOperation(() => {
+ * asyncLocalStorage.getStore(); // Returns the same object
+ * });
+ * ```
+ *
+ * This transition will continue for the _entire_ synchronous execution.
+ * This means that if, for example, the context is entered within an event
+ * handler subsequent event handlers will also run within that context unless
+ * specifically bound to another context with an `AsyncResource`. That is why`run()` should be preferred over `enterWith()` unless there are strong reasons
+ * to use the latter method.
+ *
+ * ```js
+ * const store = { id: 1 };
+ *
+ * emitter.on('my-event', () => {
+ * asyncLocalStorage.enterWith(store);
+ * });
+ * emitter.on('my-event', () => {
+ * asyncLocalStorage.getStore(); // Returns the same object
+ * });
+ *
+ * asyncLocalStorage.getStore(); // Returns undefined
+ * emitter.emit('my-event');
+ * asyncLocalStorage.getStore(); // Returns the same object
+ * ```
+ * @since v13.11.0, v12.17.0
+ * @experimental
+ */
+ enterWith(store: T): void;
+ }
+}
+declare module "node:async_hooks" {
+ export * from "async_hooks";
+}
diff --git a/ADOOR_ACE/Interaction-Server/node_modules/@types/node/buffer.d.ts b/ADOOR_ACE/Interaction-Server/node_modules/@types/node/buffer.d.ts
new file mode 100644
index 0000000000000000000000000000000000000000..255e2688a0da8e2777cae1e5e7fe7c35eadb636d
--- /dev/null
+++ b/ADOOR_ACE/Interaction-Server/node_modules/@types/node/buffer.d.ts
@@ -0,0 +1,2363 @@
+/**
+ * `Buffer` objects are used to represent a fixed-length sequence of bytes. Many
+ * Node.js APIs support `Buffer`s.
+ *
+ * The `Buffer` class is a subclass of JavaScript's [`Uint8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array) class and
+ * extends it with methods that cover additional use cases. Node.js APIs accept
+ * plain [`Uint8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array) s wherever `Buffer`s are supported as well.
+ *
+ * While the `Buffer` class is available within the global scope, it is still
+ * recommended to explicitly reference it via an import or require statement.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * // Creates a zero-filled Buffer of length 10.
+ * const buf1 = Buffer.alloc(10);
+ *
+ * // Creates a Buffer of length 10,
+ * // filled with bytes which all have the value `1`.
+ * const buf2 = Buffer.alloc(10, 1);
+ *
+ * // Creates an uninitialized buffer of length 10.
+ * // This is faster than calling Buffer.alloc() but the returned
+ * // Buffer instance might contain old data that needs to be
+ * // overwritten using fill(), write(), or other functions that fill the Buffer's
+ * // contents.
+ * const buf3 = Buffer.allocUnsafe(10);
+ *
+ * // Creates a Buffer containing the bytes [1, 2, 3].
+ * const buf4 = Buffer.from([1, 2, 3]);
+ *
+ * // Creates a Buffer containing the bytes [1, 1, 1, 1] – the entries
+ * // are all truncated using `(value & 255)` to fit into the range 0–255.
+ * const buf5 = Buffer.from([257, 257.5, -255, '1']);
+ *
+ * // Creates a Buffer containing the UTF-8-encoded bytes for the string 'tést':
+ * // [0x74, 0xc3, 0xa9, 0x73, 0x74] (in hexadecimal notation)
+ * // [116, 195, 169, 115, 116] (in decimal notation)
+ * const buf6 = Buffer.from('tést');
+ *
+ * // Creates a Buffer containing the Latin-1 bytes [0x74, 0xe9, 0x73, 0x74].
+ * const buf7 = Buffer.from('tést', 'latin1');
+ * ```
+ * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/buffer.js)
+ */
+declare module "buffer" {
+ import { BinaryLike } from "node:crypto";
+ import { ReadableStream as WebReadableStream } from "node:stream/web";
+ /**
+ * This function returns `true` if `input` contains only valid UTF-8-encoded data,
+ * including the case in which `input` is empty.
+ *
+ * Throws if the `input` is a detached array buffer.
+ * @since v19.4.0, v18.14.0
+ * @param input The input to validate.
+ */
+ export function isUtf8(input: Buffer | ArrayBuffer | NodeJS.TypedArray): boolean;
+ /**
+ * This function returns `true` if `input` contains only valid ASCII-encoded data,
+ * including the case in which `input` is empty.
+ *
+ * Throws if the `input` is a detached array buffer.
+ * @since v19.6.0, v18.15.0
+ * @param input The input to validate.
+ */
+ export function isAscii(input: Buffer | ArrayBuffer | NodeJS.TypedArray): boolean;
+ export const INSPECT_MAX_BYTES: number;
+ export const kMaxLength: number;
+ export const kStringMaxLength: number;
+ export const constants: {
+ MAX_LENGTH: number;
+ MAX_STRING_LENGTH: number;
+ };
+ export type TranscodeEncoding =
+ | "ascii"
+ | "utf8"
+ | "utf-8"
+ | "utf16le"
+ | "utf-16le"
+ | "ucs2"
+ | "ucs-2"
+ | "latin1"
+ | "binary";
+ /**
+ * Re-encodes the given `Buffer` or `Uint8Array` instance from one character
+ * encoding to another. Returns a new `Buffer` instance.
+ *
+ * Throws if the `fromEnc` or `toEnc` specify invalid character encodings or if
+ * conversion from `fromEnc` to `toEnc` is not permitted.
+ *
+ * Encodings supported by `buffer.transcode()` are: `'ascii'`, `'utf8'`,`'utf16le'`, `'ucs2'`, `'latin1'`, and `'binary'`.
+ *
+ * The transcoding process will use substitution characters if a given byte
+ * sequence cannot be adequately represented in the target encoding. For instance:
+ *
+ * ```js
+ * import { Buffer, transcode } from 'node:buffer';
+ *
+ * const newBuf = transcode(Buffer.from('€'), 'utf8', 'ascii');
+ * console.log(newBuf.toString('ascii'));
+ * // Prints: '?'
+ * ```
+ *
+ * Because the Euro (`€`) sign is not representable in US-ASCII, it is replaced
+ * with `?` in the transcoded `Buffer`.
+ * @since v7.1.0
+ * @param source A `Buffer` or `Uint8Array` instance.
+ * @param fromEnc The current encoding.
+ * @param toEnc To target encoding.
+ */
+ export function transcode(source: Uint8Array, fromEnc: TranscodeEncoding, toEnc: TranscodeEncoding): Buffer;
+ export const SlowBuffer: {
+ /** @deprecated since v6.0.0, use `Buffer.allocUnsafeSlow()` */
+ new(size: number): Buffer;
+ prototype: Buffer;
+ };
+ /**
+ * Resolves a `'blob:nodedata:...'` an associated `Blob` object registered using
+ * a prior call to `URL.createObjectURL()`.
+ * @since v16.7.0
+ * @experimental
+ * @param id A `'blob:nodedata:...` URL string returned by a prior call to `URL.createObjectURL()`.
+ */
+ export function resolveObjectURL(id: string): Blob | undefined;
+ export { Buffer };
+ /**
+ * @experimental
+ */
+ export interface BlobOptions {
+ /**
+ * One of either `'transparent'` or `'native'`. When set to `'native'`, line endings in string source parts
+ * will be converted to the platform native line-ending as specified by `require('node:os').EOL`.
+ */
+ endings?: "transparent" | "native";
+ /**
+ * The Blob content-type. The intent is for `type` to convey
+ * the MIME media type of the data, however no validation of the type format
+ * is performed.
+ */
+ type?: string | undefined;
+ }
+ /**
+ * A [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) encapsulates immutable, raw data that can be safely shared across
+ * multiple worker threads.
+ * @since v15.7.0, v14.18.0
+ */
+ export class Blob {
+ /**
+ * The total size of the `Blob` in bytes.
+ * @since v15.7.0, v14.18.0
+ */
+ readonly size: number;
+ /**
+ * The content-type of the `Blob`.
+ * @since v15.7.0, v14.18.0
+ */
+ readonly type: string;
+ /**
+ * Creates a new `Blob` object containing a concatenation of the given sources.
+ *
+ * {ArrayBuffer}, {TypedArray}, {DataView}, and {Buffer} sources are copied into
+ * the 'Blob' and can therefore be safely modified after the 'Blob' is created.
+ *
+ * String sources are also copied into the `Blob`.
+ */
+ constructor(sources: Array, options?: BlobOptions);
+ /**
+ * Returns a promise that fulfills with an [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer) containing a copy of
+ * the `Blob` data.
+ * @since v15.7.0, v14.18.0
+ */
+ arrayBuffer(): Promise;
+ /**
+ * Creates and returns a new `Blob` containing a subset of this `Blob` objects
+ * data. The original `Blob` is not altered.
+ * @since v15.7.0, v14.18.0
+ * @param start The starting index.
+ * @param end The ending index.
+ * @param type The content-type for the new `Blob`
+ */
+ slice(start?: number, end?: number, type?: string): Blob;
+ /**
+ * Returns a promise that fulfills with the contents of the `Blob` decoded as a
+ * UTF-8 string.
+ * @since v15.7.0, v14.18.0
+ */
+ text(): Promise;
+ /**
+ * Returns a new `ReadableStream` that allows the content of the `Blob` to be read.
+ * @since v16.7.0
+ */
+ stream(): WebReadableStream;
+ }
+ export interface FileOptions {
+ /**
+ * One of either `'transparent'` or `'native'`. When set to `'native'`, line endings in string source parts will be
+ * converted to the platform native line-ending as specified by `require('node:os').EOL`.
+ */
+ endings?: "native" | "transparent";
+ /** The File content-type. */
+ type?: string;
+ /** The last modified date of the file. `Default`: Date.now(). */
+ lastModified?: number;
+ }
+ /**
+ * A [`File`](https://developer.mozilla.org/en-US/docs/Web/API/File) provides information about files.
+ * @since v19.2.0, v18.13.0
+ */
+ export class File extends Blob {
+ constructor(sources: Array, fileName: string, options?: FileOptions);
+ /**
+ * The name of the `File`.
+ * @since v19.2.0, v18.13.0
+ */
+ readonly name: string;
+ /**
+ * The last modified date of the `File`.
+ * @since v19.2.0, v18.13.0
+ */
+ readonly lastModified: number;
+ }
+ export import atob = globalThis.atob;
+ export import btoa = globalThis.btoa;
+ import { Blob as NodeBlob } from "buffer";
+ // This conditional type will be the existing global Blob in a browser, or
+ // the copy below in a Node environment.
+ type __Blob = typeof globalThis extends { onmessage: any; Blob: any } ? {} : NodeBlob;
+ global {
+ namespace NodeJS {
+ export { BufferEncoding };
+ }
+ // Buffer class
+ type BufferEncoding =
+ | "ascii"
+ | "utf8"
+ | "utf-8"
+ | "utf16le"
+ | "utf-16le"
+ | "ucs2"
+ | "ucs-2"
+ | "base64"
+ | "base64url"
+ | "latin1"
+ | "binary"
+ | "hex";
+ type WithImplicitCoercion =
+ | T
+ | {
+ valueOf(): T;
+ };
+ /**
+ * Raw data is stored in instances of the Buffer class.
+ * A Buffer is similar to an array of integers but corresponds to a raw memory allocation outside the V8 heap. A Buffer cannot be resized.
+ * Valid string encodings: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'base64url'|'binary'(deprecated)|'hex'
+ */
+ interface BufferConstructor {
+ /**
+ * Allocates a new buffer containing the given {str}.
+ *
+ * @param str String to store in buffer.
+ * @param encoding encoding to use, optional. Default is 'utf8'
+ * @deprecated since v10.0.0 - Use `Buffer.from(string[, encoding])` instead.
+ */
+ new(str: string, encoding?: BufferEncoding): Buffer;
+ /**
+ * Allocates a new buffer of {size} octets.
+ *
+ * @param size count of octets to allocate.
+ * @deprecated since v10.0.0 - Use `Buffer.alloc()` instead (also see `Buffer.allocUnsafe()`).
+ */
+ new(size: number): Buffer;
+ /**
+ * Allocates a new buffer containing the given {array} of octets.
+ *
+ * @param array The octets to store.
+ * @deprecated since v10.0.0 - Use `Buffer.from(array)` instead.
+ */
+ new(array: Uint8Array): Buffer;
+ /**
+ * Produces a Buffer backed by the same allocated memory as
+ * the given {ArrayBuffer}/{SharedArrayBuffer}.
+ *
+ * @param arrayBuffer The ArrayBuffer with which to share memory.
+ * @deprecated since v10.0.0 - Use `Buffer.from(arrayBuffer[, byteOffset[, length]])` instead.
+ */
+ new(arrayBuffer: ArrayBuffer | SharedArrayBuffer): Buffer;
+ /**
+ * Allocates a new buffer containing the given {array} of octets.
+ *
+ * @param array The octets to store.
+ * @deprecated since v10.0.0 - Use `Buffer.from(array)` instead.
+ */
+ new(array: readonly any[]): Buffer;
+ /**
+ * Copies the passed {buffer} data onto a new {Buffer} instance.
+ *
+ * @param buffer The buffer to copy.
+ * @deprecated since v10.0.0 - Use `Buffer.from(buffer)` instead.
+ */
+ new(buffer: Buffer): Buffer;
+ /**
+ * Allocates a new `Buffer` using an `array` of bytes in the range `0` – `255`.
+ * Array entries outside that range will be truncated to fit into it.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * // Creates a new Buffer containing the UTF-8 bytes of the string 'buffer'.
+ * const buf = Buffer.from([0x62, 0x75, 0x66, 0x66, 0x65, 0x72]);
+ * ```
+ *
+ * If `array` is an `Array`\-like object (that is, one with a `length` property of
+ * type `number`), it is treated as if it is an array, unless it is a `Buffer` or
+ * a `Uint8Array`. This means all other `TypedArray` variants get treated as an`Array`. To create a `Buffer` from the bytes backing a `TypedArray`, use `Buffer.copyBytesFrom()`.
+ *
+ * A `TypeError` will be thrown if `array` is not an `Array` or another type
+ * appropriate for `Buffer.from()` variants.
+ *
+ * `Buffer.from(array)` and `Buffer.from(string)` may also use the internal`Buffer` pool like `Buffer.allocUnsafe()` does.
+ * @since v5.10.0
+ */
+ from(
+ arrayBuffer: WithImplicitCoercion,
+ byteOffset?: number,
+ length?: number,
+ ): Buffer;
+ /**
+ * Creates a new Buffer using the passed {data}
+ * @param data data to create a new Buffer
+ */
+ from(data: Uint8Array | readonly number[]): Buffer;
+ from(data: WithImplicitCoercion): Buffer;
+ /**
+ * Creates a new Buffer containing the given JavaScript string {str}.
+ * If provided, the {encoding} parameter identifies the character encoding.
+ * If not provided, {encoding} defaults to 'utf8'.
+ */
+ from(
+ str:
+ | WithImplicitCoercion
+ | {
+ [Symbol.toPrimitive](hint: "string"): string;
+ },
+ encoding?: BufferEncoding,
+ ): Buffer;
+ /**
+ * Creates a new Buffer using the passed {data}
+ * @param values to create a new Buffer
+ */
+ of(...items: number[]): Buffer;
+ /**
+ * Returns `true` if `obj` is a `Buffer`, `false` otherwise.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * Buffer.isBuffer(Buffer.alloc(10)); // true
+ * Buffer.isBuffer(Buffer.from('foo')); // true
+ * Buffer.isBuffer('a string'); // false
+ * Buffer.isBuffer([]); // false
+ * Buffer.isBuffer(new Uint8Array(1024)); // false
+ * ```
+ * @since v0.1.101
+ */
+ isBuffer(obj: any): obj is Buffer;
+ /**
+ * Returns `true` if `encoding` is the name of a supported character encoding,
+ * or `false` otherwise.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * console.log(Buffer.isEncoding('utf8'));
+ * // Prints: true
+ *
+ * console.log(Buffer.isEncoding('hex'));
+ * // Prints: true
+ *
+ * console.log(Buffer.isEncoding('utf/8'));
+ * // Prints: false
+ *
+ * console.log(Buffer.isEncoding(''));
+ * // Prints: false
+ * ```
+ * @since v0.9.1
+ * @param encoding A character encoding name to check.
+ */
+ isEncoding(encoding: string): encoding is BufferEncoding;
+ /**
+ * Returns the byte length of a string when encoded using `encoding`.
+ * This is not the same as [`String.prototype.length`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/length), which does not account
+ * for the encoding that is used to convert the string into bytes.
+ *
+ * For `'base64'`, `'base64url'`, and `'hex'`, this function assumes valid input.
+ * For strings that contain non-base64/hex-encoded data (e.g. whitespace), the
+ * return value might be greater than the length of a `Buffer` created from the
+ * string.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const str = '\u00bd + \u00bc = \u00be';
+ *
+ * console.log(`${str}: ${str.length} characters, ` +
+ * `${Buffer.byteLength(str, 'utf8')} bytes`);
+ * // Prints: ½ + ¼ = ¾: 9 characters, 12 bytes
+ * ```
+ *
+ * When `string` is a
+ * `Buffer`/[`DataView`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView)/[`TypedArray`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/-
+ * Reference/Global_Objects/TypedArray)/[`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer)/[`SharedArrayBuffer`](https://develop-
+ * er.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/SharedArrayBuffer), the byte length as reported by `.byteLength`is returned.
+ * @since v0.1.90
+ * @param string A value to calculate the length of.
+ * @param [encoding='utf8'] If `string` is a string, this is its encoding.
+ * @return The number of bytes contained within `string`.
+ */
+ byteLength(
+ string: string | Buffer | NodeJS.ArrayBufferView | ArrayBuffer | SharedArrayBuffer,
+ encoding?: BufferEncoding,
+ ): number;
+ /**
+ * Returns a new `Buffer` which is the result of concatenating all the `Buffer`instances in the `list` together.
+ *
+ * If the list has no items, or if the `totalLength` is 0, then a new zero-length`Buffer` is returned.
+ *
+ * If `totalLength` is not provided, it is calculated from the `Buffer` instances
+ * in `list` by adding their lengths.
+ *
+ * If `totalLength` is provided, it is coerced to an unsigned integer. If the
+ * combined length of the `Buffer`s in `list` exceeds `totalLength`, the result is
+ * truncated to `totalLength`.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * // Create a single `Buffer` from a list of three `Buffer` instances.
+ *
+ * const buf1 = Buffer.alloc(10);
+ * const buf2 = Buffer.alloc(14);
+ * const buf3 = Buffer.alloc(18);
+ * const totalLength = buf1.length + buf2.length + buf3.length;
+ *
+ * console.log(totalLength);
+ * // Prints: 42
+ *
+ * const bufA = Buffer.concat([buf1, buf2, buf3], totalLength);
+ *
+ * console.log(bufA);
+ * // Prints:
+ * console.log(bufA.length);
+ * // Prints: 42
+ * ```
+ *
+ * `Buffer.concat()` may also use the internal `Buffer` pool like `Buffer.allocUnsafe()` does.
+ * @since v0.7.11
+ * @param list List of `Buffer` or {@link Uint8Array} instances to concatenate.
+ * @param totalLength Total length of the `Buffer` instances in `list` when concatenated.
+ */
+ concat(list: readonly Uint8Array[], totalLength?: number): Buffer;
+ /**
+ * Copies the underlying memory of `view` into a new `Buffer`.
+ *
+ * ```js
+ * const u16 = new Uint16Array([0, 0xffff]);
+ * const buf = Buffer.copyBytesFrom(u16, 1, 1);
+ * u16[1] = 0;
+ * console.log(buf.length); // 2
+ * console.log(buf[0]); // 255
+ * console.log(buf[1]); // 255
+ * ```
+ * @since v19.8.0
+ * @param view The {TypedArray} to copy.
+ * @param [offset=0] The starting offset within `view`.
+ * @param [length=view.length - offset] The number of elements from `view` to copy.
+ */
+ copyBytesFrom(view: NodeJS.TypedArray, offset?: number, length?: number): Buffer;
+ /**
+ * Compares `buf1` to `buf2`, typically for the purpose of sorting arrays of`Buffer` instances. This is equivalent to calling `buf1.compare(buf2)`.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf1 = Buffer.from('1234');
+ * const buf2 = Buffer.from('0123');
+ * const arr = [buf1, buf2];
+ *
+ * console.log(arr.sort(Buffer.compare));
+ * // Prints: [ , ]
+ * // (This result is equal to: [buf2, buf1].)
+ * ```
+ * @since v0.11.13
+ * @return Either `-1`, `0`, or `1`, depending on the result of the comparison. See `compare` for details.
+ */
+ compare(buf1: Uint8Array, buf2: Uint8Array): -1 | 0 | 1;
+ /**
+ * Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the`Buffer` will be zero-filled.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.alloc(5);
+ *
+ * console.log(buf);
+ * // Prints:
+ * ```
+ *
+ * If `size` is larger than {@link constants.MAX_LENGTH} or smaller than 0, `ERR_OUT_OF_RANGE` is thrown.
+ *
+ * If `fill` is specified, the allocated `Buffer` will be initialized by calling `buf.fill(fill)`.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.alloc(5, 'a');
+ *
+ * console.log(buf);
+ * // Prints:
+ * ```
+ *
+ * If both `fill` and `encoding` are specified, the allocated `Buffer` will be
+ * initialized by calling `buf.fill(fill, encoding)`.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64');
+ *
+ * console.log(buf);
+ * // Prints:
+ * ```
+ *
+ * Calling `Buffer.alloc()` can be measurably slower than the alternative `Buffer.allocUnsafe()` but ensures that the newly created `Buffer` instance
+ * contents will never contain sensitive data from previous allocations, including
+ * data that might not have been allocated for `Buffer`s.
+ *
+ * A `TypeError` will be thrown if `size` is not a number.
+ * @since v5.10.0
+ * @param size The desired length of the new `Buffer`.
+ * @param [fill=0] A value to pre-fill the new `Buffer` with.
+ * @param [encoding='utf8'] If `fill` is a string, this is its encoding.
+ */
+ alloc(size: number, fill?: string | Uint8Array | number, encoding?: BufferEncoding): Buffer;
+ /**
+ * Allocates a new `Buffer` of `size` bytes. If `size` is larger than {@link constants.MAX_LENGTH} or smaller than 0, `ERR_OUT_OF_RANGE` is thrown.
+ *
+ * The underlying memory for `Buffer` instances created in this way is _not_
+ * _initialized_. The contents of the newly created `Buffer` are unknown and _may contain sensitive data_. Use `Buffer.alloc()` instead to initialize`Buffer` instances with zeroes.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.allocUnsafe(10);
+ *
+ * console.log(buf);
+ * // Prints (contents may vary):
+ *
+ * buf.fill(0);
+ *
+ * console.log(buf);
+ * // Prints:
+ * ```
+ *
+ * A `TypeError` will be thrown if `size` is not a number.
+ *
+ * The `Buffer` module pre-allocates an internal `Buffer` instance of
+ * size `Buffer.poolSize` that is used as a pool for the fast allocation of new `Buffer` instances created using `Buffer.allocUnsafe()`, `Buffer.from(array)`,
+ * and `Buffer.concat()` only when `size` is less than `Buffer.poolSize >>> 1` (floor of `Buffer.poolSize` divided by two).
+ *
+ * Use of this pre-allocated internal memory pool is a key difference between
+ * calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`.
+ * Specifically, `Buffer.alloc(size, fill)` will _never_ use the internal `Buffer`pool, while `Buffer.allocUnsafe(size).fill(fill)`_will_ use the internal`Buffer` pool if `size` is less
+ * than or equal to half `Buffer.poolSize`. The
+ * difference is subtle but can be important when an application requires the
+ * additional performance that `Buffer.allocUnsafe()` provides.
+ * @since v5.10.0
+ * @param size The desired length of the new `Buffer`.
+ */
+ allocUnsafe(size: number): Buffer;
+ /**
+ * Allocates a new `Buffer` of `size` bytes. If `size` is larger than {@link constants.MAX_LENGTH} or smaller than 0, `ERR_OUT_OF_RANGE` is thrown. A zero-length `Buffer` is created if
+ * `size` is 0.
+ *
+ * The underlying memory for `Buffer` instances created in this way is _not_
+ * _initialized_. The contents of the newly created `Buffer` are unknown and _may contain sensitive data_. Use `buf.fill(0)` to initialize
+ * such `Buffer` instances with zeroes.
+ *
+ * When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances,
+ * allocations under 4 KiB are sliced from a single pre-allocated `Buffer`. This
+ * allows applications to avoid the garbage collection overhead of creating many
+ * individually allocated `Buffer` instances. This approach improves both
+ * performance and memory usage by eliminating the need to track and clean up as
+ * many individual `ArrayBuffer` objects.
+ *
+ * However, in the case where a developer may need to retain a small chunk of
+ * memory from a pool for an indeterminate amount of time, it may be appropriate
+ * to create an un-pooled `Buffer` instance using `Buffer.allocUnsafeSlow()` and
+ * then copying out the relevant bits.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * // Need to keep around a few small chunks of memory.
+ * const store = [];
+ *
+ * socket.on('readable', () => {
+ * let data;
+ * while (null !== (data = readable.read())) {
+ * // Allocate for retained data.
+ * const sb = Buffer.allocUnsafeSlow(10);
+ *
+ * // Copy the data into the new allocation.
+ * data.copy(sb, 0, 0, 10);
+ *
+ * store.push(sb);
+ * }
+ * });
+ * ```
+ *
+ * A `TypeError` will be thrown if `size` is not a number.
+ * @since v5.12.0
+ * @param size The desired length of the new `Buffer`.
+ */
+ allocUnsafeSlow(size: number): Buffer;
+ /**
+ * This is the size (in bytes) of pre-allocated internal `Buffer` instances used
+ * for pooling. This value may be modified.
+ * @since v0.11.3
+ */
+ poolSize: number;
+ }
+ interface Buffer extends Uint8Array {
+ /**
+ * Writes `string` to `buf` at `offset` according to the character encoding in`encoding`. The `length` parameter is the number of bytes to write. If `buf` did
+ * not contain enough space to fit the entire string, only part of `string` will be
+ * written. However, partially encoded characters will not be written.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.alloc(256);
+ *
+ * const len = buf.write('\u00bd + \u00bc = \u00be', 0);
+ *
+ * console.log(`${len} bytes: ${buf.toString('utf8', 0, len)}`);
+ * // Prints: 12 bytes: ½ + ¼ = ¾
+ *
+ * const buffer = Buffer.alloc(10);
+ *
+ * const length = buffer.write('abcd', 8);
+ *
+ * console.log(`${length} bytes: ${buffer.toString('utf8', 8, 10)}`);
+ * // Prints: 2 bytes : ab
+ * ```
+ * @since v0.1.90
+ * @param string String to write to `buf`.
+ * @param [offset=0] Number of bytes to skip before starting to write `string`.
+ * @param [length=buf.length - offset] Maximum number of bytes to write (written bytes will not exceed `buf.length - offset`).
+ * @param [encoding='utf8'] The character encoding of `string`.
+ * @return Number of bytes written.
+ */
+ write(string: string, encoding?: BufferEncoding): number;
+ write(string: string, offset: number, encoding?: BufferEncoding): number;
+ write(string: string, offset: number, length: number, encoding?: BufferEncoding): number;
+ /**
+ * Decodes `buf` to a string according to the specified character encoding in`encoding`. `start` and `end` may be passed to decode only a subset of `buf`.
+ *
+ * If `encoding` is `'utf8'` and a byte sequence in the input is not valid UTF-8,
+ * then each invalid byte is replaced with the replacement character `U+FFFD`.
+ *
+ * The maximum length of a string instance (in UTF-16 code units) is available
+ * as {@link constants.MAX_STRING_LENGTH}.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf1 = Buffer.allocUnsafe(26);
+ *
+ * for (let i = 0; i < 26; i++) {
+ * // 97 is the decimal ASCII value for 'a'.
+ * buf1[i] = i + 97;
+ * }
+ *
+ * console.log(buf1.toString('utf8'));
+ * // Prints: abcdefghijklmnopqrstuvwxyz
+ * console.log(buf1.toString('utf8', 0, 5));
+ * // Prints: abcde
+ *
+ * const buf2 = Buffer.from('tést');
+ *
+ * console.log(buf2.toString('hex'));
+ * // Prints: 74c3a97374
+ * console.log(buf2.toString('utf8', 0, 3));
+ * // Prints: té
+ * console.log(buf2.toString(undefined, 0, 3));
+ * // Prints: té
+ * ```
+ * @since v0.1.90
+ * @param [encoding='utf8'] The character encoding to use.
+ * @param [start=0] The byte offset to start decoding at.
+ * @param [end=buf.length] The byte offset to stop decoding at (not inclusive).
+ */
+ toString(encoding?: BufferEncoding, start?: number, end?: number): string;
+ /**
+ * Returns a JSON representation of `buf`. [`JSON.stringify()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify) implicitly calls
+ * this function when stringifying a `Buffer` instance.
+ *
+ * `Buffer.from()` accepts objects in the format returned from this method.
+ * In particular, `Buffer.from(buf.toJSON())` works like `Buffer.from(buf)`.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5]);
+ * const json = JSON.stringify(buf);
+ *
+ * console.log(json);
+ * // Prints: {"type":"Buffer","data":[1,2,3,4,5]}
+ *
+ * const copy = JSON.parse(json, (key, value) => {
+ * return value && value.type === 'Buffer' ?
+ * Buffer.from(value) :
+ * value;
+ * });
+ *
+ * console.log(copy);
+ * // Prints:
+ * ```
+ * @since v0.9.2
+ */
+ toJSON(): {
+ type: "Buffer";
+ data: number[];
+ };
+ /**
+ * Returns `true` if both `buf` and `otherBuffer` have exactly the same bytes,`false` otherwise. Equivalent to `buf.compare(otherBuffer) === 0`.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf1 = Buffer.from('ABC');
+ * const buf2 = Buffer.from('414243', 'hex');
+ * const buf3 = Buffer.from('ABCD');
+ *
+ * console.log(buf1.equals(buf2));
+ * // Prints: true
+ * console.log(buf1.equals(buf3));
+ * // Prints: false
+ * ```
+ * @since v0.11.13
+ * @param otherBuffer A `Buffer` or {@link Uint8Array} with which to compare `buf`.
+ */
+ equals(otherBuffer: Uint8Array): boolean;
+ /**
+ * Compares `buf` with `target` and returns a number indicating whether `buf`comes before, after, or is the same as `target` in sort order.
+ * Comparison is based on the actual sequence of bytes in each `Buffer`.
+ *
+ * * `0` is returned if `target` is the same as `buf`
+ * * `1` is returned if `target` should come _before_`buf` when sorted.
+ * * `-1` is returned if `target` should come _after_`buf` when sorted.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf1 = Buffer.from('ABC');
+ * const buf2 = Buffer.from('BCD');
+ * const buf3 = Buffer.from('ABCD');
+ *
+ * console.log(buf1.compare(buf1));
+ * // Prints: 0
+ * console.log(buf1.compare(buf2));
+ * // Prints: -1
+ * console.log(buf1.compare(buf3));
+ * // Prints: -1
+ * console.log(buf2.compare(buf1));
+ * // Prints: 1
+ * console.log(buf2.compare(buf3));
+ * // Prints: 1
+ * console.log([buf1, buf2, buf3].sort(Buffer.compare));
+ * // Prints: [ , , ]
+ * // (This result is equal to: [buf1, buf3, buf2].)
+ * ```
+ *
+ * The optional `targetStart`, `targetEnd`, `sourceStart`, and `sourceEnd`arguments can be used to limit the comparison to specific ranges within `target`and `buf` respectively.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf1 = Buffer.from([1, 2, 3, 4, 5, 6, 7, 8, 9]);
+ * const buf2 = Buffer.from([5, 6, 7, 8, 9, 1, 2, 3, 4]);
+ *
+ * console.log(buf1.compare(buf2, 5, 9, 0, 4));
+ * // Prints: 0
+ * console.log(buf1.compare(buf2, 0, 6, 4));
+ * // Prints: -1
+ * console.log(buf1.compare(buf2, 5, 6, 5));
+ * // Prints: 1
+ * ```
+ *
+ * `ERR_OUT_OF_RANGE` is thrown if `targetStart < 0`, `sourceStart < 0`,`targetEnd > target.byteLength`, or `sourceEnd > source.byteLength`.
+ * @since v0.11.13
+ * @param target A `Buffer` or {@link Uint8Array} with which to compare `buf`.
+ * @param [targetStart=0] The offset within `target` at which to begin comparison.
+ * @param [targetEnd=target.length] The offset within `target` at which to end comparison (not inclusive).
+ * @param [sourceStart=0] The offset within `buf` at which to begin comparison.
+ * @param [sourceEnd=buf.length] The offset within `buf` at which to end comparison (not inclusive).
+ */
+ compare(
+ target: Uint8Array,
+ targetStart?: number,
+ targetEnd?: number,
+ sourceStart?: number,
+ sourceEnd?: number,
+ ): -1 | 0 | 1;
+ /**
+ * Copies data from a region of `buf` to a region in `target`, even if the `target`memory region overlaps with `buf`.
+ *
+ * [`TypedArray.prototype.set()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray/set) performs the same operation, and is available
+ * for all TypedArrays, including Node.js `Buffer`s, although it takes
+ * different function arguments.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * // Create two `Buffer` instances.
+ * const buf1 = Buffer.allocUnsafe(26);
+ * const buf2 = Buffer.allocUnsafe(26).fill('!');
+ *
+ * for (let i = 0; i < 26; i++) {
+ * // 97 is the decimal ASCII value for 'a'.
+ * buf1[i] = i + 97;
+ * }
+ *
+ * // Copy `buf1` bytes 16 through 19 into `buf2` starting at byte 8 of `buf2`.
+ * buf1.copy(buf2, 8, 16, 20);
+ * // This is equivalent to:
+ * // buf2.set(buf1.subarray(16, 20), 8);
+ *
+ * console.log(buf2.toString('ascii', 0, 25));
+ * // Prints: !!!!!!!!qrst!!!!!!!!!!!!!
+ * ```
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * // Create a `Buffer` and copy data from one region to an overlapping region
+ * // within the same `Buffer`.
+ *
+ * const buf = Buffer.allocUnsafe(26);
+ *
+ * for (let i = 0; i < 26; i++) {
+ * // 97 is the decimal ASCII value for 'a'.
+ * buf[i] = i + 97;
+ * }
+ *
+ * buf.copy(buf, 0, 4, 10);
+ *
+ * console.log(buf.toString());
+ * // Prints: efghijghijklmnopqrstuvwxyz
+ * ```
+ * @since v0.1.90
+ * @param target A `Buffer` or {@link Uint8Array} to copy into.
+ * @param [targetStart=0] The offset within `target` at which to begin writing.
+ * @param [sourceStart=0] The offset within `buf` from which to begin copying.
+ * @param [sourceEnd=buf.length] The offset within `buf` at which to stop copying (not inclusive).
+ * @return The number of bytes copied.
+ */
+ copy(target: Uint8Array, targetStart?: number, sourceStart?: number, sourceEnd?: number): number;
+ /**
+ * Returns a new `Buffer` that references the same memory as the original, but
+ * offset and cropped by the `start` and `end` indices.
+ *
+ * This method is not compatible with the `Uint8Array.prototype.slice()`,
+ * which is a superclass of `Buffer`. To copy the slice, use`Uint8Array.prototype.slice()`.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.from('buffer');
+ *
+ * const copiedBuf = Uint8Array.prototype.slice.call(buf);
+ * copiedBuf[0]++;
+ * console.log(copiedBuf.toString());
+ * // Prints: cuffer
+ *
+ * console.log(buf.toString());
+ * // Prints: buffer
+ *
+ * // With buf.slice(), the original buffer is modified.
+ * const notReallyCopiedBuf = buf.slice();
+ * notReallyCopiedBuf[0]++;
+ * console.log(notReallyCopiedBuf.toString());
+ * // Prints: cuffer
+ * console.log(buf.toString());
+ * // Also prints: cuffer (!)
+ * ```
+ * @since v0.3.0
+ * @deprecated Use `subarray` instead.
+ * @param [start=0] Where the new `Buffer` will start.
+ * @param [end=buf.length] Where the new `Buffer` will end (not inclusive).
+ */
+ slice(start?: number, end?: number): Buffer;
+ /**
+ * Returns a new `Buffer` that references the same memory as the original, but
+ * offset and cropped by the `start` and `end` indices.
+ *
+ * Specifying `end` greater than `buf.length` will return the same result as
+ * that of `end` equal to `buf.length`.
+ *
+ * This method is inherited from [`TypedArray.prototype.subarray()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray/subarray).
+ *
+ * Modifying the new `Buffer` slice will modify the memory in the original `Buffer`because the allocated memory of the two objects overlap.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * // Create a `Buffer` with the ASCII alphabet, take a slice, and modify one byte
+ * // from the original `Buffer`.
+ *
+ * const buf1 = Buffer.allocUnsafe(26);
+ *
+ * for (let i = 0; i < 26; i++) {
+ * // 97 is the decimal ASCII value for 'a'.
+ * buf1[i] = i + 97;
+ * }
+ *
+ * const buf2 = buf1.subarray(0, 3);
+ *
+ * console.log(buf2.toString('ascii', 0, buf2.length));
+ * // Prints: abc
+ *
+ * buf1[0] = 33;
+ *
+ * console.log(buf2.toString('ascii', 0, buf2.length));
+ * // Prints: !bc
+ * ```
+ *
+ * Specifying negative indexes causes the slice to be generated relative to the
+ * end of `buf` rather than the beginning.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.from('buffer');
+ *
+ * console.log(buf.subarray(-6, -1).toString());
+ * // Prints: buffe
+ * // (Equivalent to buf.subarray(0, 5).)
+ *
+ * console.log(buf.subarray(-6, -2).toString());
+ * // Prints: buff
+ * // (Equivalent to buf.subarray(0, 4).)
+ *
+ * console.log(buf.subarray(-5, -2).toString());
+ * // Prints: uff
+ * // (Equivalent to buf.subarray(1, 4).)
+ * ```
+ * @since v3.0.0
+ * @param [start=0] Where the new `Buffer` will start.
+ * @param [end=buf.length] Where the new `Buffer` will end (not inclusive).
+ */
+ subarray(start?: number, end?: number): Buffer;
+ /**
+ * Writes `value` to `buf` at the specified `offset` as big-endian.
+ *
+ * `value` is interpreted and written as a two's complement signed integer.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.allocUnsafe(8);
+ *
+ * buf.writeBigInt64BE(0x0102030405060708n, 0);
+ *
+ * console.log(buf);
+ * // Prints:
+ * ```
+ * @since v12.0.0, v10.20.0
+ * @param value Number to be written to `buf`.
+ * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy: `0 <= offset <= buf.length - 8`.
+ * @return `offset` plus the number of bytes written.
+ */
+ writeBigInt64BE(value: bigint, offset?: number): number;
+ /**
+ * Writes `value` to `buf` at the specified `offset` as little-endian.
+ *
+ * `value` is interpreted and written as a two's complement signed integer.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.allocUnsafe(8);
+ *
+ * buf.writeBigInt64LE(0x0102030405060708n, 0);
+ *
+ * console.log(buf);
+ * // Prints:
+ * ```
+ * @since v12.0.0, v10.20.0
+ * @param value Number to be written to `buf`.
+ * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy: `0 <= offset <= buf.length - 8`.
+ * @return `offset` plus the number of bytes written.
+ */
+ writeBigInt64LE(value: bigint, offset?: number): number;
+ /**
+ * Writes `value` to `buf` at the specified `offset` as big-endian.
+ *
+ * This function is also available under the `writeBigUint64BE` alias.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.allocUnsafe(8);
+ *
+ * buf.writeBigUInt64BE(0xdecafafecacefaden, 0);
+ *
+ * console.log(buf);
+ * // Prints:
+ * ```
+ * @since v12.0.0, v10.20.0
+ * @param value Number to be written to `buf`.
+ * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy: `0 <= offset <= buf.length - 8`.
+ * @return `offset` plus the number of bytes written.
+ */
+ writeBigUInt64BE(value: bigint, offset?: number): number;
+ /**
+ * @alias Buffer.writeBigUInt64BE
+ * @since v14.10.0, v12.19.0
+ */
+ writeBigUint64BE(value: bigint, offset?: number): number;
+ /**
+ * Writes `value` to `buf` at the specified `offset` as little-endian
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.allocUnsafe(8);
+ *
+ * buf.writeBigUInt64LE(0xdecafafecacefaden, 0);
+ *
+ * console.log(buf);
+ * // Prints:
+ * ```
+ *
+ * This function is also available under the `writeBigUint64LE` alias.
+ * @since v12.0.0, v10.20.0
+ * @param value Number to be written to `buf`.
+ * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy: `0 <= offset <= buf.length - 8`.
+ * @return `offset` plus the number of bytes written.
+ */
+ writeBigUInt64LE(value: bigint, offset?: number): number;
+ /**
+ * @alias Buffer.writeBigUInt64LE
+ * @since v14.10.0, v12.19.0
+ */
+ writeBigUint64LE(value: bigint, offset?: number): number;
+ /**
+ * Writes `byteLength` bytes of `value` to `buf` at the specified `offset`as little-endian. Supports up to 48 bits of accuracy. Behavior is undefined
+ * when `value` is anything other than an unsigned integer.
+ *
+ * This function is also available under the `writeUintLE` alias.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.allocUnsafe(6);
+ *
+ * buf.writeUIntLE(0x1234567890ab, 0, 6);
+ *
+ * console.log(buf);
+ * // Prints:
+ * ```
+ * @since v0.5.5
+ * @param value Number to be written to `buf`.
+ * @param offset Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - byteLength`.
+ * @param byteLength Number of bytes to write. Must satisfy `0 < byteLength <= 6`.
+ * @return `offset` plus the number of bytes written.
+ */
+ writeUIntLE(value: number, offset: number, byteLength: number): number;
+ /**
+ * @alias Buffer.writeUIntLE
+ * @since v14.9.0, v12.19.0
+ */
+ writeUintLE(value: number, offset: number, byteLength: number): number;
+ /**
+ * Writes `byteLength` bytes of `value` to `buf` at the specified `offset`as big-endian. Supports up to 48 bits of accuracy. Behavior is undefined
+ * when `value` is anything other than an unsigned integer.
+ *
+ * This function is also available under the `writeUintBE` alias.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.allocUnsafe(6);
+ *
+ * buf.writeUIntBE(0x1234567890ab, 0, 6);
+ *
+ * console.log(buf);
+ * // Prints:
+ * ```
+ * @since v0.5.5
+ * @param value Number to be written to `buf`.
+ * @param offset Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - byteLength`.
+ * @param byteLength Number of bytes to write. Must satisfy `0 < byteLength <= 6`.
+ * @return `offset` plus the number of bytes written.
+ */
+ writeUIntBE(value: number, offset: number, byteLength: number): number;
+ /**
+ * @alias Buffer.writeUIntBE
+ * @since v14.9.0, v12.19.0
+ */
+ writeUintBE(value: number, offset: number, byteLength: number): number;
+ /**
+ * Writes `byteLength` bytes of `value` to `buf` at the specified `offset`as little-endian. Supports up to 48 bits of accuracy. Behavior is undefined
+ * when `value` is anything other than a signed integer.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.allocUnsafe(6);
+ *
+ * buf.writeIntLE(0x1234567890ab, 0, 6);
+ *
+ * console.log(buf);
+ * // Prints:
+ * ```
+ * @since v0.11.15
+ * @param value Number to be written to `buf`.
+ * @param offset Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - byteLength`.
+ * @param byteLength Number of bytes to write. Must satisfy `0 < byteLength <= 6`.
+ * @return `offset` plus the number of bytes written.
+ */
+ writeIntLE(value: number, offset: number, byteLength: number): number;
+ /**
+ * Writes `byteLength` bytes of `value` to `buf` at the specified `offset`as big-endian. Supports up to 48 bits of accuracy. Behavior is undefined when`value` is anything other than a
+ * signed integer.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.allocUnsafe(6);
+ *
+ * buf.writeIntBE(0x1234567890ab, 0, 6);
+ *
+ * console.log(buf);
+ * // Prints:
+ * ```
+ * @since v0.11.15
+ * @param value Number to be written to `buf`.
+ * @param offset Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - byteLength`.
+ * @param byteLength Number of bytes to write. Must satisfy `0 < byteLength <= 6`.
+ * @return `offset` plus the number of bytes written.
+ */
+ writeIntBE(value: number, offset: number, byteLength: number): number;
+ /**
+ * Reads an unsigned, big-endian 64-bit integer from `buf` at the specified`offset`.
+ *
+ * This function is also available under the `readBigUint64BE` alias.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.from([0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff]);
+ *
+ * console.log(buf.readBigUInt64BE(0));
+ * // Prints: 4294967295n
+ * ```
+ * @since v12.0.0, v10.20.0
+ * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy: `0 <= offset <= buf.length - 8`.
+ */
+ readBigUInt64BE(offset?: number): bigint;
+ /**
+ * @alias Buffer.readBigUInt64BE
+ * @since v14.10.0, v12.19.0
+ */
+ readBigUint64BE(offset?: number): bigint;
+ /**
+ * Reads an unsigned, little-endian 64-bit integer from `buf` at the specified`offset`.
+ *
+ * This function is also available under the `readBigUint64LE` alias.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.from([0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff]);
+ *
+ * console.log(buf.readBigUInt64LE(0));
+ * // Prints: 18446744069414584320n
+ * ```
+ * @since v12.0.0, v10.20.0
+ * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy: `0 <= offset <= buf.length - 8`.
+ */
+ readBigUInt64LE(offset?: number): bigint;
+ /**
+ * @alias Buffer.readBigUInt64LE
+ * @since v14.10.0, v12.19.0
+ */
+ readBigUint64LE(offset?: number): bigint;
+ /**
+ * Reads a signed, big-endian 64-bit integer from `buf` at the specified `offset`.
+ *
+ * Integers read from a `Buffer` are interpreted as two's complement signed
+ * values.
+ * @since v12.0.0, v10.20.0
+ * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy: `0 <= offset <= buf.length - 8`.
+ */
+ readBigInt64BE(offset?: number): bigint;
+ /**
+ * Reads a signed, little-endian 64-bit integer from `buf` at the specified`offset`.
+ *
+ * Integers read from a `Buffer` are interpreted as two's complement signed
+ * values.
+ * @since v12.0.0, v10.20.0
+ * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy: `0 <= offset <= buf.length - 8`.
+ */
+ readBigInt64LE(offset?: number): bigint;
+ /**
+ * Reads `byteLength` number of bytes from `buf` at the specified `offset`and interprets the result as an unsigned, little-endian integer supporting
+ * up to 48 bits of accuracy.
+ *
+ * This function is also available under the `readUintLE` alias.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]);
+ *
+ * console.log(buf.readUIntLE(0, 6).toString(16));
+ * // Prints: ab9078563412
+ * ```
+ * @since v0.11.15
+ * @param offset Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - byteLength`.
+ * @param byteLength Number of bytes to read. Must satisfy `0 < byteLength <= 6`.
+ */
+ readUIntLE(offset: number, byteLength: number): number;
+ /**
+ * @alias Buffer.readUIntLE
+ * @since v14.9.0, v12.19.0
+ */
+ readUintLE(offset: number, byteLength: number): number;
+ /**
+ * Reads `byteLength` number of bytes from `buf` at the specified `offset`and interprets the result as an unsigned big-endian integer supporting
+ * up to 48 bits of accuracy.
+ *
+ * This function is also available under the `readUintBE` alias.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]);
+ *
+ * console.log(buf.readUIntBE(0, 6).toString(16));
+ * // Prints: 1234567890ab
+ * console.log(buf.readUIntBE(1, 6).toString(16));
+ * // Throws ERR_OUT_OF_RANGE.
+ * ```
+ * @since v0.11.15
+ * @param offset Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - byteLength`.
+ * @param byteLength Number of bytes to read. Must satisfy `0 < byteLength <= 6`.
+ */
+ readUIntBE(offset: number, byteLength: number): number;
+ /**
+ * @alias Buffer.readUIntBE
+ * @since v14.9.0, v12.19.0
+ */
+ readUintBE(offset: number, byteLength: number): number;
+ /**
+ * Reads `byteLength` number of bytes from `buf` at the specified `offset`and interprets the result as a little-endian, two's complement signed value
+ * supporting up to 48 bits of accuracy.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]);
+ *
+ * console.log(buf.readIntLE(0, 6).toString(16));
+ * // Prints: -546f87a9cbee
+ * ```
+ * @since v0.11.15
+ * @param offset Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - byteLength`.
+ * @param byteLength Number of bytes to read. Must satisfy `0 < byteLength <= 6`.
+ */
+ readIntLE(offset: number, byteLength: number): number;
+ /**
+ * Reads `byteLength` number of bytes from `buf` at the specified `offset`and interprets the result as a big-endian, two's complement signed value
+ * supporting up to 48 bits of accuracy.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]);
+ *
+ * console.log(buf.readIntBE(0, 6).toString(16));
+ * // Prints: 1234567890ab
+ * console.log(buf.readIntBE(1, 6).toString(16));
+ * // Throws ERR_OUT_OF_RANGE.
+ * console.log(buf.readIntBE(1, 0).toString(16));
+ * // Throws ERR_OUT_OF_RANGE.
+ * ```
+ * @since v0.11.15
+ * @param offset Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - byteLength`.
+ * @param byteLength Number of bytes to read. Must satisfy `0 < byteLength <= 6`.
+ */
+ readIntBE(offset: number, byteLength: number): number;
+ /**
+ * Reads an unsigned 8-bit integer from `buf` at the specified `offset`.
+ *
+ * This function is also available under the `readUint8` alias.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.from([1, -2]);
+ *
+ * console.log(buf.readUInt8(0));
+ * // Prints: 1
+ * console.log(buf.readUInt8(1));
+ * // Prints: 254
+ * console.log(buf.readUInt8(2));
+ * // Throws ERR_OUT_OF_RANGE.
+ * ```
+ * @since v0.5.0
+ * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 1`.
+ */
+ readUInt8(offset?: number): number;
+ /**
+ * @alias Buffer.readUInt8
+ * @since v14.9.0, v12.19.0
+ */
+ readUint8(offset?: number): number;
+ /**
+ * Reads an unsigned, little-endian 16-bit integer from `buf` at the specified`offset`.
+ *
+ * This function is also available under the `readUint16LE` alias.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.from([0x12, 0x34, 0x56]);
+ *
+ * console.log(buf.readUInt16LE(0).toString(16));
+ * // Prints: 3412
+ * console.log(buf.readUInt16LE(1).toString(16));
+ * // Prints: 5634
+ * console.log(buf.readUInt16LE(2).toString(16));
+ * // Throws ERR_OUT_OF_RANGE.
+ * ```
+ * @since v0.5.5
+ * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 2`.
+ */
+ readUInt16LE(offset?: number): number;
+ /**
+ * @alias Buffer.readUInt16LE
+ * @since v14.9.0, v12.19.0
+ */
+ readUint16LE(offset?: number): number;
+ /**
+ * Reads an unsigned, big-endian 16-bit integer from `buf` at the specified`offset`.
+ *
+ * This function is also available under the `readUint16BE` alias.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.from([0x12, 0x34, 0x56]);
+ *
+ * console.log(buf.readUInt16BE(0).toString(16));
+ * // Prints: 1234
+ * console.log(buf.readUInt16BE(1).toString(16));
+ * // Prints: 3456
+ * ```
+ * @since v0.5.5
+ * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 2`.
+ */
+ readUInt16BE(offset?: number): number;
+ /**
+ * @alias Buffer.readUInt16BE
+ * @since v14.9.0, v12.19.0
+ */
+ readUint16BE(offset?: number): number;
+ /**
+ * Reads an unsigned, little-endian 32-bit integer from `buf` at the specified`offset`.
+ *
+ * This function is also available under the `readUint32LE` alias.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78]);
+ *
+ * console.log(buf.readUInt32LE(0).toString(16));
+ * // Prints: 78563412
+ * console.log(buf.readUInt32LE(1).toString(16));
+ * // Throws ERR_OUT_OF_RANGE.
+ * ```
+ * @since v0.5.5
+ * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`.
+ */
+ readUInt32LE(offset?: number): number;
+ /**
+ * @alias Buffer.readUInt32LE
+ * @since v14.9.0, v12.19.0
+ */
+ readUint32LE(offset?: number): number;
+ /**
+ * Reads an unsigned, big-endian 32-bit integer from `buf` at the specified`offset`.
+ *
+ * This function is also available under the `readUint32BE` alias.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78]);
+ *
+ * console.log(buf.readUInt32BE(0).toString(16));
+ * // Prints: 12345678
+ * ```
+ * @since v0.5.5
+ * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`.
+ */
+ readUInt32BE(offset?: number): number;
+ /**
+ * @alias Buffer.readUInt32BE
+ * @since v14.9.0, v12.19.0
+ */
+ readUint32BE(offset?: number): number;
+ /**
+ * Reads a signed 8-bit integer from `buf` at the specified `offset`.
+ *
+ * Integers read from a `Buffer` are interpreted as two's complement signed values.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.from([-1, 5]);
+ *
+ * console.log(buf.readInt8(0));
+ * // Prints: -1
+ * console.log(buf.readInt8(1));
+ * // Prints: 5
+ * console.log(buf.readInt8(2));
+ * // Throws ERR_OUT_OF_RANGE.
+ * ```
+ * @since v0.5.0
+ * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 1`.
+ */
+ readInt8(offset?: number): number;
+ /**
+ * Reads a signed, little-endian 16-bit integer from `buf` at the specified`offset`.
+ *
+ * Integers read from a `Buffer` are interpreted as two's complement signed values.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.from([0, 5]);
+ *
+ * console.log(buf.readInt16LE(0));
+ * // Prints: 1280
+ * console.log(buf.readInt16LE(1));
+ * // Throws ERR_OUT_OF_RANGE.
+ * ```
+ * @since v0.5.5
+ * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 2`.
+ */
+ readInt16LE(offset?: number): number;
+ /**
+ * Reads a signed, big-endian 16-bit integer from `buf` at the specified `offset`.
+ *
+ * Integers read from a `Buffer` are interpreted as two's complement signed values.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.from([0, 5]);
+ *
+ * console.log(buf.readInt16BE(0));
+ * // Prints: 5
+ * ```
+ * @since v0.5.5
+ * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 2`.
+ */
+ readInt16BE(offset?: number): number;
+ /**
+ * Reads a signed, little-endian 32-bit integer from `buf` at the specified`offset`.
+ *
+ * Integers read from a `Buffer` are interpreted as two's complement signed values.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.from([0, 0, 0, 5]);
+ *
+ * console.log(buf.readInt32LE(0));
+ * // Prints: 83886080
+ * console.log(buf.readInt32LE(1));
+ * // Throws ERR_OUT_OF_RANGE.
+ * ```
+ * @since v0.5.5
+ * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`.
+ */
+ readInt32LE(offset?: number): number;
+ /**
+ * Reads a signed, big-endian 32-bit integer from `buf` at the specified `offset`.
+ *
+ * Integers read from a `Buffer` are interpreted as two's complement signed values.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.from([0, 0, 0, 5]);
+ *
+ * console.log(buf.readInt32BE(0));
+ * // Prints: 5
+ * ```
+ * @since v0.5.5
+ * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`.
+ */
+ readInt32BE(offset?: number): number;
+ /**
+ * Reads a 32-bit, little-endian float from `buf` at the specified `offset`.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.from([1, 2, 3, 4]);
+ *
+ * console.log(buf.readFloatLE(0));
+ * // Prints: 1.539989614439558e-36
+ * console.log(buf.readFloatLE(1));
+ * // Throws ERR_OUT_OF_RANGE.
+ * ```
+ * @since v0.11.15
+ * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`.
+ */
+ readFloatLE(offset?: number): number;
+ /**
+ * Reads a 32-bit, big-endian float from `buf` at the specified `offset`.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.from([1, 2, 3, 4]);
+ *
+ * console.log(buf.readFloatBE(0));
+ * // Prints: 2.387939260590663e-38
+ * ```
+ * @since v0.11.15
+ * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`.
+ */
+ readFloatBE(offset?: number): number;
+ /**
+ * Reads a 64-bit, little-endian double from `buf` at the specified `offset`.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.from([1, 2, 3, 4, 5, 6, 7, 8]);
+ *
+ * console.log(buf.readDoubleLE(0));
+ * // Prints: 5.447603722011605e-270
+ * console.log(buf.readDoubleLE(1));
+ * // Throws ERR_OUT_OF_RANGE.
+ * ```
+ * @since v0.11.15
+ * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 8`.
+ */
+ readDoubleLE(offset?: number): number;
+ /**
+ * Reads a 64-bit, big-endian double from `buf` at the specified `offset`.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.from([1, 2, 3, 4, 5, 6, 7, 8]);
+ *
+ * console.log(buf.readDoubleBE(0));
+ * // Prints: 8.20788039913184e-304
+ * ```
+ * @since v0.11.15
+ * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 8`.
+ */
+ readDoubleBE(offset?: number): number;
+ reverse(): this;
+ /**
+ * Interprets `buf` as an array of unsigned 16-bit integers and swaps the
+ * byte order _in-place_. Throws `ERR_INVALID_BUFFER_SIZE` if `buf.length` is not a multiple of 2.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf1 = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8]);
+ *
+ * console.log(buf1);
+ * // Prints:
+ *
+ * buf1.swap16();
+ *
+ * console.log(buf1);
+ * // Prints:
+ *
+ * const buf2 = Buffer.from([0x1, 0x2, 0x3]);
+ *
+ * buf2.swap16();
+ * // Throws ERR_INVALID_BUFFER_SIZE.
+ * ```
+ *
+ * One convenient use of `buf.swap16()` is to perform a fast in-place conversion
+ * between UTF-16 little-endian and UTF-16 big-endian:
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.from('This is little-endian UTF-16', 'utf16le');
+ * buf.swap16(); // Convert to big-endian UTF-16 text.
+ * ```
+ * @since v5.10.0
+ * @return A reference to `buf`.
+ */
+ swap16(): Buffer;
+ /**
+ * Interprets `buf` as an array of unsigned 32-bit integers and swaps the
+ * byte order _in-place_. Throws `ERR_INVALID_BUFFER_SIZE` if `buf.length` is not a multiple of 4.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf1 = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8]);
+ *
+ * console.log(buf1);
+ * // Prints:
+ *
+ * buf1.swap32();
+ *
+ * console.log(buf1);
+ * // Prints:
+ *
+ * const buf2 = Buffer.from([0x1, 0x2, 0x3]);
+ *
+ * buf2.swap32();
+ * // Throws ERR_INVALID_BUFFER_SIZE.
+ * ```
+ * @since v5.10.0
+ * @return A reference to `buf`.
+ */
+ swap32(): Buffer;
+ /**
+ * Interprets `buf` as an array of 64-bit numbers and swaps byte order _in-place_.
+ * Throws `ERR_INVALID_BUFFER_SIZE` if `buf.length` is not a multiple of 8.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf1 = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8]);
+ *
+ * console.log(buf1);
+ * // Prints:
+ *
+ * buf1.swap64();
+ *
+ * console.log(buf1);
+ * // Prints:
+ *
+ * const buf2 = Buffer.from([0x1, 0x2, 0x3]);
+ *
+ * buf2.swap64();
+ * // Throws ERR_INVALID_BUFFER_SIZE.
+ * ```
+ * @since v6.3.0
+ * @return A reference to `buf`.
+ */
+ swap64(): Buffer;
+ /**
+ * Writes `value` to `buf` at the specified `offset`. `value` must be a
+ * valid unsigned 8-bit integer. Behavior is undefined when `value` is anything
+ * other than an unsigned 8-bit integer.
+ *
+ * This function is also available under the `writeUint8` alias.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.allocUnsafe(4);
+ *
+ * buf.writeUInt8(0x3, 0);
+ * buf.writeUInt8(0x4, 1);
+ * buf.writeUInt8(0x23, 2);
+ * buf.writeUInt8(0x42, 3);
+ *
+ * console.log(buf);
+ * // Prints:
+ * ```
+ * @since v0.5.0
+ * @param value Number to be written to `buf`.
+ * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 1`.
+ * @return `offset` plus the number of bytes written.
+ */
+ writeUInt8(value: number, offset?: number): number;
+ /**
+ * @alias Buffer.writeUInt8
+ * @since v14.9.0, v12.19.0
+ */
+ writeUint8(value: number, offset?: number): number;
+ /**
+ * Writes `value` to `buf` at the specified `offset` as little-endian. The `value`must be a valid unsigned 16-bit integer. Behavior is undefined when `value` is
+ * anything other than an unsigned 16-bit integer.
+ *
+ * This function is also available under the `writeUint16LE` alias.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.allocUnsafe(4);
+ *
+ * buf.writeUInt16LE(0xdead, 0);
+ * buf.writeUInt16LE(0xbeef, 2);
+ *
+ * console.log(buf);
+ * // Prints:
+ * ```
+ * @since v0.5.5
+ * @param value Number to be written to `buf`.
+ * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 2`.
+ * @return `offset` plus the number of bytes written.
+ */
+ writeUInt16LE(value: number, offset?: number): number;
+ /**
+ * @alias Buffer.writeUInt16LE
+ * @since v14.9.0, v12.19.0
+ */
+ writeUint16LE(value: number, offset?: number): number;
+ /**
+ * Writes `value` to `buf` at the specified `offset` as big-endian. The `value`must be a valid unsigned 16-bit integer. Behavior is undefined when `value`is anything other than an
+ * unsigned 16-bit integer.
+ *
+ * This function is also available under the `writeUint16BE` alias.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.allocUnsafe(4);
+ *
+ * buf.writeUInt16BE(0xdead, 0);
+ * buf.writeUInt16BE(0xbeef, 2);
+ *
+ * console.log(buf);
+ * // Prints:
+ * ```
+ * @since v0.5.5
+ * @param value Number to be written to `buf`.
+ * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 2`.
+ * @return `offset` plus the number of bytes written.
+ */
+ writeUInt16BE(value: number, offset?: number): number;
+ /**
+ * @alias Buffer.writeUInt16BE
+ * @since v14.9.0, v12.19.0
+ */
+ writeUint16BE(value: number, offset?: number): number;
+ /**
+ * Writes `value` to `buf` at the specified `offset` as little-endian. The `value`must be a valid unsigned 32-bit integer. Behavior is undefined when `value` is
+ * anything other than an unsigned 32-bit integer.
+ *
+ * This function is also available under the `writeUint32LE` alias.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.allocUnsafe(4);
+ *
+ * buf.writeUInt32LE(0xfeedface, 0);
+ *
+ * console.log(buf);
+ * // Prints:
+ * ```
+ * @since v0.5.5
+ * @param value Number to be written to `buf`.
+ * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`.
+ * @return `offset` plus the number of bytes written.
+ */
+ writeUInt32LE(value: number, offset?: number): number;
+ /**
+ * @alias Buffer.writeUInt32LE
+ * @since v14.9.0, v12.19.0
+ */
+ writeUint32LE(value: number, offset?: number): number;
+ /**
+ * Writes `value` to `buf` at the specified `offset` as big-endian. The `value`must be a valid unsigned 32-bit integer. Behavior is undefined when `value`is anything other than an
+ * unsigned 32-bit integer.
+ *
+ * This function is also available under the `writeUint32BE` alias.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.allocUnsafe(4);
+ *
+ * buf.writeUInt32BE(0xfeedface, 0);
+ *
+ * console.log(buf);
+ * // Prints:
+ * ```
+ * @since v0.5.5
+ * @param value Number to be written to `buf`.
+ * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`.
+ * @return `offset` plus the number of bytes written.
+ */
+ writeUInt32BE(value: number, offset?: number): number;
+ /**
+ * @alias Buffer.writeUInt32BE
+ * @since v14.9.0, v12.19.0
+ */
+ writeUint32BE(value: number, offset?: number): number;
+ /**
+ * Writes `value` to `buf` at the specified `offset`. `value` must be a valid
+ * signed 8-bit integer. Behavior is undefined when `value` is anything other than
+ * a signed 8-bit integer.
+ *
+ * `value` is interpreted and written as a two's complement signed integer.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.allocUnsafe(2);
+ *
+ * buf.writeInt8(2, 0);
+ * buf.writeInt8(-2, 1);
+ *
+ * console.log(buf);
+ * // Prints:
+ * ```
+ * @since v0.5.0
+ * @param value Number to be written to `buf`.
+ * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 1`.
+ * @return `offset` plus the number of bytes written.
+ */
+ writeInt8(value: number, offset?: number): number;
+ /**
+ * Writes `value` to `buf` at the specified `offset` as little-endian. The `value`must be a valid signed 16-bit integer. Behavior is undefined when `value` is
+ * anything other than a signed 16-bit integer.
+ *
+ * The `value` is interpreted and written as a two's complement signed integer.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.allocUnsafe(2);
+ *
+ * buf.writeInt16LE(0x0304, 0);
+ *
+ * console.log(buf);
+ * // Prints:
+ * ```
+ * @since v0.5.5
+ * @param value Number to be written to `buf`.
+ * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 2`.
+ * @return `offset` plus the number of bytes written.
+ */
+ writeInt16LE(value: number, offset?: number): number;
+ /**
+ * Writes `value` to `buf` at the specified `offset` as big-endian. The `value`must be a valid signed 16-bit integer. Behavior is undefined when `value` is
+ * anything other than a signed 16-bit integer.
+ *
+ * The `value` is interpreted and written as a two's complement signed integer.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.allocUnsafe(2);
+ *
+ * buf.writeInt16BE(0x0102, 0);
+ *
+ * console.log(buf);
+ * // Prints:
+ * ```
+ * @since v0.5.5
+ * @param value Number to be written to `buf`.
+ * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 2`.
+ * @return `offset` plus the number of bytes written.
+ */
+ writeInt16BE(value: number, offset?: number): number;
+ /**
+ * Writes `value` to `buf` at the specified `offset` as little-endian. The `value`must be a valid signed 32-bit integer. Behavior is undefined when `value` is
+ * anything other than a signed 32-bit integer.
+ *
+ * The `value` is interpreted and written as a two's complement signed integer.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.allocUnsafe(4);
+ *
+ * buf.writeInt32LE(0x05060708, 0);
+ *
+ * console.log(buf);
+ * // Prints:
+ * ```
+ * @since v0.5.5
+ * @param value Number to be written to `buf`.
+ * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`.
+ * @return `offset` plus the number of bytes written.
+ */
+ writeInt32LE(value: number, offset?: number): number;
+ /**
+ * Writes `value` to `buf` at the specified `offset` as big-endian. The `value`must be a valid signed 32-bit integer. Behavior is undefined when `value` is
+ * anything other than a signed 32-bit integer.
+ *
+ * The `value` is interpreted and written as a two's complement signed integer.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.allocUnsafe(4);
+ *
+ * buf.writeInt32BE(0x01020304, 0);
+ *
+ * console.log(buf);
+ * // Prints:
+ * ```
+ * @since v0.5.5
+ * @param value Number to be written to `buf`.
+ * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`.
+ * @return `offset` plus the number of bytes written.
+ */
+ writeInt32BE(value: number, offset?: number): number;
+ /**
+ * Writes `value` to `buf` at the specified `offset` as little-endian. Behavior is
+ * undefined when `value` is anything other than a JavaScript number.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.allocUnsafe(4);
+ *
+ * buf.writeFloatLE(0xcafebabe, 0);
+ *
+ * console.log(buf);
+ * // Prints:
+ * ```
+ * @since v0.11.15
+ * @param value Number to be written to `buf`.
+ * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`.
+ * @return `offset` plus the number of bytes written.
+ */
+ writeFloatLE(value: number, offset?: number): number;
+ /**
+ * Writes `value` to `buf` at the specified `offset` as big-endian. Behavior is
+ * undefined when `value` is anything other than a JavaScript number.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.allocUnsafe(4);
+ *
+ * buf.writeFloatBE(0xcafebabe, 0);
+ *
+ * console.log(buf);
+ * // Prints:
+ * ```
+ * @since v0.11.15
+ * @param value Number to be written to `buf`.
+ * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`.
+ * @return `offset` plus the number of bytes written.
+ */
+ writeFloatBE(value: number, offset?: number): number;
+ /**
+ * Writes `value` to `buf` at the specified `offset` as little-endian. The `value`must be a JavaScript number. Behavior is undefined when `value` is anything
+ * other than a JavaScript number.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.allocUnsafe(8);
+ *
+ * buf.writeDoubleLE(123.456, 0);
+ *
+ * console.log(buf);
+ * // Prints:
+ * ```
+ * @since v0.11.15
+ * @param value Number to be written to `buf`.
+ * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 8`.
+ * @return `offset` plus the number of bytes written.
+ */
+ writeDoubleLE(value: number, offset?: number): number;
+ /**
+ * Writes `value` to `buf` at the specified `offset` as big-endian. The `value`must be a JavaScript number. Behavior is undefined when `value` is anything
+ * other than a JavaScript number.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.allocUnsafe(8);
+ *
+ * buf.writeDoubleBE(123.456, 0);
+ *
+ * console.log(buf);
+ * // Prints:
+ * ```
+ * @since v0.11.15
+ * @param value Number to be written to `buf`.
+ * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 8`.
+ * @return `offset` plus the number of bytes written.
+ */
+ writeDoubleBE(value: number, offset?: number): number;
+ /**
+ * Fills `buf` with the specified `value`. If the `offset` and `end` are not given,
+ * the entire `buf` will be filled:
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * // Fill a `Buffer` with the ASCII character 'h'.
+ *
+ * const b = Buffer.allocUnsafe(50).fill('h');
+ *
+ * console.log(b.toString());
+ * // Prints: hhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhh
+ *
+ * // Fill a buffer with empty string
+ * const c = Buffer.allocUnsafe(5).fill('');
+ *
+ * console.log(c.fill(''));
+ * // Prints:
+ * ```
+ *
+ * `value` is coerced to a `uint32` value if it is not a string, `Buffer`, or
+ * integer. If the resulting integer is greater than `255` (decimal), `buf` will be
+ * filled with `value & 255`.
+ *
+ * If the final write of a `fill()` operation falls on a multi-byte character,
+ * then only the bytes of that character that fit into `buf` are written:
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * // Fill a `Buffer` with character that takes up two bytes in UTF-8.
+ *
+ * console.log(Buffer.allocUnsafe(5).fill('\u0222'));
+ * // Prints:
+ * ```
+ *
+ * If `value` contains invalid characters, it is truncated; if no valid
+ * fill data remains, an exception is thrown:
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.allocUnsafe(5);
+ *
+ * console.log(buf.fill('a'));
+ * // Prints:
+ * console.log(buf.fill('aazz', 'hex'));
+ * // Prints:
+ * console.log(buf.fill('zz', 'hex'));
+ * // Throws an exception.
+ * ```
+ * @since v0.5.0
+ * @param value The value with which to fill `buf`. Empty value (string, Uint8Array, Buffer) is coerced to `0`.
+ * @param [offset=0] Number of bytes to skip before starting to fill `buf`.
+ * @param [end=buf.length] Where to stop filling `buf` (not inclusive).
+ * @param [encoding='utf8'] The encoding for `value` if `value` is a string.
+ * @return A reference to `buf`.
+ */
+ fill(value: string | Uint8Array | number, offset?: number, end?: number, encoding?: BufferEncoding): this;
+ /**
+ * If `value` is:
+ *
+ * * a string, `value` is interpreted according to the character encoding in`encoding`.
+ * * a `Buffer` or [`Uint8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array), `value` will be used in its entirety.
+ * To compare a partial `Buffer`, use `buf.subarray`.
+ * * a number, `value` will be interpreted as an unsigned 8-bit integer
+ * value between `0` and `255`.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.from('this is a buffer');
+ *
+ * console.log(buf.indexOf('this'));
+ * // Prints: 0
+ * console.log(buf.indexOf('is'));
+ * // Prints: 2
+ * console.log(buf.indexOf(Buffer.from('a buffer')));
+ * // Prints: 8
+ * console.log(buf.indexOf(97));
+ * // Prints: 8 (97 is the decimal ASCII value for 'a')
+ * console.log(buf.indexOf(Buffer.from('a buffer example')));
+ * // Prints: -1
+ * console.log(buf.indexOf(Buffer.from('a buffer example').slice(0, 8)));
+ * // Prints: 8
+ *
+ * const utf16Buffer = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'utf16le');
+ *
+ * console.log(utf16Buffer.indexOf('\u03a3', 0, 'utf16le'));
+ * // Prints: 4
+ * console.log(utf16Buffer.indexOf('\u03a3', -4, 'utf16le'));
+ * // Prints: 6
+ * ```
+ *
+ * If `value` is not a string, number, or `Buffer`, this method will throw a`TypeError`. If `value` is a number, it will be coerced to a valid byte value,
+ * an integer between 0 and 255.
+ *
+ * If `byteOffset` is not a number, it will be coerced to a number. If the result
+ * of coercion is `NaN` or `0`, then the entire buffer will be searched. This
+ * behavior matches [`String.prototype.indexOf()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/indexOf).
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const b = Buffer.from('abcdef');
+ *
+ * // Passing a value that's a number, but not a valid byte.
+ * // Prints: 2, equivalent to searching for 99 or 'c'.
+ * console.log(b.indexOf(99.9));
+ * console.log(b.indexOf(256 + 99));
+ *
+ * // Passing a byteOffset that coerces to NaN or 0.
+ * // Prints: 1, searching the whole buffer.
+ * console.log(b.indexOf('b', undefined));
+ * console.log(b.indexOf('b', {}));
+ * console.log(b.indexOf('b', null));
+ * console.log(b.indexOf('b', []));
+ * ```
+ *
+ * If `value` is an empty string or empty `Buffer` and `byteOffset` is less
+ * than `buf.length`, `byteOffset` will be returned. If `value` is empty and`byteOffset` is at least `buf.length`, `buf.length` will be returned.
+ * @since v1.5.0
+ * @param value What to search for.
+ * @param [byteOffset=0] Where to begin searching in `buf`. If negative, then offset is calculated from the end of `buf`.
+ * @param [encoding='utf8'] If `value` is a string, this is the encoding used to determine the binary representation of the string that will be searched for in `buf`.
+ * @return The index of the first occurrence of `value` in `buf`, or `-1` if `buf` does not contain `value`.
+ */
+ indexOf(value: string | number | Uint8Array, byteOffset?: number, encoding?: BufferEncoding): number;
+ /**
+ * Identical to `buf.indexOf()`, except the last occurrence of `value` is found
+ * rather than the first occurrence.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.from('this buffer is a buffer');
+ *
+ * console.log(buf.lastIndexOf('this'));
+ * // Prints: 0
+ * console.log(buf.lastIndexOf('buffer'));
+ * // Prints: 17
+ * console.log(buf.lastIndexOf(Buffer.from('buffer')));
+ * // Prints: 17
+ * console.log(buf.lastIndexOf(97));
+ * // Prints: 15 (97 is the decimal ASCII value for 'a')
+ * console.log(buf.lastIndexOf(Buffer.from('yolo')));
+ * // Prints: -1
+ * console.log(buf.lastIndexOf('buffer', 5));
+ * // Prints: 5
+ * console.log(buf.lastIndexOf('buffer', 4));
+ * // Prints: -1
+ *
+ * const utf16Buffer = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'utf16le');
+ *
+ * console.log(utf16Buffer.lastIndexOf('\u03a3', undefined, 'utf16le'));
+ * // Prints: 6
+ * console.log(utf16Buffer.lastIndexOf('\u03a3', -5, 'utf16le'));
+ * // Prints: 4
+ * ```
+ *
+ * If `value` is not a string, number, or `Buffer`, this method will throw a`TypeError`. If `value` is a number, it will be coerced to a valid byte value,
+ * an integer between 0 and 255.
+ *
+ * If `byteOffset` is not a number, it will be coerced to a number. Any arguments
+ * that coerce to `NaN`, like `{}` or `undefined`, will search the whole buffer.
+ * This behavior matches [`String.prototype.lastIndexOf()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/lastIndexOf).
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const b = Buffer.from('abcdef');
+ *
+ * // Passing a value that's a number, but not a valid byte.
+ * // Prints: 2, equivalent to searching for 99 or 'c'.
+ * console.log(b.lastIndexOf(99.9));
+ * console.log(b.lastIndexOf(256 + 99));
+ *
+ * // Passing a byteOffset that coerces to NaN.
+ * // Prints: 1, searching the whole buffer.
+ * console.log(b.lastIndexOf('b', undefined));
+ * console.log(b.lastIndexOf('b', {}));
+ *
+ * // Passing a byteOffset that coerces to 0.
+ * // Prints: -1, equivalent to passing 0.
+ * console.log(b.lastIndexOf('b', null));
+ * console.log(b.lastIndexOf('b', []));
+ * ```
+ *
+ * If `value` is an empty string or empty `Buffer`, `byteOffset` will be returned.
+ * @since v6.0.0
+ * @param value What to search for.
+ * @param [byteOffset=buf.length - 1] Where to begin searching in `buf`. If negative, then offset is calculated from the end of `buf`.
+ * @param [encoding='utf8'] If `value` is a string, this is the encoding used to determine the binary representation of the string that will be searched for in `buf`.
+ * @return The index of the last occurrence of `value` in `buf`, or `-1` if `buf` does not contain `value`.
+ */
+ lastIndexOf(value: string | number | Uint8Array, byteOffset?: number, encoding?: BufferEncoding): number;
+ /**
+ * Creates and returns an [iterator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols) of `[index, byte]` pairs from the contents
+ * of `buf`.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * // Log the entire contents of a `Buffer`.
+ *
+ * const buf = Buffer.from('buffer');
+ *
+ * for (const pair of buf.entries()) {
+ * console.log(pair);
+ * }
+ * // Prints:
+ * // [0, 98]
+ * // [1, 117]
+ * // [2, 102]
+ * // [3, 102]
+ * // [4, 101]
+ * // [5, 114]
+ * ```
+ * @since v1.1.0
+ */
+ entries(): IterableIterator<[number, number]>;
+ /**
+ * Equivalent to `buf.indexOf() !== -1`.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.from('this is a buffer');
+ *
+ * console.log(buf.includes('this'));
+ * // Prints: true
+ * console.log(buf.includes('is'));
+ * // Prints: true
+ * console.log(buf.includes(Buffer.from('a buffer')));
+ * // Prints: true
+ * console.log(buf.includes(97));
+ * // Prints: true (97 is the decimal ASCII value for 'a')
+ * console.log(buf.includes(Buffer.from('a buffer example')));
+ * // Prints: false
+ * console.log(buf.includes(Buffer.from('a buffer example').slice(0, 8)));
+ * // Prints: true
+ * console.log(buf.includes('this', 4));
+ * // Prints: false
+ * ```
+ * @since v5.3.0
+ * @param value What to search for.
+ * @param [byteOffset=0] Where to begin searching in `buf`. If negative, then offset is calculated from the end of `buf`.
+ * @param [encoding='utf8'] If `value` is a string, this is its encoding.
+ * @return `true` if `value` was found in `buf`, `false` otherwise.
+ */
+ includes(value: string | number | Buffer, byteOffset?: number, encoding?: BufferEncoding): boolean;
+ /**
+ * Creates and returns an [iterator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols) of `buf` keys (indices).
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.from('buffer');
+ *
+ * for (const key of buf.keys()) {
+ * console.log(key);
+ * }
+ * // Prints:
+ * // 0
+ * // 1
+ * // 2
+ * // 3
+ * // 4
+ * // 5
+ * ```
+ * @since v1.1.0
+ */
+ keys(): IterableIterator;
+ /**
+ * Creates and returns an [iterator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols) for `buf` values (bytes). This function is
+ * called automatically when a `Buffer` is used in a `for..of` statement.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ *
+ * const buf = Buffer.from('buffer');
+ *
+ * for (const value of buf.values()) {
+ * console.log(value);
+ * }
+ * // Prints:
+ * // 98
+ * // 117
+ * // 102
+ * // 102
+ * // 101
+ * // 114
+ *
+ * for (const value of buf) {
+ * console.log(value);
+ * }
+ * // Prints:
+ * // 98
+ * // 117
+ * // 102
+ * // 102
+ * // 101
+ * // 114
+ * ```
+ * @since v1.1.0
+ */
+ values(): IterableIterator;
+ }
+ var Buffer: BufferConstructor;
+ /**
+ * Decodes a string of Base64-encoded data into bytes, and encodes those bytes
+ * into a string using Latin-1 (ISO-8859-1).
+ *
+ * The `data` may be any JavaScript-value that can be coerced into a string.
+ *
+ * **This function is only provided for compatibility with legacy web platform APIs**
+ * **and should never be used in new code, because they use strings to represent**
+ * **binary data and predate the introduction of typed arrays in JavaScript.**
+ * **For code running using Node.js APIs, converting between base64-encoded strings**
+ * **and binary data should be performed using `Buffer.from(str, 'base64')` and`buf.toString('base64')`.**
+ * @since v15.13.0, v14.17.0
+ * @legacy Use `Buffer.from(data, 'base64')` instead.
+ * @param data The Base64-encoded input string.
+ */
+ function atob(data: string): string;
+ /**
+ * Decodes a string into bytes using Latin-1 (ISO-8859), and encodes those bytes
+ * into a string using Base64.
+ *
+ * The `data` may be any JavaScript-value that can be coerced into a string.
+ *
+ * **This function is only provided for compatibility with legacy web platform APIs**
+ * **and should never be used in new code, because they use strings to represent**
+ * **binary data and predate the introduction of typed arrays in JavaScript.**
+ * **For code running using Node.js APIs, converting between base64-encoded strings**
+ * **and binary data should be performed using `Buffer.from(str, 'base64')` and`buf.toString('base64')`.**
+ * @since v15.13.0, v14.17.0
+ * @legacy Use `buf.toString('base64')` instead.
+ * @param data An ASCII (Latin1) string.
+ */
+ function btoa(data: string): string;
+ interface Blob extends __Blob {}
+ /**
+ * `Blob` class is a global reference for `require('node:buffer').Blob`
+ * https://nodejs.org/api/buffer.html#class-blob
+ * @since v18.0.0
+ */
+ var Blob: typeof globalThis extends {
+ onmessage: any;
+ Blob: infer T;
+ } ? T
+ : typeof NodeBlob;
+ }
+}
+declare module "node:buffer" {
+ export * from "buffer";
+}
diff --git a/ADOOR_ACE/Interaction-Server/node_modules/@types/node/child_process.d.ts b/ADOOR_ACE/Interaction-Server/node_modules/@types/node/child_process.d.ts
new file mode 100644
index 0000000000000000000000000000000000000000..e5078b8524729156c1757afd9d8e52e08e4a2b10
--- /dev/null
+++ b/ADOOR_ACE/Interaction-Server/node_modules/@types/node/child_process.d.ts
@@ -0,0 +1,1542 @@
+/**
+ * The `node:child_process` module provides the ability to spawn subprocesses in
+ * a manner that is similar, but not identical, to [`popen(3)`](http://man7.org/linux/man-pages/man3/popen.3.html). This capability
+ * is primarily provided by the {@link spawn} function:
+ *
+ * ```js
+ * const { spawn } = require('node:child_process');
+ * const ls = spawn('ls', ['-lh', '/usr']);
+ *
+ * ls.stdout.on('data', (data) => {
+ * console.log(`stdout: ${data}`);
+ * });
+ *
+ * ls.stderr.on('data', (data) => {
+ * console.error(`stderr: ${data}`);
+ * });
+ *
+ * ls.on('close', (code) => {
+ * console.log(`child process exited with code ${code}`);
+ * });
+ * ```
+ *
+ * By default, pipes for `stdin`, `stdout`, and `stderr` are established between
+ * the parent Node.js process and the spawned subprocess. These pipes have
+ * limited (and platform-specific) capacity. If the subprocess writes to
+ * stdout in excess of that limit without the output being captured, the
+ * subprocess blocks waiting for the pipe buffer to accept more data. This is
+ * identical to the behavior of pipes in the shell. Use the `{ stdio: 'ignore' }`option if the output will not be consumed.
+ *
+ * The command lookup is performed using the `options.env.PATH` environment
+ * variable if `env` is in the `options` object. Otherwise, `process.env.PATH` is
+ * used. If `options.env` is set without `PATH`, lookup on Unix is performed
+ * on a default search path search of `/usr/bin:/bin` (see your operating system's
+ * manual for execvpe/execvp), on Windows the current processes environment
+ * variable `PATH` is used.
+ *
+ * On Windows, environment variables are case-insensitive. Node.js
+ * lexicographically sorts the `env` keys and uses the first one that
+ * case-insensitively matches. Only first (in lexicographic order) entry will be
+ * passed to the subprocess. This might lead to issues on Windows when passing
+ * objects to the `env` option that have multiple variants of the same key, such as`PATH` and `Path`.
+ *
+ * The {@link spawn} method spawns the child process asynchronously,
+ * without blocking the Node.js event loop. The {@link spawnSync} function provides equivalent functionality in a synchronous manner that blocks
+ * the event loop until the spawned process either exits or is terminated.
+ *
+ * For convenience, the `node:child_process` module provides a handful of
+ * synchronous and asynchronous alternatives to {@link spawn} and {@link spawnSync}. Each of these alternatives are implemented on
+ * top of {@link spawn} or {@link spawnSync}.
+ *
+ * * {@link exec}: spawns a shell and runs a command within that
+ * shell, passing the `stdout` and `stderr` to a callback function when
+ * complete.
+ * * {@link execFile}: similar to {@link exec} except
+ * that it spawns the command directly without first spawning a shell by
+ * default.
+ * * {@link fork}: spawns a new Node.js process and invokes a
+ * specified module with an IPC communication channel established that allows
+ * sending messages between parent and child.
+ * * {@link execSync}: a synchronous version of {@link exec} that will block the Node.js event loop.
+ * * {@link execFileSync}: a synchronous version of {@link execFile} that will block the Node.js event loop.
+ *
+ * For certain use cases, such as automating shell scripts, the `synchronous counterparts` may be more convenient. In many cases, however,
+ * the synchronous methods can have significant impact on performance due to
+ * stalling the event loop while spawned processes complete.
+ * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/child_process.js)
+ */
+declare module "child_process" {
+ import { ObjectEncodingOptions } from "node:fs";
+ import { Abortable, EventEmitter } from "node:events";
+ import * as net from "node:net";
+ import { Pipe, Readable, Stream, Writable } from "node:stream";
+ import { URL } from "node:url";
+ type Serializable = string | object | number | boolean | bigint;
+ type SendHandle = net.Socket | net.Server;
+ /**
+ * Instances of the `ChildProcess` represent spawned child processes.
+ *
+ * Instances of `ChildProcess` are not intended to be created directly. Rather,
+ * use the {@link spawn}, {@link exec},{@link execFile}, or {@link fork} methods to create
+ * instances of `ChildProcess`.
+ * @since v2.2.0
+ */
+ class ChildProcess extends EventEmitter {
+ /**
+ * A `Writable Stream` that represents the child process's `stdin`.
+ *
+ * If a child process waits to read all of its input, the child will not continue
+ * until this stream has been closed via `end()`.
+ *
+ * If the child was spawned with `stdio[0]` set to anything other than `'pipe'`,
+ * then this will be `null`.
+ *
+ * `subprocess.stdin` is an alias for `subprocess.stdio[0]`. Both properties will
+ * refer to the same value.
+ *
+ * The `subprocess.stdin` property can be `null` or `undefined`if the child process could not be successfully spawned.
+ * @since v0.1.90
+ */
+ stdin: Writable | null;
+ /**
+ * A `Readable Stream` that represents the child process's `stdout`.
+ *
+ * If the child was spawned with `stdio[1]` set to anything other than `'pipe'`,
+ * then this will be `null`.
+ *
+ * `subprocess.stdout` is an alias for `subprocess.stdio[1]`. Both properties will
+ * refer to the same value.
+ *
+ * ```js
+ * const { spawn } = require('node:child_process');
+ *
+ * const subprocess = spawn('ls');
+ *
+ * subprocess.stdout.on('data', (data) => {
+ * console.log(`Received chunk ${data}`);
+ * });
+ * ```
+ *
+ * The `subprocess.stdout` property can be `null` or `undefined`if the child process could not be successfully spawned.
+ * @since v0.1.90
+ */
+ stdout: Readable | null;
+ /**
+ * A `Readable Stream` that represents the child process's `stderr`.
+ *
+ * If the child was spawned with `stdio[2]` set to anything other than `'pipe'`,
+ * then this will be `null`.
+ *
+ * `subprocess.stderr` is an alias for `subprocess.stdio[2]`. Both properties will
+ * refer to the same value.
+ *
+ * The `subprocess.stderr` property can be `null` or `undefined`if the child process could not be successfully spawned.
+ * @since v0.1.90
+ */
+ stderr: Readable | null;
+ /**
+ * The `subprocess.channel` property is a reference to the child's IPC channel. If
+ * no IPC channel exists, this property is `undefined`.
+ * @since v7.1.0
+ */
+ readonly channel?: Pipe | null | undefined;
+ /**
+ * A sparse array of pipes to the child process, corresponding with positions in
+ * the `stdio` option passed to {@link spawn} that have been set
+ * to the value `'pipe'`. `subprocess.stdio[0]`, `subprocess.stdio[1]`, and`subprocess.stdio[2]` are also available as `subprocess.stdin`,`subprocess.stdout`, and `subprocess.stderr`,
+ * respectively.
+ *
+ * In the following example, only the child's fd `1` (stdout) is configured as a
+ * pipe, so only the parent's `subprocess.stdio[1]` is a stream, all other values
+ * in the array are `null`.
+ *
+ * ```js
+ * const assert = require('node:assert');
+ * const fs = require('node:fs');
+ * const child_process = require('node:child_process');
+ *
+ * const subprocess = child_process.spawn('ls', {
+ * stdio: [
+ * 0, // Use parent's stdin for child.
+ * 'pipe', // Pipe child's stdout to parent.
+ * fs.openSync('err.out', 'w'), // Direct child's stderr to a file.
+ * ],
+ * });
+ *
+ * assert.strictEqual(subprocess.stdio[0], null);
+ * assert.strictEqual(subprocess.stdio[0], subprocess.stdin);
+ *
+ * assert(subprocess.stdout);
+ * assert.strictEqual(subprocess.stdio[1], subprocess.stdout);
+ *
+ * assert.strictEqual(subprocess.stdio[2], null);
+ * assert.strictEqual(subprocess.stdio[2], subprocess.stderr);
+ * ```
+ *
+ * The `subprocess.stdio` property can be `undefined` if the child process could
+ * not be successfully spawned.
+ * @since v0.7.10
+ */
+ readonly stdio: [
+ Writable | null,
+ // stdin
+ Readable | null,
+ // stdout
+ Readable | null,
+ // stderr
+ Readable | Writable | null | undefined,
+ // extra
+ Readable | Writable | null | undefined, // extra
+ ];
+ /**
+ * The `subprocess.killed` property indicates whether the child process
+ * successfully received a signal from `subprocess.kill()`. The `killed` property
+ * does not indicate that the child process has been terminated.
+ * @since v0.5.10
+ */
+ readonly killed: boolean;
+ /**
+ * Returns the process identifier (PID) of the child process. If the child process
+ * fails to spawn due to errors, then the value is `undefined` and `error` is
+ * emitted.
+ *
+ * ```js
+ * const { spawn } = require('node:child_process');
+ * const grep = spawn('grep', ['ssh']);
+ *
+ * console.log(`Spawned child pid: ${grep.pid}`);
+ * grep.stdin.end();
+ * ```
+ * @since v0.1.90
+ */
+ readonly pid?: number | undefined;
+ /**
+ * The `subprocess.connected` property indicates whether it is still possible to
+ * send and receive messages from a child process. When `subprocess.connected` is`false`, it is no longer possible to send or receive messages.
+ * @since v0.7.2
+ */
+ readonly connected: boolean;
+ /**
+ * The `subprocess.exitCode` property indicates the exit code of the child process.
+ * If the child process is still running, the field will be `null`.
+ */
+ readonly exitCode: number | null;
+ /**
+ * The `subprocess.signalCode` property indicates the signal received by
+ * the child process if any, else `null`.
+ */
+ readonly signalCode: NodeJS.Signals | null;
+ /**
+ * The `subprocess.spawnargs` property represents the full list of command-line
+ * arguments the child process was launched with.
+ */
+ readonly spawnargs: string[];
+ /**
+ * The `subprocess.spawnfile` property indicates the executable file name of
+ * the child process that is launched.
+ *
+ * For {@link fork}, its value will be equal to `process.execPath`.
+ * For {@link spawn}, its value will be the name of
+ * the executable file.
+ * For {@link exec}, its value will be the name of the shell
+ * in which the child process is launched.
+ */
+ readonly spawnfile: string;
+ /**
+ * The `subprocess.kill()` method sends a signal to the child process. If no
+ * argument is given, the process will be sent the `'SIGTERM'` signal. See [`signal(7)`](http://man7.org/linux/man-pages/man7/signal.7.html) for a list of available signals. This function
+ * returns `true` if [`kill(2)`](http://man7.org/linux/man-pages/man2/kill.2.html) succeeds, and `false` otherwise.
+ *
+ * ```js
+ * const { spawn } = require('node:child_process');
+ * const grep = spawn('grep', ['ssh']);
+ *
+ * grep.on('close', (code, signal) => {
+ * console.log(
+ * `child process terminated due to receipt of signal ${signal}`);
+ * });
+ *
+ * // Send SIGHUP to process.
+ * grep.kill('SIGHUP');
+ * ```
+ *
+ * The `ChildProcess` object may emit an `'error'` event if the signal
+ * cannot be delivered. Sending a signal to a child process that has already exited
+ * is not an error but may have unforeseen consequences. Specifically, if the
+ * process identifier (PID) has been reassigned to another process, the signal will
+ * be delivered to that process instead which can have unexpected results.
+ *
+ * While the function is called `kill`, the signal delivered to the child process
+ * may not actually terminate the process.
+ *
+ * See [`kill(2)`](http://man7.org/linux/man-pages/man2/kill.2.html) for reference.
+ *
+ * On Windows, where POSIX signals do not exist, the `signal` argument will be
+ * ignored, and the process will be killed forcefully and abruptly (similar to`'SIGKILL'`).
+ * See `Signal Events` for more details.
+ *
+ * On Linux, child processes of child processes will not be terminated
+ * when attempting to kill their parent. This is likely to happen when running a
+ * new process in a shell or with the use of the `shell` option of `ChildProcess`:
+ *
+ * ```js
+ * 'use strict';
+ * const { spawn } = require('node:child_process');
+ *
+ * const subprocess = spawn(
+ * 'sh',
+ * [
+ * '-c',
+ * `node -e "setInterval(() => {
+ * console.log(process.pid, 'is alive')
+ * }, 500);"`,
+ * ], {
+ * stdio: ['inherit', 'inherit', 'inherit'],
+ * },
+ * );
+ *
+ * setTimeout(() => {
+ * subprocess.kill(); // Does not terminate the Node.js process in the shell.
+ * }, 2000);
+ * ```
+ * @since v0.1.90
+ */
+ kill(signal?: NodeJS.Signals | number): boolean;
+ /**
+ * Calls {@link ChildProcess.kill} with `'SIGTERM'`.
+ * @since v20.5.0
+ */
+ [Symbol.dispose](): void;
+ /**
+ * When an IPC channel has been established between the parent and child (
+ * i.e. when using {@link fork}), the `subprocess.send()` method can
+ * be used to send messages to the child process. When the child process is a
+ * Node.js instance, these messages can be received via the `'message'` event.
+ *
+ * The message goes through serialization and parsing. The resulting
+ * message might not be the same as what is originally sent.
+ *
+ * For example, in the parent script:
+ *
+ * ```js
+ * const cp = require('node:child_process');
+ * const n = cp.fork(`${__dirname}/sub.js`);
+ *
+ * n.on('message', (m) => {
+ * console.log('PARENT got message:', m);
+ * });
+ *
+ * // Causes the child to print: CHILD got message: { hello: 'world' }
+ * n.send({ hello: 'world' });
+ * ```
+ *
+ * And then the child script, `'sub.js'` might look like this:
+ *
+ * ```js
+ * process.on('message', (m) => {
+ * console.log('CHILD got message:', m);
+ * });
+ *
+ * // Causes the parent to print: PARENT got message: { foo: 'bar', baz: null }
+ * process.send({ foo: 'bar', baz: NaN });
+ * ```
+ *
+ * Child Node.js processes will have a `process.send()` method of their own
+ * that allows the child to send messages back to the parent.
+ *
+ * There is a special case when sending a `{cmd: 'NODE_foo'}` message. Messages
+ * containing a `NODE_` prefix in the `cmd` property are reserved for use within
+ * Node.js core and will not be emitted in the child's `'message'` event. Rather, such messages are emitted using the`'internalMessage'` event and are consumed internally by Node.js.
+ * Applications should avoid using such messages or listening for`'internalMessage'` events as it is subject to change without notice.
+ *
+ * The optional `sendHandle` argument that may be passed to `subprocess.send()` is
+ * for passing a TCP server or socket object to the child process. The child will
+ * receive the object as the second argument passed to the callback function
+ * registered on the `'message'` event. Any data that is received
+ * and buffered in the socket will not be sent to the child.
+ *
+ * The optional `callback` is a function that is invoked after the message is
+ * sent but before the child may have received it. The function is called with a
+ * single argument: `null` on success, or an `Error` object on failure.
+ *
+ * If no `callback` function is provided and the message cannot be sent, an`'error'` event will be emitted by the `ChildProcess` object. This can
+ * happen, for instance, when the child process has already exited.
+ *
+ * `subprocess.send()` will return `false` if the channel has closed or when the
+ * backlog of unsent messages exceeds a threshold that makes it unwise to send
+ * more. Otherwise, the method returns `true`. The `callback` function can be
+ * used to implement flow control.
+ *
+ * #### Example: sending a server object
+ *
+ * The `sendHandle` argument can be used, for instance, to pass the handle of
+ * a TCP server object to the child process as illustrated in the example below:
+ *
+ * ```js
+ * const subprocess = require('node:child_process').fork('subprocess.js');
+ *
+ * // Open up the server object and send the handle.
+ * const server = require('node:net').createServer();
+ * server.on('connection', (socket) => {
+ * socket.end('handled by parent');
+ * });
+ * server.listen(1337, () => {
+ * subprocess.send('server', server);
+ * });
+ * ```
+ *
+ * The child would then receive the server object as:
+ *
+ * ```js
+ * process.on('message', (m, server) => {
+ * if (m === 'server') {
+ * server.on('connection', (socket) => {
+ * socket.end('handled by child');
+ * });
+ * }
+ * });
+ * ```
+ *
+ * Once the server is now shared between the parent and child, some connections
+ * can be handled by the parent and some by the child.
+ *
+ * While the example above uses a server created using the `node:net` module,`node:dgram` module servers use exactly the same workflow with the exceptions of
+ * listening on a `'message'` event instead of `'connection'` and using`server.bind()` instead of `server.listen()`. This is, however, only
+ * supported on Unix platforms.
+ *
+ * #### Example: sending a socket object
+ *
+ * Similarly, the `sendHandler` argument can be used to pass the handle of a
+ * socket to the child process. The example below spawns two children that each
+ * handle connections with "normal" or "special" priority:
+ *
+ * ```js
+ * const { fork } = require('node:child_process');
+ * const normal = fork('subprocess.js', ['normal']);
+ * const special = fork('subprocess.js', ['special']);
+ *
+ * // Open up the server and send sockets to child. Use pauseOnConnect to prevent
+ * // the sockets from being read before they are sent to the child process.
+ * const server = require('node:net').createServer({ pauseOnConnect: true });
+ * server.on('connection', (socket) => {
+ *
+ * // If this is special priority...
+ * if (socket.remoteAddress === '74.125.127.100') {
+ * special.send('socket', socket);
+ * return;
+ * }
+ * // This is normal priority.
+ * normal.send('socket', socket);
+ * });
+ * server.listen(1337);
+ * ```
+ *
+ * The `subprocess.js` would receive the socket handle as the second argument
+ * passed to the event callback function:
+ *
+ * ```js
+ * process.on('message', (m, socket) => {
+ * if (m === 'socket') {
+ * if (socket) {
+ * // Check that the client socket exists.
+ * // It is possible for the socket to be closed between the time it is
+ * // sent and the time it is received in the child process.
+ * socket.end(`Request handled with ${process.argv[2]} priority`);
+ * }
+ * }
+ * });
+ * ```
+ *
+ * Do not use `.maxConnections` on a socket that has been passed to a subprocess.
+ * The parent cannot track when the socket is destroyed.
+ *
+ * Any `'message'` handlers in the subprocess should verify that `socket` exists,
+ * as the connection may have been closed during the time it takes to send the
+ * connection to the child.
+ * @since v0.5.9
+ * @param options The `options` argument, if present, is an object used to parameterize the sending of certain types of handles. `options` supports the following properties:
+ */
+ send(message: Serializable, callback?: (error: Error | null) => void): boolean;
+ send(message: Serializable, sendHandle?: SendHandle, callback?: (error: Error | null) => void): boolean;
+ send(
+ message: Serializable,
+ sendHandle?: SendHandle,
+ options?: MessageOptions,
+ callback?: (error: Error | null) => void,
+ ): boolean;
+ /**
+ * Closes the IPC channel between parent and child, allowing the child to exit
+ * gracefully once there are no other connections keeping it alive. After calling
+ * this method the `subprocess.connected` and `process.connected` properties in
+ * both the parent and child (respectively) will be set to `false`, and it will be
+ * no longer possible to pass messages between the processes.
+ *
+ * The `'disconnect'` event will be emitted when there are no messages in the
+ * process of being received. This will most often be triggered immediately after
+ * calling `subprocess.disconnect()`.
+ *
+ * When the child process is a Node.js instance (e.g. spawned using {@link fork}), the `process.disconnect()` method can be invoked
+ * within the child process to close the IPC channel as well.
+ * @since v0.7.2
+ */
+ disconnect(): void;
+ /**
+ * By default, the parent will wait for the detached child to exit. To prevent the
+ * parent from waiting for a given `subprocess` to exit, use the`subprocess.unref()` method. Doing so will cause the parent's event loop to not
+ * include the child in its reference count, allowing the parent to exit
+ * independently of the child, unless there is an established IPC channel between
+ * the child and the parent.
+ *
+ * ```js
+ * const { spawn } = require('node:child_process');
+ *
+ * const subprocess = spawn(process.argv[0], ['child_program.js'], {
+ * detached: true,
+ * stdio: 'ignore',
+ * });
+ *
+ * subprocess.unref();
+ * ```
+ * @since v0.7.10
+ */
+ unref(): void;
+ /**
+ * Calling `subprocess.ref()` after making a call to `subprocess.unref()` will
+ * restore the removed reference count for the child process, forcing the parent
+ * to wait for the child to exit before exiting itself.
+ *
+ * ```js
+ * const { spawn } = require('node:child_process');
+ *
+ * const subprocess = spawn(process.argv[0], ['child_program.js'], {
+ * detached: true,
+ * stdio: 'ignore',
+ * });
+ *
+ * subprocess.unref();
+ * subprocess.ref();
+ * ```
+ * @since v0.7.10
+ */
+ ref(): void;
+ /**
+ * events.EventEmitter
+ * 1. close
+ * 2. disconnect
+ * 3. error
+ * 4. exit
+ * 5. message
+ * 6. spawn
+ */
+ addListener(event: string, listener: (...args: any[]) => void): this;
+ addListener(event: "close", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this;
+ addListener(event: "disconnect", listener: () => void): this;
+ addListener(event: "error", listener: (err: Error) => void): this;
+ addListener(event: "exit", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this;
+ addListener(event: "message", listener: (message: Serializable, sendHandle: SendHandle) => void): this;
+ addListener(event: "spawn", listener: () => void): this;
+ emit(event: string | symbol, ...args: any[]): boolean;
+ emit(event: "close", code: number | null, signal: NodeJS.Signals | null): boolean;
+ emit(event: "disconnect"): boolean;
+ emit(event: "error", err: Error): boolean;
+ emit(event: "exit", code: number | null, signal: NodeJS.Signals | null): boolean;
+ emit(event: "message", message: Serializable, sendHandle: SendHandle): boolean;
+ emit(event: "spawn", listener: () => void): boolean;
+ on(event: string, listener: (...args: any[]) => void): this;
+ on(event: "close", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this;
+ on(event: "disconnect", listener: () => void): this;
+ on(event: "error", listener: (err: Error) => void): this;
+ on(event: "exit", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this;
+ on(event: "message", listener: (message: Serializable, sendHandle: SendHandle) => void): this;
+ on(event: "spawn", listener: () => void): this;
+ once(event: string, listener: (...args: any[]) => void): this;
+ once(event: "close", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this;
+ once(event: "disconnect", listener: () => void): this;
+ once(event: "error", listener: (err: Error) => void): this;
+ once(event: "exit", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this;
+ once(event: "message", listener: (message: Serializable, sendHandle: SendHandle) => void): this;
+ once(event: "spawn", listener: () => void): this;
+ prependListener(event: string, listener: (...args: any[]) => void): this;
+ prependListener(event: "close", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this;
+ prependListener(event: "disconnect", listener: () => void): this;
+ prependListener(event: "error", listener: (err: Error) => void): this;
+ prependListener(event: "exit", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this;
+ prependListener(event: "message", listener: (message: Serializable, sendHandle: SendHandle) => void): this;
+ prependListener(event: "spawn", listener: () => void): this;
+ prependOnceListener(event: string, listener: (...args: any[]) => void): this;
+ prependOnceListener(
+ event: "close",
+ listener: (code: number | null, signal: NodeJS.Signals | null) => void,
+ ): this;
+ prependOnceListener(event: "disconnect", listener: () => void): this;
+ prependOnceListener(event: "error", listener: (err: Error) => void): this;
+ prependOnceListener(
+ event: "exit",
+ listener: (code: number | null, signal: NodeJS.Signals | null) => void,
+ ): this;
+ prependOnceListener(event: "message", listener: (message: Serializable, sendHandle: SendHandle) => void): this;
+ prependOnceListener(event: "spawn", listener: () => void): this;
+ }
+ // return this object when stdio option is undefined or not specified
+ interface ChildProcessWithoutNullStreams extends ChildProcess {
+ stdin: Writable;
+ stdout: Readable;
+ stderr: Readable;
+ readonly stdio: [
+ Writable,
+ Readable,
+ Readable,
+ // stderr
+ Readable | Writable | null | undefined,
+ // extra, no modification
+ Readable | Writable | null | undefined, // extra, no modification
+ ];
+ }
+ // return this object when stdio option is a tuple of 3
+ interface ChildProcessByStdio
+ extends ChildProcess
+ {
+ stdin: I;
+ stdout: O;
+ stderr: E;
+ readonly stdio: [
+ I,
+ O,
+ E,
+ Readable | Writable | null | undefined,
+ // extra, no modification
+ Readable | Writable | null | undefined, // extra, no modification
+ ];
+ }
+ interface MessageOptions {
+ keepOpen?: boolean | undefined;
+ }
+ type IOType = "overlapped" | "pipe" | "ignore" | "inherit";
+ type StdioOptions = IOType | Array;
+ type SerializationType = "json" | "advanced";
+ interface MessagingOptions extends Abortable {
+ /**
+ * Specify the kind of serialization used for sending messages between processes.
+ * @default 'json'
+ */
+ serialization?: SerializationType | undefined;
+ /**
+ * The signal value to be used when the spawned process will be killed by the abort signal.
+ * @default 'SIGTERM'
+ */
+ killSignal?: NodeJS.Signals | number | undefined;
+ /**
+ * In milliseconds the maximum amount of time the process is allowed to run.
+ */
+ timeout?: number | undefined;
+ }
+ interface ProcessEnvOptions {
+ uid?: number | undefined;
+ gid?: number | undefined;
+ cwd?: string | URL | undefined;
+ env?: NodeJS.ProcessEnv | undefined;
+ }
+ interface CommonOptions extends ProcessEnvOptions {
+ /**
+ * @default false
+ */
+ windowsHide?: boolean | undefined;
+ /**
+ * @default 0
+ */
+ timeout?: number | undefined;
+ }
+ interface CommonSpawnOptions extends CommonOptions, MessagingOptions, Abortable {
+ argv0?: string | undefined;
+ /**
+ * Can be set to 'pipe', 'inherit', 'overlapped', or 'ignore', or an array of these strings.
+ * If passed as an array, the first element is used for `stdin`, the second for
+ * `stdout`, and the third for `stderr`. A fourth element can be used to
+ * specify the `stdio` behavior beyond the standard streams. See
+ * {@link ChildProcess.stdio} for more information.
+ *
+ * @default 'pipe'
+ */
+ stdio?: StdioOptions | undefined;
+ shell?: boolean | string | undefined;
+ windowsVerbatimArguments?: boolean | undefined;
+ }
+ interface SpawnOptions extends CommonSpawnOptions {
+ detached?: boolean | undefined;
+ }
+ interface SpawnOptionsWithoutStdio extends SpawnOptions {
+ stdio?: StdioPipeNamed | StdioPipe[] | undefined;
+ }
+ type StdioNull = "inherit" | "ignore" | Stream;
+ type StdioPipeNamed = "pipe" | "overlapped";
+ type StdioPipe = undefined | null | StdioPipeNamed;
+ interface SpawnOptionsWithStdioTuple<
+ Stdin extends StdioNull | StdioPipe,
+ Stdout extends StdioNull | StdioPipe,
+ Stderr extends StdioNull | StdioPipe,
+ > extends SpawnOptions {
+ stdio: [Stdin, Stdout, Stderr];
+ }
+ /**
+ * The `child_process.spawn()` method spawns a new process using the given`command`, with command-line arguments in `args`. If omitted, `args` defaults
+ * to an empty array.
+ *
+ * **If the `shell` option is enabled, do not pass unsanitized user input to this**
+ * **function. Any input containing shell metacharacters may be used to trigger**
+ * **arbitrary command execution.**
+ *
+ * A third argument may be used to specify additional options, with these defaults:
+ *
+ * ```js
+ * const defaults = {
+ * cwd: undefined,
+ * env: process.env,
+ * };
+ * ```
+ *
+ * Use `cwd` to specify the working directory from which the process is spawned.
+ * If not given, the default is to inherit the current working directory. If given,
+ * but the path does not exist, the child process emits an `ENOENT` error
+ * and exits immediately. `ENOENT` is also emitted when the command
+ * does not exist.
+ *
+ * Use `env` to specify environment variables that will be visible to the new
+ * process, the default is `process.env`.
+ *
+ * `undefined` values in `env` will be ignored.
+ *
+ * Example of running `ls -lh /usr`, capturing `stdout`, `stderr`, and the
+ * exit code:
+ *
+ * ```js
+ * const { spawn } = require('node:child_process');
+ * const ls = spawn('ls', ['-lh', '/usr']);
+ *
+ * ls.stdout.on('data', (data) => {
+ * console.log(`stdout: ${data}`);
+ * });
+ *
+ * ls.stderr.on('data', (data) => {
+ * console.error(`stderr: ${data}`);
+ * });
+ *
+ * ls.on('close', (code) => {
+ * console.log(`child process exited with code ${code}`);
+ * });
+ * ```
+ *
+ * Example: A very elaborate way to run `ps ax | grep ssh`
+ *
+ * ```js
+ * const { spawn } = require('node:child_process');
+ * const ps = spawn('ps', ['ax']);
+ * const grep = spawn('grep', ['ssh']);
+ *
+ * ps.stdout.on('data', (data) => {
+ * grep.stdin.write(data);
+ * });
+ *
+ * ps.stderr.on('data', (data) => {
+ * console.error(`ps stderr: ${data}`);
+ * });
+ *
+ * ps.on('close', (code) => {
+ * if (code !== 0) {
+ * console.log(`ps process exited with code ${code}`);
+ * }
+ * grep.stdin.end();
+ * });
+ *
+ * grep.stdout.on('data', (data) => {
+ * console.log(data.toString());
+ * });
+ *
+ * grep.stderr.on('data', (data) => {
+ * console.error(`grep stderr: ${data}`);
+ * });
+ *
+ * grep.on('close', (code) => {
+ * if (code !== 0) {
+ * console.log(`grep process exited with code ${code}`);
+ * }
+ * });
+ * ```
+ *
+ * Example of checking for failed `spawn`:
+ *
+ * ```js
+ * const { spawn } = require('node:child_process');
+ * const subprocess = spawn('bad_command');
+ *
+ * subprocess.on('error', (err) => {
+ * console.error('Failed to start subprocess.');
+ * });
+ * ```
+ *
+ * Certain platforms (macOS, Linux) will use the value of `argv[0]` for the process
+ * title while others (Windows, SunOS) will use `command`.
+ *
+ * Node.js overwrites `argv[0]` with `process.execPath` on startup, so`process.argv[0]` in a Node.js child process will not match the `argv0`parameter passed to `spawn` from the parent. Retrieve
+ * it with the`process.argv0` property instead.
+ *
+ * If the `signal` option is enabled, calling `.abort()` on the corresponding`AbortController` is similar to calling `.kill()` on the child process except
+ * the error passed to the callback will be an `AbortError`:
+ *
+ * ```js
+ * const { spawn } = require('node:child_process');
+ * const controller = new AbortController();
+ * const { signal } = controller;
+ * const grep = spawn('grep', ['ssh'], { signal });
+ * grep.on('error', (err) => {
+ * // This will be called with err being an AbortError if the controller aborts
+ * });
+ * controller.abort(); // Stops the child process
+ * ```
+ * @since v0.1.90
+ * @param command The command to run.
+ * @param args List of string arguments.
+ */
+ function spawn(command: string, options?: SpawnOptionsWithoutStdio): ChildProcessWithoutNullStreams;
+ function spawn(
+ command: string,
+ options: SpawnOptionsWithStdioTuple,
+ ): ChildProcessByStdio;
+ function spawn(
+ command: string,
+ options: SpawnOptionsWithStdioTuple,
+ ): ChildProcessByStdio;
+ function spawn(
+ command: string,
+ options: SpawnOptionsWithStdioTuple,
+ ): ChildProcessByStdio;
+ function spawn(
+ command: string,
+ options: SpawnOptionsWithStdioTuple,
+ ): ChildProcessByStdio;
+ function spawn(
+ command: string,
+ options: SpawnOptionsWithStdioTuple,
+ ): ChildProcessByStdio;
+ function spawn(
+ command: string,
+ options: SpawnOptionsWithStdioTuple,
+ ): ChildProcessByStdio;
+ function spawn(
+ command: string,
+ options: SpawnOptionsWithStdioTuple,
+ ): ChildProcessByStdio;
+ function spawn(
+ command: string,
+ options: SpawnOptionsWithStdioTuple,
+ ): ChildProcessByStdio;
+ function spawn(command: string, options: SpawnOptions): ChildProcess;
+ // overloads of spawn with 'args'
+ function spawn(
+ command: string,
+ args?: readonly string[],
+ options?: SpawnOptionsWithoutStdio,
+ ): ChildProcessWithoutNullStreams;
+ function spawn(
+ command: string,
+ args: readonly string[],
+ options: SpawnOptionsWithStdioTuple,
+ ): ChildProcessByStdio;
+ function spawn(
+ command: string,
+ args: readonly string[],
+ options: SpawnOptionsWithStdioTuple,
+ ): ChildProcessByStdio;
+ function spawn(
+ command: string,
+ args: readonly string[],
+ options: SpawnOptionsWithStdioTuple,
+ ): ChildProcessByStdio;
+ function spawn(
+ command: string,
+ args: readonly string[],
+ options: SpawnOptionsWithStdioTuple,
+ ): ChildProcessByStdio;
+ function spawn(
+ command: string,
+ args: readonly string[],
+ options: SpawnOptionsWithStdioTuple,
+ ): ChildProcessByStdio;
+ function spawn(
+ command: string,
+ args: readonly string[],
+ options: SpawnOptionsWithStdioTuple,
+ ): ChildProcessByStdio;
+ function spawn(
+ command: string,
+ args: readonly string[],
+ options: SpawnOptionsWithStdioTuple,
+ ): ChildProcessByStdio;
+ function spawn(
+ command: string,
+ args: readonly string[],
+ options: SpawnOptionsWithStdioTuple,
+ ): ChildProcessByStdio;
+ function spawn(command: string, args: readonly string[], options: SpawnOptions): ChildProcess;
+ interface ExecOptions extends CommonOptions {
+ shell?: string | undefined;
+ signal?: AbortSignal | undefined;
+ maxBuffer?: number | undefined;
+ killSignal?: NodeJS.Signals | number | undefined;
+ }
+ interface ExecOptionsWithStringEncoding extends ExecOptions {
+ encoding: BufferEncoding;
+ }
+ interface ExecOptionsWithBufferEncoding extends ExecOptions {
+ encoding: BufferEncoding | null; // specify `null`.
+ }
+ interface ExecException extends Error {
+ cmd?: string | undefined;
+ killed?: boolean | undefined;
+ code?: number | undefined;
+ signal?: NodeJS.Signals | undefined;
+ stdout?: string;
+ stderr?: string;
+ }
+ /**
+ * Spawns a shell then executes the `command` within that shell, buffering any
+ * generated output. The `command` string passed to the exec function is processed
+ * directly by the shell and special characters (vary based on [shell](https://en.wikipedia.org/wiki/List_of_command-line_interpreters))
+ * need to be dealt with accordingly:
+ *
+ * ```js
+ * const { exec } = require('node:child_process');
+ *
+ * exec('"/path/to/test file/test.sh" arg1 arg2');
+ * // Double quotes are used so that the space in the path is not interpreted as
+ * // a delimiter of multiple arguments.
+ *
+ * exec('echo "The \\$HOME variable is $HOME"');
+ * // The $HOME variable is escaped in the first instance, but not in the second.
+ * ```
+ *
+ * **Never pass unsanitized user input to this function. Any input containing shell**
+ * **metacharacters may be used to trigger arbitrary command execution.**
+ *
+ * If a `callback` function is provided, it is called with the arguments`(error, stdout, stderr)`. On success, `error` will be `null`. On error,`error` will be an instance of `Error`. The
+ * `error.code` property will be
+ * the exit code of the process. By convention, any exit code other than `0`indicates an error. `error.signal` will be the signal that terminated the
+ * process.
+ *
+ * The `stdout` and `stderr` arguments passed to the callback will contain the
+ * stdout and stderr output of the child process. By default, Node.js will decode
+ * the output as UTF-8 and pass strings to the callback. The `encoding` option
+ * can be used to specify the character encoding used to decode the stdout and
+ * stderr output. If `encoding` is `'buffer'`, or an unrecognized character
+ * encoding, `Buffer` objects will be passed to the callback instead.
+ *
+ * ```js
+ * const { exec } = require('node:child_process');
+ * exec('cat *.js missing_file | wc -l', (error, stdout, stderr) => {
+ * if (error) {
+ * console.error(`exec error: ${error}`);
+ * return;
+ * }
+ * console.log(`stdout: ${stdout}`);
+ * console.error(`stderr: ${stderr}`);
+ * });
+ * ```
+ *
+ * If `timeout` is greater than `0`, the parent will send the signal
+ * identified by the `killSignal` property (the default is `'SIGTERM'`) if the
+ * child runs longer than `timeout` milliseconds.
+ *
+ * Unlike the [`exec(3)`](http://man7.org/linux/man-pages/man3/exec.3.html) POSIX system call, `child_process.exec()` does not replace
+ * the existing process and uses a shell to execute the command.
+ *
+ * If this method is invoked as its `util.promisify()` ed version, it returns
+ * a `Promise` for an `Object` with `stdout` and `stderr` properties. The returned`ChildProcess` instance is attached to the `Promise` as a `child` property. In
+ * case of an error (including any error resulting in an exit code other than 0), a
+ * rejected promise is returned, with the same `error` object given in the
+ * callback, but with two additional properties `stdout` and `stderr`.
+ *
+ * ```js
+ * const util = require('node:util');
+ * const exec = util.promisify(require('node:child_process').exec);
+ *
+ * async function lsExample() {
+ * const { stdout, stderr } = await exec('ls');
+ * console.log('stdout:', stdout);
+ * console.error('stderr:', stderr);
+ * }
+ * lsExample();
+ * ```
+ *
+ * If the `signal` option is enabled, calling `.abort()` on the corresponding`AbortController` is similar to calling `.kill()` on the child process except
+ * the error passed to the callback will be an `AbortError`:
+ *
+ * ```js
+ * const { exec } = require('node:child_process');
+ * const controller = new AbortController();
+ * const { signal } = controller;
+ * const child = exec('grep ssh', { signal }, (error) => {
+ * console.error(error); // an AbortError
+ * });
+ * controller.abort();
+ * ```
+ * @since v0.1.90
+ * @param command The command to run, with space-separated arguments.
+ * @param callback called with the output when process terminates.
+ */
+ function exec(
+ command: string,
+ callback?: (error: ExecException | null, stdout: string, stderr: string) => void,
+ ): ChildProcess;
+ // `options` with `"buffer"` or `null` for `encoding` means stdout/stderr are definitely `Buffer`.
+ function exec(
+ command: string,
+ options: {
+ encoding: "buffer" | null;
+ } & ExecOptions,
+ callback?: (error: ExecException | null, stdout: Buffer, stderr: Buffer) => void,
+ ): ChildProcess;
+ // `options` with well known `encoding` means stdout/stderr are definitely `string`.
+ function exec(
+ command: string,
+ options: {
+ encoding: BufferEncoding;
+ } & ExecOptions,
+ callback?: (error: ExecException | null, stdout: string, stderr: string) => void,
+ ): ChildProcess;
+ // `options` with an `encoding` whose type is `string` means stdout/stderr could either be `Buffer` or `string`.
+ // There is no guarantee the `encoding` is unknown as `string` is a superset of `BufferEncoding`.
+ function exec(
+ command: string,
+ options: {
+ encoding: BufferEncoding;
+ } & ExecOptions,
+ callback?: (error: ExecException | null, stdout: string | Buffer, stderr: string | Buffer) => void,
+ ): ChildProcess;
+ // `options` without an `encoding` means stdout/stderr are definitely `string`.
+ function exec(
+ command: string,
+ options: ExecOptions,
+ callback?: (error: ExecException | null, stdout: string, stderr: string) => void,
+ ): ChildProcess;
+ // fallback if nothing else matches. Worst case is always `string | Buffer`.
+ function exec(
+ command: string,
+ options: (ObjectEncodingOptions & ExecOptions) | undefined | null,
+ callback?: (error: ExecException | null, stdout: string | Buffer, stderr: string | Buffer) => void,
+ ): ChildProcess;
+ interface PromiseWithChild extends Promise {
+ child: ChildProcess;
+ }
+ namespace exec {
+ function __promisify__(command: string): PromiseWithChild<{
+ stdout: string;
+ stderr: string;
+ }>;
+ function __promisify__(
+ command: string,
+ options: {
+ encoding: "buffer" | null;
+ } & ExecOptions,
+ ): PromiseWithChild<{
+ stdout: Buffer;
+ stderr: Buffer;
+ }>;
+ function __promisify__(
+ command: string,
+ options: {
+ encoding: BufferEncoding;
+ } & ExecOptions,
+ ): PromiseWithChild<{
+ stdout: string;
+ stderr: string;
+ }>;
+ function __promisify__(
+ command: string,
+ options: ExecOptions,
+ ): PromiseWithChild<{
+ stdout: string;
+ stderr: string;
+ }>;
+ function __promisify__(
+ command: string,
+ options?: (ObjectEncodingOptions & ExecOptions) | null,
+ ): PromiseWithChild<{
+ stdout: string | Buffer;
+ stderr: string | Buffer;
+ }>;
+ }
+ interface ExecFileOptions extends CommonOptions, Abortable {
+ maxBuffer?: number | undefined;
+ killSignal?: NodeJS.Signals | number | undefined;
+ windowsVerbatimArguments?: boolean | undefined;
+ shell?: boolean | string | undefined;
+ signal?: AbortSignal | undefined;
+ }
+ interface ExecFileOptionsWithStringEncoding extends ExecFileOptions {
+ encoding: BufferEncoding;
+ }
+ interface ExecFileOptionsWithBufferEncoding extends ExecFileOptions {
+ encoding: "buffer" | null;
+ }
+ interface ExecFileOptionsWithOtherEncoding extends ExecFileOptions {
+ encoding: BufferEncoding;
+ }
+ type ExecFileException =
+ & Omit
+ & Omit
+ & { code?: string | number | undefined | null };
+ /**
+ * The `child_process.execFile()` function is similar to {@link exec} except that it does not spawn a shell by default. Rather, the specified
+ * executable `file` is spawned directly as a new process making it slightly more
+ * efficient than {@link exec}.
+ *
+ * The same options as {@link exec} are supported. Since a shell is
+ * not spawned, behaviors such as I/O redirection and file globbing are not
+ * supported.
+ *
+ * ```js
+ * const { execFile } = require('node:child_process');
+ * const child = execFile('node', ['--version'], (error, stdout, stderr) => {
+ * if (error) {
+ * throw error;
+ * }
+ * console.log(stdout);
+ * });
+ * ```
+ *
+ * The `stdout` and `stderr` arguments passed to the callback will contain the
+ * stdout and stderr output of the child process. By default, Node.js will decode
+ * the output as UTF-8 and pass strings to the callback. The `encoding` option
+ * can be used to specify the character encoding used to decode the stdout and
+ * stderr output. If `encoding` is `'buffer'`, or an unrecognized character
+ * encoding, `Buffer` objects will be passed to the callback instead.
+ *
+ * If this method is invoked as its `util.promisify()` ed version, it returns
+ * a `Promise` for an `Object` with `stdout` and `stderr` properties. The returned`ChildProcess` instance is attached to the `Promise` as a `child` property. In
+ * case of an error (including any error resulting in an exit code other than 0), a
+ * rejected promise is returned, with the same `error` object given in the
+ * callback, but with two additional properties `stdout` and `stderr`.
+ *
+ * ```js
+ * const util = require('node:util');
+ * const execFile = util.promisify(require('node:child_process').execFile);
+ * async function getVersion() {
+ * const { stdout } = await execFile('node', ['--version']);
+ * console.log(stdout);
+ * }
+ * getVersion();
+ * ```
+ *
+ * **If the `shell` option is enabled, do not pass unsanitized user input to this**
+ * **function. Any input containing shell metacharacters may be used to trigger**
+ * **arbitrary command execution.**
+ *
+ * If the `signal` option is enabled, calling `.abort()` on the corresponding`AbortController` is similar to calling `.kill()` on the child process except
+ * the error passed to the callback will be an `AbortError`:
+ *
+ * ```js
+ * const { execFile } = require('node:child_process');
+ * const controller = new AbortController();
+ * const { signal } = controller;
+ * const child = execFile('node', ['--version'], { signal }, (error) => {
+ * console.error(error); // an AbortError
+ * });
+ * controller.abort();
+ * ```
+ * @since v0.1.91
+ * @param file The name or path of the executable file to run.
+ * @param args List of string arguments.
+ * @param callback Called with the output when process terminates.
+ */
+ function execFile(file: string): ChildProcess;
+ function execFile(
+ file: string,
+ options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null,
+ ): ChildProcess;
+ function execFile(file: string, args?: readonly string[] | null): ChildProcess;
+ function execFile(
+ file: string,
+ args: readonly string[] | undefined | null,
+ options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null,
+ ): ChildProcess;
+ // no `options` definitely means stdout/stderr are `string`.
+ function execFile(
+ file: string,
+ callback: (error: ExecFileException | null, stdout: string, stderr: string) => void,
+ ): ChildProcess;
+ function execFile(
+ file: string,
+ args: readonly string[] | undefined | null,
+ callback: (error: ExecFileException | null, stdout: string, stderr: string) => void,
+ ): ChildProcess;
+ // `options` with `"buffer"` or `null` for `encoding` means stdout/stderr are definitely `Buffer`.
+ function execFile(
+ file: string,
+ options: ExecFileOptionsWithBufferEncoding,
+ callback: (error: ExecFileException | null, stdout: Buffer, stderr: Buffer) => void,
+ ): ChildProcess;
+ function execFile(
+ file: string,
+ args: readonly string[] | undefined | null,
+ options: ExecFileOptionsWithBufferEncoding,
+ callback: (error: ExecFileException | null, stdout: Buffer, stderr: Buffer) => void,
+ ): ChildProcess;
+ // `options` with well known `encoding` means stdout/stderr are definitely `string`.
+ function execFile(
+ file: string,
+ options: ExecFileOptionsWithStringEncoding,
+ callback: (error: ExecFileException | null, stdout: string, stderr: string) => void,
+ ): ChildProcess;
+ function execFile(
+ file: string,
+ args: readonly string[] | undefined | null,
+ options: ExecFileOptionsWithStringEncoding,
+ callback: (error: ExecFileException | null, stdout: string, stderr: string) => void,
+ ): ChildProcess;
+ // `options` with an `encoding` whose type is `string` means stdout/stderr could either be `Buffer` or `string`.
+ // There is no guarantee the `encoding` is unknown as `string` is a superset of `BufferEncoding`.
+ function execFile(
+ file: string,
+ options: ExecFileOptionsWithOtherEncoding,
+ callback: (error: ExecFileException | null, stdout: string | Buffer, stderr: string | Buffer) => void,
+ ): ChildProcess;
+ function execFile(
+ file: string,
+ args: readonly string[] | undefined | null,
+ options: ExecFileOptionsWithOtherEncoding,
+ callback: (error: ExecFileException | null, stdout: string | Buffer, stderr: string | Buffer) => void,
+ ): ChildProcess;
+ // `options` without an `encoding` means stdout/stderr are definitely `string`.
+ function execFile(
+ file: string,
+ options: ExecFileOptions,
+ callback: (error: ExecFileException | null, stdout: string, stderr: string) => void,
+ ): ChildProcess;
+ function execFile(
+ file: string,
+ args: readonly string[] | undefined | null,
+ options: ExecFileOptions,
+ callback: (error: ExecFileException | null, stdout: string, stderr: string) => void,
+ ): ChildProcess;
+ // fallback if nothing else matches. Worst case is always `string | Buffer`.
+ function execFile(
+ file: string,
+ options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null,
+ callback:
+ | ((error: ExecFileException | null, stdout: string | Buffer, stderr: string | Buffer) => void)
+ | undefined
+ | null,
+ ): ChildProcess;
+ function execFile(
+ file: string,
+ args: readonly string[] | undefined | null,
+ options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null,
+ callback:
+ | ((error: ExecFileException | null, stdout: string | Buffer, stderr: string | Buffer) => void)
+ | undefined
+ | null,
+ ): ChildProcess;
+ namespace execFile {
+ function __promisify__(file: string): PromiseWithChild<{
+ stdout: string;
+ stderr: string;
+ }>;
+ function __promisify__(
+ file: string,
+ args: readonly string[] | undefined | null,
+ ): PromiseWithChild<{
+ stdout: string;
+ stderr: string;
+ }>;
+ function __promisify__(
+ file: string,
+ options: ExecFileOptionsWithBufferEncoding,
+ ): PromiseWithChild<{
+ stdout: Buffer;
+ stderr: Buffer;
+ }>;
+ function __promisify__(
+ file: string,
+ args: readonly string[] | undefined | null,
+ options: ExecFileOptionsWithBufferEncoding,
+ ): PromiseWithChild<{
+ stdout: Buffer;
+ stderr: Buffer;
+ }>;
+ function __promisify__(
+ file: string,
+ options: ExecFileOptionsWithStringEncoding,
+ ): PromiseWithChild<{
+ stdout: string;
+ stderr: string;
+ }>;
+ function __promisify__(
+ file: string,
+ args: readonly string[] | undefined | null,
+ options: ExecFileOptionsWithStringEncoding,
+ ): PromiseWithChild<{
+ stdout: string;
+ stderr: string;
+ }>;
+ function __promisify__(
+ file: string,
+ options: ExecFileOptionsWithOtherEncoding,
+ ): PromiseWithChild<{
+ stdout: string | Buffer;
+ stderr: string | Buffer;
+ }>;
+ function __promisify__(
+ file: string,
+ args: readonly string[] | undefined | null,
+ options: ExecFileOptionsWithOtherEncoding,
+ ): PromiseWithChild<{
+ stdout: string | Buffer;
+ stderr: string | Buffer;
+ }>;
+ function __promisify__(
+ file: string,
+ options: ExecFileOptions,
+ ): PromiseWithChild<{
+ stdout: string;
+ stderr: string;
+ }>;
+ function __promisify__(
+ file: string,
+ args: readonly string[] | undefined | null,
+ options: ExecFileOptions,
+ ): PromiseWithChild<{
+ stdout: string;
+ stderr: string;
+ }>;
+ function __promisify__(
+ file: string,
+ options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null,
+ ): PromiseWithChild<{
+ stdout: string | Buffer;
+ stderr: string | Buffer;
+ }>;
+ function __promisify__(
+ file: string,
+ args: readonly string[] | undefined | null,
+ options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null,
+ ): PromiseWithChild<{
+ stdout: string | Buffer;
+ stderr: string | Buffer;
+ }>;
+ }
+ interface ForkOptions extends ProcessEnvOptions, MessagingOptions, Abortable {
+ execPath?: string | undefined;
+ execArgv?: string[] | undefined;
+ silent?: boolean | undefined;
+ /**
+ * Can be set to 'pipe', 'inherit', 'overlapped', or 'ignore', or an array of these strings.
+ * If passed as an array, the first element is used for `stdin`, the second for
+ * `stdout`, and the third for `stderr`. A fourth element can be used to
+ * specify the `stdio` behavior beyond the standard streams. See
+ * {@link ChildProcess.stdio} for more information.
+ *
+ * @default 'pipe'
+ */
+ stdio?: StdioOptions | undefined;
+ detached?: boolean | undefined;
+ windowsVerbatimArguments?: boolean | undefined;
+ }
+ /**
+ * The `child_process.fork()` method is a special case of {@link spawn} used specifically to spawn new Node.js processes.
+ * Like {@link spawn}, a `ChildProcess` object is returned. The
+ * returned `ChildProcess` will have an additional communication channel
+ * built-in that allows messages to be passed back and forth between the parent and
+ * child. See `subprocess.send()` for details.
+ *
+ * Keep in mind that spawned Node.js child processes are
+ * independent of the parent with exception of the IPC communication channel
+ * that is established between the two. Each process has its own memory, with
+ * their own V8 instances. Because of the additional resource allocations
+ * required, spawning a large number of child Node.js processes is not
+ * recommended.
+ *
+ * By default, `child_process.fork()` will spawn new Node.js instances using the `process.execPath` of the parent process. The `execPath` property in the`options` object allows for an alternative
+ * execution path to be used.
+ *
+ * Node.js processes launched with a custom `execPath` will communicate with the
+ * parent process using the file descriptor (fd) identified using the
+ * environment variable `NODE_CHANNEL_FD` on the child process.
+ *
+ * Unlike the [`fork(2)`](http://man7.org/linux/man-pages/man2/fork.2.html) POSIX system call, `child_process.fork()` does not clone the
+ * current process.
+ *
+ * The `shell` option available in {@link spawn} is not supported by`child_process.fork()` and will be ignored if set.
+ *
+ * If the `signal` option is enabled, calling `.abort()` on the corresponding`AbortController` is similar to calling `.kill()` on the child process except
+ * the error passed to the callback will be an `AbortError`:
+ *
+ * ```js
+ * if (process.argv[2] === 'child') {
+ * setTimeout(() => {
+ * console.log(`Hello from ${process.argv[2]}!`);
+ * }, 1_000);
+ * } else {
+ * const { fork } = require('node:child_process');
+ * const controller = new AbortController();
+ * const { signal } = controller;
+ * const child = fork(__filename, ['child'], { signal });
+ * child.on('error', (err) => {
+ * // This will be called with err being an AbortError if the controller aborts
+ * });
+ * controller.abort(); // Stops the child process
+ * }
+ * ```
+ * @since v0.5.0
+ * @param modulePath The module to run in the child.
+ * @param args List of string arguments.
+ */
+ function fork(modulePath: string, options?: ForkOptions): ChildProcess;
+ function fork(modulePath: string, args?: readonly string[], options?: ForkOptions): ChildProcess;
+ interface SpawnSyncOptions extends CommonSpawnOptions {
+ input?: string | NodeJS.ArrayBufferView | undefined;
+ maxBuffer?: number | undefined;
+ encoding?: BufferEncoding | "buffer" | null | undefined;
+ }
+ interface SpawnSyncOptionsWithStringEncoding extends SpawnSyncOptions {
+ encoding: BufferEncoding;
+ }
+ interface SpawnSyncOptionsWithBufferEncoding extends SpawnSyncOptions {
+ encoding?: "buffer" | null | undefined;
+ }
+ interface SpawnSyncReturns {
+ pid: number;
+ output: Array;
+ stdout: T;
+ stderr: T;
+ status: number | null;
+ signal: NodeJS.Signals | null;
+ error?: Error | undefined;
+ }
+ /**
+ * The `child_process.spawnSync()` method is generally identical to {@link spawn} with the exception that the function will not return
+ * until the child process has fully closed. When a timeout has been encountered
+ * and `killSignal` is sent, the method won't return until the process has
+ * completely exited. If the process intercepts and handles the `SIGTERM` signal
+ * and doesn't exit, the parent process will wait until the child process has
+ * exited.
+ *
+ * **If the `shell` option is enabled, do not pass unsanitized user input to this**
+ * **function. Any input containing shell metacharacters may be used to trigger**
+ * **arbitrary command execution.**
+ * @since v0.11.12
+ * @param command The command to run.
+ * @param args List of string arguments.
+ */
+ function spawnSync(command: string): SpawnSyncReturns;
+ function spawnSync(command: string, options: SpawnSyncOptionsWithStringEncoding): SpawnSyncReturns;
+ function spawnSync(command: string, options: SpawnSyncOptionsWithBufferEncoding): SpawnSyncReturns;
+ function spawnSync(command: string, options?: SpawnSyncOptions): SpawnSyncReturns;
+ function spawnSync(command: string, args: readonly string[]): SpawnSyncReturns;
+ function spawnSync(
+ command: string,
+ args: readonly string[],
+ options: SpawnSyncOptionsWithStringEncoding,
+ ): SpawnSyncReturns;
+ function spawnSync(
+ command: string,
+ args: readonly string[],
+ options: SpawnSyncOptionsWithBufferEncoding,
+ ): SpawnSyncReturns;
+ function spawnSync(
+ command: string,
+ args?: readonly string[],
+ options?: SpawnSyncOptions,
+ ): SpawnSyncReturns;
+ interface CommonExecOptions extends CommonOptions {
+ input?: string | NodeJS.ArrayBufferView | undefined;
+ /**
+ * Can be set to 'pipe', 'inherit, or 'ignore', or an array of these strings.
+ * If passed as an array, the first element is used for `stdin`, the second for
+ * `stdout`, and the third for `stderr`. A fourth element can be used to
+ * specify the `stdio` behavior beyond the standard streams. See
+ * {@link ChildProcess.stdio} for more information.
+ *
+ * @default 'pipe'
+ */
+ stdio?: StdioOptions | undefined;
+ killSignal?: NodeJS.Signals | number | undefined;
+ maxBuffer?: number | undefined;
+ encoding?: BufferEncoding | "buffer" | null | undefined;
+ }
+ interface ExecSyncOptions extends CommonExecOptions {
+ shell?: string | undefined;
+ }
+ interface ExecSyncOptionsWithStringEncoding extends ExecSyncOptions {
+ encoding: BufferEncoding;
+ }
+ interface ExecSyncOptionsWithBufferEncoding extends ExecSyncOptions {
+ encoding?: "buffer" | null | undefined;
+ }
+ /**
+ * The `child_process.execSync()` method is generally identical to {@link exec} with the exception that the method will not return
+ * until the child process has fully closed. When a timeout has been encountered
+ * and `killSignal` is sent, the method won't return until the process has
+ * completely exited. If the child process intercepts and handles the `SIGTERM`signal and doesn't exit, the parent process will wait until the child process
+ * has exited.
+ *
+ * If the process times out or has a non-zero exit code, this method will throw.
+ * The `Error` object will contain the entire result from {@link spawnSync}.
+ *
+ * **Never pass unsanitized user input to this function. Any input containing shell**
+ * **metacharacters may be used to trigger arbitrary command execution.**
+ * @since v0.11.12
+ * @param command The command to run.
+ * @return The stdout from the command.
+ */
+ function execSync(command: string): Buffer;
+ function execSync(command: string, options: ExecSyncOptionsWithStringEncoding): string;
+ function execSync(command: string, options: ExecSyncOptionsWithBufferEncoding): Buffer;
+ function execSync(command: string, options?: ExecSyncOptions): string | Buffer;
+ interface ExecFileSyncOptions extends CommonExecOptions {
+ shell?: boolean | string | undefined;
+ }
+ interface ExecFileSyncOptionsWithStringEncoding extends ExecFileSyncOptions {
+ encoding: BufferEncoding;
+ }
+ interface ExecFileSyncOptionsWithBufferEncoding extends ExecFileSyncOptions {
+ encoding?: "buffer" | null; // specify `null`.
+ }
+ /**
+ * The `child_process.execFileSync()` method is generally identical to {@link execFile} with the exception that the method will not
+ * return until the child process has fully closed. When a timeout has been
+ * encountered and `killSignal` is sent, the method won't return until the process
+ * has completely exited.
+ *
+ * If the child process intercepts and handles the `SIGTERM` signal and
+ * does not exit, the parent process will still wait until the child process has
+ * exited.
+ *
+ * If the process times out or has a non-zero exit code, this method will throw an `Error` that will include the full result of the underlying {@link spawnSync}.
+ *
+ * **If the `shell` option is enabled, do not pass unsanitized user input to this**
+ * **function. Any input containing shell metacharacters may be used to trigger**
+ * **arbitrary command execution.**
+ * @since v0.11.12
+ * @param file The name or path of the executable file to run.
+ * @param args List of string arguments.
+ * @return The stdout from the command.
+ */
+ function execFileSync(file: string): Buffer;
+ function execFileSync(file: string, options: ExecFileSyncOptionsWithStringEncoding): string;
+ function execFileSync(file: string, options: ExecFileSyncOptionsWithBufferEncoding): Buffer;
+ function execFileSync(file: string, options?: ExecFileSyncOptions): string | Buffer;
+ function execFileSync(file: string, args: readonly string[]): Buffer;
+ function execFileSync(
+ file: string,
+ args: readonly string[],
+ options: ExecFileSyncOptionsWithStringEncoding,
+ ): string;
+ function execFileSync(
+ file: string,
+ args: readonly string[],
+ options: ExecFileSyncOptionsWithBufferEncoding,
+ ): Buffer;
+ function execFileSync(file: string, args?: readonly string[], options?: ExecFileSyncOptions): string | Buffer;
+}
+declare module "node:child_process" {
+ export * from "child_process";
+}
diff --git a/ADOOR_ACE/Interaction-Server/node_modules/@types/node/cluster.d.ts b/ADOOR_ACE/Interaction-Server/node_modules/@types/node/cluster.d.ts
new file mode 100644
index 0000000000000000000000000000000000000000..60fbbd4cc9216972e19307363c59c9cd5bab9cb1
--- /dev/null
+++ b/ADOOR_ACE/Interaction-Server/node_modules/@types/node/cluster.d.ts
@@ -0,0 +1,578 @@
+/**
+ * Clusters of Node.js processes can be used to run multiple instances of Node.js
+ * that can distribute workloads among their application threads. When process isolation
+ * is not needed, use the [`worker_threads`](https://nodejs.org/docs/latest-v20.x/api/worker_threads.html)
+ * module instead, which allows running multiple application threads within a single Node.js instance.
+ *
+ * The cluster module allows easy creation of child processes that all share
+ * server ports.
+ *
+ * ```js
+ * import cluster from 'node:cluster';
+ * import http from 'node:http';
+ * import { availableParallelism } from 'node:os';
+ * import process from 'node:process';
+ *
+ * const numCPUs = availableParallelism();
+ *
+ * if (cluster.isPrimary) {
+ * console.log(`Primary ${process.pid} is running`);
+ *
+ * // Fork workers.
+ * for (let i = 0; i < numCPUs; i++) {
+ * cluster.fork();
+ * }
+ *
+ * cluster.on('exit', (worker, code, signal) => {
+ * console.log(`worker ${worker.process.pid} died`);
+ * });
+ * } else {
+ * // Workers can share any TCP connection
+ * // In this case it is an HTTP server
+ * http.createServer((req, res) => {
+ * res.writeHead(200);
+ * res.end('hello world\n');
+ * }).listen(8000);
+ *
+ * console.log(`Worker ${process.pid} started`);
+ * }
+ * ```
+ *
+ * Running Node.js will now share port 8000 between the workers:
+ *
+ * ```console
+ * $ node server.js
+ * Primary 3596 is running
+ * Worker 4324 started
+ * Worker 4520 started
+ * Worker 6056 started
+ * Worker 5644 started
+ * ```
+ *
+ * On Windows, it is not yet possible to set up a named pipe server in a worker.
+ * @see [source](https://github.com/nodejs/node/blob/v20.11.1/lib/cluster.js)
+ */
+declare module "cluster" {
+ import * as child from "node:child_process";
+ import EventEmitter = require("node:events");
+ import * as net from "node:net";
+ type SerializationType = "json" | "advanced";
+ export interface ClusterSettings {
+ /**
+ * List of string arguments passed to the Node.js executable.
+ * @default process.execArgv
+ */
+ execArgv?: string[] | undefined;
+ /**
+ * File path to worker file.
+ * @default process.argv[1]
+ */
+ exec?: string | undefined;
+ /**
+ * String arguments passed to worker.
+ * @default process.argv.slice(2)
+ */
+ args?: string[] | undefined;
+ /**
+ * Whether or not to send output to parent's stdio.
+ * @default false
+ */
+ silent?: boolean | undefined;
+ /**
+ * Configures the stdio of forked processes. Because the cluster module relies on IPC to function, this configuration must
+ * contain an `'ipc'` entry. When this option is provided, it overrides `silent`. See [`child_prcess.spawn()`](https://nodejs.org/docs/latest-v20.x/api/child_process.html#child_processspawncommand-args-options)'s
+ * [`stdio`](https://nodejs.org/docs/latest-v20.x/api/child_process.html#optionsstdio).
+ */
+ stdio?: any[] | undefined;
+ /**
+ * Sets the user identity of the process. (See [`setuid(2)`](https://man7.org/linux/man-pages/man2/setuid.2.html).)
+ */
+ uid?: number | undefined;
+ /**
+ * Sets the group identity of the process. (See [`setgid(2)`](https://man7.org/linux/man-pages/man2/setgid.2.html).)
+ */
+ gid?: number | undefined;
+ /**
+ * Sets inspector port of worker. This can be a number, or a function that takes no arguments and returns a number.
+ * By default each worker gets its own port, incremented from the primary's `process.debugPort`.
+ */
+ inspectPort?: number | (() => number) | undefined;
+ /**
+ * Specify the kind of serialization used for sending messages between processes. Possible values are `'json'` and `'advanced'`.
+ * See [Advanced serialization for `child_process`](https://nodejs.org/docs/latest-v20.x/api/child_process.html#advanced-serialization) for more details.
+ * @default false
+ */
+ serialization?: SerializationType | undefined;
+ /**
+ * Current working directory of the worker process.
+ * @default undefined (inherits from parent process)
+ */
+ cwd?: string | undefined;
+ /**
+ * Hide the forked processes console window that would normally be created on Windows systems.
+ * @default false
+ */
+ windowsHide?: boolean | undefined;
+ }
+ export interface Address {
+ address: string;
+ port: number;
+ /**
+ * The `addressType` is one of:
+ *
+ * * `4` (TCPv4)
+ * * `6` (TCPv6)
+ * * `-1` (Unix domain socket)
+ * * `'udp4'` or `'udp6'` (UDPv4 or UDPv6)
+ */
+ addressType: 4 | 6 | -1 | "udp4" | "udp6";
+ }
+ /**
+ * A `Worker` object contains all public information and method about a worker.
+ * In the primary it can be obtained using `cluster.workers`. In a worker
+ * it can be obtained using `cluster.worker`.
+ * @since v0.7.0
+ */
+ export class Worker extends EventEmitter {
+ /**
+ * Each new worker is given its own unique id, this id is stored in the `id`.
+ *
+ * While a worker is alive, this is the key that indexes it in `cluster.workers`.
+ * @since v0.8.0
+ */
+ id: number;
+ /**
+ * All workers are created using [`child_process.fork()`](https://nodejs.org/docs/latest-v20.x/api/child_process.html#child_processforkmodulepath-args-options), the returned object
+ * from this function is stored as `.process`. In a worker, the global `process` is stored.
+ *
+ * See: [Child Process module](https://nodejs.org/docs/latest-v20.x/api/child_process.html#child_processforkmodulepath-args-options).
+ *
+ * Workers will call `process.exit(0)` if the `'disconnect'` event occurs
+ * on `process` and `.exitedAfterDisconnect` is not `true`. This protects against
+ * accidental disconnection.
+ * @since v0.7.0
+ */
+ process: child.ChildProcess;
+ /**
+ * Send a message to a worker or primary, optionally with a handle.
+ *
+ * In the primary, this sends a message to a specific worker. It is identical to [`ChildProcess.send()`](https://nodejs.org/docs/latest-v20.x/api/child_process.html#subprocesssendmessage-sendhandle-options-callback).
+ *
+ * In a worker, this sends a message to the primary. It is identical to `process.send()`.
+ *
+ * This example will echo back all messages from the primary:
+ *
+ * ```js
+ * if (cluster.isPrimary) {
+ * const worker = cluster.fork();
+ * worker.send('hi there');
+ *
+ * } else if (cluster.isWorker) {
+ * process.on('message', (msg) => {
+ * process.send(msg);
+ * });
+ * }
+ * ```
+ * @since v0.7.0
+ * @param options The `options` argument, if present, is an object used to parameterize the sending of certain types of handles.
+ */
+ send(message: child.Serializable, callback?: (error: Error | null) => void): boolean;
+ send(
+ message: child.Serializable,
+ sendHandle: child.SendHandle,
+ callback?: (error: Error | null) => void,
+ ): boolean;
+ send(
+ message: child.Serializable,
+ sendHandle: child.SendHandle,
+ options?: child.MessageOptions,
+ callback?: (error: Error | null) => void,
+ ): boolean;
+ /**
+ * This function will kill the worker. In the primary worker, it does this by
+ * disconnecting the `worker.process`, and once disconnected, killing with `signal`. In the worker, it does it by killing the process with `signal`.
+ *
+ * The `kill()` function kills the worker process without waiting for a graceful
+ * disconnect, it has the same behavior as `worker.process.kill()`.
+ *
+ * This method is aliased as `worker.destroy()` for backwards compatibility.
+ *
+ * In a worker, `process.kill()` exists, but it is not this function;
+ * it is [`kill()`](https://nodejs.org/docs/latest-v20.x/api/process.html#processkillpid-signal).
+ * @since v0.9.12
+ * @param [signal='SIGTERM'] Name of the kill signal to send to the worker process.
+ */
+ kill(signal?: string): void;
+ destroy(signal?: string): void;
+ /**
+ * In a worker, this function will close all servers, wait for the `'close'` event
+ * on those servers, and then disconnect the IPC channel.
+ *
+ * In the primary, an internal message is sent to the worker causing it to call `.disconnect()` on itself.
+ *
+ * Causes `.exitedAfterDisconnect` to be set.
+ *
+ * After a server is closed, it will no longer accept new connections,
+ * but connections may be accepted by any other listening worker. Existing
+ * connections will be allowed to close as usual. When no more connections exist,
+ * see `server.close()`, the IPC channel to the worker will close allowing it
+ * to die gracefully.
+ *
+ * The above applies _only_ to server connections, client connections are not
+ * automatically closed by workers, and disconnect does not wait for them to close
+ * before exiting.
+ *
+ * In a worker, `process.disconnect` exists, but it is not this function;
+ * it is `disconnect()`.
+ *
+ * Because long living server connections may block workers from disconnecting, it
+ * may be useful to send a message, so application specific actions may be taken to
+ * close them. It also may be useful to implement a timeout, killing a worker if
+ * the `'disconnect'` event has not been emitted after some time.
+ *
+ * ```js
+ * if (cluster.isPrimary) {
+ * const worker = cluster.fork();
+ * let timeout;
+ *
+ * worker.on('listening', (address) => {
+ * worker.send('shutdown');
+ * worker.disconnect();
+ * timeout = setTimeout(() => {
+ * worker.kill();
+ * }, 2000);
+ * });
+ *
+ * worker.on('disconnect', () => {
+ * clearTimeout(timeout);
+ * });
+ *
+ * } else if (cluster.isWorker) {
+ * const net = require('node:net');
+ * const server = net.createServer((socket) => {
+ * // Connections never end
+ * });
+ *
+ * server.listen(8000);
+ *
+ * process.on('message', (msg) => {
+ * if (msg === 'shutdown') {
+ * // Initiate graceful close of any connections to server
+ * }
+ * });
+ * }
+ * ```
+ * @since v0.7.7
+ * @return A reference to `worker`.
+ */
+ disconnect(): void;
+ /**
+ * This function returns `true` if the worker is connected to its primary via its
+ * IPC channel, `false` otherwise. A worker is connected to its primary after it
+ * has been created. It is disconnected after the `'disconnect'` event is emitted.
+ * @since v0.11.14
+ */
+ isConnected(): boolean;
+ /**
+ * This function returns `true` if the worker's process has terminated (either
+ * because of exiting or being signaled). Otherwise, it returns `false`.
+ *
+ * ```js
+ * import cluster from 'node:cluster';
+ * import http from 'node:http';
+ * import { availableParallelism } from 'node:os';
+ * import process from 'node:process';
+ *
+ * const numCPUs = availableParallelism();
+ *
+ * if (cluster.isPrimary) {
+ * console.log(`Primary ${process.pid} is running`);
+ *
+ * // Fork workers.
+ * for (let i = 0; i < numCPUs; i++) {
+ * cluster.fork();
+ * }
+ *
+ * cluster.on('fork', (worker) => {
+ * console.log('worker is dead:', worker.isDead());
+ * });
+ *
+ * cluster.on('exit', (worker, code, signal) => {
+ * console.log('worker is dead:', worker.isDead());
+ * });
+ * } else {
+ * // Workers can share any TCP connection. In this case, it is an HTTP server.
+ * http.createServer((req, res) => {
+ * res.writeHead(200);
+ * res.end(`Current process\n ${process.pid}`);
+ * process.kill(process.pid);
+ * }).listen(8000);
+ * }
+ * ```
+ * @since v0.11.14
+ */
+ isDead(): boolean;
+ /**
+ * This property is `true` if the worker exited due to `.disconnect()`.
+ * If the worker exited any other way, it is `false`. If the
+ * worker has not exited, it is `undefined`.
+ *
+ * The boolean `worker.exitedAfterDisconnect` allows distinguishing between
+ * voluntary and accidental exit, the primary may choose not to respawn a worker
+ * based on this value.
+ *
+ * ```js
+ * cluster.on('exit', (worker, code, signal) => {
+ * if (worker.exitedAfterDisconnect === true) {
+ * console.log('Oh, it was just voluntary – no need to worry');
+ * }
+ * });
+ *
+ * // kill worker
+ * worker.kill();
+ * ```
+ * @since v6.0.0
+ */
+ exitedAfterDisconnect: boolean;
+ /**
+ * events.EventEmitter
+ * 1. disconnect
+ * 2. error
+ * 3. exit
+ * 4. listening
+ * 5. message
+ * 6. online
+ */
+ addListener(event: string, listener: (...args: any[]) => void): this;
+ addListener(event: "disconnect", listener: () => void): this;
+ addListener(event: "error", listener: (error: Error) => void): this;
+ addListener(event: "exit", listener: (code: number, signal: string) => void): this;
+ addListener(event: "listening", listener: (address: Address) => void): this;
+ addListener(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
+ addListener(event: "online", listener: () => void): this;
+ emit(event: string | symbol, ...args: any[]): boolean;
+ emit(event: "disconnect"): boolean;
+ emit(event: "error", error: Error): boolean;
+ emit(event: "exit", code: number, signal: string): boolean;
+ emit(event: "listening", address: Address): boolean;
+ emit(event: "message", message: any, handle: net.Socket | net.Server): boolean;
+ emit(event: "online"): boolean;
+ on(event: string, listener: (...args: any[]) => void): this;
+ on(event: "disconnect", listener: () => void): this;
+ on(event: "error", listener: (error: Error) => void): this;
+ on(event: "exit", listener: (code: number, signal: string) => void): this;
+ on(event: "listening", listener: (address: Address) => void): this;
+ on(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
+ on(event: "online", listener: () => void): this;
+ once(event: string, listener: (...args: any[]) => void): this;
+ once(event: "disconnect", listener: () => void): this;
+ once(event: "error", listener: (error: Error) => void): this;
+ once(event: "exit", listener: (code: number, signal: string) => void): this;
+ once(event: "listening", listener: (address: Address) => void): this;
+ once(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
+ once(event: "online", listener: () => void): this;
+ prependListener(event: string, listener: (...args: any[]) => void): this;
+ prependListener(event: "disconnect", listener: () => void): this;
+ prependListener(event: "error", listener: (error: Error) => void): this;
+ prependListener(event: "exit", listener: (code: number, signal: string) => void): this;
+ prependListener(event: "listening", listener: (address: Address) => void): this;
+ prependListener(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
+ prependListener(event: "online", listener: () => void): this;
+ prependOnceListener(event: string, listener: (...args: any[]) => void): this;
+ prependOnceListener(event: "disconnect", listener: () => void): this;
+ prependOnceListener(event: "error", listener: (error: Error) => void): this;
+ prependOnceListener(event: "exit", listener: (code: number, signal: string) => void): this;
+ prependOnceListener(event: "listening", listener: (address: Address) => void): this;
+ prependOnceListener(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
+ prependOnceListener(event: "online", listener: () => void): this;
+ }
+ export interface Cluster extends EventEmitter {
+ disconnect(callback?: () => void): void;
+ /**
+ * Spawn a new worker process.
+ *
+ * This can only be called from the primary process.
+ * @param env Key/value pairs to add to worker process environment.
+ * @since v0.6.0
+ */
+ fork(env?: any): Worker;
+ /** @deprecated since v16.0.0 - use isPrimary. */
+ readonly isMaster: boolean;
+ /**
+ * True if the process is a primary. This is determined by the `process.env.NODE_UNIQUE_ID`. If `process.env.NODE_UNIQUE_ID`
+ * is undefined, then `isPrimary` is `true`.
+ * @since v16.0.0
+ */
+ readonly isPrimary: boolean;
+ /**
+ * True if the process is not a primary (it is the negation of `cluster.isPrimary`).
+ * @since v0.6.0
+ */
+ readonly isWorker: boolean;
+ /**
+ * The scheduling policy, either `cluster.SCHED_RR` for round-robin or `cluster.SCHED_NONE` to leave it to the operating system. This is a
+ * global setting and effectively frozen once either the first worker is spawned, or [`.setupPrimary()`](https://nodejs.org/docs/latest-v20.x/api/cluster.html#clustersetupprimarysettings)
+ * is called, whichever comes first.
+ *
+ * `SCHED_RR` is the default on all operating systems except Windows. Windows will change to `SCHED_RR` once libuv is able to effectively distribute
+ * IOCP handles without incurring a large performance hit.
+ *
+ * `cluster.schedulingPolicy` can also be set through the `NODE_CLUSTER_SCHED_POLICY` environment variable. Valid values are `'rr'` and `'none'`.
+ * @since v0.11.2
+ */
+ schedulingPolicy: number;
+ /**
+ * After calling [`.setupPrimary()`](https://nodejs.org/docs/latest-v20.x/api/cluster.html#clustersetupprimarysettings)
+ * (or [`.fork()`](https://nodejs.org/docs/latest-v20.x/api/cluster.html#clusterforkenv)) this settings object will contain
+ * the settings, including the default values.
+ *
+ * This object is not intended to be changed or set manually.
+ * @since v0.7.1
+ */
+ readonly settings: ClusterSettings;
+ /** @deprecated since v16.0.0 - use [`.setupPrimary()`](https://nodejs.org/docs/latest-v20.x/api/cluster.html#clustersetupprimarysettings) instead. */
+ setupMaster(settings?: ClusterSettings): void;
+ /**
+ * `setupPrimary` is used to change the default 'fork' behavior. Once called, the settings will be present in `cluster.settings`.
+ *
+ * Any settings changes only affect future calls to [`.fork()`](https://nodejs.org/docs/latest-v20.x/api/cluster.html#clusterforkenv)
+ * and have no effect on workers that are already running.
+ *
+ * The only attribute of a worker that cannot be set via `.setupPrimary()` is the `env` passed to
+ * [`.fork()`](https://nodejs.org/docs/latest-v20.x/api/cluster.html#clusterforkenv).
+ *
+ * The defaults above apply to the first call only; the defaults for later calls are the current values at the time of
+ * `cluster.setupPrimary()` is called.
+ *
+ * ```js
+ * import cluster from 'node:cluster';
+ *
+ * cluster.setupPrimary({
+ * exec: 'worker.js',
+ * args: ['--use', 'https'],
+ * silent: true,
+ * });
+ * cluster.fork(); // https worker
+ * cluster.setupPrimary({
+ * exec: 'worker.js',
+ * args: ['--use', 'http'],
+ * });
+ * cluster.fork(); // http worker
+ * ```
+ *
+ * This can only be called from the primary process.
+ * @since v16.0.0
+ */
+ setupPrimary(settings?: ClusterSettings): void;
+ /**
+ * A reference to the current worker object. Not available in the primary process.
+ *
+ * ```js
+ * import cluster from 'node:cluster';
+ *
+ * if (cluster.isPrimary) {
+ * console.log('I am primary');
+ * cluster.fork();
+ * cluster.fork();
+ * } else if (cluster.isWorker) {
+ * console.log(`I am worker #${cluster.worker.id}`);
+ * }
+ * ```
+ * @since v0.7.0
+ */
+ readonly worker?: Worker | undefined;
+ /**
+ * A hash that stores the active worker objects, keyed by `id` field. This makes it easy to loop through all the workers. It is only available in the primary process.
+ *
+ * A worker is removed from `cluster.workers` after the worker has disconnected _and_ exited. The order between these two events cannot be determined in advance. However, it
+ * is guaranteed that the removal from the `cluster.workers` list happens before the last `'disconnect'` or `'exit'` event is emitted.
+ *
+ * ```js
+ * import cluster from 'node:cluster';
+ *
+ * for (const worker of Object.values(cluster.workers)) {
+ * worker.send('big announcement to all workers');
+ * }
+ * ```
+ * @since v0.7.0
+ */
+ readonly workers?: NodeJS.Dict | undefined;
+ readonly SCHED_NONE: number;
+ readonly SCHED_RR: number;
+ /**
+ * events.EventEmitter
+ * 1. disconnect
+ * 2. exit
+ * 3. fork
+ * 4. listening
+ * 5. message
+ * 6. online
+ * 7. setup
+ */
+ addListener(event: string, listener: (...args: any[]) => void): this;
+ addListener(event: "disconnect", listener: (worker: Worker) => void): this;
+ addListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this;
+ addListener(event: "fork", listener: (worker: Worker) => void): this;
+ addListener(event: "listening", listener: (worker: Worker, address: Address) => void): this;
+ addListener(
+ event: "message",
+ listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void,
+ ): this; // the handle is a net.Socket or net.Server object, or undefined.
+ addListener(event: "online", listener: (worker: Worker) => void): this;
+ addListener(event: "setup", listener: (settings: ClusterSettings) => void): this;
+ emit(event: string | symbol, ...args: any[]): boolean;
+ emit(event: "disconnect", worker: Worker): boolean;
+ emit(event: "exit", worker: Worker, code: number, signal: string): boolean;
+ emit(event: "fork", worker: Worker): boolean;
+ emit(event: "listening", worker: Worker, address: Address): boolean;
+ emit(event: "message", worker: Worker, message: any, handle: net.Socket | net.Server): boolean;
+ emit(event: "online", worker: Worker): boolean;
+ emit(event: "setup", settings: ClusterSettings): boolean;
+ on(event: string, listener: (...args: any[]) => void): this;
+ on(event: "disconnect", listener: (worker: Worker) => void): this;
+ on(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this;
+ on(event: "fork", listener: (worker: Worker) => void): this;
+ on(event: "listening", listener: (worker: Worker, address: Address) => void): this;
+ on(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
+ on(event: "online", listener: (worker: Worker) => void): this;
+ on(event: "setup", listener: (settings: ClusterSettings) => void): this;
+ once(event: string, listener: (...args: any[]) => void): this;
+ once(event: "disconnect", listener: (worker: Worker) => void): this;
+ once(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this;
+ once(event: "fork", listener: (worker: Worker) => void): this;
+ once(event: "listening", listener: (worker: Worker, address: Address) => void): this;
+ once(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
+ once(event: "online", listener: (worker: Worker) => void): this;
+ once(event: "setup", listener: (settings: ClusterSettings) => void): this;
+ prependListener(event: string, listener: (...args: any[]) => void): this;
+ prependListener(event: "disconnect", listener: (worker: Worker) => void): this;
+ prependListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this;
+ prependListener(event: "fork", listener: (worker: Worker) => void): this;
+ prependListener(event: "listening", listener: (worker: Worker, address: Address) => void): this;
+ // the handle is a net.Socket or net.Server object, or undefined.
+ prependListener(
+ event: "message",
+ listener: (worker: Worker, message: any, handle?: net.Socket | net.Server) => void,
+ ): this;
+ prependListener(event: "online", listener: (worker: Worker) => void): this;
+ prependListener(event: "setup", listener: (settings: ClusterSettings) => void): this;
+ prependOnceListener(event: string, listener: (...args: any[]) => void): this;
+ prependOnceListener(event: "disconnect", listener: (worker: Worker) => void): this;
+ prependOnceListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this;
+ prependOnceListener(event: "fork", listener: (worker: Worker) => void): this;
+ prependOnceListener(event: "listening", listener: (worker: Worker, address: Address) => void): this;
+ // the handle is a net.Socket or net.Server object, or undefined.
+ prependOnceListener(
+ event: "message",
+ listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void,
+ ): this;
+ prependOnceListener(event: "online", listener: (worker: Worker) => void): this;
+ prependOnceListener(event: "setup", listener: (settings: ClusterSettings) => void): this;
+ }
+ const cluster: Cluster;
+ export default cluster;
+}
+declare module "node:cluster" {
+ export * from "cluster";
+ export { default as default } from "cluster";
+}
diff --git a/ADOOR_ACE/Interaction-Server/node_modules/@types/node/console.d.ts b/ADOOR_ACE/Interaction-Server/node_modules/@types/node/console.d.ts
new file mode 100644
index 0000000000000000000000000000000000000000..3d6a7d039511b8cbfec2fbf794e2a6fab757ad0b
--- /dev/null
+++ b/ADOOR_ACE/Interaction-Server/node_modules/@types/node/console.d.ts
@@ -0,0 +1,452 @@
+/**
+ * The `node:console` module provides a simple debugging console that is similar to
+ * the JavaScript console mechanism provided by web browsers.
+ *
+ * The module exports two specific components:
+ *
+ * * A `Console` class with methods such as `console.log()`, `console.error()`, and `console.warn()` that can be used to write to any Node.js stream.
+ * * A global `console` instance configured to write to [`process.stdout`](https://nodejs.org/docs/latest-v20.x/api/process.html#processstdout) and
+ * [`process.stderr`](https://nodejs.org/docs/latest-v20.x/api/process.html#processstderr). The global `console` can be used without calling `require('node:console')`.
+ *
+ * _**Warning**_: The global console object's methods are neither consistently
+ * synchronous like the browser APIs they resemble, nor are they consistently
+ * asynchronous like all other Node.js streams. See the [`note on process I/O`](https://nodejs.org/docs/latest-v20.x/api/process.html#a-note-on-process-io) for
+ * more information.
+ *
+ * Example using the global `console`:
+ *
+ * ```js
+ * console.log('hello world');
+ * // Prints: hello world, to stdout
+ * console.log('hello %s', 'world');
+ * // Prints: hello world, to stdout
+ * console.error(new Error('Whoops, something bad happened'));
+ * // Prints error message and stack trace to stderr:
+ * // Error: Whoops, something bad happened
+ * // at [eval]:5:15
+ * // at Script.runInThisContext (node:vm:132:18)
+ * // at Object.runInThisContext (node:vm:309:38)
+ * // at node:internal/process/execution:77:19
+ * // at [eval]-wrapper:6:22
+ * // at evalScript (node:internal/process/execution:76:60)
+ * // at node:internal/main/eval_string:23:3
+ *
+ * const name = 'Will Robinson';
+ * console.warn(`Danger ${name}! Danger!`);
+ * // Prints: Danger Will Robinson! Danger!, to stderr
+ * ```
+ *
+ * Example using the `Console` class:
+ *
+ * ```js
+ * const out = getStreamSomehow();
+ * const err = getStreamSomehow();
+ * const myConsole = new console.Console(out, err);
+ *
+ * myConsole.log('hello world');
+ * // Prints: hello world, to out
+ * myConsole.log('hello %s', 'world');
+ * // Prints: hello world, to out
+ * myConsole.error(new Error('Whoops, something bad happened'));
+ * // Prints: [Error: Whoops, something bad happened], to err
+ *
+ * const name = 'Will Robinson';
+ * myConsole.warn(`Danger ${name}! Danger!`);
+ * // Prints: Danger Will Robinson! Danger!, to err
+ * ```
+ * @see [source](https://github.com/nodejs/node/blob/v20.12.1/lib/console.js)
+ */
+declare module "console" {
+ import console = require("node:console");
+ export = console;
+}
+declare module "node:console" {
+ import { InspectOptions } from "node:util";
+ global {
+ // This needs to be global to avoid TS2403 in case lib.dom.d.ts is present in the same build
+ interface Console {
+ Console: console.ConsoleConstructor;
+ /**
+ * `console.assert()` writes a message if `value` is [falsy](https://developer.mozilla.org/en-US/docs/Glossary/Falsy) or omitted. It only
+ * writes a message and does not otherwise affect execution. The output always
+ * starts with `"Assertion failed"`. If provided, `message` is formatted using
+ * [`util.format()`](https://nodejs.org/docs/latest-v20.x/api/util.html#utilformatformat-args).
+ *
+ * If `value` is [truthy](https://developer.mozilla.org/en-US/docs/Glossary/Truthy), nothing happens.
+ *
+ * ```js
+ * console.assert(true, 'does nothing');
+ *
+ * console.assert(false, 'Whoops %s work', 'didn\'t');
+ * // Assertion failed: Whoops didn't work
+ *
+ * console.assert();
+ * // Assertion failed
+ * ```
+ * @since v0.1.101
+ * @param value The value tested for being truthy.
+ * @param message All arguments besides `value` are used as error message.
+ */
+ assert(value: any, message?: string, ...optionalParams: any[]): void;
+ /**
+ * When `stdout` is a TTY, calling `console.clear()` will attempt to clear the
+ * TTY. When `stdout` is not a TTY, this method does nothing.
+ *
+ * The specific operation of `console.clear()` can vary across operating systems
+ * and terminal types. For most Linux operating systems, `console.clear()` operates similarly to the `clear` shell command. On Windows, `console.clear()` will clear only the output in the
+ * current terminal viewport for the Node.js
+ * binary.
+ * @since v8.3.0
+ */
+ clear(): void;
+ /**
+ * Maintains an internal counter specific to `label` and outputs to `stdout` the
+ * number of times `console.count()` has been called with the given `label`.
+ *
+ * ```js
+ * > console.count()
+ * default: 1
+ * undefined
+ * > console.count('default')
+ * default: 2
+ * undefined
+ * > console.count('abc')
+ * abc: 1
+ * undefined
+ * > console.count('xyz')
+ * xyz: 1
+ * undefined
+ * > console.count('abc')
+ * abc: 2
+ * undefined
+ * > console.count()
+ * default: 3
+ * undefined
+ * >
+ * ```
+ * @since v8.3.0
+ * @param [label='default'] The display label for the counter.
+ */
+ count(label?: string): void;
+ /**
+ * Resets the internal counter specific to `label`.
+ *
+ * ```js
+ * > console.count('abc');
+ * abc: 1
+ * undefined
+ * > console.countReset('abc');
+ * undefined
+ * > console.count('abc');
+ * abc: 1
+ * undefined
+ * >
+ * ```
+ * @since v8.3.0
+ * @param [label='default'] The display label for the counter.
+ */
+ countReset(label?: string): void;
+ /**
+ * The `console.debug()` function is an alias for {@link log}.
+ * @since v8.0.0
+ */
+ debug(message?: any, ...optionalParams: any[]): void;
+ /**
+ * Uses [`util.inspect()`](https://nodejs.org/docs/latest-v20.x/api/util.html#utilinspectobject-options) on `obj` and prints the resulting string to `stdout`.
+ * This function bypasses any custom `inspect()` function defined on `obj`.
+ * @since v0.1.101
+ */
+ dir(obj: any, options?: InspectOptions): void;
+ /**
+ * This method calls `console.log()` passing it the arguments received.
+ * This method does not produce any XML formatting.
+ * @since v8.0.0
+ */
+ dirxml(...data: any[]): void;
+ /**
+ * Prints to `stderr` with newline. Multiple arguments can be passed, with the
+ * first used as the primary message and all additional used as substitution
+ * values similar to [`printf(3)`](http://man7.org/linux/man-pages/man3/printf.3.html)
+ * (the arguments are all passed to [`util.format()`](https://nodejs.org/docs/latest-v20.x/api/util.html#utilformatformat-args)).
+ *
+ * ```js
+ * const code = 5;
+ * console.error('error #%d', code);
+ * // Prints: error #5, to stderr
+ * console.error('error', code);
+ * // Prints: error 5, to stderr
+ * ```
+ *
+ * If formatting elements (e.g. `%d`) are not found in the first string then
+ * [`util.inspect()`](https://nodejs.org/docs/latest-v20.x/api/util.html#utilinspectobject-options) is called on each argument and the
+ * resulting string values are concatenated. See [`util.format()`](https://nodejs.org/docs/latest-v20.x/api/util.html#utilformatformat-args)
+ * for more information.
+ * @since v0.1.100
+ */
+ error(message?: any, ...optionalParams: any[]): void;
+ /**
+ * Increases indentation of subsequent lines by spaces for `groupIndentation` length.
+ *
+ * If one or more `label`s are provided, those are printed first without the
+ * additional indentation.
+ * @since v8.5.0
+ */
+ group(...label: any[]): void;
+ /**
+ * An alias for {@link group}.
+ * @since v8.5.0
+ */
+ groupCollapsed(...label: any[]): void;
+ /**
+ * Decreases indentation of subsequent lines by spaces for `groupIndentation` length.
+ * @since v8.5.0
+ */
+ groupEnd(): void;
+ /**
+ * The `console.info()` function is an alias for {@link log}.
+ * @since v0.1.100
+ */
+ info(message?: any, ...optionalParams: any[]): void;
+ /**
+ * Prints to `stdout` with newline. Multiple arguments can be passed, with the
+ * first used as the primary message and all additional used as substitution
+ * values similar to [`printf(3)`](http://man7.org/linux/man-pages/man3/printf.3.html)
+ * (the arguments are all passed to [`util.format()`](https://nodejs.org/docs/latest-v20.x/api/util.html#utilformatformat-args)).
+ *
+ * ```js
+ * const count = 5;
+ * console.log('count: %d', count);
+ * // Prints: count: 5, to stdout
+ * console.log('count:', count);
+ * // Prints: count: 5, to stdout
+ * ```
+ *
+ * See [`util.format()`](https://nodejs.org/docs/latest-v20.x/api/util.html#utilformatformat-args) for more information.
+ * @since v0.1.100
+ */
+ log(message?: any, ...optionalParams: any[]): void;
+ /**
+ * Try to construct a table with the columns of the properties of `tabularData` (or use `properties`) and rows of `tabularData` and log it. Falls back to just
+ * logging the argument if it can't be parsed as tabular.
+ *
+ * ```js
+ * // These can't be parsed as tabular data
+ * console.table(Symbol());
+ * // Symbol()
+ *
+ * console.table(undefined);
+ * // undefined
+ *
+ * console.table([{ a: 1, b: 'Y' }, { a: 'Z', b: 2 }]);
+ * // ┌─────────┬─────┬─────┐
+ * // │ (index) │ a │ b │
+ * // ├─────────┼─────┼─────┤
+ * // │ 0 │ 1 │ 'Y' │
+ * // │ 1 │ 'Z' │ 2 │
+ * // └─────────┴─────┴─────┘
+ *
+ * console.table([{ a: 1, b: 'Y' }, { a: 'Z', b: 2 }], ['a']);
+ * // ┌─────────┬─────┐
+ * // │ (index) │ a │
+ * // ├─────────┼─────┤
+ * // │ 0 │ 1 │
+ * // │ 1 │ 'Z' │
+ * // └─────────┴─────┘
+ * ```
+ * @since v10.0.0
+ * @param properties Alternate properties for constructing the table.
+ */
+ table(tabularData: any, properties?: readonly string[]): void;
+ /**
+ * Starts a timer that can be used to compute the duration of an operation. Timers
+ * are identified by a unique `label`. Use the same `label` when calling {@link timeEnd} to stop the timer and output the elapsed time in
+ * suitable time units to `stdout`. For example, if the elapsed
+ * time is 3869ms, `console.timeEnd()` displays "3.869s".
+ * @since v0.1.104
+ * @param [label='default']
+ */
+ time(label?: string): void;
+ /**
+ * Stops a timer that was previously started by calling {@link time} and
+ * prints the result to `stdout`:
+ *
+ * ```js
+ * console.time('bunch-of-stuff');
+ * // Do a bunch of stuff.
+ * console.timeEnd('bunch-of-stuff');
+ * // Prints: bunch-of-stuff: 225.438ms
+ * ```
+ * @since v0.1.104
+ * @param [label='default']
+ */
+ timeEnd(label?: string): void;
+ /**
+ * For a timer that was previously started by calling {@link time}, prints
+ * the elapsed time and other `data` arguments to `stdout`:
+ *
+ * ```js
+ * console.time('process');
+ * const value = expensiveProcess1(); // Returns 42
+ * console.timeLog('process', value);
+ * // Prints "process: 365.227ms 42".
+ * doExpensiveProcess2(value);
+ * console.timeEnd('process');
+ * ```
+ * @since v10.7.0
+ * @param [label='default']
+ */
+ timeLog(label?: string, ...data: any[]): void;
+ /**
+ * Prints to `stderr` the string `'Trace: '`, followed by the [`util.format()`](https://nodejs.org/docs/latest-v20.x/api/util.html#utilformatformat-args)
+ * formatted message and stack trace to the current position in the code.
+ *
+ * ```js
+ * console.trace('Show me');
+ * // Prints: (stack trace will vary based on where trace is called)
+ * // Trace: Show me
+ * // at repl:2:9
+ * // at REPLServer.defaultEval (repl.js:248:27)
+ * // at bound (domain.js:287:14)
+ * // at REPLServer.runBound [as eval] (domain.js:300:12)
+ * // at REPLServer. (repl.js:412:12)
+ * // at emitOne (events.js:82:20)
+ * // at REPLServer.emit (events.js:169:7)
+ * // at REPLServer.Interface._onLine (readline.js:210:10)
+ * // at REPLServer.Interface._line (readline.js:549:8)
+ * // at REPLServer.Interface._ttyWrite (readline.js:826:14)
+ * ```
+ * @since v0.1.104
+ */
+ trace(message?: any, ...optionalParams: any[]): void;
+ /**
+ * The `console.warn()` function is an alias for {@link error}.
+ * @since v0.1.100
+ */
+ warn(message?: any, ...optionalParams: any[]): void;
+ // --- Inspector mode only ---
+ /**
+ * This method does not display anything unless used in the inspector. The `console.profile()`
+ * method starts a JavaScript CPU profile with an optional label until {@link profileEnd}
+ * is called. The profile is then added to the Profile panel of the inspector.
+ *
+ * ```js
+ * console.profile('MyLabel');
+ * // Some code
+ * console.profileEnd('MyLabel');
+ * // Adds the profile 'MyLabel' to the Profiles panel of the inspector.
+ * ```
+ * @since v8.0.0
+ */
+ profile(label?: string): void;
+ /**
+ * This method does not display anything unless used in the inspector. Stops the current
+ * JavaScript CPU profiling session if one has been started and prints the report to the
+ * Profiles panel of the inspector. See {@link profile} for an example.
+ *
+ * If this method is called without a label, the most recently started profile is stopped.
+ * @since v8.0.0
+ */
+ profileEnd(label?: string): void;
+ /**
+ * This method does not display anything unless used in the inspector. The `console.timeStamp()`
+ * method adds an event with the label `'label'` to the Timeline panel of the inspector.
+ * @since v8.0.0
+ */
+ timeStamp(label?: string): void;
+ }
+ /**
+ * The `console` module provides a simple debugging console that is similar to the
+ * JavaScript console mechanism provided by web browsers.
+ *
+ * The module exports two specific components:
+ *
+ * * A `Console` class with methods such as `console.log()`, `console.error()` and `console.warn()` that can be used to write to any Node.js stream.
+ * * A global `console` instance configured to write to [`process.stdout`](https://nodejs.org/docs/latest-v20.x/api/process.html#processstdout) and
+ * [`process.stderr`](https://nodejs.org/docs/latest-v20.x/api/process.html#processstderr). The global `console` can be used without calling `require('console')`.
+ *
+ * _**Warning**_: The global console object's methods are neither consistently
+ * synchronous like the browser APIs they resemble, nor are they consistently
+ * asynchronous like all other Node.js streams. See the [`note on process I/O`](https://nodejs.org/docs/latest-v20.x/api/process.html#a-note-on-process-io) for
+ * more information.
+ *
+ * Example using the global `console`:
+ *
+ * ```js
+ * console.log('hello world');
+ * // Prints: hello world, to stdout
+ * console.log('hello %s', 'world');
+ * // Prints: hello world, to stdout
+ * console.error(new Error('Whoops, something bad happened'));
+ * // Prints error message and stack trace to stderr:
+ * // Error: Whoops, something bad happened
+ * // at [eval]:5:15
+ * // at Script.runInThisContext (node:vm:132:18)
+ * // at Object.runInThisContext (node:vm:309:38)
+ * // at node:internal/process/execution:77:19
+ * // at [eval]-wrapper:6:22
+ * // at evalScript (node:internal/process/execution:76:60)
+ * // at node:internal/main/eval_string:23:3
+ *
+ * const name = 'Will Robinson';
+ * console.warn(`Danger ${name}! Danger!`);
+ * // Prints: Danger Will Robinson! Danger!, to stderr
+ * ```
+ *
+ * Example using the `Console` class:
+ *
+ * ```js
+ * const out = getStreamSomehow();
+ * const err = getStreamSomehow();
+ * const myConsole = new console.Console(out, err);
+ *
+ * myConsole.log('hello world');
+ * // Prints: hello world, to out
+ * myConsole.log('hello %s', 'world');
+ * // Prints: hello world, to out
+ * myConsole.error(new Error('Whoops, something bad happened'));
+ * // Prints: [Error: Whoops, something bad happened], to err
+ *
+ * const name = 'Will Robinson';
+ * myConsole.warn(`Danger ${name}! Danger!`);
+ * // Prints: Danger Will Robinson! Danger!, to err
+ * ```
+ * @see [source](https://github.com/nodejs/node/blob/v20.11.1/lib/console.js)
+ */
+ namespace console {
+ interface ConsoleConstructorOptions {
+ stdout: NodeJS.WritableStream;
+ stderr?: NodeJS.WritableStream | undefined;
+ /**
+ * Ignore errors when writing to the underlying streams.
+ * @default true
+ */
+ ignoreErrors?: boolean | undefined;
+ /**
+ * Set color support for this `Console` instance. Setting to true enables coloring while inspecting
+ * values. Setting to `false` disables coloring while inspecting values. Setting to `'auto'` makes color
+ * support depend on the value of the `isTTY` property and the value returned by `getColorDepth()` on the
+ * respective stream. This option can not be used, if `inspectOptions.colors` is set as well.
+ * @default auto
+ */
+ colorMode?: boolean | "auto" | undefined;
+ /**
+ * Specifies options that are passed along to
+ * [`util.inspect()`](https://nodejs.org/docs/latest-v20.x/api/util.html#utilinspectobject-options).
+ */
+ inspectOptions?: InspectOptions | undefined;
+ /**
+ * Set group indentation.
+ * @default 2
+ */
+ groupIndentation?: number | undefined;
+ }
+ interface ConsoleConstructor {
+ prototype: Console;
+ new(stdout: NodeJS.WritableStream, stderr?: NodeJS.WritableStream, ignoreErrors?: boolean): Console;
+ new(options: ConsoleConstructorOptions): Console;
+ }
+ }
+ var console: Console;
+ }
+ export = globalThis.console;
+}
diff --git a/ADOOR_ACE/Interaction-Server/node_modules/@types/node/constants.d.ts b/ADOOR_ACE/Interaction-Server/node_modules/@types/node/constants.d.ts
new file mode 100644
index 0000000000000000000000000000000000000000..c3ac2b826436020bc945c03bb85c9a9479245f2f
--- /dev/null
+++ b/ADOOR_ACE/Interaction-Server/node_modules/@types/node/constants.d.ts
@@ -0,0 +1,19 @@
+/** @deprecated since v6.3.0 - use constants property exposed by the relevant module instead. */
+declare module "constants" {
+ import { constants as osConstants, SignalConstants } from "node:os";
+ import { constants as cryptoConstants } from "node:crypto";
+ import { constants as fsConstants } from "node:fs";
+
+ const exp:
+ & typeof osConstants.errno
+ & typeof osConstants.priority
+ & SignalConstants
+ & typeof cryptoConstants
+ & typeof fsConstants;
+ export = exp;
+}
+
+declare module "node:constants" {
+ import constants = require("constants");
+ export = constants;
+}
diff --git a/ADOOR_ACE/Interaction-Server/node_modules/@types/node/crypto.d.ts b/ADOOR_ACE/Interaction-Server/node_modules/@types/node/crypto.d.ts
new file mode 100644
index 0000000000000000000000000000000000000000..d121e4705a2d94db4cd35580b26394588213f1b1
--- /dev/null
+++ b/ADOOR_ACE/Interaction-Server/node_modules/@types/node/crypto.d.ts
@@ -0,0 +1,4487 @@
+/**
+ * The `node:crypto` module provides cryptographic functionality that includes a
+ * set of wrappers for OpenSSL's hash, HMAC, cipher, decipher, sign, and verify
+ * functions.
+ *
+ * ```js
+ * const { createHmac } = await import('node:crypto');
+ *
+ * const secret = 'abcdefg';
+ * const hash = createHmac('sha256', secret)
+ * .update('I love cupcakes')
+ * .digest('hex');
+ * console.log(hash);
+ * // Prints:
+ * // c0fa1bc00531bd78ef38c628449c5102aeabd49b5dc3a2a516ea6ea959d6658e
+ * ```
+ * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/crypto.js)
+ */
+declare module "crypto" {
+ import * as stream from "node:stream";
+ import { PeerCertificate } from "node:tls";
+ /**
+ * SPKAC is a Certificate Signing Request mechanism originally implemented by
+ * Netscape and was specified formally as part of HTML5's `keygen` element.
+ *
+ * `` is deprecated since [HTML 5.2](https://www.w3.org/TR/html52/changes.html#features-removed) and new projects
+ * should not use this element anymore.
+ *
+ * The `node:crypto` module provides the `Certificate` class for working with SPKAC
+ * data. The most common usage is handling output generated by the HTML5`` element. Node.js uses [OpenSSL's SPKAC
+ * implementation](https://www.openssl.org/docs/man3.0/man1/openssl-spkac.html) internally.
+ * @since v0.11.8
+ */
+ class Certificate {
+ /**
+ * ```js
+ * const { Certificate } = await import('node:crypto');
+ * const spkac = getSpkacSomehow();
+ * const challenge = Certificate.exportChallenge(spkac);
+ * console.log(challenge.toString('utf8'));
+ * // Prints: the challenge as a UTF8 string
+ * ```
+ * @since v9.0.0
+ * @param encoding The `encoding` of the `spkac` string.
+ * @return The challenge component of the `spkac` data structure, which includes a public key and a challenge.
+ */
+ static exportChallenge(spkac: BinaryLike): Buffer;
+ /**
+ * ```js
+ * const { Certificate } = await import('node:crypto');
+ * const spkac = getSpkacSomehow();
+ * const publicKey = Certificate.exportPublicKey(spkac);
+ * console.log(publicKey);
+ * // Prints: the public key as
+ * ```
+ * @since v9.0.0
+ * @param encoding The `encoding` of the `spkac` string.
+ * @return The public key component of the `spkac` data structure, which includes a public key and a challenge.
+ */
+ static exportPublicKey(spkac: BinaryLike, encoding?: string): Buffer;
+ /**
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ * const { Certificate } = await import('node:crypto');
+ *
+ * const spkac = getSpkacSomehow();
+ * console.log(Certificate.verifySpkac(Buffer.from(spkac)));
+ * // Prints: true or false
+ * ```
+ * @since v9.0.0
+ * @param encoding The `encoding` of the `spkac` string.
+ * @return `true` if the given `spkac` data structure is valid, `false` otherwise.
+ */
+ static verifySpkac(spkac: NodeJS.ArrayBufferView): boolean;
+ /**
+ * @deprecated
+ * @param spkac
+ * @returns The challenge component of the `spkac` data structure,
+ * which includes a public key and a challenge.
+ */
+ exportChallenge(spkac: BinaryLike): Buffer;
+ /**
+ * @deprecated
+ * @param spkac
+ * @param encoding The encoding of the spkac string.
+ * @returns The public key component of the `spkac` data structure,
+ * which includes a public key and a challenge.
+ */
+ exportPublicKey(spkac: BinaryLike, encoding?: string): Buffer;
+ /**
+ * @deprecated
+ * @param spkac
+ * @returns `true` if the given `spkac` data structure is valid,
+ * `false` otherwise.
+ */
+ verifySpkac(spkac: NodeJS.ArrayBufferView): boolean;
+ }
+ namespace constants {
+ // https://nodejs.org/dist/latest-v20.x/docs/api/crypto.html#crypto-constants
+ const OPENSSL_VERSION_NUMBER: number;
+ /** Applies multiple bug workarounds within OpenSSL. See https://www.openssl.org/docs/man1.0.2/ssl/SSL_CTX_set_options.html for detail. */
+ const SSL_OP_ALL: number;
+ /** Instructs OpenSSL to allow a non-[EC]DHE-based key exchange mode for TLS v1.3 */
+ const SSL_OP_ALLOW_NO_DHE_KEX: number;
+ /** Allows legacy insecure renegotiation between OpenSSL and unpatched clients or servers. See https://www.openssl.org/docs/man1.0.2/ssl/SSL_CTX_set_options.html. */
+ const SSL_OP_ALLOW_UNSAFE_LEGACY_RENEGOTIATION: number;
+ /** Attempts to use the server's preferences instead of the client's when selecting a cipher. See https://www.openssl.org/docs/man1.0.2/ssl/SSL_CTX_set_options.html. */
+ const SSL_OP_CIPHER_SERVER_PREFERENCE: number;
+ /** Instructs OpenSSL to use Cisco's version identifier of DTLS_BAD_VER. */
+ const SSL_OP_CISCO_ANYCONNECT: number;
+ /** Instructs OpenSSL to turn on cookie exchange. */
+ const SSL_OP_COOKIE_EXCHANGE: number;
+ /** Instructs OpenSSL to add server-hello extension from an early version of the cryptopro draft. */
+ const SSL_OP_CRYPTOPRO_TLSEXT_BUG: number;
+ /** Instructs OpenSSL to disable a SSL 3.0/TLS 1.0 vulnerability workaround added in OpenSSL 0.9.6d. */
+ const SSL_OP_DONT_INSERT_EMPTY_FRAGMENTS: number;
+ /** Allows initial connection to servers that do not support RI. */
+ const SSL_OP_LEGACY_SERVER_CONNECT: number;
+ /** Instructs OpenSSL to disable support for SSL/TLS compression. */
+ const SSL_OP_NO_COMPRESSION: number;
+ /** Instructs OpenSSL to disable encrypt-then-MAC. */
+ const SSL_OP_NO_ENCRYPT_THEN_MAC: number;
+ const SSL_OP_NO_QUERY_MTU: number;
+ /** Instructs OpenSSL to disable renegotiation. */
+ const SSL_OP_NO_RENEGOTIATION: number;
+ /** Instructs OpenSSL to always start a new session when performing renegotiation. */
+ const SSL_OP_NO_SESSION_RESUMPTION_ON_RENEGOTIATION: number;
+ /** Instructs OpenSSL to turn off SSL v2 */
+ const SSL_OP_NO_SSLv2: number;
+ /** Instructs OpenSSL to turn off SSL v3 */
+ const SSL_OP_NO_SSLv3: number;
+ /** Instructs OpenSSL to disable use of RFC4507bis tickets. */
+ const SSL_OP_NO_TICKET: number;
+ /** Instructs OpenSSL to turn off TLS v1 */
+ const SSL_OP_NO_TLSv1: number;
+ /** Instructs OpenSSL to turn off TLS v1.1 */
+ const SSL_OP_NO_TLSv1_1: number;
+ /** Instructs OpenSSL to turn off TLS v1.2 */
+ const SSL_OP_NO_TLSv1_2: number;
+ /** Instructs OpenSSL to turn off TLS v1.3 */
+ const SSL_OP_NO_TLSv1_3: number;
+ /** Instructs OpenSSL server to prioritize ChaCha20-Poly1305 when the client does. This option has no effect if `SSL_OP_CIPHER_SERVER_PREFERENCE` is not enabled. */
+ const SSL_OP_PRIORITIZE_CHACHA: number;
+ /** Instructs OpenSSL to disable version rollback attack detection. */
+ const SSL_OP_TLS_ROLLBACK_BUG: number;
+ const ENGINE_METHOD_RSA: number;
+ const ENGINE_METHOD_DSA: number;
+ const ENGINE_METHOD_DH: number;
+ const ENGINE_METHOD_RAND: number;
+ const ENGINE_METHOD_EC: number;
+ const ENGINE_METHOD_CIPHERS: number;
+ const ENGINE_METHOD_DIGESTS: number;
+ const ENGINE_METHOD_PKEY_METHS: number;
+ const ENGINE_METHOD_PKEY_ASN1_METHS: number;
+ const ENGINE_METHOD_ALL: number;
+ const ENGINE_METHOD_NONE: number;
+ const DH_CHECK_P_NOT_SAFE_PRIME: number;
+ const DH_CHECK_P_NOT_PRIME: number;
+ const DH_UNABLE_TO_CHECK_GENERATOR: number;
+ const DH_NOT_SUITABLE_GENERATOR: number;
+ const RSA_PKCS1_PADDING: number;
+ const RSA_SSLV23_PADDING: number;
+ const RSA_NO_PADDING: number;
+ const RSA_PKCS1_OAEP_PADDING: number;
+ const RSA_X931_PADDING: number;
+ const RSA_PKCS1_PSS_PADDING: number;
+ /** Sets the salt length for RSA_PKCS1_PSS_PADDING to the digest size when signing or verifying. */
+ const RSA_PSS_SALTLEN_DIGEST: number;
+ /** Sets the salt length for RSA_PKCS1_PSS_PADDING to the maximum permissible value when signing data. */
+ const RSA_PSS_SALTLEN_MAX_SIGN: number;
+ /** Causes the salt length for RSA_PKCS1_PSS_PADDING to be determined automatically when verifying a signature. */
+ const RSA_PSS_SALTLEN_AUTO: number;
+ const POINT_CONVERSION_COMPRESSED: number;
+ const POINT_CONVERSION_UNCOMPRESSED: number;
+ const POINT_CONVERSION_HYBRID: number;
+ /** Specifies the built-in default cipher list used by Node.js (colon-separated values). */
+ const defaultCoreCipherList: string;
+ /** Specifies the active default cipher list used by the current Node.js process (colon-separated values). */
+ const defaultCipherList: string;
+ }
+ interface HashOptions extends stream.TransformOptions {
+ /**
+ * For XOF hash functions such as `shake256`, the
+ * outputLength option can be used to specify the desired output length in bytes.
+ */
+ outputLength?: number | undefined;
+ }
+ /** @deprecated since v10.0.0 */
+ const fips: boolean;
+ /**
+ * Creates and returns a `Hash` object that can be used to generate hash digests
+ * using the given `algorithm`. Optional `options` argument controls stream
+ * behavior. For XOF hash functions such as `'shake256'`, the `outputLength` option
+ * can be used to specify the desired output length in bytes.
+ *
+ * The `algorithm` is dependent on the available algorithms supported by the
+ * version of OpenSSL on the platform. Examples are `'sha256'`, `'sha512'`, etc.
+ * On recent releases of OpenSSL, `openssl list -digest-algorithms` will
+ * display the available digest algorithms.
+ *
+ * Example: generating the sha256 sum of a file
+ *
+ * ```js
+ * import {
+ * createReadStream,
+ * } from 'node:fs';
+ * import { argv } from 'node:process';
+ * const {
+ * createHash,
+ * } = await import('node:crypto');
+ *
+ * const filename = argv[2];
+ *
+ * const hash = createHash('sha256');
+ *
+ * const input = createReadStream(filename);
+ * input.on('readable', () => {
+ * // Only one element is going to be produced by the
+ * // hash stream.
+ * const data = input.read();
+ * if (data)
+ * hash.update(data);
+ * else {
+ * console.log(`${hash.digest('hex')} ${filename}`);
+ * }
+ * });
+ * ```
+ * @since v0.1.92
+ * @param options `stream.transform` options
+ */
+ function createHash(algorithm: string, options?: HashOptions): Hash;
+ /**
+ * Creates and returns an `Hmac` object that uses the given `algorithm` and `key`.
+ * Optional `options` argument controls stream behavior.
+ *
+ * The `algorithm` is dependent on the available algorithms supported by the
+ * version of OpenSSL on the platform. Examples are `'sha256'`, `'sha512'`, etc.
+ * On recent releases of OpenSSL, `openssl list -digest-algorithms` will
+ * display the available digest algorithms.
+ *
+ * The `key` is the HMAC key used to generate the cryptographic HMAC hash. If it is
+ * a `KeyObject`, its type must be `secret`. If it is a string, please consider `caveats when using strings as inputs to cryptographic APIs`. If it was
+ * obtained from a cryptographically secure source of entropy, such as {@link randomBytes} or {@link generateKey}, its length should not
+ * exceed the block size of `algorithm` (e.g., 512 bits for SHA-256).
+ *
+ * Example: generating the sha256 HMAC of a file
+ *
+ * ```js
+ * import {
+ * createReadStream,
+ * } from 'node:fs';
+ * import { argv } from 'node:process';
+ * const {
+ * createHmac,
+ * } = await import('node:crypto');
+ *
+ * const filename = argv[2];
+ *
+ * const hmac = createHmac('sha256', 'a secret');
+ *
+ * const input = createReadStream(filename);
+ * input.on('readable', () => {
+ * // Only one element is going to be produced by the
+ * // hash stream.
+ * const data = input.read();
+ * if (data)
+ * hmac.update(data);
+ * else {
+ * console.log(`${hmac.digest('hex')} ${filename}`);
+ * }
+ * });
+ * ```
+ * @since v0.1.94
+ * @param options `stream.transform` options
+ */
+ function createHmac(algorithm: string, key: BinaryLike | KeyObject, options?: stream.TransformOptions): Hmac;
+ // https://nodejs.org/api/buffer.html#buffer_buffers_and_character_encodings
+ type BinaryToTextEncoding = "base64" | "base64url" | "hex" | "binary";
+ type CharacterEncoding = "utf8" | "utf-8" | "utf16le" | "utf-16le" | "latin1";
+ type LegacyCharacterEncoding = "ascii" | "binary" | "ucs2" | "ucs-2";
+ type Encoding = BinaryToTextEncoding | CharacterEncoding | LegacyCharacterEncoding;
+ type ECDHKeyFormat = "compressed" | "uncompressed" | "hybrid";
+ /**
+ * The `Hash` class is a utility for creating hash digests of data. It can be
+ * used in one of two ways:
+ *
+ * * As a `stream` that is both readable and writable, where data is written
+ * to produce a computed hash digest on the readable side, or
+ * * Using the `hash.update()` and `hash.digest()` methods to produce the
+ * computed hash.
+ *
+ * The {@link createHash} method is used to create `Hash` instances. `Hash`objects are not to be created directly using the `new` keyword.
+ *
+ * Example: Using `Hash` objects as streams:
+ *
+ * ```js
+ * const {
+ * createHash,
+ * } = await import('node:crypto');
+ *
+ * const hash = createHash('sha256');
+ *
+ * hash.on('readable', () => {
+ * // Only one element is going to be produced by the
+ * // hash stream.
+ * const data = hash.read();
+ * if (data) {
+ * console.log(data.toString('hex'));
+ * // Prints:
+ * // 6a2da20943931e9834fc12cfe5bb47bbd9ae43489a30726962b576f4e3993e50
+ * }
+ * });
+ *
+ * hash.write('some data to hash');
+ * hash.end();
+ * ```
+ *
+ * Example: Using `Hash` and piped streams:
+ *
+ * ```js
+ * import { createReadStream } from 'node:fs';
+ * import { stdout } from 'node:process';
+ * const { createHash } = await import('node:crypto');
+ *
+ * const hash = createHash('sha256');
+ *
+ * const input = createReadStream('test.js');
+ * input.pipe(hash).setEncoding('hex').pipe(stdout);
+ * ```
+ *
+ * Example: Using the `hash.update()` and `hash.digest()` methods:
+ *
+ * ```js
+ * const {
+ * createHash,
+ * } = await import('node:crypto');
+ *
+ * const hash = createHash('sha256');
+ *
+ * hash.update('some data to hash');
+ * console.log(hash.digest('hex'));
+ * // Prints:
+ * // 6a2da20943931e9834fc12cfe5bb47bbd9ae43489a30726962b576f4e3993e50
+ * ```
+ * @since v0.1.92
+ */
+ class Hash extends stream.Transform {
+ private constructor();
+ /**
+ * Creates a new `Hash` object that contains a deep copy of the internal state
+ * of the current `Hash` object.
+ *
+ * The optional `options` argument controls stream behavior. For XOF hash
+ * functions such as `'shake256'`, the `outputLength` option can be used to
+ * specify the desired output length in bytes.
+ *
+ * An error is thrown when an attempt is made to copy the `Hash` object after
+ * its `hash.digest()` method has been called.
+ *
+ * ```js
+ * // Calculate a rolling hash.
+ * const {
+ * createHash,
+ * } = await import('node:crypto');
+ *
+ * const hash = createHash('sha256');
+ *
+ * hash.update('one');
+ * console.log(hash.copy().digest('hex'));
+ *
+ * hash.update('two');
+ * console.log(hash.copy().digest('hex'));
+ *
+ * hash.update('three');
+ * console.log(hash.copy().digest('hex'));
+ *
+ * // Etc.
+ * ```
+ * @since v13.1.0
+ * @param options `stream.transform` options
+ */
+ copy(options?: HashOptions): Hash;
+ /**
+ * Updates the hash content with the given `data`, the encoding of which
+ * is given in `inputEncoding`.
+ * If `encoding` is not provided, and the `data` is a string, an
+ * encoding of `'utf8'` is enforced. If `data` is a `Buffer`, `TypedArray`, or`DataView`, then `inputEncoding` is ignored.
+ *
+ * This can be called many times with new data as it is streamed.
+ * @since v0.1.92
+ * @param inputEncoding The `encoding` of the `data` string.
+ */
+ update(data: BinaryLike): Hash;
+ update(data: string, inputEncoding: Encoding): Hash;
+ /**
+ * Calculates the digest of all of the data passed to be hashed (using the `hash.update()` method).
+ * If `encoding` is provided a string will be returned; otherwise
+ * a `Buffer` is returned.
+ *
+ * The `Hash` object can not be used again after `hash.digest()` method has been
+ * called. Multiple calls will cause an error to be thrown.
+ * @since v0.1.92
+ * @param encoding The `encoding` of the return value.
+ */
+ digest(): Buffer;
+ digest(encoding: BinaryToTextEncoding): string;
+ }
+ /**
+ * The `Hmac` class is a utility for creating cryptographic HMAC digests. It can
+ * be used in one of two ways:
+ *
+ * * As a `stream` that is both readable and writable, where data is written
+ * to produce a computed HMAC digest on the readable side, or
+ * * Using the `hmac.update()` and `hmac.digest()` methods to produce the
+ * computed HMAC digest.
+ *
+ * The {@link createHmac} method is used to create `Hmac` instances. `Hmac`objects are not to be created directly using the `new` keyword.
+ *
+ * Example: Using `Hmac` objects as streams:
+ *
+ * ```js
+ * const {
+ * createHmac,
+ * } = await import('node:crypto');
+ *
+ * const hmac = createHmac('sha256', 'a secret');
+ *
+ * hmac.on('readable', () => {
+ * // Only one element is going to be produced by the
+ * // hash stream.
+ * const data = hmac.read();
+ * if (data) {
+ * console.log(data.toString('hex'));
+ * // Prints:
+ * // 7fd04df92f636fd450bc841c9418e5825c17f33ad9c87c518115a45971f7f77e
+ * }
+ * });
+ *
+ * hmac.write('some data to hash');
+ * hmac.end();
+ * ```
+ *
+ * Example: Using `Hmac` and piped streams:
+ *
+ * ```js
+ * import { createReadStream } from 'node:fs';
+ * import { stdout } from 'node:process';
+ * const {
+ * createHmac,
+ * } = await import('node:crypto');
+ *
+ * const hmac = createHmac('sha256', 'a secret');
+ *
+ * const input = createReadStream('test.js');
+ * input.pipe(hmac).pipe(stdout);
+ * ```
+ *
+ * Example: Using the `hmac.update()` and `hmac.digest()` methods:
+ *
+ * ```js
+ * const {
+ * createHmac,
+ * } = await import('node:crypto');
+ *
+ * const hmac = createHmac('sha256', 'a secret');
+ *
+ * hmac.update('some data to hash');
+ * console.log(hmac.digest('hex'));
+ * // Prints:
+ * // 7fd04df92f636fd450bc841c9418e5825c17f33ad9c87c518115a45971f7f77e
+ * ```
+ * @since v0.1.94
+ */
+ class Hmac extends stream.Transform {
+ private constructor();
+ /**
+ * Updates the `Hmac` content with the given `data`, the encoding of which
+ * is given in `inputEncoding`.
+ * If `encoding` is not provided, and the `data` is a string, an
+ * encoding of `'utf8'` is enforced. If `data` is a `Buffer`, `TypedArray`, or`DataView`, then `inputEncoding` is ignored.
+ *
+ * This can be called many times with new data as it is streamed.
+ * @since v0.1.94
+ * @param inputEncoding The `encoding` of the `data` string.
+ */
+ update(data: BinaryLike): Hmac;
+ update(data: string, inputEncoding: Encoding): Hmac;
+ /**
+ * Calculates the HMAC digest of all of the data passed using `hmac.update()`.
+ * If `encoding` is
+ * provided a string is returned; otherwise a `Buffer` is returned;
+ *
+ * The `Hmac` object can not be used again after `hmac.digest()` has been
+ * called. Multiple calls to `hmac.digest()` will result in an error being thrown.
+ * @since v0.1.94
+ * @param encoding The `encoding` of the return value.
+ */
+ digest(): Buffer;
+ digest(encoding: BinaryToTextEncoding): string;
+ }
+ type KeyObjectType = "secret" | "public" | "private";
+ interface KeyExportOptions {
+ type: "pkcs1" | "spki" | "pkcs8" | "sec1";
+ format: T;
+ cipher?: string | undefined;
+ passphrase?: string | Buffer | undefined;
+ }
+ interface JwkKeyExportOptions {
+ format: "jwk";
+ }
+ interface JsonWebKey {
+ crv?: string | undefined;
+ d?: string | undefined;
+ dp?: string | undefined;
+ dq?: string | undefined;
+ e?: string | undefined;
+ k?: string | undefined;
+ kty?: string | undefined;
+ n?: string | undefined;
+ p?: string | undefined;
+ q?: string | undefined;
+ qi?: string | undefined;
+ x?: string | undefined;
+ y?: string | undefined;
+ [key: string]: unknown;
+ }
+ interface AsymmetricKeyDetails {
+ /**
+ * Key size in bits (RSA, DSA).
+ */
+ modulusLength?: number | undefined;
+ /**
+ * Public exponent (RSA).
+ */
+ publicExponent?: bigint | undefined;
+ /**
+ * Name of the message digest (RSA-PSS).
+ */
+ hashAlgorithm?: string | undefined;
+ /**
+ * Name of the message digest used by MGF1 (RSA-PSS).
+ */
+ mgf1HashAlgorithm?: string | undefined;
+ /**
+ * Minimal salt length in bytes (RSA-PSS).
+ */
+ saltLength?: number | undefined;
+ /**
+ * Size of q in bits (DSA).
+ */
+ divisorLength?: number | undefined;
+ /**
+ * Name of the curve (EC).
+ */
+ namedCurve?: string | undefined;
+ }
+ /**
+ * Node.js uses a `KeyObject` class to represent a symmetric or asymmetric key,
+ * and each kind of key exposes different functions. The {@link createSecretKey}, {@link createPublicKey} and {@link createPrivateKey} methods are used to create `KeyObject`instances. `KeyObject`
+ * objects are not to be created directly using the `new`keyword.
+ *
+ * Most applications should consider using the new `KeyObject` API instead of
+ * passing keys as strings or `Buffer`s due to improved security features.
+ *
+ * `KeyObject` instances can be passed to other threads via `postMessage()`.
+ * The receiver obtains a cloned `KeyObject`, and the `KeyObject` does not need to
+ * be listed in the `transferList` argument.
+ * @since v11.6.0
+ */
+ class KeyObject {
+ private constructor();
+ /**
+ * Example: Converting a `CryptoKey` instance to a `KeyObject`:
+ *
+ * ```js
+ * const { KeyObject } = await import('node:crypto');
+ * const { subtle } = globalThis.crypto;
+ *
+ * const key = await subtle.generateKey({
+ * name: 'HMAC',
+ * hash: 'SHA-256',
+ * length: 256,
+ * }, true, ['sign', 'verify']);
+ *
+ * const keyObject = KeyObject.from(key);
+ * console.log(keyObject.symmetricKeySize);
+ * // Prints: 32 (symmetric key size in bytes)
+ * ```
+ * @since v15.0.0
+ */
+ static from(key: webcrypto.CryptoKey): KeyObject;
+ /**
+ * For asymmetric keys, this property represents the type of the key. Supported key
+ * types are:
+ *
+ * * `'rsa'` (OID 1.2.840.113549.1.1.1)
+ * * `'rsa-pss'` (OID 1.2.840.113549.1.1.10)
+ * * `'dsa'` (OID 1.2.840.10040.4.1)
+ * * `'ec'` (OID 1.2.840.10045.2.1)
+ * * `'x25519'` (OID 1.3.101.110)
+ * * `'x448'` (OID 1.3.101.111)
+ * * `'ed25519'` (OID 1.3.101.112)
+ * * `'ed448'` (OID 1.3.101.113)
+ * * `'dh'` (OID 1.2.840.113549.1.3.1)
+ *
+ * This property is `undefined` for unrecognized `KeyObject` types and symmetric
+ * keys.
+ * @since v11.6.0
+ */
+ asymmetricKeyType?: KeyType | undefined;
+ /**
+ * For asymmetric keys, this property represents the size of the embedded key in
+ * bytes. This property is `undefined` for symmetric keys.
+ */
+ asymmetricKeySize?: number | undefined;
+ /**
+ * This property exists only on asymmetric keys. Depending on the type of the key,
+ * this object contains information about the key. None of the information obtained
+ * through this property can be used to uniquely identify a key or to compromise
+ * the security of the key.
+ *
+ * For RSA-PSS keys, if the key material contains a `RSASSA-PSS-params` sequence,
+ * the `hashAlgorithm`, `mgf1HashAlgorithm`, and `saltLength` properties will be
+ * set.
+ *
+ * Other key details might be exposed via this API using additional attributes.
+ * @since v15.7.0
+ */
+ asymmetricKeyDetails?: AsymmetricKeyDetails | undefined;
+ /**
+ * For symmetric keys, the following encoding options can be used:
+ *
+ * For public keys, the following encoding options can be used:
+ *
+ * For private keys, the following encoding options can be used:
+ *
+ * The result type depends on the selected encoding format, when PEM the
+ * result is a string, when DER it will be a buffer containing the data
+ * encoded as DER, when [JWK](https://tools.ietf.org/html/rfc7517) it will be an object.
+ *
+ * When [JWK](https://tools.ietf.org/html/rfc7517) encoding format was selected, all other encoding options are
+ * ignored.
+ *
+ * PKCS#1, SEC1, and PKCS#8 type keys can be encrypted by using a combination of
+ * the `cipher` and `format` options. The PKCS#8 `type` can be used with any`format` to encrypt any key algorithm (RSA, EC, or DH) by specifying a`cipher`. PKCS#1 and SEC1 can only be
+ * encrypted by specifying a `cipher`when the PEM `format` is used. For maximum compatibility, use PKCS#8 for
+ * encrypted private keys. Since PKCS#8 defines its own
+ * encryption mechanism, PEM-level encryption is not supported when encrypting
+ * a PKCS#8 key. See [RFC 5208](https://www.rfc-editor.org/rfc/rfc5208.txt) for PKCS#8 encryption and [RFC 1421](https://www.rfc-editor.org/rfc/rfc1421.txt) for
+ * PKCS#1 and SEC1 encryption.
+ * @since v11.6.0
+ */
+ export(options: KeyExportOptions<"pem">): string | Buffer;
+ export(options?: KeyExportOptions<"der">): Buffer;
+ export(options?: JwkKeyExportOptions): JsonWebKey;
+ /**
+ * Returns `true` or `false` depending on whether the keys have exactly the same
+ * type, value, and parameters. This method is not [constant time](https://en.wikipedia.org/wiki/Timing_attack).
+ * @since v17.7.0, v16.15.0
+ * @param otherKeyObject A `KeyObject` with which to compare `keyObject`.
+ */
+ equals(otherKeyObject: KeyObject): boolean;
+ /**
+ * For secret keys, this property represents the size of the key in bytes. This
+ * property is `undefined` for asymmetric keys.
+ * @since v11.6.0
+ */
+ symmetricKeySize?: number | undefined;
+ /**
+ * Depending on the type of this `KeyObject`, this property is either`'secret'` for secret (symmetric) keys, `'public'` for public (asymmetric) keys
+ * or `'private'` for private (asymmetric) keys.
+ * @since v11.6.0
+ */
+ type: KeyObjectType;
+ }
+ type CipherCCMTypes = "aes-128-ccm" | "aes-192-ccm" | "aes-256-ccm" | "chacha20-poly1305";
+ type CipherGCMTypes = "aes-128-gcm" | "aes-192-gcm" | "aes-256-gcm";
+ type CipherOCBTypes = "aes-128-ocb" | "aes-192-ocb" | "aes-256-ocb";
+ type BinaryLike = string | NodeJS.ArrayBufferView;
+ type CipherKey = BinaryLike | KeyObject;
+ interface CipherCCMOptions extends stream.TransformOptions {
+ authTagLength: number;
+ }
+ interface CipherGCMOptions extends stream.TransformOptions {
+ authTagLength?: number | undefined;
+ }
+ interface CipherOCBOptions extends stream.TransformOptions {
+ authTagLength: number;
+ }
+ /**
+ * Creates and returns a `Cipher` object that uses the given `algorithm` and`password`.
+ *
+ * The `options` argument controls stream behavior and is optional except when a
+ * cipher in CCM or OCB mode (e.g. `'aes-128-ccm'`) is used. In that case, the`authTagLength` option is required and specifies the length of the
+ * authentication tag in bytes, see `CCM mode`. In GCM mode, the `authTagLength`option is not required but can be used to set the length of the authentication
+ * tag that will be returned by `getAuthTag()` and defaults to 16 bytes.
+ * For `chacha20-poly1305`, the `authTagLength` option defaults to 16 bytes.
+ *
+ * The `algorithm` is dependent on OpenSSL, examples are `'aes192'`, etc. On
+ * recent OpenSSL releases, `openssl list -cipher-algorithms` will
+ * display the available cipher algorithms.
+ *
+ * The `password` is used to derive the cipher key and initialization vector (IV).
+ * The value must be either a `'latin1'` encoded string, a `Buffer`, a`TypedArray`, or a `DataView`.
+ *
+ * **This function is semantically insecure for all**
+ * **supported ciphers and fatally flawed for ciphers in counter mode (such as CTR,**
+ * **GCM, or CCM).**
+ *
+ * The implementation of `crypto.createCipher()` derives keys using the OpenSSL
+ * function [`EVP_BytesToKey`](https://www.openssl.org/docs/man3.0/man3/EVP_BytesToKey.html) with the digest algorithm set to MD5, one
+ * iteration, and no salt. The lack of salt allows dictionary attacks as the same
+ * password always creates the same key. The low iteration count and
+ * non-cryptographically secure hash algorithm allow passwords to be tested very
+ * rapidly.
+ *
+ * In line with OpenSSL's recommendation to use a more modern algorithm instead of [`EVP_BytesToKey`](https://www.openssl.org/docs/man3.0/man3/EVP_BytesToKey.html) it is recommended that
+ * developers derive a key and IV on
+ * their own using {@link scrypt} and to use {@link createCipheriv} to create the `Cipher` object. Users should not use ciphers with counter mode
+ * (e.g. CTR, GCM, or CCM) in `crypto.createCipher()`. A warning is emitted when
+ * they are used in order to avoid the risk of IV reuse that causes
+ * vulnerabilities. For the case when IV is reused in GCM, see [Nonce-Disrespecting Adversaries](https://github.com/nonce-disrespect/nonce-disrespect) for details.
+ * @since v0.1.94
+ * @deprecated Since v10.0.0 - Use {@link createCipheriv} instead.
+ * @param options `stream.transform` options
+ */
+ function createCipher(algorithm: CipherCCMTypes, password: BinaryLike, options: CipherCCMOptions): CipherCCM;
+ /** @deprecated since v10.0.0 use `createCipheriv()` */
+ function createCipher(algorithm: CipherGCMTypes, password: BinaryLike, options?: CipherGCMOptions): CipherGCM;
+ /** @deprecated since v10.0.0 use `createCipheriv()` */
+ function createCipher(algorithm: string, password: BinaryLike, options?: stream.TransformOptions): Cipher;
+ /**
+ * Creates and returns a `Cipher` object, with the given `algorithm`, `key` and
+ * initialization vector (`iv`).
+ *
+ * The `options` argument controls stream behavior and is optional except when a
+ * cipher in CCM or OCB mode (e.g. `'aes-128-ccm'`) is used. In that case, the`authTagLength` option is required and specifies the length of the
+ * authentication tag in bytes, see `CCM mode`. In GCM mode, the `authTagLength`option is not required but can be used to set the length of the authentication
+ * tag that will be returned by `getAuthTag()` and defaults to 16 bytes.
+ * For `chacha20-poly1305`, the `authTagLength` option defaults to 16 bytes.
+ *
+ * The `algorithm` is dependent on OpenSSL, examples are `'aes192'`, etc. On
+ * recent OpenSSL releases, `openssl list -cipher-algorithms` will
+ * display the available cipher algorithms.
+ *
+ * The `key` is the raw key used by the `algorithm` and `iv` is an [initialization vector](https://en.wikipedia.org/wiki/Initialization_vector). Both arguments must be `'utf8'` encoded
+ * strings,`Buffers`, `TypedArray`, or `DataView`s. The `key` may optionally be
+ * a `KeyObject` of type `secret`. If the cipher does not need
+ * an initialization vector, `iv` may be `null`.
+ *
+ * When passing strings for `key` or `iv`, please consider `caveats when using strings as inputs to cryptographic APIs`.
+ *
+ * Initialization vectors should be unpredictable and unique; ideally, they will be
+ * cryptographically random. They do not have to be secret: IVs are typically just
+ * added to ciphertext messages unencrypted. It may sound contradictory that
+ * something has to be unpredictable and unique, but does not have to be secret;
+ * remember that an attacker must not be able to predict ahead of time what a
+ * given IV will be.
+ * @since v0.1.94
+ * @param options `stream.transform` options
+ */
+ function createCipheriv(
+ algorithm: CipherCCMTypes,
+ key: CipherKey,
+ iv: BinaryLike,
+ options: CipherCCMOptions,
+ ): CipherCCM;
+ function createCipheriv(
+ algorithm: CipherOCBTypes,
+ key: CipherKey,
+ iv: BinaryLike,
+ options: CipherOCBOptions,
+ ): CipherOCB;
+ function createCipheriv(
+ algorithm: CipherGCMTypes,
+ key: CipherKey,
+ iv: BinaryLike,
+ options?: CipherGCMOptions,
+ ): CipherGCM;
+ function createCipheriv(
+ algorithm: string,
+ key: CipherKey,
+ iv: BinaryLike | null,
+ options?: stream.TransformOptions,
+ ): Cipher;
+ /**
+ * Instances of the `Cipher` class are used to encrypt data. The class can be
+ * used in one of two ways:
+ *
+ * * As a `stream` that is both readable and writable, where plain unencrypted
+ * data is written to produce encrypted data on the readable side, or
+ * * Using the `cipher.update()` and `cipher.final()` methods to produce
+ * the encrypted data.
+ *
+ * The {@link createCipher} or {@link createCipheriv} methods are
+ * used to create `Cipher` instances. `Cipher` objects are not to be created
+ * directly using the `new` keyword.
+ *
+ * Example: Using `Cipher` objects as streams:
+ *
+ * ```js
+ * const {
+ * scrypt,
+ * randomFill,
+ * createCipheriv,
+ * } = await import('node:crypto');
+ *
+ * const algorithm = 'aes-192-cbc';
+ * const password = 'Password used to generate key';
+ *
+ * // First, we'll generate the key. The key length is dependent on the algorithm.
+ * // In this case for aes192, it is 24 bytes (192 bits).
+ * scrypt(password, 'salt', 24, (err, key) => {
+ * if (err) throw err;
+ * // Then, we'll generate a random initialization vector
+ * randomFill(new Uint8Array(16), (err, iv) => {
+ * if (err) throw err;
+ *
+ * // Once we have the key and iv, we can create and use the cipher...
+ * const cipher = createCipheriv(algorithm, key, iv);
+ *
+ * let encrypted = '';
+ * cipher.setEncoding('hex');
+ *
+ * cipher.on('data', (chunk) => encrypted += chunk);
+ * cipher.on('end', () => console.log(encrypted));
+ *
+ * cipher.write('some clear text data');
+ * cipher.end();
+ * });
+ * });
+ * ```
+ *
+ * Example: Using `Cipher` and piped streams:
+ *
+ * ```js
+ * import {
+ * createReadStream,
+ * createWriteStream,
+ * } from 'node:fs';
+ *
+ * import {
+ * pipeline,
+ * } from 'node:stream';
+ *
+ * const {
+ * scrypt,
+ * randomFill,
+ * createCipheriv,
+ * } = await import('node:crypto');
+ *
+ * const algorithm = 'aes-192-cbc';
+ * const password = 'Password used to generate key';
+ *
+ * // First, we'll generate the key. The key length is dependent on the algorithm.
+ * // In this case for aes192, it is 24 bytes (192 bits).
+ * scrypt(password, 'salt', 24, (err, key) => {
+ * if (err) throw err;
+ * // Then, we'll generate a random initialization vector
+ * randomFill(new Uint8Array(16), (err, iv) => {
+ * if (err) throw err;
+ *
+ * const cipher = createCipheriv(algorithm, key, iv);
+ *
+ * const input = createReadStream('test.js');
+ * const output = createWriteStream('test.enc');
+ *
+ * pipeline(input, cipher, output, (err) => {
+ * if (err) throw err;
+ * });
+ * });
+ * });
+ * ```
+ *
+ * Example: Using the `cipher.update()` and `cipher.final()` methods:
+ *
+ * ```js
+ * const {
+ * scrypt,
+ * randomFill,
+ * createCipheriv,
+ * } = await import('node:crypto');
+ *
+ * const algorithm = 'aes-192-cbc';
+ * const password = 'Password used to generate key';
+ *
+ * // First, we'll generate the key. The key length is dependent on the algorithm.
+ * // In this case for aes192, it is 24 bytes (192 bits).
+ * scrypt(password, 'salt', 24, (err, key) => {
+ * if (err) throw err;
+ * // Then, we'll generate a random initialization vector
+ * randomFill(new Uint8Array(16), (err, iv) => {
+ * if (err) throw err;
+ *
+ * const cipher = createCipheriv(algorithm, key, iv);
+ *
+ * let encrypted = cipher.update('some clear text data', 'utf8', 'hex');
+ * encrypted += cipher.final('hex');
+ * console.log(encrypted);
+ * });
+ * });
+ * ```
+ * @since v0.1.94
+ */
+ class Cipher extends stream.Transform {
+ private constructor();
+ /**
+ * Updates the cipher with `data`. If the `inputEncoding` argument is given,
+ * the `data`argument is a string using the specified encoding. If the `inputEncoding`argument is not given, `data` must be a `Buffer`, `TypedArray`, or`DataView`. If `data` is a `Buffer`,
+ * `TypedArray`, or `DataView`, then`inputEncoding` is ignored.
+ *
+ * The `outputEncoding` specifies the output format of the enciphered
+ * data. If the `outputEncoding`is specified, a string using the specified encoding is returned. If no`outputEncoding` is provided, a `Buffer` is returned.
+ *
+ * The `cipher.update()` method can be called multiple times with new data until `cipher.final()` is called. Calling `cipher.update()` after `cipher.final()` will result in an error being
+ * thrown.
+ * @since v0.1.94
+ * @param inputEncoding The `encoding` of the data.
+ * @param outputEncoding The `encoding` of the return value.
+ */
+ update(data: BinaryLike): Buffer;
+ update(data: string, inputEncoding: Encoding): Buffer;
+ update(data: NodeJS.ArrayBufferView, inputEncoding: undefined, outputEncoding: Encoding): string;
+ update(data: string, inputEncoding: Encoding | undefined, outputEncoding: Encoding): string;
+ /**
+ * Once the `cipher.final()` method has been called, the `Cipher` object can no
+ * longer be used to encrypt data. Attempts to call `cipher.final()` more than
+ * once will result in an error being thrown.
+ * @since v0.1.94
+ * @param outputEncoding The `encoding` of the return value.
+ * @return Any remaining enciphered contents. If `outputEncoding` is specified, a string is returned. If an `outputEncoding` is not provided, a {@link Buffer} is returned.
+ */
+ final(): Buffer;
+ final(outputEncoding: BufferEncoding): string;
+ /**
+ * When using block encryption algorithms, the `Cipher` class will automatically
+ * add padding to the input data to the appropriate block size. To disable the
+ * default padding call `cipher.setAutoPadding(false)`.
+ *
+ * When `autoPadding` is `false`, the length of the entire input data must be a
+ * multiple of the cipher's block size or `cipher.final()` will throw an error.
+ * Disabling automatic padding is useful for non-standard padding, for instance
+ * using `0x0` instead of PKCS padding.
+ *
+ * The `cipher.setAutoPadding()` method must be called before `cipher.final()`.
+ * @since v0.7.1
+ * @param [autoPadding=true]
+ * @return for method chaining.
+ */
+ setAutoPadding(autoPadding?: boolean): this;
+ }
+ interface CipherCCM extends Cipher {
+ setAAD(
+ buffer: NodeJS.ArrayBufferView,
+ options: {
+ plaintextLength: number;
+ },
+ ): this;
+ getAuthTag(): Buffer;
+ }
+ interface CipherGCM extends Cipher {
+ setAAD(
+ buffer: NodeJS.ArrayBufferView,
+ options?: {
+ plaintextLength: number;
+ },
+ ): this;
+ getAuthTag(): Buffer;
+ }
+ interface CipherOCB extends Cipher {
+ setAAD(
+ buffer: NodeJS.ArrayBufferView,
+ options?: {
+ plaintextLength: number;
+ },
+ ): this;
+ getAuthTag(): Buffer;
+ }
+ /**
+ * Creates and returns a `Decipher` object that uses the given `algorithm` and`password` (key).
+ *
+ * The `options` argument controls stream behavior and is optional except when a
+ * cipher in CCM or OCB mode (e.g. `'aes-128-ccm'`) is used. In that case, the`authTagLength` option is required and specifies the length of the
+ * authentication tag in bytes, see `CCM mode`.
+ * For `chacha20-poly1305`, the `authTagLength` option defaults to 16 bytes.
+ *
+ * **This function is semantically insecure for all**
+ * **supported ciphers and fatally flawed for ciphers in counter mode (such as CTR,**
+ * **GCM, or CCM).**
+ *
+ * The implementation of `crypto.createDecipher()` derives keys using the OpenSSL
+ * function [`EVP_BytesToKey`](https://www.openssl.org/docs/man3.0/man3/EVP_BytesToKey.html) with the digest algorithm set to MD5, one
+ * iteration, and no salt. The lack of salt allows dictionary attacks as the same
+ * password always creates the same key. The low iteration count and
+ * non-cryptographically secure hash algorithm allow passwords to be tested very
+ * rapidly.
+ *
+ * In line with OpenSSL's recommendation to use a more modern algorithm instead of [`EVP_BytesToKey`](https://www.openssl.org/docs/man3.0/man3/EVP_BytesToKey.html) it is recommended that
+ * developers derive a key and IV on
+ * their own using {@link scrypt} and to use {@link createDecipheriv} to create the `Decipher` object.
+ * @since v0.1.94
+ * @deprecated Since v10.0.0 - Use {@link createDecipheriv} instead.
+ * @param options `stream.transform` options
+ */
+ function createDecipher(algorithm: CipherCCMTypes, password: BinaryLike, options: CipherCCMOptions): DecipherCCM;
+ /** @deprecated since v10.0.0 use `createDecipheriv()` */
+ function createDecipher(algorithm: CipherGCMTypes, password: BinaryLike, options?: CipherGCMOptions): DecipherGCM;
+ /** @deprecated since v10.0.0 use `createDecipheriv()` */
+ function createDecipher(algorithm: string, password: BinaryLike, options?: stream.TransformOptions): Decipher;
+ /**
+ * Creates and returns a `Decipher` object that uses the given `algorithm`, `key`and initialization vector (`iv`).
+ *
+ * The `options` argument controls stream behavior and is optional except when a
+ * cipher in CCM or OCB mode (e.g. `'aes-128-ccm'`) is used. In that case, the`authTagLength` option is required and specifies the length of the
+ * authentication tag in bytes, see `CCM mode`. In GCM mode, the `authTagLength`option is not required but can be used to restrict accepted authentication tags
+ * to those with the specified length.
+ * For `chacha20-poly1305`, the `authTagLength` option defaults to 16 bytes.
+ *
+ * The `algorithm` is dependent on OpenSSL, examples are `'aes192'`, etc. On
+ * recent OpenSSL releases, `openssl list -cipher-algorithms` will
+ * display the available cipher algorithms.
+ *
+ * The `key` is the raw key used by the `algorithm` and `iv` is an [initialization vector](https://en.wikipedia.org/wiki/Initialization_vector). Both arguments must be `'utf8'` encoded
+ * strings,`Buffers`, `TypedArray`, or `DataView`s. The `key` may optionally be
+ * a `KeyObject` of type `secret`. If the cipher does not need
+ * an initialization vector, `iv` may be `null`.
+ *
+ * When passing strings for `key` or `iv`, please consider `caveats when using strings as inputs to cryptographic APIs`.
+ *
+ * Initialization vectors should be unpredictable and unique; ideally, they will be
+ * cryptographically random. They do not have to be secret: IVs are typically just
+ * added to ciphertext messages unencrypted. It may sound contradictory that
+ * something has to be unpredictable and unique, but does not have to be secret;
+ * remember that an attacker must not be able to predict ahead of time what a given
+ * IV will be.
+ * @since v0.1.94
+ * @param options `stream.transform` options
+ */
+ function createDecipheriv(
+ algorithm: CipherCCMTypes,
+ key: CipherKey,
+ iv: BinaryLike,
+ options: CipherCCMOptions,
+ ): DecipherCCM;
+ function createDecipheriv(
+ algorithm: CipherOCBTypes,
+ key: CipherKey,
+ iv: BinaryLike,
+ options: CipherOCBOptions,
+ ): DecipherOCB;
+ function createDecipheriv(
+ algorithm: CipherGCMTypes,
+ key: CipherKey,
+ iv: BinaryLike,
+ options?: CipherGCMOptions,
+ ): DecipherGCM;
+ function createDecipheriv(
+ algorithm: string,
+ key: CipherKey,
+ iv: BinaryLike | null,
+ options?: stream.TransformOptions,
+ ): Decipher;
+ /**
+ * Instances of the `Decipher` class are used to decrypt data. The class can be
+ * used in one of two ways:
+ *
+ * * As a `stream` that is both readable and writable, where plain encrypted
+ * data is written to produce unencrypted data on the readable side, or
+ * * Using the `decipher.update()` and `decipher.final()` methods to
+ * produce the unencrypted data.
+ *
+ * The {@link createDecipher} or {@link createDecipheriv} methods are
+ * used to create `Decipher` instances. `Decipher` objects are not to be created
+ * directly using the `new` keyword.
+ *
+ * Example: Using `Decipher` objects as streams:
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ * const {
+ * scryptSync,
+ * createDecipheriv,
+ * } = await import('node:crypto');
+ *
+ * const algorithm = 'aes-192-cbc';
+ * const password = 'Password used to generate key';
+ * // Key length is dependent on the algorithm. In this case for aes192, it is
+ * // 24 bytes (192 bits).
+ * // Use the async `crypto.scrypt()` instead.
+ * const key = scryptSync(password, 'salt', 24);
+ * // The IV is usually passed along with the ciphertext.
+ * const iv = Buffer.alloc(16, 0); // Initialization vector.
+ *
+ * const decipher = createDecipheriv(algorithm, key, iv);
+ *
+ * let decrypted = '';
+ * decipher.on('readable', () => {
+ * let chunk;
+ * while (null !== (chunk = decipher.read())) {
+ * decrypted += chunk.toString('utf8');
+ * }
+ * });
+ * decipher.on('end', () => {
+ * console.log(decrypted);
+ * // Prints: some clear text data
+ * });
+ *
+ * // Encrypted with same algorithm, key and iv.
+ * const encrypted =
+ * 'e5f79c5915c02171eec6b212d5520d44480993d7d622a7c4c2da32f6efda0ffa';
+ * decipher.write(encrypted, 'hex');
+ * decipher.end();
+ * ```
+ *
+ * Example: Using `Decipher` and piped streams:
+ *
+ * ```js
+ * import {
+ * createReadStream,
+ * createWriteStream,
+ * } from 'node:fs';
+ * import { Buffer } from 'node:buffer';
+ * const {
+ * scryptSync,
+ * createDecipheriv,
+ * } = await import('node:crypto');
+ *
+ * const algorithm = 'aes-192-cbc';
+ * const password = 'Password used to generate key';
+ * // Use the async `crypto.scrypt()` instead.
+ * const key = scryptSync(password, 'salt', 24);
+ * // The IV is usually passed along with the ciphertext.
+ * const iv = Buffer.alloc(16, 0); // Initialization vector.
+ *
+ * const decipher = createDecipheriv(algorithm, key, iv);
+ *
+ * const input = createReadStream('test.enc');
+ * const output = createWriteStream('test.js');
+ *
+ * input.pipe(decipher).pipe(output);
+ * ```
+ *
+ * Example: Using the `decipher.update()` and `decipher.final()` methods:
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ * const {
+ * scryptSync,
+ * createDecipheriv,
+ * } = await import('node:crypto');
+ *
+ * const algorithm = 'aes-192-cbc';
+ * const password = 'Password used to generate key';
+ * // Use the async `crypto.scrypt()` instead.
+ * const key = scryptSync(password, 'salt', 24);
+ * // The IV is usually passed along with the ciphertext.
+ * const iv = Buffer.alloc(16, 0); // Initialization vector.
+ *
+ * const decipher = createDecipheriv(algorithm, key, iv);
+ *
+ * // Encrypted using same algorithm, key and iv.
+ * const encrypted =
+ * 'e5f79c5915c02171eec6b212d5520d44480993d7d622a7c4c2da32f6efda0ffa';
+ * let decrypted = decipher.update(encrypted, 'hex', 'utf8');
+ * decrypted += decipher.final('utf8');
+ * console.log(decrypted);
+ * // Prints: some clear text data
+ * ```
+ * @since v0.1.94
+ */
+ class Decipher extends stream.Transform {
+ private constructor();
+ /**
+ * Updates the decipher with `data`. If the `inputEncoding` argument is given,
+ * the `data`argument is a string using the specified encoding. If the `inputEncoding`argument is not given, `data` must be a `Buffer`. If `data` is a `Buffer` then `inputEncoding` is
+ * ignored.
+ *
+ * The `outputEncoding` specifies the output format of the enciphered
+ * data. If the `outputEncoding`is specified, a string using the specified encoding is returned. If no`outputEncoding` is provided, a `Buffer` is returned.
+ *
+ * The `decipher.update()` method can be called multiple times with new data until `decipher.final()` is called. Calling `decipher.update()` after `decipher.final()` will result in an error
+ * being thrown.
+ * @since v0.1.94
+ * @param inputEncoding The `encoding` of the `data` string.
+ * @param outputEncoding The `encoding` of the return value.
+ */
+ update(data: NodeJS.ArrayBufferView): Buffer;
+ update(data: string, inputEncoding: Encoding): Buffer;
+ update(data: NodeJS.ArrayBufferView, inputEncoding: undefined, outputEncoding: Encoding): string;
+ update(data: string, inputEncoding: Encoding | undefined, outputEncoding: Encoding): string;
+ /**
+ * Once the `decipher.final()` method has been called, the `Decipher` object can
+ * no longer be used to decrypt data. Attempts to call `decipher.final()` more
+ * than once will result in an error being thrown.
+ * @since v0.1.94
+ * @param outputEncoding The `encoding` of the return value.
+ * @return Any remaining deciphered contents. If `outputEncoding` is specified, a string is returned. If an `outputEncoding` is not provided, a {@link Buffer} is returned.
+ */
+ final(): Buffer;
+ final(outputEncoding: BufferEncoding): string;
+ /**
+ * When data has been encrypted without standard block padding, calling`decipher.setAutoPadding(false)` will disable automatic padding to prevent `decipher.final()` from checking for and
+ * removing padding.
+ *
+ * Turning auto padding off will only work if the input data's length is a
+ * multiple of the ciphers block size.
+ *
+ * The `decipher.setAutoPadding()` method must be called before `decipher.final()`.
+ * @since v0.7.1
+ * @param [autoPadding=true]
+ * @return for method chaining.
+ */
+ setAutoPadding(auto_padding?: boolean): this;
+ }
+ interface DecipherCCM extends Decipher {
+ setAuthTag(buffer: NodeJS.ArrayBufferView): this;
+ setAAD(
+ buffer: NodeJS.ArrayBufferView,
+ options: {
+ plaintextLength: number;
+ },
+ ): this;
+ }
+ interface DecipherGCM extends Decipher {
+ setAuthTag(buffer: NodeJS.ArrayBufferView): this;
+ setAAD(
+ buffer: NodeJS.ArrayBufferView,
+ options?: {
+ plaintextLength: number;
+ },
+ ): this;
+ }
+ interface DecipherOCB extends Decipher {
+ setAuthTag(buffer: NodeJS.ArrayBufferView): this;
+ setAAD(
+ buffer: NodeJS.ArrayBufferView,
+ options?: {
+ plaintextLength: number;
+ },
+ ): this;
+ }
+ interface PrivateKeyInput {
+ key: string | Buffer;
+ format?: KeyFormat | undefined;
+ type?: "pkcs1" | "pkcs8" | "sec1" | undefined;
+ passphrase?: string | Buffer | undefined;
+ encoding?: string | undefined;
+ }
+ interface PublicKeyInput {
+ key: string | Buffer;
+ format?: KeyFormat | undefined;
+ type?: "pkcs1" | "spki" | undefined;
+ encoding?: string | undefined;
+ }
+ /**
+ * Asynchronously generates a new random secret key of the given `length`. The`type` will determine which validations will be performed on the `length`.
+ *
+ * ```js
+ * const {
+ * generateKey,
+ * } = await import('node:crypto');
+ *
+ * generateKey('hmac', { length: 512 }, (err, key) => {
+ * if (err) throw err;
+ * console.log(key.export().toString('hex')); // 46e..........620
+ * });
+ * ```
+ *
+ * The size of a generated HMAC key should not exceed the block size of the
+ * underlying hash function. See {@link createHmac} for more information.
+ * @since v15.0.0
+ * @param type The intended use of the generated secret key. Currently accepted values are `'hmac'` and `'aes'`.
+ */
+ function generateKey(
+ type: "hmac" | "aes",
+ options: {
+ length: number;
+ },
+ callback: (err: Error | null, key: KeyObject) => void,
+ ): void;
+ /**
+ * Synchronously generates a new random secret key of the given `length`. The`type` will determine which validations will be performed on the `length`.
+ *
+ * ```js
+ * const {
+ * generateKeySync,
+ * } = await import('node:crypto');
+ *
+ * const key = generateKeySync('hmac', { length: 512 });
+ * console.log(key.export().toString('hex')); // e89..........41e
+ * ```
+ *
+ * The size of a generated HMAC key should not exceed the block size of the
+ * underlying hash function. See {@link createHmac} for more information.
+ * @since v15.0.0
+ * @param type The intended use of the generated secret key. Currently accepted values are `'hmac'` and `'aes'`.
+ */
+ function generateKeySync(
+ type: "hmac" | "aes",
+ options: {
+ length: number;
+ },
+ ): KeyObject;
+ interface JsonWebKeyInput {
+ key: JsonWebKey;
+ format: "jwk";
+ }
+ /**
+ * Creates and returns a new key object containing a private key. If `key` is a
+ * string or `Buffer`, `format` is assumed to be `'pem'`; otherwise, `key`must be an object with the properties described above.
+ *
+ * If the private key is encrypted, a `passphrase` must be specified. The length
+ * of the passphrase is limited to 1024 bytes.
+ * @since v11.6.0
+ */
+ function createPrivateKey(key: PrivateKeyInput | string | Buffer | JsonWebKeyInput): KeyObject;
+ /**
+ * Creates and returns a new key object containing a public key. If `key` is a
+ * string or `Buffer`, `format` is assumed to be `'pem'`; if `key` is a `KeyObject`with type `'private'`, the public key is derived from the given private key;
+ * otherwise, `key` must be an object with the properties described above.
+ *
+ * If the format is `'pem'`, the `'key'` may also be an X.509 certificate.
+ *
+ * Because public keys can be derived from private keys, a private key may be
+ * passed instead of a public key. In that case, this function behaves as if {@link createPrivateKey} had been called, except that the type of the
+ * returned `KeyObject` will be `'public'` and that the private key cannot be
+ * extracted from the returned `KeyObject`. Similarly, if a `KeyObject` with type`'private'` is given, a new `KeyObject` with type `'public'` will be returned
+ * and it will be impossible to extract the private key from the returned object.
+ * @since v11.6.0
+ */
+ function createPublicKey(key: PublicKeyInput | string | Buffer | KeyObject | JsonWebKeyInput): KeyObject;
+ /**
+ * Creates and returns a new key object containing a secret key for symmetric
+ * encryption or `Hmac`.
+ * @since v11.6.0
+ * @param encoding The string encoding when `key` is a string.
+ */
+ function createSecretKey(key: NodeJS.ArrayBufferView): KeyObject;
+ function createSecretKey(key: string, encoding: BufferEncoding): KeyObject;
+ /**
+ * Creates and returns a `Sign` object that uses the given `algorithm`. Use {@link getHashes} to obtain the names of the available digest algorithms.
+ * Optional `options` argument controls the `stream.Writable` behavior.
+ *
+ * In some cases, a `Sign` instance can be created using the name of a signature
+ * algorithm, such as `'RSA-SHA256'`, instead of a digest algorithm. This will use
+ * the corresponding digest algorithm. This does not work for all signature
+ * algorithms, such as `'ecdsa-with-SHA256'`, so it is best to always use digest
+ * algorithm names.
+ * @since v0.1.92
+ * @param options `stream.Writable` options
+ */
+ function createSign(algorithm: string, options?: stream.WritableOptions): Sign;
+ type DSAEncoding = "der" | "ieee-p1363";
+ interface SigningOptions {
+ /**
+ * @see crypto.constants.RSA_PKCS1_PADDING
+ */
+ padding?: number | undefined;
+ saltLength?: number | undefined;
+ dsaEncoding?: DSAEncoding | undefined;
+ }
+ interface SignPrivateKeyInput extends PrivateKeyInput, SigningOptions {}
+ interface SignKeyObjectInput extends SigningOptions {
+ key: KeyObject;
+ }
+ interface VerifyPublicKeyInput extends PublicKeyInput, SigningOptions {}
+ interface VerifyKeyObjectInput extends SigningOptions {
+ key: KeyObject;
+ }
+ interface VerifyJsonWebKeyInput extends JsonWebKeyInput, SigningOptions {}
+ type KeyLike = string | Buffer | KeyObject;
+ /**
+ * The `Sign` class is a utility for generating signatures. It can be used in one
+ * of two ways:
+ *
+ * * As a writable `stream`, where data to be signed is written and the `sign.sign()` method is used to generate and return the signature, or
+ * * Using the `sign.update()` and `sign.sign()` methods to produce the
+ * signature.
+ *
+ * The {@link createSign} method is used to create `Sign` instances. The
+ * argument is the string name of the hash function to use. `Sign` objects are not
+ * to be created directly using the `new` keyword.
+ *
+ * Example: Using `Sign` and `Verify` objects as streams:
+ *
+ * ```js
+ * const {
+ * generateKeyPairSync,
+ * createSign,
+ * createVerify,
+ * } = await import('node:crypto');
+ *
+ * const { privateKey, publicKey } = generateKeyPairSync('ec', {
+ * namedCurve: 'sect239k1',
+ * });
+ *
+ * const sign = createSign('SHA256');
+ * sign.write('some data to sign');
+ * sign.end();
+ * const signature = sign.sign(privateKey, 'hex');
+ *
+ * const verify = createVerify('SHA256');
+ * verify.write('some data to sign');
+ * verify.end();
+ * console.log(verify.verify(publicKey, signature, 'hex'));
+ * // Prints: true
+ * ```
+ *
+ * Example: Using the `sign.update()` and `verify.update()` methods:
+ *
+ * ```js
+ * const {
+ * generateKeyPairSync,
+ * createSign,
+ * createVerify,
+ * } = await import('node:crypto');
+ *
+ * const { privateKey, publicKey } = generateKeyPairSync('rsa', {
+ * modulusLength: 2048,
+ * });
+ *
+ * const sign = createSign('SHA256');
+ * sign.update('some data to sign');
+ * sign.end();
+ * const signature = sign.sign(privateKey);
+ *
+ * const verify = createVerify('SHA256');
+ * verify.update('some data to sign');
+ * verify.end();
+ * console.log(verify.verify(publicKey, signature));
+ * // Prints: true
+ * ```
+ * @since v0.1.92
+ */
+ class Sign extends stream.Writable {
+ private constructor();
+ /**
+ * Updates the `Sign` content with the given `data`, the encoding of which
+ * is given in `inputEncoding`.
+ * If `encoding` is not provided, and the `data` is a string, an
+ * encoding of `'utf8'` is enforced. If `data` is a `Buffer`, `TypedArray`, or`DataView`, then `inputEncoding` is ignored.
+ *
+ * This can be called many times with new data as it is streamed.
+ * @since v0.1.92
+ * @param inputEncoding The `encoding` of the `data` string.
+ */
+ update(data: BinaryLike): this;
+ update(data: string, inputEncoding: Encoding): this;
+ /**
+ * Calculates the signature on all the data passed through using either `sign.update()` or `sign.write()`.
+ *
+ * If `privateKey` is not a `KeyObject`, this function behaves as if`privateKey` had been passed to {@link createPrivateKey}. If it is an
+ * object, the following additional properties can be passed:
+ *
+ * If `outputEncoding` is provided a string is returned; otherwise a `Buffer` is returned.
+ *
+ * The `Sign` object can not be again used after `sign.sign()` method has been
+ * called. Multiple calls to `sign.sign()` will result in an error being thrown.
+ * @since v0.1.92
+ */
+ sign(privateKey: KeyLike | SignKeyObjectInput | SignPrivateKeyInput): Buffer;
+ sign(
+ privateKey: KeyLike | SignKeyObjectInput | SignPrivateKeyInput,
+ outputFormat: BinaryToTextEncoding,
+ ): string;
+ }
+ /**
+ * Creates and returns a `Verify` object that uses the given algorithm.
+ * Use {@link getHashes} to obtain an array of names of the available
+ * signing algorithms. Optional `options` argument controls the`stream.Writable` behavior.
+ *
+ * In some cases, a `Verify` instance can be created using the name of a signature
+ * algorithm, such as `'RSA-SHA256'`, instead of a digest algorithm. This will use
+ * the corresponding digest algorithm. This does not work for all signature
+ * algorithms, such as `'ecdsa-with-SHA256'`, so it is best to always use digest
+ * algorithm names.
+ * @since v0.1.92
+ * @param options `stream.Writable` options
+ */
+ function createVerify(algorithm: string, options?: stream.WritableOptions): Verify;
+ /**
+ * The `Verify` class is a utility for verifying signatures. It can be used in one
+ * of two ways:
+ *
+ * * As a writable `stream` where written data is used to validate against the
+ * supplied signature, or
+ * * Using the `verify.update()` and `verify.verify()` methods to verify
+ * the signature.
+ *
+ * The {@link createVerify} method is used to create `Verify` instances.`Verify` objects are not to be created directly using the `new` keyword.
+ *
+ * See `Sign` for examples.
+ * @since v0.1.92
+ */
+ class Verify extends stream.Writable {
+ private constructor();
+ /**
+ * Updates the `Verify` content with the given `data`, the encoding of which
+ * is given in `inputEncoding`.
+ * If `inputEncoding` is not provided, and the `data` is a string, an
+ * encoding of `'utf8'` is enforced. If `data` is a `Buffer`, `TypedArray`, or`DataView`, then `inputEncoding` is ignored.
+ *
+ * This can be called many times with new data as it is streamed.
+ * @since v0.1.92
+ * @param inputEncoding The `encoding` of the `data` string.
+ */
+ update(data: BinaryLike): Verify;
+ update(data: string, inputEncoding: Encoding): Verify;
+ /**
+ * Verifies the provided data using the given `object` and `signature`.
+ *
+ * If `object` is not a `KeyObject`, this function behaves as if`object` had been passed to {@link createPublicKey}. If it is an
+ * object, the following additional properties can be passed:
+ *
+ * The `signature` argument is the previously calculated signature for the data, in
+ * the `signatureEncoding`.
+ * If a `signatureEncoding` is specified, the `signature` is expected to be a
+ * string; otherwise `signature` is expected to be a `Buffer`,`TypedArray`, or `DataView`.
+ *
+ * The `verify` object can not be used again after `verify.verify()` has been
+ * called. Multiple calls to `verify.verify()` will result in an error being
+ * thrown.
+ *
+ * Because public keys can be derived from private keys, a private key may
+ * be passed instead of a public key.
+ * @since v0.1.92
+ */
+ verify(
+ object: KeyLike | VerifyKeyObjectInput | VerifyPublicKeyInput | VerifyJsonWebKeyInput,
+ signature: NodeJS.ArrayBufferView,
+ ): boolean;
+ verify(
+ object: KeyLike | VerifyKeyObjectInput | VerifyPublicKeyInput | VerifyJsonWebKeyInput,
+ signature: string,
+ signature_format?: BinaryToTextEncoding,
+ ): boolean;
+ }
+ /**
+ * Creates a `DiffieHellman` key exchange object using the supplied `prime` and an
+ * optional specific `generator`.
+ *
+ * The `generator` argument can be a number, string, or `Buffer`. If`generator` is not specified, the value `2` is used.
+ *
+ * If `primeEncoding` is specified, `prime` is expected to be a string; otherwise
+ * a `Buffer`, `TypedArray`, or `DataView` is expected.
+ *
+ * If `generatorEncoding` is specified, `generator` is expected to be a string;
+ * otherwise a number, `Buffer`, `TypedArray`, or `DataView` is expected.
+ * @since v0.11.12
+ * @param primeEncoding The `encoding` of the `prime` string.
+ * @param [generator=2]
+ * @param generatorEncoding The `encoding` of the `generator` string.
+ */
+ function createDiffieHellman(primeLength: number, generator?: number): DiffieHellman;
+ function createDiffieHellman(
+ prime: ArrayBuffer | NodeJS.ArrayBufferView,
+ generator?: number | ArrayBuffer | NodeJS.ArrayBufferView,
+ ): DiffieHellman;
+ function createDiffieHellman(
+ prime: ArrayBuffer | NodeJS.ArrayBufferView,
+ generator: string,
+ generatorEncoding: BinaryToTextEncoding,
+ ): DiffieHellman;
+ function createDiffieHellman(
+ prime: string,
+ primeEncoding: BinaryToTextEncoding,
+ generator?: number | ArrayBuffer | NodeJS.ArrayBufferView,
+ ): DiffieHellman;
+ function createDiffieHellman(
+ prime: string,
+ primeEncoding: BinaryToTextEncoding,
+ generator: string,
+ generatorEncoding: BinaryToTextEncoding,
+ ): DiffieHellman;
+ /**
+ * The `DiffieHellman` class is a utility for creating Diffie-Hellman key
+ * exchanges.
+ *
+ * Instances of the `DiffieHellman` class can be created using the {@link createDiffieHellman} function.
+ *
+ * ```js
+ * import assert from 'node:assert';
+ *
+ * const {
+ * createDiffieHellman,
+ * } = await import('node:crypto');
+ *
+ * // Generate Alice's keys...
+ * const alice = createDiffieHellman(2048);
+ * const aliceKey = alice.generateKeys();
+ *
+ * // Generate Bob's keys...
+ * const bob = createDiffieHellman(alice.getPrime(), alice.getGenerator());
+ * const bobKey = bob.generateKeys();
+ *
+ * // Exchange and generate the secret...
+ * const aliceSecret = alice.computeSecret(bobKey);
+ * const bobSecret = bob.computeSecret(aliceKey);
+ *
+ * // OK
+ * assert.strictEqual(aliceSecret.toString('hex'), bobSecret.toString('hex'));
+ * ```
+ * @since v0.5.0
+ */
+ class DiffieHellman {
+ private constructor();
+ /**
+ * Generates private and public Diffie-Hellman key values unless they have been
+ * generated or computed already, and returns
+ * the public key in the specified `encoding`. This key should be
+ * transferred to the other party.
+ * If `encoding` is provided a string is returned; otherwise a `Buffer` is returned.
+ *
+ * This function is a thin wrapper around [`DH_generate_key()`](https://www.openssl.org/docs/man3.0/man3/DH_generate_key.html). In particular,
+ * once a private key has been generated or set, calling this function only updates
+ * the public key but does not generate a new private key.
+ * @since v0.5.0
+ * @param encoding The `encoding` of the return value.
+ */
+ generateKeys(): Buffer;
+ generateKeys(encoding: BinaryToTextEncoding): string;
+ /**
+ * Computes the shared secret using `otherPublicKey` as the other
+ * party's public key and returns the computed shared secret. The supplied
+ * key is interpreted using the specified `inputEncoding`, and secret is
+ * encoded using specified `outputEncoding`.
+ * If the `inputEncoding` is not
+ * provided, `otherPublicKey` is expected to be a `Buffer`,`TypedArray`, or `DataView`.
+ *
+ * If `outputEncoding` is given a string is returned; otherwise, a `Buffer` is returned.
+ * @since v0.5.0
+ * @param inputEncoding The `encoding` of an `otherPublicKey` string.
+ * @param outputEncoding The `encoding` of the return value.
+ */
+ computeSecret(otherPublicKey: NodeJS.ArrayBufferView, inputEncoding?: null, outputEncoding?: null): Buffer;
+ computeSecret(otherPublicKey: string, inputEncoding: BinaryToTextEncoding, outputEncoding?: null): Buffer;
+ computeSecret(
+ otherPublicKey: NodeJS.ArrayBufferView,
+ inputEncoding: null,
+ outputEncoding: BinaryToTextEncoding,
+ ): string;
+ computeSecret(
+ otherPublicKey: string,
+ inputEncoding: BinaryToTextEncoding,
+ outputEncoding: BinaryToTextEncoding,
+ ): string;
+ /**
+ * Returns the Diffie-Hellman prime in the specified `encoding`.
+ * If `encoding` is provided a string is
+ * returned; otherwise a `Buffer` is returned.
+ * @since v0.5.0
+ * @param encoding The `encoding` of the return value.
+ */
+ getPrime(): Buffer;
+ getPrime(encoding: BinaryToTextEncoding): string;
+ /**
+ * Returns the Diffie-Hellman generator in the specified `encoding`.
+ * If `encoding` is provided a string is
+ * returned; otherwise a `Buffer` is returned.
+ * @since v0.5.0
+ * @param encoding The `encoding` of the return value.
+ */
+ getGenerator(): Buffer;
+ getGenerator(encoding: BinaryToTextEncoding): string;
+ /**
+ * Returns the Diffie-Hellman public key in the specified `encoding`.
+ * If `encoding` is provided a
+ * string is returned; otherwise a `Buffer` is returned.
+ * @since v0.5.0
+ * @param encoding The `encoding` of the return value.
+ */
+ getPublicKey(): Buffer;
+ getPublicKey(encoding: BinaryToTextEncoding): string;
+ /**
+ * Returns the Diffie-Hellman private key in the specified `encoding`.
+ * If `encoding` is provided a
+ * string is returned; otherwise a `Buffer` is returned.
+ * @since v0.5.0
+ * @param encoding The `encoding` of the return value.
+ */
+ getPrivateKey(): Buffer;
+ getPrivateKey(encoding: BinaryToTextEncoding): string;
+ /**
+ * Sets the Diffie-Hellman public key. If the `encoding` argument is provided,`publicKey` is expected
+ * to be a string. If no `encoding` is provided, `publicKey` is expected
+ * to be a `Buffer`, `TypedArray`, or `DataView`.
+ * @since v0.5.0
+ * @param encoding The `encoding` of the `publicKey` string.
+ */
+ setPublicKey(publicKey: NodeJS.ArrayBufferView): void;
+ setPublicKey(publicKey: string, encoding: BufferEncoding): void;
+ /**
+ * Sets the Diffie-Hellman private key. If the `encoding` argument is provided,`privateKey` is expected
+ * to be a string. If no `encoding` is provided, `privateKey` is expected
+ * to be a `Buffer`, `TypedArray`, or `DataView`.
+ *
+ * This function does not automatically compute the associated public key. Either `diffieHellman.setPublicKey()` or `diffieHellman.generateKeys()` can be
+ * used to manually provide the public key or to automatically derive it.
+ * @since v0.5.0
+ * @param encoding The `encoding` of the `privateKey` string.
+ */
+ setPrivateKey(privateKey: NodeJS.ArrayBufferView): void;
+ setPrivateKey(privateKey: string, encoding: BufferEncoding): void;
+ /**
+ * A bit field containing any warnings and/or errors resulting from a check
+ * performed during initialization of the `DiffieHellman` object.
+ *
+ * The following values are valid for this property (as defined in `node:constants` module):
+ *
+ * * `DH_CHECK_P_NOT_SAFE_PRIME`
+ * * `DH_CHECK_P_NOT_PRIME`
+ * * `DH_UNABLE_TO_CHECK_GENERATOR`
+ * * `DH_NOT_SUITABLE_GENERATOR`
+ * @since v0.11.12
+ */
+ verifyError: number;
+ }
+ /**
+ * The `DiffieHellmanGroup` class takes a well-known modp group as its argument.
+ * It works the same as `DiffieHellman`, except that it does not allow changing its keys after creation.
+ * In other words, it does not implement `setPublicKey()` or `setPrivateKey()` methods.
+ *
+ * ```js
+ * const { createDiffieHellmanGroup } = await import('node:crypto');
+ * const dh = createDiffieHellmanGroup('modp1');
+ * ```
+ * The name (e.g. `'modp1'`) is taken from [RFC 2412](https://www.rfc-editor.org/rfc/rfc2412.txt) (modp1 and 2) and [RFC 3526](https://www.rfc-editor.org/rfc/rfc3526.txt):
+ * ```bash
+ * $ perl -ne 'print "$1\n" if /"(modp\d+)"/' src/node_crypto_groups.h
+ * modp1 # 768 bits
+ * modp2 # 1024 bits
+ * modp5 # 1536 bits
+ * modp14 # 2048 bits
+ * modp15 # etc.
+ * modp16
+ * modp17
+ * modp18
+ * ```
+ * @since v0.7.5
+ */
+ const DiffieHellmanGroup: DiffieHellmanGroupConstructor;
+ interface DiffieHellmanGroupConstructor {
+ new(name: string): DiffieHellmanGroup;
+ (name: string): DiffieHellmanGroup;
+ readonly prototype: DiffieHellmanGroup;
+ }
+ type DiffieHellmanGroup = Omit;
+ /**
+ * Creates a predefined `DiffieHellmanGroup` key exchange object. The
+ * supported groups are listed in the documentation for `DiffieHellmanGroup`.
+ *
+ * The returned object mimics the interface of objects created by {@link createDiffieHellman}, but will not allow changing
+ * the keys (with `diffieHellman.setPublicKey()`, for example). The
+ * advantage of using this method is that the parties do not have to
+ * generate nor exchange a group modulus beforehand, saving both processor
+ * and communication time.
+ *
+ * Example (obtaining a shared secret):
+ *
+ * ```js
+ * const {
+ * getDiffieHellman,
+ * } = await import('node:crypto');
+ * const alice = getDiffieHellman('modp14');
+ * const bob = getDiffieHellman('modp14');
+ *
+ * alice.generateKeys();
+ * bob.generateKeys();
+ *
+ * const aliceSecret = alice.computeSecret(bob.getPublicKey(), null, 'hex');
+ * const bobSecret = bob.computeSecret(alice.getPublicKey(), null, 'hex');
+ *
+ * // aliceSecret and bobSecret should be the same
+ * console.log(aliceSecret === bobSecret);
+ * ```
+ * @since v0.7.5
+ */
+ function getDiffieHellman(groupName: string): DiffieHellmanGroup;
+ /**
+ * An alias for {@link getDiffieHellman}
+ * @since v0.9.3
+ */
+ function createDiffieHellmanGroup(name: string): DiffieHellmanGroup;
+ /**
+ * Provides an asynchronous Password-Based Key Derivation Function 2 (PBKDF2)
+ * implementation. A selected HMAC digest algorithm specified by `digest` is
+ * applied to derive a key of the requested byte length (`keylen`) from the`password`, `salt` and `iterations`.
+ *
+ * The supplied `callback` function is called with two arguments: `err` and`derivedKey`. If an error occurs while deriving the key, `err` will be set;
+ * otherwise `err` will be `null`. By default, the successfully generated`derivedKey` will be passed to the callback as a `Buffer`. An error will be
+ * thrown if any of the input arguments specify invalid values or types.
+ *
+ * The `iterations` argument must be a number set as high as possible. The
+ * higher the number of iterations, the more secure the derived key will be,
+ * but will take a longer amount of time to complete.
+ *
+ * The `salt` should be as unique as possible. It is recommended that a salt is
+ * random and at least 16 bytes long. See [NIST SP 800-132](https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-132.pdf) for details.
+ *
+ * When passing strings for `password` or `salt`, please consider `caveats when using strings as inputs to cryptographic APIs`.
+ *
+ * ```js
+ * const {
+ * pbkdf2,
+ * } = await import('node:crypto');
+ *
+ * pbkdf2('secret', 'salt', 100000, 64, 'sha512', (err, derivedKey) => {
+ * if (err) throw err;
+ * console.log(derivedKey.toString('hex')); // '3745e48...08d59ae'
+ * });
+ * ```
+ *
+ * An array of supported digest functions can be retrieved using {@link getHashes}.
+ *
+ * This API uses libuv's threadpool, which can have surprising and
+ * negative performance implications for some applications; see the `UV_THREADPOOL_SIZE` documentation for more information.
+ * @since v0.5.5
+ */
+ function pbkdf2(
+ password: BinaryLike,
+ salt: BinaryLike,
+ iterations: number,
+ keylen: number,
+ digest: string,
+ callback: (err: Error | null, derivedKey: Buffer) => void,
+ ): void;
+ /**
+ * Provides a synchronous Password-Based Key Derivation Function 2 (PBKDF2)
+ * implementation. A selected HMAC digest algorithm specified by `digest` is
+ * applied to derive a key of the requested byte length (`keylen`) from the`password`, `salt` and `iterations`.
+ *
+ * If an error occurs an `Error` will be thrown, otherwise the derived key will be
+ * returned as a `Buffer`.
+ *
+ * The `iterations` argument must be a number set as high as possible. The
+ * higher the number of iterations, the more secure the derived key will be,
+ * but will take a longer amount of time to complete.
+ *
+ * The `salt` should be as unique as possible. It is recommended that a salt is
+ * random and at least 16 bytes long. See [NIST SP 800-132](https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-132.pdf) for details.
+ *
+ * When passing strings for `password` or `salt`, please consider `caveats when using strings as inputs to cryptographic APIs`.
+ *
+ * ```js
+ * const {
+ * pbkdf2Sync,
+ * } = await import('node:crypto');
+ *
+ * const key = pbkdf2Sync('secret', 'salt', 100000, 64, 'sha512');
+ * console.log(key.toString('hex')); // '3745e48...08d59ae'
+ * ```
+ *
+ * An array of supported digest functions can be retrieved using {@link getHashes}.
+ * @since v0.9.3
+ */
+ function pbkdf2Sync(
+ password: BinaryLike,
+ salt: BinaryLike,
+ iterations: number,
+ keylen: number,
+ digest: string,
+ ): Buffer;
+ /**
+ * Generates cryptographically strong pseudorandom data. The `size` argument
+ * is a number indicating the number of bytes to generate.
+ *
+ * If a `callback` function is provided, the bytes are generated asynchronously
+ * and the `callback` function is invoked with two arguments: `err` and `buf`.
+ * If an error occurs, `err` will be an `Error` object; otherwise it is `null`. The`buf` argument is a `Buffer` containing the generated bytes.
+ *
+ * ```js
+ * // Asynchronous
+ * const {
+ * randomBytes,
+ * } = await import('node:crypto');
+ *
+ * randomBytes(256, (err, buf) => {
+ * if (err) throw err;
+ * console.log(`${buf.length} bytes of random data: ${buf.toString('hex')}`);
+ * });
+ * ```
+ *
+ * If the `callback` function is not provided, the random bytes are generated
+ * synchronously and returned as a `Buffer`. An error will be thrown if
+ * there is a problem generating the bytes.
+ *
+ * ```js
+ * // Synchronous
+ * const {
+ * randomBytes,
+ * } = await import('node:crypto');
+ *
+ * const buf = randomBytes(256);
+ * console.log(
+ * `${buf.length} bytes of random data: ${buf.toString('hex')}`);
+ * ```
+ *
+ * The `crypto.randomBytes()` method will not complete until there is
+ * sufficient entropy available.
+ * This should normally never take longer than a few milliseconds. The only time
+ * when generating the random bytes may conceivably block for a longer period of
+ * time is right after boot, when the whole system is still low on entropy.
+ *
+ * This API uses libuv's threadpool, which can have surprising and
+ * negative performance implications for some applications; see the `UV_THREADPOOL_SIZE` documentation for more information.
+ *
+ * The asynchronous version of `crypto.randomBytes()` is carried out in a single
+ * threadpool request. To minimize threadpool task length variation, partition
+ * large `randomBytes` requests when doing so as part of fulfilling a client
+ * request.
+ * @since v0.5.8
+ * @param size The number of bytes to generate. The `size` must not be larger than `2**31 - 1`.
+ * @return if the `callback` function is not provided.
+ */
+ function randomBytes(size: number): Buffer;
+ function randomBytes(size: number, callback: (err: Error | null, buf: Buffer) => void): void;
+ function pseudoRandomBytes(size: number): Buffer;
+ function pseudoRandomBytes(size: number, callback: (err: Error | null, buf: Buffer) => void): void;
+ /**
+ * Return a random integer `n` such that `min <= n < max`. This
+ * implementation avoids [modulo bias](https://en.wikipedia.org/wiki/Fisher%E2%80%93Yates_shuffle#Modulo_bias).
+ *
+ * The range (`max - min`) must be less than 2**48. `min` and `max` must
+ * be [safe integers](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isSafeInteger).
+ *
+ * If the `callback` function is not provided, the random integer is
+ * generated synchronously.
+ *
+ * ```js
+ * // Asynchronous
+ * const {
+ * randomInt,
+ * } = await import('node:crypto');
+ *
+ * randomInt(3, (err, n) => {
+ * if (err) throw err;
+ * console.log(`Random number chosen from (0, 1, 2): ${n}`);
+ * });
+ * ```
+ *
+ * ```js
+ * // Synchronous
+ * const {
+ * randomInt,
+ * } = await import('node:crypto');
+ *
+ * const n = randomInt(3);
+ * console.log(`Random number chosen from (0, 1, 2): ${n}`);
+ * ```
+ *
+ * ```js
+ * // With `min` argument
+ * const {
+ * randomInt,
+ * } = await import('node:crypto');
+ *
+ * const n = randomInt(1, 7);
+ * console.log(`The dice rolled: ${n}`);
+ * ```
+ * @since v14.10.0, v12.19.0
+ * @param [min=0] Start of random range (inclusive).
+ * @param max End of random range (exclusive).
+ * @param callback `function(err, n) {}`.
+ */
+ function randomInt(max: number): number;
+ function randomInt(min: number, max: number): number;
+ function randomInt(max: number, callback: (err: Error | null, value: number) => void): void;
+ function randomInt(min: number, max: number, callback: (err: Error | null, value: number) => void): void;
+ /**
+ * Synchronous version of {@link randomFill}.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ * const { randomFillSync } = await import('node:crypto');
+ *
+ * const buf = Buffer.alloc(10);
+ * console.log(randomFillSync(buf).toString('hex'));
+ *
+ * randomFillSync(buf, 5);
+ * console.log(buf.toString('hex'));
+ *
+ * // The above is equivalent to the following:
+ * randomFillSync(buf, 5, 5);
+ * console.log(buf.toString('hex'));
+ * ```
+ *
+ * Any `ArrayBuffer`, `TypedArray` or `DataView` instance may be passed as`buffer`.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ * const { randomFillSync } = await import('node:crypto');
+ *
+ * const a = new Uint32Array(10);
+ * console.log(Buffer.from(randomFillSync(a).buffer,
+ * a.byteOffset, a.byteLength).toString('hex'));
+ *
+ * const b = new DataView(new ArrayBuffer(10));
+ * console.log(Buffer.from(randomFillSync(b).buffer,
+ * b.byteOffset, b.byteLength).toString('hex'));
+ *
+ * const c = new ArrayBuffer(10);
+ * console.log(Buffer.from(randomFillSync(c)).toString('hex'));
+ * ```
+ * @since v7.10.0, v6.13.0
+ * @param buffer Must be supplied. The size of the provided `buffer` must not be larger than `2**31 - 1`.
+ * @param [offset=0]
+ * @param [size=buffer.length - offset]
+ * @return The object passed as `buffer` argument.
+ */
+ function randomFillSync(buffer: T, offset?: number, size?: number): T;
+ /**
+ * This function is similar to {@link randomBytes} but requires the first
+ * argument to be a `Buffer` that will be filled. It also
+ * requires that a callback is passed in.
+ *
+ * If the `callback` function is not provided, an error will be thrown.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ * const { randomFill } = await import('node:crypto');
+ *
+ * const buf = Buffer.alloc(10);
+ * randomFill(buf, (err, buf) => {
+ * if (err) throw err;
+ * console.log(buf.toString('hex'));
+ * });
+ *
+ * randomFill(buf, 5, (err, buf) => {
+ * if (err) throw err;
+ * console.log(buf.toString('hex'));
+ * });
+ *
+ * // The above is equivalent to the following:
+ * randomFill(buf, 5, 5, (err, buf) => {
+ * if (err) throw err;
+ * console.log(buf.toString('hex'));
+ * });
+ * ```
+ *
+ * Any `ArrayBuffer`, `TypedArray`, or `DataView` instance may be passed as`buffer`.
+ *
+ * While this includes instances of `Float32Array` and `Float64Array`, this
+ * function should not be used to generate random floating-point numbers. The
+ * result may contain `+Infinity`, `-Infinity`, and `NaN`, and even if the array
+ * contains finite numbers only, they are not drawn from a uniform random
+ * distribution and have no meaningful lower or upper bounds.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ * const { randomFill } = await import('node:crypto');
+ *
+ * const a = new Uint32Array(10);
+ * randomFill(a, (err, buf) => {
+ * if (err) throw err;
+ * console.log(Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength)
+ * .toString('hex'));
+ * });
+ *
+ * const b = new DataView(new ArrayBuffer(10));
+ * randomFill(b, (err, buf) => {
+ * if (err) throw err;
+ * console.log(Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength)
+ * .toString('hex'));
+ * });
+ *
+ * const c = new ArrayBuffer(10);
+ * randomFill(c, (err, buf) => {
+ * if (err) throw err;
+ * console.log(Buffer.from(buf).toString('hex'));
+ * });
+ * ```
+ *
+ * This API uses libuv's threadpool, which can have surprising and
+ * negative performance implications for some applications; see the `UV_THREADPOOL_SIZE` documentation for more information.
+ *
+ * The asynchronous version of `crypto.randomFill()` is carried out in a single
+ * threadpool request. To minimize threadpool task length variation, partition
+ * large `randomFill` requests when doing so as part of fulfilling a client
+ * request.
+ * @since v7.10.0, v6.13.0
+ * @param buffer Must be supplied. The size of the provided `buffer` must not be larger than `2**31 - 1`.
+ * @param [offset=0]
+ * @param [size=buffer.length - offset]
+ * @param callback `function(err, buf) {}`.
+ */
+ function randomFill(
+ buffer: T,
+ callback: (err: Error | null, buf: T) => void,
+ ): void;
+ function randomFill(
+ buffer: T,
+ offset: number,
+ callback: (err: Error | null, buf: T) => void,
+ ): void;
+ function randomFill(
+ buffer: T,
+ offset: number,
+ size: number,
+ callback: (err: Error | null, buf: T) => void,
+ ): void;
+ interface ScryptOptions {
+ cost?: number | undefined;
+ blockSize?: number | undefined;
+ parallelization?: number | undefined;
+ N?: number | undefined;
+ r?: number | undefined;
+ p?: number | undefined;
+ maxmem?: number | undefined;
+ }
+ /**
+ * Provides an asynchronous [scrypt](https://en.wikipedia.org/wiki/Scrypt) implementation. Scrypt is a password-based
+ * key derivation function that is designed to be expensive computationally and
+ * memory-wise in order to make brute-force attacks unrewarding.
+ *
+ * The `salt` should be as unique as possible. It is recommended that a salt is
+ * random and at least 16 bytes long. See [NIST SP 800-132](https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-132.pdf) for details.
+ *
+ * When passing strings for `password` or `salt`, please consider `caveats when using strings as inputs to cryptographic APIs`.
+ *
+ * The `callback` function is called with two arguments: `err` and `derivedKey`.`err` is an exception object when key derivation fails, otherwise `err` is`null`. `derivedKey` is passed to the
+ * callback as a `Buffer`.
+ *
+ * An exception is thrown when any of the input arguments specify invalid values
+ * or types.
+ *
+ * ```js
+ * const {
+ * scrypt,
+ * } = await import('node:crypto');
+ *
+ * // Using the factory defaults.
+ * scrypt('password', 'salt', 64, (err, derivedKey) => {
+ * if (err) throw err;
+ * console.log(derivedKey.toString('hex')); // '3745e48...08d59ae'
+ * });
+ * // Using a custom N parameter. Must be a power of two.
+ * scrypt('password', 'salt', 64, { N: 1024 }, (err, derivedKey) => {
+ * if (err) throw err;
+ * console.log(derivedKey.toString('hex')); // '3745e48...aa39b34'
+ * });
+ * ```
+ * @since v10.5.0
+ */
+ function scrypt(
+ password: BinaryLike,
+ salt: BinaryLike,
+ keylen: number,
+ callback: (err: Error | null, derivedKey: Buffer) => void,
+ ): void;
+ function scrypt(
+ password: BinaryLike,
+ salt: BinaryLike,
+ keylen: number,
+ options: ScryptOptions,
+ callback: (err: Error | null, derivedKey: Buffer) => void,
+ ): void;
+ /**
+ * Provides a synchronous [scrypt](https://en.wikipedia.org/wiki/Scrypt) implementation. Scrypt is a password-based
+ * key derivation function that is designed to be expensive computationally and
+ * memory-wise in order to make brute-force attacks unrewarding.
+ *
+ * The `salt` should be as unique as possible. It is recommended that a salt is
+ * random and at least 16 bytes long. See [NIST SP 800-132](https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-132.pdf) for details.
+ *
+ * When passing strings for `password` or `salt`, please consider `caveats when using strings as inputs to cryptographic APIs`.
+ *
+ * An exception is thrown when key derivation fails, otherwise the derived key is
+ * returned as a `Buffer`.
+ *
+ * An exception is thrown when any of the input arguments specify invalid values
+ * or types.
+ *
+ * ```js
+ * const {
+ * scryptSync,
+ * } = await import('node:crypto');
+ * // Using the factory defaults.
+ *
+ * const key1 = scryptSync('password', 'salt', 64);
+ * console.log(key1.toString('hex')); // '3745e48...08d59ae'
+ * // Using a custom N parameter. Must be a power of two.
+ * const key2 = scryptSync('password', 'salt', 64, { N: 1024 });
+ * console.log(key2.toString('hex')); // '3745e48...aa39b34'
+ * ```
+ * @since v10.5.0
+ */
+ function scryptSync(password: BinaryLike, salt: BinaryLike, keylen: number, options?: ScryptOptions): Buffer;
+ interface RsaPublicKey {
+ key: KeyLike;
+ padding?: number | undefined;
+ }
+ interface RsaPrivateKey {
+ key: KeyLike;
+ passphrase?: string | undefined;
+ /**
+ * @default 'sha1'
+ */
+ oaepHash?: string | undefined;
+ oaepLabel?: NodeJS.TypedArray | undefined;
+ padding?: number | undefined;
+ }
+ /**
+ * Encrypts the content of `buffer` with `key` and returns a new `Buffer` with encrypted content. The returned data can be decrypted using
+ * the corresponding private key, for example using {@link privateDecrypt}.
+ *
+ * If `key` is not a `KeyObject`, this function behaves as if`key` had been passed to {@link createPublicKey}. If it is an
+ * object, the `padding` property can be passed. Otherwise, this function uses`RSA_PKCS1_OAEP_PADDING`.
+ *
+ * Because RSA public keys can be derived from private keys, a private key may
+ * be passed instead of a public key.
+ * @since v0.11.14
+ */
+ function publicEncrypt(key: RsaPublicKey | RsaPrivateKey | KeyLike, buffer: NodeJS.ArrayBufferView): Buffer;
+ /**
+ * Decrypts `buffer` with `key`.`buffer` was previously encrypted using
+ * the corresponding private key, for example using {@link privateEncrypt}.
+ *
+ * If `key` is not a `KeyObject`, this function behaves as if`key` had been passed to {@link createPublicKey}. If it is an
+ * object, the `padding` property can be passed. Otherwise, this function uses`RSA_PKCS1_PADDING`.
+ *
+ * Because RSA public keys can be derived from private keys, a private key may
+ * be passed instead of a public key.
+ * @since v1.1.0
+ */
+ function publicDecrypt(key: RsaPublicKey | RsaPrivateKey | KeyLike, buffer: NodeJS.ArrayBufferView): Buffer;
+ /**
+ * Decrypts `buffer` with `privateKey`. `buffer` was previously encrypted using
+ * the corresponding public key, for example using {@link publicEncrypt}.
+ *
+ * If `privateKey` is not a `KeyObject`, this function behaves as if`privateKey` had been passed to {@link createPrivateKey}. If it is an
+ * object, the `padding` property can be passed. Otherwise, this function uses`RSA_PKCS1_OAEP_PADDING`.
+ * @since v0.11.14
+ */
+ function privateDecrypt(privateKey: RsaPrivateKey | KeyLike, buffer: NodeJS.ArrayBufferView): Buffer;
+ /**
+ * Encrypts `buffer` with `privateKey`. The returned data can be decrypted using
+ * the corresponding public key, for example using {@link publicDecrypt}.
+ *
+ * If `privateKey` is not a `KeyObject`, this function behaves as if`privateKey` had been passed to {@link createPrivateKey}. If it is an
+ * object, the `padding` property can be passed. Otherwise, this function uses`RSA_PKCS1_PADDING`.
+ * @since v1.1.0
+ */
+ function privateEncrypt(privateKey: RsaPrivateKey | KeyLike, buffer: NodeJS.ArrayBufferView): Buffer;
+ /**
+ * ```js
+ * const {
+ * getCiphers,
+ * } = await import('node:crypto');
+ *
+ * console.log(getCiphers()); // ['aes-128-cbc', 'aes-128-ccm', ...]
+ * ```
+ * @since v0.9.3
+ * @return An array with the names of the supported cipher algorithms.
+ */
+ function getCiphers(): string[];
+ /**
+ * ```js
+ * const {
+ * getCurves,
+ * } = await import('node:crypto');
+ *
+ * console.log(getCurves()); // ['Oakley-EC2N-3', 'Oakley-EC2N-4', ...]
+ * ```
+ * @since v2.3.0
+ * @return An array with the names of the supported elliptic curves.
+ */
+ function getCurves(): string[];
+ /**
+ * @since v10.0.0
+ * @return `1` if and only if a FIPS compliant crypto provider is currently in use, `0` otherwise. A future semver-major release may change the return type of this API to a {boolean}.
+ */
+ function getFips(): 1 | 0;
+ /**
+ * Enables the FIPS compliant crypto provider in a FIPS-enabled Node.js build.
+ * Throws an error if FIPS mode is not available.
+ * @since v10.0.0
+ * @param bool `true` to enable FIPS mode.
+ */
+ function setFips(bool: boolean): void;
+ /**
+ * ```js
+ * const {
+ * getHashes,
+ * } = await import('node:crypto');
+ *
+ * console.log(getHashes()); // ['DSA', 'DSA-SHA', 'DSA-SHA1', ...]
+ * ```
+ * @since v0.9.3
+ * @return An array of the names of the supported hash algorithms, such as `'RSA-SHA256'`. Hash algorithms are also called "digest" algorithms.
+ */
+ function getHashes(): string[];
+ /**
+ * The `ECDH` class is a utility for creating Elliptic Curve Diffie-Hellman (ECDH)
+ * key exchanges.
+ *
+ * Instances of the `ECDH` class can be created using the {@link createECDH} function.
+ *
+ * ```js
+ * import assert from 'node:assert';
+ *
+ * const {
+ * createECDH,
+ * } = await import('node:crypto');
+ *
+ * // Generate Alice's keys...
+ * const alice = createECDH('secp521r1');
+ * const aliceKey = alice.generateKeys();
+ *
+ * // Generate Bob's keys...
+ * const bob = createECDH('secp521r1');
+ * const bobKey = bob.generateKeys();
+ *
+ * // Exchange and generate the secret...
+ * const aliceSecret = alice.computeSecret(bobKey);
+ * const bobSecret = bob.computeSecret(aliceKey);
+ *
+ * assert.strictEqual(aliceSecret.toString('hex'), bobSecret.toString('hex'));
+ * // OK
+ * ```
+ * @since v0.11.14
+ */
+ class ECDH {
+ private constructor();
+ /**
+ * Converts the EC Diffie-Hellman public key specified by `key` and `curve` to the
+ * format specified by `format`. The `format` argument specifies point encoding
+ * and can be `'compressed'`, `'uncompressed'` or `'hybrid'`. The supplied key is
+ * interpreted using the specified `inputEncoding`, and the returned key is encoded
+ * using the specified `outputEncoding`.
+ *
+ * Use {@link getCurves} to obtain a list of available curve names.
+ * On recent OpenSSL releases, `openssl ecparam -list_curves` will also display
+ * the name and description of each available elliptic curve.
+ *
+ * If `format` is not specified the point will be returned in `'uncompressed'`format.
+ *
+ * If the `inputEncoding` is not provided, `key` is expected to be a `Buffer`,`TypedArray`, or `DataView`.
+ *
+ * Example (uncompressing a key):
+ *
+ * ```js
+ * const {
+ * createECDH,
+ * ECDH,
+ * } = await import('node:crypto');
+ *
+ * const ecdh = createECDH('secp256k1');
+ * ecdh.generateKeys();
+ *
+ * const compressedKey = ecdh.getPublicKey('hex', 'compressed');
+ *
+ * const uncompressedKey = ECDH.convertKey(compressedKey,
+ * 'secp256k1',
+ * 'hex',
+ * 'hex',
+ * 'uncompressed');
+ *
+ * // The converted key and the uncompressed public key should be the same
+ * console.log(uncompressedKey === ecdh.getPublicKey('hex'));
+ * ```
+ * @since v10.0.0
+ * @param inputEncoding The `encoding` of the `key` string.
+ * @param outputEncoding The `encoding` of the return value.
+ * @param [format='uncompressed']
+ */
+ static convertKey(
+ key: BinaryLike,
+ curve: string,
+ inputEncoding?: BinaryToTextEncoding,
+ outputEncoding?: "latin1" | "hex" | "base64" | "base64url",
+ format?: "uncompressed" | "compressed" | "hybrid",
+ ): Buffer | string;
+ /**
+ * Generates private and public EC Diffie-Hellman key values, and returns
+ * the public key in the specified `format` and `encoding`. This key should be
+ * transferred to the other party.
+ *
+ * The `format` argument specifies point encoding and can be `'compressed'` or`'uncompressed'`. If `format` is not specified, the point will be returned in`'uncompressed'` format.
+ *
+ * If `encoding` is provided a string is returned; otherwise a `Buffer` is returned.
+ * @since v0.11.14
+ * @param encoding The `encoding` of the return value.
+ * @param [format='uncompressed']
+ */
+ generateKeys(): Buffer;
+ generateKeys(encoding: BinaryToTextEncoding, format?: ECDHKeyFormat): string;
+ /**
+ * Computes the shared secret using `otherPublicKey` as the other
+ * party's public key and returns the computed shared secret. The supplied
+ * key is interpreted using specified `inputEncoding`, and the returned secret
+ * is encoded using the specified `outputEncoding`.
+ * If the `inputEncoding` is not
+ * provided, `otherPublicKey` is expected to be a `Buffer`, `TypedArray`, or`DataView`.
+ *
+ * If `outputEncoding` is given a string will be returned; otherwise a `Buffer` is returned.
+ *
+ * `ecdh.computeSecret` will throw an`ERR_CRYPTO_ECDH_INVALID_PUBLIC_KEY` error when `otherPublicKey`lies outside of the elliptic curve. Since `otherPublicKey` is
+ * usually supplied from a remote user over an insecure network,
+ * be sure to handle this exception accordingly.
+ * @since v0.11.14
+ * @param inputEncoding The `encoding` of the `otherPublicKey` string.
+ * @param outputEncoding The `encoding` of the return value.
+ */
+ computeSecret(otherPublicKey: NodeJS.ArrayBufferView): Buffer;
+ computeSecret(otherPublicKey: string, inputEncoding: BinaryToTextEncoding): Buffer;
+ computeSecret(otherPublicKey: NodeJS.ArrayBufferView, outputEncoding: BinaryToTextEncoding): string;
+ computeSecret(
+ otherPublicKey: string,
+ inputEncoding: BinaryToTextEncoding,
+ outputEncoding: BinaryToTextEncoding,
+ ): string;
+ /**
+ * If `encoding` is specified, a string is returned; otherwise a `Buffer` is
+ * returned.
+ * @since v0.11.14
+ * @param encoding The `encoding` of the return value.
+ * @return The EC Diffie-Hellman in the specified `encoding`.
+ */
+ getPrivateKey(): Buffer;
+ getPrivateKey(encoding: BinaryToTextEncoding): string;
+ /**
+ * The `format` argument specifies point encoding and can be `'compressed'` or`'uncompressed'`. If `format` is not specified the point will be returned in`'uncompressed'` format.
+ *
+ * If `encoding` is specified, a string is returned; otherwise a `Buffer` is
+ * returned.
+ * @since v0.11.14
+ * @param encoding The `encoding` of the return value.
+ * @param [format='uncompressed']
+ * @return The EC Diffie-Hellman public key in the specified `encoding` and `format`.
+ */
+ getPublicKey(encoding?: null, format?: ECDHKeyFormat): Buffer;
+ getPublicKey(encoding: BinaryToTextEncoding, format?: ECDHKeyFormat): string;
+ /**
+ * Sets the EC Diffie-Hellman private key.
+ * If `encoding` is provided, `privateKey` is expected
+ * to be a string; otherwise `privateKey` is expected to be a `Buffer`,`TypedArray`, or `DataView`.
+ *
+ * If `privateKey` is not valid for the curve specified when the `ECDH` object was
+ * created, an error is thrown. Upon setting the private key, the associated
+ * public point (key) is also generated and set in the `ECDH` object.
+ * @since v0.11.14
+ * @param encoding The `encoding` of the `privateKey` string.
+ */
+ setPrivateKey(privateKey: NodeJS.ArrayBufferView): void;
+ setPrivateKey(privateKey: string, encoding: BinaryToTextEncoding): void;
+ }
+ /**
+ * Creates an Elliptic Curve Diffie-Hellman (`ECDH`) key exchange object using a
+ * predefined curve specified by the `curveName` string. Use {@link getCurves} to obtain a list of available curve names. On recent
+ * OpenSSL releases, `openssl ecparam -list_curves` will also display the name
+ * and description of each available elliptic curve.
+ * @since v0.11.14
+ */
+ function createECDH(curveName: string): ECDH;
+ /**
+ * This function compares the underlying bytes that represent the given`ArrayBuffer`, `TypedArray`, or `DataView` instances using a constant-time
+ * algorithm.
+ *
+ * This function does not leak timing information that
+ * would allow an attacker to guess one of the values. This is suitable for
+ * comparing HMAC digests or secret values like authentication cookies or [capability urls](https://www.w3.org/TR/capability-urls/).
+ *
+ * `a` and `b` must both be `Buffer`s, `TypedArray`s, or `DataView`s, and they
+ * must have the same byte length. An error is thrown if `a` and `b` have
+ * different byte lengths.
+ *
+ * If at least one of `a` and `b` is a `TypedArray` with more than one byte per
+ * entry, such as `Uint16Array`, the result will be computed using the platform
+ * byte order.
+ *
+ * **When both of the inputs are `Float32Array`s or`Float64Array`s, this function might return unexpected results due to IEEE 754**
+ * **encoding of floating-point numbers. In particular, neither `x === y` nor`Object.is(x, y)` implies that the byte representations of two floating-point**
+ * **numbers `x` and `y` are equal.**
+ *
+ * Use of `crypto.timingSafeEqual` does not guarantee that the _surrounding_ code
+ * is timing-safe. Care should be taken to ensure that the surrounding code does
+ * not introduce timing vulnerabilities.
+ * @since v6.6.0
+ */
+ function timingSafeEqual(a: NodeJS.ArrayBufferView, b: NodeJS.ArrayBufferView): boolean;
+ type KeyType = "rsa" | "rsa-pss" | "dsa" | "ec" | "ed25519" | "ed448" | "x25519" | "x448";
+ type KeyFormat = "pem" | "der" | "jwk";
+ interface BasePrivateKeyEncodingOptions {
+ format: T;
+ cipher?: string | undefined;
+ passphrase?: string | undefined;
+ }
+ interface KeyPairKeyObjectResult {
+ publicKey: KeyObject;
+ privateKey: KeyObject;
+ }
+ interface ED25519KeyPairKeyObjectOptions {}
+ interface ED448KeyPairKeyObjectOptions {}
+ interface X25519KeyPairKeyObjectOptions {}
+ interface X448KeyPairKeyObjectOptions {}
+ interface ECKeyPairKeyObjectOptions {
+ /**
+ * Name of the curve to use
+ */
+ namedCurve: string;
+ /**
+ * Must be `'named'` or `'explicit'`. Default: `'named'`.
+ */
+ paramEncoding?: "explicit" | "named" | undefined;
+ }
+ interface RSAKeyPairKeyObjectOptions {
+ /**
+ * Key size in bits
+ */
+ modulusLength: number;
+ /**
+ * Public exponent
+ * @default 0x10001
+ */
+ publicExponent?: number | undefined;
+ }
+ interface RSAPSSKeyPairKeyObjectOptions {
+ /**
+ * Key size in bits
+ */
+ modulusLength: number;
+ /**
+ * Public exponent
+ * @default 0x10001
+ */
+ publicExponent?: number | undefined;
+ /**
+ * Name of the message digest
+ */
+ hashAlgorithm?: string;
+ /**
+ * Name of the message digest used by MGF1
+ */
+ mgf1HashAlgorithm?: string;
+ /**
+ * Minimal salt length in bytes
+ */
+ saltLength?: string;
+ }
+ interface DSAKeyPairKeyObjectOptions {
+ /**
+ * Key size in bits
+ */
+ modulusLength: number;
+ /**
+ * Size of q in bits
+ */
+ divisorLength: number;
+ }
+ interface RSAKeyPairOptions {
+ /**
+ * Key size in bits
+ */
+ modulusLength: number;
+ /**
+ * Public exponent
+ * @default 0x10001
+ */
+ publicExponent?: number | undefined;
+ publicKeyEncoding: {
+ type: "pkcs1" | "spki";
+ format: PubF;
+ };
+ privateKeyEncoding: BasePrivateKeyEncodingOptions & {
+ type: "pkcs1" | "pkcs8";
+ };
+ }
+ interface RSAPSSKeyPairOptions {
+ /**
+ * Key size in bits
+ */
+ modulusLength: number;
+ /**
+ * Public exponent
+ * @default 0x10001
+ */
+ publicExponent?: number | undefined;
+ /**
+ * Name of the message digest
+ */
+ hashAlgorithm?: string;
+ /**
+ * Name of the message digest used by MGF1
+ */
+ mgf1HashAlgorithm?: string;
+ /**
+ * Minimal salt length in bytes
+ */
+ saltLength?: string;
+ publicKeyEncoding: {
+ type: "spki";
+ format: PubF;
+ };
+ privateKeyEncoding: BasePrivateKeyEncodingOptions & {
+ type: "pkcs8";
+ };
+ }
+ interface DSAKeyPairOptions {
+ /**
+ * Key size in bits
+ */
+ modulusLength: number;
+ /**
+ * Size of q in bits
+ */
+ divisorLength: number;
+ publicKeyEncoding: {
+ type: "spki";
+ format: PubF;
+ };
+ privateKeyEncoding: BasePrivateKeyEncodingOptions & {
+ type: "pkcs8";
+ };
+ }
+ interface ECKeyPairOptions extends ECKeyPairKeyObjectOptions {
+ publicKeyEncoding: {
+ type: "pkcs1" | "spki";
+ format: PubF;
+ };
+ privateKeyEncoding: BasePrivateKeyEncodingOptions & {
+ type: "sec1" | "pkcs8";
+ };
+ }
+ interface ED25519KeyPairOptions {
+ publicKeyEncoding: {
+ type: "spki";
+ format: PubF;
+ };
+ privateKeyEncoding: BasePrivateKeyEncodingOptions & {
+ type: "pkcs8";
+ };
+ }
+ interface ED448KeyPairOptions {
+ publicKeyEncoding: {
+ type: "spki";
+ format: PubF;
+ };
+ privateKeyEncoding: BasePrivateKeyEncodingOptions & {
+ type: "pkcs8";
+ };
+ }
+ interface X25519KeyPairOptions {
+ publicKeyEncoding: {
+ type: "spki";
+ format: PubF;
+ };
+ privateKeyEncoding: BasePrivateKeyEncodingOptions & {
+ type: "pkcs8";
+ };
+ }
+ interface X448KeyPairOptions {
+ publicKeyEncoding: {
+ type: "spki";
+ format: PubF;
+ };
+ privateKeyEncoding: BasePrivateKeyEncodingOptions & {
+ type: "pkcs8";
+ };
+ }
+ interface KeyPairSyncResult {
+ publicKey: T1;
+ privateKey: T2;
+ }
+ /**
+ * Generates a new asymmetric key pair of the given `type`. RSA, RSA-PSS, DSA, EC,
+ * Ed25519, Ed448, X25519, X448, and DH are currently supported.
+ *
+ * If a `publicKeyEncoding` or `privateKeyEncoding` was specified, this function
+ * behaves as if `keyObject.export()` had been called on its result. Otherwise,
+ * the respective part of the key is returned as a `KeyObject`.
+ *
+ * When encoding public keys, it is recommended to use `'spki'`. When encoding
+ * private keys, it is recommended to use `'pkcs8'` with a strong passphrase,
+ * and to keep the passphrase confidential.
+ *
+ * ```js
+ * const {
+ * generateKeyPairSync,
+ * } = await import('node:crypto');
+ *
+ * const {
+ * publicKey,
+ * privateKey,
+ * } = generateKeyPairSync('rsa', {
+ * modulusLength: 4096,
+ * publicKeyEncoding: {
+ * type: 'spki',
+ * format: 'pem',
+ * },
+ * privateKeyEncoding: {
+ * type: 'pkcs8',
+ * format: 'pem',
+ * cipher: 'aes-256-cbc',
+ * passphrase: 'top secret',
+ * },
+ * });
+ * ```
+ *
+ * The return value `{ publicKey, privateKey }` represents the generated key pair.
+ * When PEM encoding was selected, the respective key will be a string, otherwise
+ * it will be a buffer containing the data encoded as DER.
+ * @since v10.12.0
+ * @param type Must be `'rsa'`, `'rsa-pss'`, `'dsa'`, `'ec'`, `'ed25519'`, `'ed448'`, `'x25519'`, `'x448'`, or `'dh'`.
+ */
+ function generateKeyPairSync(
+ type: "rsa",
+ options: RSAKeyPairOptions<"pem", "pem">,
+ ): KeyPairSyncResult;
+ function generateKeyPairSync(
+ type: "rsa",
+ options: RSAKeyPairOptions<"pem", "der">,
+ ): KeyPairSyncResult;
+ function generateKeyPairSync(
+ type: "rsa",
+ options: RSAKeyPairOptions<"der", "pem">,
+ ): KeyPairSyncResult;
+ function generateKeyPairSync(
+ type: "rsa",
+ options: RSAKeyPairOptions<"der", "der">,
+ ): KeyPairSyncResult;
+ function generateKeyPairSync(type: "rsa", options: RSAKeyPairKeyObjectOptions): KeyPairKeyObjectResult;
+ function generateKeyPairSync(
+ type: "rsa-pss",
+ options: RSAPSSKeyPairOptions<"pem", "pem">,
+ ): KeyPairSyncResult;
+ function generateKeyPairSync(
+ type: "rsa-pss",
+ options: RSAPSSKeyPairOptions<"pem", "der">,
+ ): KeyPairSyncResult;
+ function generateKeyPairSync(
+ type: "rsa-pss",
+ options: RSAPSSKeyPairOptions<"der", "pem">,
+ ): KeyPairSyncResult;
+ function generateKeyPairSync(
+ type: "rsa-pss",
+ options: RSAPSSKeyPairOptions<"der", "der">,
+ ): KeyPairSyncResult;
+ function generateKeyPairSync(type: "rsa-pss", options: RSAPSSKeyPairKeyObjectOptions): KeyPairKeyObjectResult;
+ function generateKeyPairSync(
+ type: "dsa",
+ options: DSAKeyPairOptions<"pem", "pem">,
+ ): KeyPairSyncResult;
+ function generateKeyPairSync(
+ type: "dsa",
+ options: DSAKeyPairOptions<"pem", "der">,
+ ): KeyPairSyncResult;
+ function generateKeyPairSync(
+ type: "dsa",
+ options: DSAKeyPairOptions<"der", "pem">,
+ ): KeyPairSyncResult;
+ function generateKeyPairSync(
+ type: "dsa",
+ options: DSAKeyPairOptions<"der", "der">,
+ ): KeyPairSyncResult;
+ function generateKeyPairSync(type: "dsa", options: DSAKeyPairKeyObjectOptions): KeyPairKeyObjectResult;
+ function generateKeyPairSync(
+ type: "ec",
+ options: ECKeyPairOptions<"pem", "pem">,
+ ): KeyPairSyncResult;
+ function generateKeyPairSync(
+ type: "ec",
+ options: ECKeyPairOptions<"pem", "der">,
+ ): KeyPairSyncResult;
+ function generateKeyPairSync(
+ type: "ec",
+ options: ECKeyPairOptions<"der", "pem">,
+ ): KeyPairSyncResult;
+ function generateKeyPairSync(
+ type: "ec",
+ options: ECKeyPairOptions<"der", "der">,
+ ): KeyPairSyncResult;
+ function generateKeyPairSync(type: "ec", options: ECKeyPairKeyObjectOptions): KeyPairKeyObjectResult;
+ function generateKeyPairSync(
+ type: "ed25519",
+ options: ED25519KeyPairOptions<"pem", "pem">,
+ ): KeyPairSyncResult;
+ function generateKeyPairSync(
+ type: "ed25519",
+ options: ED25519KeyPairOptions<"pem", "der">,
+ ): KeyPairSyncResult;
+ function generateKeyPairSync(
+ type: "ed25519",
+ options: ED25519KeyPairOptions<"der", "pem">,
+ ): KeyPairSyncResult;
+ function generateKeyPairSync(
+ type: "ed25519",
+ options: ED25519KeyPairOptions<"der", "der">,
+ ): KeyPairSyncResult;
+ function generateKeyPairSync(type: "ed25519", options?: ED25519KeyPairKeyObjectOptions): KeyPairKeyObjectResult;
+ function generateKeyPairSync(
+ type: "ed448",
+ options: ED448KeyPairOptions<"pem", "pem">,
+ ): KeyPairSyncResult;
+ function generateKeyPairSync(
+ type: "ed448",
+ options: ED448KeyPairOptions<"pem", "der">,
+ ): KeyPairSyncResult;
+ function generateKeyPairSync(
+ type: "ed448",
+ options: ED448KeyPairOptions<"der", "pem">,
+ ): KeyPairSyncResult;
+ function generateKeyPairSync(
+ type: "ed448",
+ options: ED448KeyPairOptions<"der", "der">,
+ ): KeyPairSyncResult;
+ function generateKeyPairSync(type: "ed448", options?: ED448KeyPairKeyObjectOptions): KeyPairKeyObjectResult;
+ function generateKeyPairSync(
+ type: "x25519",
+ options: X25519KeyPairOptions<"pem", "pem">,
+ ): KeyPairSyncResult;
+ function generateKeyPairSync(
+ type: "x25519",
+ options: X25519KeyPairOptions<"pem", "der">,
+ ): KeyPairSyncResult;
+ function generateKeyPairSync(
+ type: "x25519",
+ options: X25519KeyPairOptions<"der", "pem">,
+ ): KeyPairSyncResult;
+ function generateKeyPairSync(
+ type: "x25519",
+ options: X25519KeyPairOptions<"der", "der">,
+ ): KeyPairSyncResult;
+ function generateKeyPairSync(type: "x25519", options?: X25519KeyPairKeyObjectOptions): KeyPairKeyObjectResult;
+ function generateKeyPairSync(
+ type: "x448",
+ options: X448KeyPairOptions<"pem", "pem">,
+ ): KeyPairSyncResult;
+ function generateKeyPairSync(
+ type: "x448",
+ options: X448KeyPairOptions<"pem", "der">,
+ ): KeyPairSyncResult;
+ function generateKeyPairSync(
+ type: "x448",
+ options: X448KeyPairOptions<"der", "pem">,
+ ): KeyPairSyncResult;
+ function generateKeyPairSync(
+ type: "x448",
+ options: X448KeyPairOptions<"der", "der">,
+ ): KeyPairSyncResult;
+ function generateKeyPairSync(type: "x448", options?: X448KeyPairKeyObjectOptions): KeyPairKeyObjectResult;
+ /**
+ * Generates a new asymmetric key pair of the given `type`. RSA, RSA-PSS, DSA, EC,
+ * Ed25519, Ed448, X25519, X448, and DH are currently supported.
+ *
+ * If a `publicKeyEncoding` or `privateKeyEncoding` was specified, this function
+ * behaves as if `keyObject.export()` had been called on its result. Otherwise,
+ * the respective part of the key is returned as a `KeyObject`.
+ *
+ * It is recommended to encode public keys as `'spki'` and private keys as`'pkcs8'` with encryption for long-term storage:
+ *
+ * ```js
+ * const {
+ * generateKeyPair,
+ * } = await import('node:crypto');
+ *
+ * generateKeyPair('rsa', {
+ * modulusLength: 4096,
+ * publicKeyEncoding: {
+ * type: 'spki',
+ * format: 'pem',
+ * },
+ * privateKeyEncoding: {
+ * type: 'pkcs8',
+ * format: 'pem',
+ * cipher: 'aes-256-cbc',
+ * passphrase: 'top secret',
+ * },
+ * }, (err, publicKey, privateKey) => {
+ * // Handle errors and use the generated key pair.
+ * });
+ * ```
+ *
+ * On completion, `callback` will be called with `err` set to `undefined` and`publicKey` / `privateKey` representing the generated key pair.
+ *
+ * If this method is invoked as its `util.promisify()` ed version, it returns
+ * a `Promise` for an `Object` with `publicKey` and `privateKey` properties.
+ * @since v10.12.0
+ * @param type Must be `'rsa'`, `'rsa-pss'`, `'dsa'`, `'ec'`, `'ed25519'`, `'ed448'`, `'x25519'`, `'x448'`, or `'dh'`.
+ */
+ function generateKeyPair(
+ type: "rsa",
+ options: RSAKeyPairOptions<"pem", "pem">,
+ callback: (err: Error | null, publicKey: string, privateKey: string) => void,
+ ): void;
+ function generateKeyPair(
+ type: "rsa",
+ options: RSAKeyPairOptions<"pem", "der">,
+ callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void,
+ ): void;
+ function generateKeyPair(
+ type: "rsa",
+ options: RSAKeyPairOptions<"der", "pem">,
+ callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void,
+ ): void;
+ function generateKeyPair(
+ type: "rsa",
+ options: RSAKeyPairOptions<"der", "der">,
+ callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void,
+ ): void;
+ function generateKeyPair(
+ type: "rsa",
+ options: RSAKeyPairKeyObjectOptions,
+ callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void,
+ ): void;
+ function generateKeyPair(
+ type: "rsa-pss",
+ options: RSAPSSKeyPairOptions<"pem", "pem">,
+ callback: (err: Error | null, publicKey: string, privateKey: string) => void,
+ ): void;
+ function generateKeyPair(
+ type: "rsa-pss",
+ options: RSAPSSKeyPairOptions<"pem", "der">,
+ callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void,
+ ): void;
+ function generateKeyPair(
+ type: "rsa-pss",
+ options: RSAPSSKeyPairOptions<"der", "pem">,
+ callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void,
+ ): void;
+ function generateKeyPair(
+ type: "rsa-pss",
+ options: RSAPSSKeyPairOptions<"der", "der">,
+ callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void,
+ ): void;
+ function generateKeyPair(
+ type: "rsa-pss",
+ options: RSAPSSKeyPairKeyObjectOptions,
+ callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void,
+ ): void;
+ function generateKeyPair(
+ type: "dsa",
+ options: DSAKeyPairOptions<"pem", "pem">,
+ callback: (err: Error | null, publicKey: string, privateKey: string) => void,
+ ): void;
+ function generateKeyPair(
+ type: "dsa",
+ options: DSAKeyPairOptions<"pem", "der">,
+ callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void,
+ ): void;
+ function generateKeyPair(
+ type: "dsa",
+ options: DSAKeyPairOptions<"der", "pem">,
+ callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void,
+ ): void;
+ function generateKeyPair(
+ type: "dsa",
+ options: DSAKeyPairOptions<"der", "der">,
+ callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void,
+ ): void;
+ function generateKeyPair(
+ type: "dsa",
+ options: DSAKeyPairKeyObjectOptions,
+ callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void,
+ ): void;
+ function generateKeyPair(
+ type: "ec",
+ options: ECKeyPairOptions<"pem", "pem">,
+ callback: (err: Error | null, publicKey: string, privateKey: string) => void,
+ ): void;
+ function generateKeyPair(
+ type: "ec",
+ options: ECKeyPairOptions<"pem", "der">,
+ callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void,
+ ): void;
+ function generateKeyPair(
+ type: "ec",
+ options: ECKeyPairOptions<"der", "pem">,
+ callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void,
+ ): void;
+ function generateKeyPair(
+ type: "ec",
+ options: ECKeyPairOptions<"der", "der">,
+ callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void,
+ ): void;
+ function generateKeyPair(
+ type: "ec",
+ options: ECKeyPairKeyObjectOptions,
+ callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void,
+ ): void;
+ function generateKeyPair(
+ type: "ed25519",
+ options: ED25519KeyPairOptions<"pem", "pem">,
+ callback: (err: Error | null, publicKey: string, privateKey: string) => void,
+ ): void;
+ function generateKeyPair(
+ type: "ed25519",
+ options: ED25519KeyPairOptions<"pem", "der">,
+ callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void,
+ ): void;
+ function generateKeyPair(
+ type: "ed25519",
+ options: ED25519KeyPairOptions<"der", "pem">,
+ callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void,
+ ): void;
+ function generateKeyPair(
+ type: "ed25519",
+ options: ED25519KeyPairOptions<"der", "der">,
+ callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void,
+ ): void;
+ function generateKeyPair(
+ type: "ed25519",
+ options: ED25519KeyPairKeyObjectOptions | undefined,
+ callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void,
+ ): void;
+ function generateKeyPair(
+ type: "ed448",
+ options: ED448KeyPairOptions<"pem", "pem">,
+ callback: (err: Error | null, publicKey: string, privateKey: string) => void,
+ ): void;
+ function generateKeyPair(
+ type: "ed448",
+ options: ED448KeyPairOptions<"pem", "der">,
+ callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void,
+ ): void;
+ function generateKeyPair(
+ type: "ed448",
+ options: ED448KeyPairOptions<"der", "pem">,
+ callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void,
+ ): void;
+ function generateKeyPair(
+ type: "ed448",
+ options: ED448KeyPairOptions<"der", "der">,
+ callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void,
+ ): void;
+ function generateKeyPair(
+ type: "ed448",
+ options: ED448KeyPairKeyObjectOptions | undefined,
+ callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void,
+ ): void;
+ function generateKeyPair(
+ type: "x25519",
+ options: X25519KeyPairOptions<"pem", "pem">,
+ callback: (err: Error | null, publicKey: string, privateKey: string) => void,
+ ): void;
+ function generateKeyPair(
+ type: "x25519",
+ options: X25519KeyPairOptions<"pem", "der">,
+ callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void,
+ ): void;
+ function generateKeyPair(
+ type: "x25519",
+ options: X25519KeyPairOptions<"der", "pem">,
+ callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void,
+ ): void;
+ function generateKeyPair(
+ type: "x25519",
+ options: X25519KeyPairOptions<"der", "der">,
+ callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void,
+ ): void;
+ function generateKeyPair(
+ type: "x25519",
+ options: X25519KeyPairKeyObjectOptions | undefined,
+ callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void,
+ ): void;
+ function generateKeyPair(
+ type: "x448",
+ options: X448KeyPairOptions<"pem", "pem">,
+ callback: (err: Error | null, publicKey: string, privateKey: string) => void,
+ ): void;
+ function generateKeyPair(
+ type: "x448",
+ options: X448KeyPairOptions<"pem", "der">,
+ callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void,
+ ): void;
+ function generateKeyPair(
+ type: "x448",
+ options: X448KeyPairOptions<"der", "pem">,
+ callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void,
+ ): void;
+ function generateKeyPair(
+ type: "x448",
+ options: X448KeyPairOptions<"der", "der">,
+ callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void,
+ ): void;
+ function generateKeyPair(
+ type: "x448",
+ options: X448KeyPairKeyObjectOptions | undefined,
+ callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void,
+ ): void;
+ namespace generateKeyPair {
+ function __promisify__(
+ type: "rsa",
+ options: RSAKeyPairOptions<"pem", "pem">,
+ ): Promise<{
+ publicKey: string;
+ privateKey: string;
+ }>;
+ function __promisify__(
+ type: "rsa",
+ options: RSAKeyPairOptions<"pem", "der">,
+ ): Promise<{
+ publicKey: string;
+ privateKey: Buffer;
+ }>;
+ function __promisify__(
+ type: "rsa",
+ options: RSAKeyPairOptions<"der", "pem">,
+ ): Promise<{
+ publicKey: Buffer;
+ privateKey: string;
+ }>;
+ function __promisify__(
+ type: "rsa",
+ options: RSAKeyPairOptions<"der", "der">,
+ ): Promise<{
+ publicKey: Buffer;
+ privateKey: Buffer;
+ }>;
+ function __promisify__(type: "rsa", options: RSAKeyPairKeyObjectOptions): Promise;
+ function __promisify__(
+ type: "rsa-pss",
+ options: RSAPSSKeyPairOptions<"pem", "pem">,
+ ): Promise<{
+ publicKey: string;
+ privateKey: string;
+ }>;
+ function __promisify__(
+ type: "rsa-pss",
+ options: RSAPSSKeyPairOptions<"pem", "der">,
+ ): Promise<{
+ publicKey: string;
+ privateKey: Buffer;
+ }>;
+ function __promisify__(
+ type: "rsa-pss",
+ options: RSAPSSKeyPairOptions<"der", "pem">,
+ ): Promise<{
+ publicKey: Buffer;
+ privateKey: string;
+ }>;
+ function __promisify__(
+ type: "rsa-pss",
+ options: RSAPSSKeyPairOptions<"der", "der">,
+ ): Promise<{
+ publicKey: Buffer;
+ privateKey: Buffer;
+ }>;
+ function __promisify__(
+ type: "rsa-pss",
+ options: RSAPSSKeyPairKeyObjectOptions,
+ ): Promise;
+ function __promisify__(
+ type: "dsa",
+ options: DSAKeyPairOptions<"pem", "pem">,
+ ): Promise<{
+ publicKey: string;
+ privateKey: string;
+ }>;
+ function __promisify__(
+ type: "dsa",
+ options: DSAKeyPairOptions<"pem", "der">,
+ ): Promise<{
+ publicKey: string;
+ privateKey: Buffer;
+ }>;
+ function __promisify__(
+ type: "dsa",
+ options: DSAKeyPairOptions<"der", "pem">,
+ ): Promise<{
+ publicKey: Buffer;
+ privateKey: string;
+ }>;
+ function __promisify__(
+ type: "dsa",
+ options: DSAKeyPairOptions<"der", "der">,
+ ): Promise<{
+ publicKey: Buffer;
+ privateKey: Buffer;
+ }>;
+ function __promisify__(type: "dsa", options: DSAKeyPairKeyObjectOptions): Promise;
+ function __promisify__(
+ type: "ec",
+ options: ECKeyPairOptions<"pem", "pem">,
+ ): Promise<{
+ publicKey: string;
+ privateKey: string;
+ }>;
+ function __promisify__(
+ type: "ec",
+ options: ECKeyPairOptions<"pem", "der">,
+ ): Promise<{
+ publicKey: string;
+ privateKey: Buffer;
+ }>;
+ function __promisify__(
+ type: "ec",
+ options: ECKeyPairOptions<"der", "pem">,
+ ): Promise<{
+ publicKey: Buffer;
+ privateKey: string;
+ }>;
+ function __promisify__(
+ type: "ec",
+ options: ECKeyPairOptions<"der", "der">,
+ ): Promise<{
+ publicKey: Buffer;
+ privateKey: Buffer;
+ }>;
+ function __promisify__(type: "ec", options: ECKeyPairKeyObjectOptions): Promise;
+ function __promisify__(
+ type: "ed25519",
+ options: ED25519KeyPairOptions<"pem", "pem">,
+ ): Promise<{
+ publicKey: string;
+ privateKey: string;
+ }>;
+ function __promisify__(
+ type: "ed25519",
+ options: ED25519KeyPairOptions<"pem", "der">,
+ ): Promise<{
+ publicKey: string;
+ privateKey: Buffer;
+ }>;
+ function __promisify__(
+ type: "ed25519",
+ options: ED25519KeyPairOptions<"der", "pem">,
+ ): Promise<{
+ publicKey: Buffer;
+ privateKey: string;
+ }>;
+ function __promisify__(
+ type: "ed25519",
+ options: ED25519KeyPairOptions<"der", "der">,
+ ): Promise<{
+ publicKey: Buffer;
+ privateKey: Buffer;
+ }>;
+ function __promisify__(
+ type: "ed25519",
+ options?: ED25519KeyPairKeyObjectOptions,
+ ): Promise;
+ function __promisify__(
+ type: "ed448",
+ options: ED448KeyPairOptions<"pem", "pem">,
+ ): Promise<{
+ publicKey: string;
+ privateKey: string;
+ }>;
+ function __promisify__(
+ type: "ed448",
+ options: ED448KeyPairOptions<"pem", "der">,
+ ): Promise<{
+ publicKey: string;
+ privateKey: Buffer;
+ }>;
+ function __promisify__(
+ type: "ed448",
+ options: ED448KeyPairOptions<"der", "pem">,
+ ): Promise<{
+ publicKey: Buffer;
+ privateKey: string;
+ }>;
+ function __promisify__(
+ type: "ed448",
+ options: ED448KeyPairOptions<"der", "der">,
+ ): Promise<{
+ publicKey: Buffer;
+ privateKey: Buffer;
+ }>;
+ function __promisify__(type: "ed448", options?: ED448KeyPairKeyObjectOptions): Promise;
+ function __promisify__(
+ type: "x25519",
+ options: X25519KeyPairOptions<"pem", "pem">,
+ ): Promise<{
+ publicKey: string;
+ privateKey: string;
+ }>;
+ function __promisify__(
+ type: "x25519",
+ options: X25519KeyPairOptions<"pem", "der">,
+ ): Promise<{
+ publicKey: string;
+ privateKey: Buffer;
+ }>;
+ function __promisify__(
+ type: "x25519",
+ options: X25519KeyPairOptions<"der", "pem">,
+ ): Promise<{
+ publicKey: Buffer;
+ privateKey: string;
+ }>;
+ function __promisify__(
+ type: "x25519",
+ options: X25519KeyPairOptions<"der", "der">,
+ ): Promise<{
+ publicKey: Buffer;
+ privateKey: Buffer;
+ }>;
+ function __promisify__(
+ type: "x25519",
+ options?: X25519KeyPairKeyObjectOptions,
+ ): Promise;
+ function __promisify__(
+ type: "x448",
+ options: X448KeyPairOptions<"pem", "pem">,
+ ): Promise<{
+ publicKey: string;
+ privateKey: string;
+ }>;
+ function __promisify__(
+ type: "x448",
+ options: X448KeyPairOptions<"pem", "der">,
+ ): Promise<{
+ publicKey: string;
+ privateKey: Buffer;
+ }>;
+ function __promisify__(
+ type: "x448",
+ options: X448KeyPairOptions<"der", "pem">,
+ ): Promise<{
+ publicKey: Buffer;
+ privateKey: string;
+ }>;
+ function __promisify__(
+ type: "x448",
+ options: X448KeyPairOptions<"der", "der">,
+ ): Promise<{
+ publicKey: Buffer;
+ privateKey: Buffer;
+ }>;
+ function __promisify__(type: "x448", options?: X448KeyPairKeyObjectOptions): Promise;
+ }
+ /**
+ * Calculates and returns the signature for `data` using the given private key and
+ * algorithm. If `algorithm` is `null` or `undefined`, then the algorithm is
+ * dependent upon the key type (especially Ed25519 and Ed448).
+ *
+ * If `key` is not a `KeyObject`, this function behaves as if `key` had been
+ * passed to {@link createPrivateKey}. If it is an object, the following
+ * additional properties can be passed:
+ *
+ * If the `callback` function is provided this function uses libuv's threadpool.
+ * @since v12.0.0
+ */
+ function sign(
+ algorithm: string | null | undefined,
+ data: NodeJS.ArrayBufferView,
+ key: KeyLike | SignKeyObjectInput | SignPrivateKeyInput,
+ ): Buffer;
+ function sign(
+ algorithm: string | null | undefined,
+ data: NodeJS.ArrayBufferView,
+ key: KeyLike | SignKeyObjectInput | SignPrivateKeyInput,
+ callback: (error: Error | null, data: Buffer) => void,
+ ): void;
+ /**
+ * Verifies the given signature for `data` using the given key and algorithm. If`algorithm` is `null` or `undefined`, then the algorithm is dependent upon the
+ * key type (especially Ed25519 and Ed448).
+ *
+ * If `key` is not a `KeyObject`, this function behaves as if `key` had been
+ * passed to {@link createPublicKey}. If it is an object, the following
+ * additional properties can be passed:
+ *
+ * The `signature` argument is the previously calculated signature for the `data`.
+ *
+ * Because public keys can be derived from private keys, a private key or a public
+ * key may be passed for `key`.
+ *
+ * If the `callback` function is provided this function uses libuv's threadpool.
+ * @since v12.0.0
+ */
+ function verify(
+ algorithm: string | null | undefined,
+ data: NodeJS.ArrayBufferView,
+ key: KeyLike | VerifyKeyObjectInput | VerifyPublicKeyInput | VerifyJsonWebKeyInput,
+ signature: NodeJS.ArrayBufferView,
+ ): boolean;
+ function verify(
+ algorithm: string | null | undefined,
+ data: NodeJS.ArrayBufferView,
+ key: KeyLike | VerifyKeyObjectInput | VerifyPublicKeyInput | VerifyJsonWebKeyInput,
+ signature: NodeJS.ArrayBufferView,
+ callback: (error: Error | null, result: boolean) => void,
+ ): void;
+ /**
+ * Computes the Diffie-Hellman secret based on a `privateKey` and a `publicKey`.
+ * Both keys must have the same `asymmetricKeyType`, which must be one of `'dh'`(for Diffie-Hellman), `'ec'` (for ECDH), `'x448'`, or `'x25519'` (for ECDH-ES).
+ * @since v13.9.0, v12.17.0
+ */
+ function diffieHellman(options: { privateKey: KeyObject; publicKey: KeyObject }): Buffer;
+ type CipherMode = "cbc" | "ccm" | "cfb" | "ctr" | "ecb" | "gcm" | "ocb" | "ofb" | "stream" | "wrap" | "xts";
+ interface CipherInfoOptions {
+ /**
+ * A test key length.
+ */
+ keyLength?: number | undefined;
+ /**
+ * A test IV length.
+ */
+ ivLength?: number | undefined;
+ }
+ interface CipherInfo {
+ /**
+ * The name of the cipher.
+ */
+ name: string;
+ /**
+ * The nid of the cipher.
+ */
+ nid: number;
+ /**
+ * The block size of the cipher in bytes.
+ * This property is omitted when mode is 'stream'.
+ */
+ blockSize?: number | undefined;
+ /**
+ * The expected or default initialization vector length in bytes.
+ * This property is omitted if the cipher does not use an initialization vector.
+ */
+ ivLength?: number | undefined;
+ /**
+ * The expected or default key length in bytes.
+ */
+ keyLength: number;
+ /**
+ * The cipher mode.
+ */
+ mode: CipherMode;
+ }
+ /**
+ * Returns information about a given cipher.
+ *
+ * Some ciphers accept variable length keys and initialization vectors. By default,
+ * the `crypto.getCipherInfo()` method will return the default values for these
+ * ciphers. To test if a given key length or iv length is acceptable for given
+ * cipher, use the `keyLength` and `ivLength` options. If the given values are
+ * unacceptable, `undefined` will be returned.
+ * @since v15.0.0
+ * @param nameOrNid The name or nid of the cipher to query.
+ */
+ function getCipherInfo(nameOrNid: string | number, options?: CipherInfoOptions): CipherInfo | undefined;
+ /**
+ * HKDF is a simple key derivation function defined in RFC 5869\. The given `ikm`,`salt` and `info` are used with the `digest` to derive a key of `keylen` bytes.
+ *
+ * The supplied `callback` function is called with two arguments: `err` and`derivedKey`. If an errors occurs while deriving the key, `err` will be set;
+ * otherwise `err` will be `null`. The successfully generated `derivedKey` will
+ * be passed to the callback as an [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer). An error will be thrown if any
+ * of the input arguments specify invalid values or types.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ * const {
+ * hkdf,
+ * } = await import('node:crypto');
+ *
+ * hkdf('sha512', 'key', 'salt', 'info', 64, (err, derivedKey) => {
+ * if (err) throw err;
+ * console.log(Buffer.from(derivedKey).toString('hex')); // '24156e2...5391653'
+ * });
+ * ```
+ * @since v15.0.0
+ * @param digest The digest algorithm to use.
+ * @param ikm The input keying material. Must be provided but can be zero-length.
+ * @param salt The salt value. Must be provided but can be zero-length.
+ * @param info Additional info value. Must be provided but can be zero-length, and cannot be more than 1024 bytes.
+ * @param keylen The length of the key to generate. Must be greater than 0. The maximum allowable value is `255` times the number of bytes produced by the selected digest function (e.g. `sha512`
+ * generates 64-byte hashes, making the maximum HKDF output 16320 bytes).
+ */
+ function hkdf(
+ digest: string,
+ irm: BinaryLike | KeyObject,
+ salt: BinaryLike,
+ info: BinaryLike,
+ keylen: number,
+ callback: (err: Error | null, derivedKey: ArrayBuffer) => void,
+ ): void;
+ /**
+ * Provides a synchronous HKDF key derivation function as defined in RFC 5869\. The
+ * given `ikm`, `salt` and `info` are used with the `digest` to derive a key of`keylen` bytes.
+ *
+ * The successfully generated `derivedKey` will be returned as an [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer).
+ *
+ * An error will be thrown if any of the input arguments specify invalid values or
+ * types, or if the derived key cannot be generated.
+ *
+ * ```js
+ * import { Buffer } from 'node:buffer';
+ * const {
+ * hkdfSync,
+ * } = await import('node:crypto');
+ *
+ * const derivedKey = hkdfSync('sha512', 'key', 'salt', 'info', 64);
+ * console.log(Buffer.from(derivedKey).toString('hex')); // '24156e2...5391653'
+ * ```
+ * @since v15.0.0
+ * @param digest The digest algorithm to use.
+ * @param ikm The input keying material. Must be provided but can be zero-length.
+ * @param salt The salt value. Must be provided but can be zero-length.
+ * @param info Additional info value. Must be provided but can be zero-length, and cannot be more than 1024 bytes.
+ * @param keylen The length of the key to generate. Must be greater than 0. The maximum allowable value is `255` times the number of bytes produced by the selected digest function (e.g. `sha512`
+ * generates 64-byte hashes, making the maximum HKDF output 16320 bytes).
+ */
+ function hkdfSync(
+ digest: string,
+ ikm: BinaryLike | KeyObject,
+ salt: BinaryLike,
+ info: BinaryLike,
+ keylen: number,
+ ): ArrayBuffer;
+ interface SecureHeapUsage {
+ /**
+ * The total allocated secure heap size as specified using the `--secure-heap=n` command-line flag.
+ */
+ total: number;
+ /**
+ * The minimum allocation from the secure heap as specified using the `--secure-heap-min` command-line flag.
+ */
+ min: number;
+ /**
+ * The total number of bytes currently allocated from the secure heap.
+ */
+ used: number;
+ /**
+ * The calculated ratio of `used` to `total` allocated bytes.
+ */
+ utilization: number;
+ }
+ /**
+ * @since v15.6.0
+ */
+ function secureHeapUsed(): SecureHeapUsage;
+ interface RandomUUIDOptions {
+ /**
+ * By default, to improve performance,
+ * Node.js will pre-emptively generate and persistently cache enough
+ * random data to generate up to 128 random UUIDs. To generate a UUID
+ * without using the cache, set `disableEntropyCache` to `true`.
+ *
+ * @default `false`
+ */
+ disableEntropyCache?: boolean | undefined;
+ }
+ type UUID = `${string}-${string}-${string}-${string}-${string}`;
+ /**
+ * Generates a random [RFC 4122](https://www.rfc-editor.org/rfc/rfc4122.txt) version 4 UUID. The UUID is generated using a
+ * cryptographic pseudorandom number generator.
+ * @since v15.6.0, v14.17.0
+ */
+ function randomUUID(options?: RandomUUIDOptions): UUID;
+ interface X509CheckOptions {
+ /**
+ * @default 'always'
+ */
+ subject?: "always" | "default" | "never";
+ /**
+ * @default true
+ */
+ wildcards?: boolean;
+ /**
+ * @default true
+ */
+ partialWildcards?: boolean;
+ /**
+ * @default false
+ */
+ multiLabelWildcards?: boolean;
+ /**
+ * @default false
+ */
+ singleLabelSubdomains?: boolean;
+ }
+ /**
+ * Encapsulates an X509 certificate and provides read-only access to
+ * its information.
+ *
+ * ```js
+ * const { X509Certificate } = await import('node:crypto');
+ *
+ * const x509 = new X509Certificate('{... pem encoded cert ...}');
+ *
+ * console.log(x509.subject);
+ * ```
+ * @since v15.6.0
+ */
+ class X509Certificate {
+ /**
+ * Will be \`true\` if this is a Certificate Authority (CA) certificate.
+ * @since v15.6.0
+ */
+ readonly ca: boolean;
+ /**
+ * The SHA-1 fingerprint of this certificate.
+ *
+ * Because SHA-1 is cryptographically broken and because the security of SHA-1 is
+ * significantly worse than that of algorithms that are commonly used to sign
+ * certificates, consider using `x509.fingerprint256` instead.
+ * @since v15.6.0
+ */
+ readonly fingerprint: string;
+ /**
+ * The SHA-256 fingerprint of this certificate.
+ * @since v15.6.0
+ */
+ readonly fingerprint256: string;
+ /**
+ * The SHA-512 fingerprint of this certificate.
+ *
+ * Because computing the SHA-256 fingerprint is usually faster and because it is
+ * only half the size of the SHA-512 fingerprint, `x509.fingerprint256` may be
+ * a better choice. While SHA-512 presumably provides a higher level of security in
+ * general, the security of SHA-256 matches that of most algorithms that are
+ * commonly used to sign certificates.
+ * @since v17.2.0, v16.14.0
+ */
+ readonly fingerprint512: string;
+ /**
+ * The complete subject of this certificate.
+ * @since v15.6.0
+ */
+ readonly subject: string;
+ /**
+ * The subject alternative name specified for this certificate.
+ *
+ * This is a comma-separated list of subject alternative names. Each entry begins
+ * with a string identifying the kind of the subject alternative name followed by
+ * a colon and the value associated with the entry.
+ *
+ * Earlier versions of Node.js incorrectly assumed that it is safe to split this
+ * property at the two-character sequence `', '` (see [CVE-2021-44532](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-44532)). However,
+ * both malicious and legitimate certificates can contain subject alternative names
+ * that include this sequence when represented as a string.
+ *
+ * After the prefix denoting the type of the entry, the remainder of each entry
+ * might be enclosed in quotes to indicate that the value is a JSON string literal.
+ * For backward compatibility, Node.js only uses JSON string literals within this
+ * property when necessary to avoid ambiguity. Third-party code should be prepared
+ * to handle both possible entry formats.
+ * @since v15.6.0
+ */
+ readonly subjectAltName: string | undefined;
+ /**
+ * A textual representation of the certificate's authority information access
+ * extension.
+ *
+ * This is a line feed separated list of access descriptions. Each line begins with
+ * the access method and the kind of the access location, followed by a colon and
+ * the value associated with the access location.
+ *
+ * After the prefix denoting the access method and the kind of the access location,
+ * the remainder of each line might be enclosed in quotes to indicate that the
+ * value is a JSON string literal. For backward compatibility, Node.js only uses
+ * JSON string literals within this property when necessary to avoid ambiguity.
+ * Third-party code should be prepared to handle both possible entry formats.
+ * @since v15.6.0
+ */
+ readonly infoAccess: string | undefined;
+ /**
+ * An array detailing the key usages for this certificate.
+ * @since v15.6.0
+ */
+ readonly keyUsage: string[];
+ /**
+ * The issuer identification included in this certificate.
+ * @since v15.6.0
+ */
+ readonly issuer: string;
+ /**
+ * The issuer certificate or `undefined` if the issuer certificate is not
+ * available.
+ * @since v15.9.0
+ */
+ readonly issuerCertificate?: X509Certificate | undefined;
+ /**
+ * The public key `KeyObject` for this certificate.
+ * @since v15.6.0
+ */
+ readonly publicKey: KeyObject;
+ /**
+ * A `Buffer` containing the DER encoding of this certificate.
+ * @since v15.6.0
+ */
+ readonly raw: Buffer;
+ /**
+ * The serial number of this certificate.
+ *
+ * Serial numbers are assigned by certificate authorities and do not uniquely
+ * identify certificates. Consider using `x509.fingerprint256` as a unique
+ * identifier instead.
+ * @since v15.6.0
+ */
+ readonly serialNumber: string;
+ /**
+ * The date/time from which this certificate is considered valid.
+ * @since v15.6.0
+ */
+ readonly validFrom: string;
+ /**
+ * The date/time until which this certificate is considered valid.
+ * @since v15.6.0
+ */
+ readonly validTo: string;
+ constructor(buffer: BinaryLike);
+ /**
+ * Checks whether the certificate matches the given email address.
+ *
+ * If the `'subject'` option is undefined or set to `'default'`, the certificate
+ * subject is only considered if the subject alternative name extension either does
+ * not exist or does not contain any email addresses.
+ *
+ * If the `'subject'` option is set to `'always'` and if the subject alternative
+ * name extension either does not exist or does not contain a matching email
+ * address, the certificate subject is considered.
+ *
+ * If the `'subject'` option is set to `'never'`, the certificate subject is never
+ * considered, even if the certificate contains no subject alternative names.
+ * @since v15.6.0
+ * @return Returns `email` if the certificate matches, `undefined` if it does not.
+ */
+ checkEmail(email: string, options?: Pick): string | undefined;
+ /**
+ * Checks whether the certificate matches the given host name.
+ *
+ * If the certificate matches the given host name, the matching subject name is
+ * returned. The returned name might be an exact match (e.g., `foo.example.com`)
+ * or it might contain wildcards (e.g., `*.example.com`). Because host name
+ * comparisons are case-insensitive, the returned subject name might also differ
+ * from the given `name` in capitalization.
+ *
+ * If the `'subject'` option is undefined or set to `'default'`, the certificate
+ * subject is only considered if the subject alternative name extension either does
+ * not exist or does not contain any DNS names. This behavior is consistent with [RFC 2818](https://www.rfc-editor.org/rfc/rfc2818.txt) ("HTTP Over TLS").
+ *
+ * If the `'subject'` option is set to `'always'` and if the subject alternative
+ * name extension either does not exist or does not contain a matching DNS name,
+ * the certificate subject is considered.
+ *
+ * If the `'subject'` option is set to `'never'`, the certificate subject is never
+ * considered, even if the certificate contains no subject alternative names.
+ * @since v15.6.0
+ * @return Returns a subject name that matches `name`, or `undefined` if no subject name matches `name`.
+ */
+ checkHost(name: string, options?: X509CheckOptions): string | undefined;
+ /**
+ * Checks whether the certificate matches the given IP address (IPv4 or IPv6).
+ *
+ * Only [RFC 5280](https://www.rfc-editor.org/rfc/rfc5280.txt) `iPAddress` subject alternative names are considered, and they
+ * must match the given `ip` address exactly. Other subject alternative names as
+ * well as the subject field of the certificate are ignored.
+ * @since v15.6.0
+ * @return Returns `ip` if the certificate matches, `undefined` if it does not.
+ */
+ checkIP(ip: string): string | undefined;
+ /**
+ * Checks whether this certificate was issued by the given `otherCert`.
+ * @since v15.6.0
+ */
+ checkIssued(otherCert: X509Certificate): boolean;
+ /**
+ * Checks whether the public key for this certificate is consistent with
+ * the given private key.
+ * @since v15.6.0
+ * @param privateKey A private key.
+ */
+ checkPrivateKey(privateKey: KeyObject): boolean;
+ /**
+ * There is no standard JSON encoding for X509 certificates. The`toJSON()` method returns a string containing the PEM encoded
+ * certificate.
+ * @since v15.6.0
+ */
+ toJSON(): string;
+ /**
+ * Returns information about this certificate using the legacy `certificate object` encoding.
+ * @since v15.6.0
+ */
+ toLegacyObject(): PeerCertificate;
+ /**
+ * Returns the PEM-encoded certificate.
+ * @since v15.6.0
+ */
+ toString(): string;
+ /**
+ * Verifies that this certificate was signed by the given public key.
+ * Does not perform any other validation checks on the certificate.
+ * @since v15.6.0
+ * @param publicKey A public key.
+ */
+ verify(publicKey: KeyObject): boolean;
+ }
+ type LargeNumberLike = NodeJS.ArrayBufferView | SharedArrayBuffer | ArrayBuffer | bigint;
+ interface GeneratePrimeOptions {
+ add?: LargeNumberLike | undefined;
+ rem?: LargeNumberLike | undefined;
+ /**
+ * @default false
+ */
+ safe?: boolean | undefined;
+ bigint?: boolean | undefined;
+ }
+ interface GeneratePrimeOptionsBigInt extends GeneratePrimeOptions {
+ bigint: true;
+ }
+ interface GeneratePrimeOptionsArrayBuffer extends GeneratePrimeOptions {
+ bigint?: false | undefined;
+ }
+ /**
+ * Generates a pseudorandom prime of `size` bits.
+ *
+ * If `options.safe` is `true`, the prime will be a safe prime -- that is,`(prime - 1) / 2` will also be a prime.
+ *
+ * The `options.add` and `options.rem` parameters can be used to enforce additional
+ * requirements, e.g., for Diffie-Hellman:
+ *
+ * * If `options.add` and `options.rem` are both set, the prime will satisfy the
+ * condition that `prime % add = rem`.
+ * * If only `options.add` is set and `options.safe` is not `true`, the prime will
+ * satisfy the condition that `prime % add = 1`.
+ * * If only `options.add` is set and `options.safe` is set to `true`, the prime
+ * will instead satisfy the condition that `prime % add = 3`. This is necessary
+ * because `prime % add = 1` for `options.add > 2` would contradict the condition
+ * enforced by `options.safe`.
+ * * `options.rem` is ignored if `options.add` is not given.
+ *
+ * Both `options.add` and `options.rem` must be encoded as big-endian sequences
+ * if given as an `ArrayBuffer`, `SharedArrayBuffer`, `TypedArray`, `Buffer`, or`DataView`.
+ *
+ * By default, the prime is encoded as a big-endian sequence of octets
+ * in an [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer). If the `bigint` option is `true`, then a
+ * [bigint](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt) is provided.
+ * @since v15.8.0
+ * @param size The size (in bits) of the prime to generate.
+ */
+ function generatePrime(size: number, callback: (err: Error | null, prime: ArrayBuffer) => void): void;
+ function generatePrime(
+ size: number,
+ options: GeneratePrimeOptionsBigInt,
+ callback: (err: Error | null, prime: bigint) => void,
+ ): void;
+ function generatePrime(
+ size: number,
+ options: GeneratePrimeOptionsArrayBuffer,
+ callback: (err: Error | null, prime: ArrayBuffer) => void,
+ ): void;
+ function generatePrime(
+ size: number,
+ options: GeneratePrimeOptions,
+ callback: (err: Error | null, prime: ArrayBuffer | bigint) => void,
+ ): void;
+ /**
+ * Generates a pseudorandom prime of `size` bits.
+ *
+ * If `options.safe` is `true`, the prime will be a safe prime -- that is,`(prime - 1) / 2` will also be a prime.
+ *
+ * The `options.add` and `options.rem` parameters can be used to enforce additional
+ * requirements, e.g., for Diffie-Hellman:
+ *
+ * * If `options.add` and `options.rem` are both set, the prime will satisfy the
+ * condition that `prime % add = rem`.
+ * * If only `options.add` is set and `options.safe` is not `true`, the prime will
+ * satisfy the condition that `prime % add = 1`.
+ * * If only `options.add` is set and `options.safe` is set to `true`, the prime
+ * will instead satisfy the condition that `prime % add = 3`. This is necessary
+ * because `prime % add = 1` for `options.add > 2` would contradict the condition
+ * enforced by `options.safe`.
+ * * `options.rem` is ignored if `options.add` is not given.
+ *
+ * Both `options.add` and `options.rem` must be encoded as big-endian sequences
+ * if given as an `ArrayBuffer`, `SharedArrayBuffer`, `TypedArray`, `Buffer`, or`DataView`.
+ *
+ * By default, the prime is encoded as a big-endian sequence of octets
+ * in an [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer). If the `bigint` option is `true`, then a
+ * [bigint](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt) is provided.
+ * @since v15.8.0
+ * @param size The size (in bits) of the prime to generate.
+ */
+ function generatePrimeSync(size: number): ArrayBuffer;
+ function generatePrimeSync(size: number, options: GeneratePrimeOptionsBigInt): bigint;
+ function generatePrimeSync(size: number, options: GeneratePrimeOptionsArrayBuffer): ArrayBuffer;
+ function generatePrimeSync(size: number, options: GeneratePrimeOptions): ArrayBuffer | bigint;
+ interface CheckPrimeOptions {
+ /**
+ * The number of Miller-Rabin probabilistic primality iterations to perform.
+ * When the value is 0 (zero), a number of checks is used that yields a false positive rate of at most `2**-64` for random input.
+ * Care must be used when selecting a number of checks.
+ * Refer to the OpenSSL documentation for the BN_is_prime_ex function nchecks options for more details.
+ *
+ * @default 0
+ */
+ checks?: number | undefined;
+ }
+ /**
+ * Checks the primality of the `candidate`.
+ * @since v15.8.0
+ * @param candidate A possible prime encoded as a sequence of big endian octets of arbitrary length.
+ */
+ function checkPrime(value: LargeNumberLike, callback: (err: Error | null, result: boolean) => void): void;
+ function checkPrime(
+ value: LargeNumberLike,
+ options: CheckPrimeOptions,
+ callback: (err: Error | null, result: boolean) => void,
+ ): void;
+ /**
+ * Checks the primality of the `candidate`.
+ * @since v15.8.0
+ * @param candidate A possible prime encoded as a sequence of big endian octets of arbitrary length.
+ * @return `true` if the candidate is a prime with an error probability less than `0.25 ** options.checks`.
+ */
+ function checkPrimeSync(candidate: LargeNumberLike, options?: CheckPrimeOptions): boolean;
+ /**
+ * Load and set the `engine` for some or all OpenSSL functions (selected by flags).
+ *
+ * `engine` could be either an id or a path to the engine's shared library.
+ *
+ * The optional `flags` argument uses `ENGINE_METHOD_ALL` by default. The `flags`is a bit field taking one of or a mix of the following flags (defined in`crypto.constants`):
+ *
+ * * `crypto.constants.ENGINE_METHOD_RSA`
+ * * `crypto.constants.ENGINE_METHOD_DSA`
+ * * `crypto.constants.ENGINE_METHOD_DH`
+ * * `crypto.constants.ENGINE_METHOD_RAND`
+ * * `crypto.constants.ENGINE_METHOD_EC`
+ * * `crypto.constants.ENGINE_METHOD_CIPHERS`
+ * * `crypto.constants.ENGINE_METHOD_DIGESTS`
+ * * `crypto.constants.ENGINE_METHOD_PKEY_METHS`
+ * * `crypto.constants.ENGINE_METHOD_PKEY_ASN1_METHS`
+ * * `crypto.constants.ENGINE_METHOD_ALL`
+ * * `crypto.constants.ENGINE_METHOD_NONE`
+ * @since v0.11.11
+ * @param flags
+ */
+ function setEngine(engine: string, flags?: number): void;
+ /**
+ * A convenient alias for {@link webcrypto.getRandomValues}. This
+ * implementation is not compliant with the Web Crypto spec, to write
+ * web-compatible code use {@link webcrypto.getRandomValues} instead.
+ * @since v17.4.0
+ * @return Returns `typedArray`.
+ */
+ function getRandomValues(typedArray: T): T;
+ /**
+ * A convenient alias for `crypto.webcrypto.subtle`.
+ * @since v17.4.0
+ */
+ const subtle: webcrypto.SubtleCrypto;
+ /**
+ * An implementation of the Web Crypto API standard.
+ *
+ * See the {@link https://nodejs.org/docs/latest/api/webcrypto.html Web Crypto API documentation} for details.
+ * @since v15.0.0
+ */
+ const webcrypto: webcrypto.Crypto;
+ namespace webcrypto {
+ type BufferSource = ArrayBufferView | ArrayBuffer;
+ type KeyFormat = "jwk" | "pkcs8" | "raw" | "spki";
+ type KeyType = "private" | "public" | "secret";
+ type KeyUsage =
+ | "decrypt"
+ | "deriveBits"
+ | "deriveKey"
+ | "encrypt"
+ | "sign"
+ | "unwrapKey"
+ | "verify"
+ | "wrapKey";
+ type AlgorithmIdentifier = Algorithm | string;
+ type HashAlgorithmIdentifier = AlgorithmIdentifier;
+ type NamedCurve = string;
+ type BigInteger = Uint8Array;
+ interface AesCbcParams extends Algorithm {
+ iv: BufferSource;
+ }
+ interface AesCtrParams extends Algorithm {
+ counter: BufferSource;
+ length: number;
+ }
+ interface AesDerivedKeyParams extends Algorithm {
+ length: number;
+ }
+ interface AesGcmParams extends Algorithm {
+ additionalData?: BufferSource;
+ iv: BufferSource;
+ tagLength?: number;
+ }
+ interface AesKeyAlgorithm extends KeyAlgorithm {
+ length: number;
+ }
+ interface AesKeyGenParams extends Algorithm {
+ length: number;
+ }
+ interface Algorithm {
+ name: string;
+ }
+ interface EcKeyAlgorithm extends KeyAlgorithm {
+ namedCurve: NamedCurve;
+ }
+ interface EcKeyGenParams extends Algorithm {
+ namedCurve: NamedCurve;
+ }
+ interface EcKeyImportParams extends Algorithm {
+ namedCurve: NamedCurve;
+ }
+ interface EcdhKeyDeriveParams extends Algorithm {
+ public: CryptoKey;
+ }
+ interface EcdsaParams extends Algorithm {
+ hash: HashAlgorithmIdentifier;
+ }
+ interface Ed448Params extends Algorithm {
+ context?: BufferSource;
+ }
+ interface HkdfParams extends Algorithm {
+ hash: HashAlgorithmIdentifier;
+ info: BufferSource;
+ salt: BufferSource;
+ }
+ interface HmacImportParams extends Algorithm {
+ hash: HashAlgorithmIdentifier;
+ length?: number;
+ }
+ interface HmacKeyAlgorithm extends KeyAlgorithm {
+ hash: KeyAlgorithm;
+ length: number;
+ }
+ interface HmacKeyGenParams extends Algorithm {
+ hash: HashAlgorithmIdentifier;
+ length?: number;
+ }
+ interface JsonWebKey {
+ alg?: string;
+ crv?: string;
+ d?: string;
+ dp?: string;
+ dq?: string;
+ e?: string;
+ ext?: boolean;
+ k?: string;
+ key_ops?: string[];
+ kty?: string;
+ n?: string;
+ oth?: RsaOtherPrimesInfo[];
+ p?: string;
+ q?: string;
+ qi?: string;
+ use?: string;
+ x?: string;
+ y?: string;
+ }
+ interface KeyAlgorithm {
+ name: string;
+ }
+ interface Pbkdf2Params extends Algorithm {
+ hash: HashAlgorithmIdentifier;
+ iterations: number;
+ salt: BufferSource;
+ }
+ interface RsaHashedImportParams extends Algorithm {
+ hash: HashAlgorithmIdentifier;
+ }
+ interface RsaHashedKeyAlgorithm extends RsaKeyAlgorithm {
+ hash: KeyAlgorithm;
+ }
+ interface RsaHashedKeyGenParams extends RsaKeyGenParams {
+ hash: HashAlgorithmIdentifier;
+ }
+ interface RsaKeyAlgorithm extends KeyAlgorithm {
+ modulusLength: number;
+ publicExponent: BigInteger;
+ }
+ interface RsaKeyGenParams extends Algorithm {
+ modulusLength: number;
+ publicExponent: BigInteger;
+ }
+ interface RsaOaepParams extends Algorithm {
+ label?: BufferSource;
+ }
+ interface RsaOtherPrimesInfo {
+ d?: string;
+ r?: string;
+ t?: string;
+ }
+ interface RsaPssParams extends Algorithm {
+ saltLength: number;
+ }
+ /**
+ * Calling `require('node:crypto').webcrypto` returns an instance of the `Crypto` class.
+ * `Crypto` is a singleton that provides access to the remainder of the crypto API.
+ * @since v15.0.0
+ */
+ interface Crypto {
+ /**
+ * Provides access to the `SubtleCrypto` API.
+ * @since v15.0.0
+ */
+ readonly subtle: SubtleCrypto;
+ /**
+ * Generates cryptographically strong random values.
+ * The given `typedArray` is filled with random values, and a reference to `typedArray` is returned.
+ *
+ * The given `typedArray` must be an integer-based instance of {@link NodeJS.TypedArray}, i.e. `Float32Array` and `Float64Array` are not accepted.
+ *
+ * An error will be thrown if the given `typedArray` is larger than 65,536 bytes.
+ * @since v15.0.0
+ */
+ getRandomValues>(typedArray: T): T;
+ /**
+ * Generates a random {@link https://www.rfc-editor.org/rfc/rfc4122.txt RFC 4122} version 4 UUID.
+ * The UUID is generated using a cryptographic pseudorandom number generator.
+ * @since v16.7.0
+ */
+ randomUUID(): UUID;
+ CryptoKey: CryptoKeyConstructor;
+ }
+ // This constructor throws ILLEGAL_CONSTRUCTOR so it should not be newable.
+ interface CryptoKeyConstructor {
+ /** Illegal constructor */
+ (_: { readonly _: unique symbol }): never; // Allows instanceof to work but not be callable by the user.
+ readonly length: 0;
+ readonly name: "CryptoKey";
+ readonly prototype: CryptoKey;
+ }
+ /**
+ * @since v15.0.0
+ */
+ interface CryptoKey {
+ /**
+ * An object detailing the algorithm for which the key can be used along with additional algorithm-specific parameters.
+ * @since v15.0.0
+ */
+ readonly algorithm: KeyAlgorithm;
+ /**
+ * When `true`, the {@link CryptoKey} can be extracted using either `subtleCrypto.exportKey()` or `subtleCrypto.wrapKey()`.
+ * @since v15.0.0
+ */
+ readonly extractable: boolean;
+ /**
+ * A string identifying whether the key is a symmetric (`'secret'`) or asymmetric (`'private'` or `'public'`) key.
+ * @since v15.0.0
+ */
+ readonly type: KeyType;
+ /**
+ * An array of strings identifying the operations for which the key may be used.
+ *
+ * The possible usages are:
+ * - `'encrypt'` - The key may be used to encrypt data.
+ * - `'decrypt'` - The key may be used to decrypt data.
+ * - `'sign'` - The key may be used to generate digital signatures.
+ * - `'verify'` - The key may be used to verify digital signatures.
+ * - `'deriveKey'` - The key may be used to derive a new key.
+ * - `'deriveBits'` - The key may be used to derive bits.
+ * - `'wrapKey'` - The key may be used to wrap another key.
+ * - `'unwrapKey'` - The key may be used to unwrap another key.
+ *
+ * Valid key usages depend on the key algorithm (identified by `cryptokey.algorithm.name`).
+ * @since v15.0.0
+ */
+ readonly usages: KeyUsage[];
+ }
+ /**
+ * The `CryptoKeyPair` is a simple dictionary object with `publicKey` and `privateKey` properties, representing an asymmetric key pair.
+ * @since v15.0.0
+ */
+ interface CryptoKeyPair {
+ /**
+ * A {@link CryptoKey} whose type will be `'private'`.
+ * @since v15.0.0
+ */
+ privateKey: CryptoKey;
+ /**
+ * A {@link CryptoKey} whose type will be `'public'`.
+ * @since v15.0.0
+ */
+ publicKey: CryptoKey;
+ }
+ /**
+ * @since v15.0.0
+ */
+ interface SubtleCrypto {
+ /**
+ * Using the method and parameters specified in `algorithm` and the keying material provided by `key`,
+ * `subtle.decrypt()` attempts to decipher the provided `data`. If successful,
+ * the returned promise will be resolved with an `` containing the plaintext result.
+ *
+ * The algorithms currently supported include:
+ *
+ * - `'RSA-OAEP'`
+ * - `'AES-CTR'`
+ * - `'AES-CBC'`
+ * - `'AES-GCM'`
+ * @since v15.0.0
+ */
+ decrypt(
+ algorithm: AlgorithmIdentifier | RsaOaepParams | AesCtrParams | AesCbcParams | AesGcmParams,
+ key: CryptoKey,
+ data: BufferSource,
+ ): Promise;
+ /**
+ * Using the method and parameters specified in `algorithm` and the keying material provided by `baseKey`,
+ * `subtle.deriveBits()` attempts to generate `length` bits.
+ * The Node.js implementation requires that when `length` is a number it must be multiple of `8`.
+ * When `length` is `null` the maximum number of bits for a given algorithm is generated. This is allowed
+ * for the `'ECDH'`, `'X25519'`, and `'X448'` algorithms.
+ * If successful, the returned promise will be resolved with an `` containing the generated data.
+ *
+ * The algorithms currently supported include:
+ *
+ * - `'ECDH'`
+ * - `'X25519'`
+ * - `'X448'`
+ * - `'HKDF'`
+ * - `'PBKDF2'`
+ * @since v15.0.0
+ */
+ deriveBits(algorithm: EcdhKeyDeriveParams, baseKey: CryptoKey, length: number | null): Promise;
+ deriveBits(
+ algorithm: AlgorithmIdentifier | HkdfParams | Pbkdf2Params,
+ baseKey: CryptoKey,
+ length: number,
+ ): Promise;
+ /**
+ * Using the method and parameters specified in `algorithm`, and the keying material provided by `baseKey`,
+ * `subtle.deriveKey()` attempts to generate a new ` based on the method and parameters in `derivedKeyAlgorithm`.
+ *
+ * Calling `subtle.deriveKey()` is equivalent to calling `subtle.deriveBits()` to generate raw keying material,
+ * then passing the result into the `subtle.importKey()` method using the `deriveKeyAlgorithm`, `extractable`, and `keyUsages` parameters as input.
+ *
+ * The algorithms currently supported include:
+ *
+ * - `'ECDH'`
+ * - `'X25519'`
+ * - `'X448'`
+ * - `'HKDF'`
+ * - `'PBKDF2'`
+ * @param keyUsages See {@link https://nodejs.org/docs/latest/api/webcrypto.html#cryptokeyusages Key usages}.
+ * @since v15.0.0
+ */
+ deriveKey(
+ algorithm: AlgorithmIdentifier | EcdhKeyDeriveParams | HkdfParams | Pbkdf2Params,
+ baseKey: CryptoKey,
+ derivedKeyAlgorithm:
+ | AlgorithmIdentifier
+ | AesDerivedKeyParams
+ | HmacImportParams
+ | HkdfParams
+ | Pbkdf2Params,
+ extractable: boolean,
+ keyUsages: readonly KeyUsage[],
+ ): Promise;
+ /**
+ * Using the method identified by `algorithm`, `subtle.digest()` attempts to generate a digest of `data`.
+ * If successful, the returned promise is resolved with an `` containing the computed digest.
+ *
+ * If `algorithm` is provided as a ``, it must be one of:
+ *
+ * - `'SHA-1'`
+ * - `'SHA-256'`
+ * - `'SHA-384'`
+ * - `'SHA-512'`
+ *
+ * If `algorithm` is provided as an `