diff --git a/package-lock.json b/package-lock.json
index 97c4ab4..d39a1e1 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -135,6 +135,7 @@
"resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.7.3.tgz",
"integrity": "sha512-sGnvb5dmrJaKEZ+LDIpguvdX3bDlEllmv4/ClQ9awcmCZrlx5jQyyMWFM5kBI+EyNOCDDiKk8il0zeuX3Zlg/w==",
"license": "MIT",
+ "peer": true,
"dependencies": {
"@floating-ui/utils": "^0.2.10"
}
@@ -144,6 +145,7 @@
"resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.7.4.tgz",
"integrity": "sha512-OOchDgh4F2CchOX94cRVqhvy7b3AFb+/rQXyswmzmGakRfkMgoWVjfnLWkRirfLEfuD4ysVW16eXzwt3jHIzKA==",
"license": "MIT",
+ "peer": true,
"dependencies": {
"@floating-ui/core": "^1.7.3",
"@floating-ui/utils": "^0.2.10"
@@ -154,6 +156,7 @@
"resolved": "https://registry.npmjs.org/@floating-ui/react-dom/-/react-dom-2.1.6.tgz",
"integrity": "sha512-4JX6rEatQEvlmgU80wZyq9RT96HZJa88q8hp0pBd+LrczeDI4o6uA2M+uvxngVHo4Ihr8uibXxH6+70zhAFrVw==",
"license": "MIT",
+ "peer": true,
"dependencies": {
"@floating-ui/dom": "^1.7.4"
},
@@ -166,7 +169,8 @@
"version": "0.2.10",
"resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.10.tgz",
"integrity": "sha512-aGTxbpbg8/b5JfU1HXSrbH3wXZuLPJcNEcZQFMxLs3oSzgtVu6nFPkbbGGUvBcUjKV2YyB9Wxxabo+HEH9tcRQ==",
- "license": "MIT"
+ "license": "MIT",
+ "peer": true
},
"node_modules/@img/sharp-darwin-arm64": {
"version": "0.33.5",
@@ -1273,6 +1277,7 @@
"resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.2.tgz",
"integrity": "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ==",
"license": "MIT",
+ "peer": true,
"dependencies": {
"loose-envify": "^1.1.0"
}
@@ -1834,6 +1839,7 @@
"resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.2.tgz",
"integrity": "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ==",
"license": "MIT",
+ "peer": true,
"dependencies": {
"loose-envify": "^1.1.0"
}
@@ -1976,13 +1982,15 @@
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.1.3.tgz",
"integrity": "sha512-JTF99U/6XIjCBo0wqkU5sK10glYe27MRRsfwoiq5zzOEZLHU3A3KCMa5X/azekYRCJ0HlwI0crAXS/5dEHTzDg==",
- "license": "MIT"
+ "license": "MIT",
+ "peer": true
},
"node_modules/@radix-ui/react-arrow": {
"version": "1.1.7",
"resolved": "https://registry.npmjs.org/@radix-ui/react-arrow/-/react-arrow-1.1.7.tgz",
"integrity": "sha512-F+M1tLhO+mlQaOWspE8Wstg+z6PwxwRd8oQ8IXceWz92kfAmalTRf0EjrouQeo7QssEPfCn05B4Ihs1K9WQ/7w==",
"license": "MIT",
+ "peer": true,
"dependencies": {
"@radix-ui/react-primitive": "2.1.3"
},
@@ -2006,6 +2014,7 @@
"resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-1.1.2.tgz",
"integrity": "sha512-z4eqJvfiNnFMHIIvXP3CY57y2WJs5g2v3X0zm9mEJkrkNv4rDxu+sg9Jh8EkXyeqBkB7SOcboo9dMVqhyrACIg==",
"license": "MIT",
+ "peer": true,
"peerDependencies": {
"@types/react": "*",
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
@@ -2021,6 +2030,7 @@
"resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.2.tgz",
"integrity": "sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA==",
"license": "MIT",
+ "peer": true,
"peerDependencies": {
"@types/react": "*",
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
@@ -2036,6 +2046,7 @@
"resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.1.11.tgz",
"integrity": "sha512-Nqcp+t5cTB8BinFkZgXiMJniQH0PsUt2k51FUhbdfeKvc4ACcG2uQniY/8+h1Yv6Kza4Q7lD7PQV0z0oicE0Mg==",
"license": "MIT",
+ "peer": true,
"dependencies": {
"@radix-ui/primitive": "1.1.3",
"@radix-ui/react-compose-refs": "1.1.2",
@@ -2063,6 +2074,7 @@
"resolved": "https://registry.npmjs.org/@radix-ui/react-focus-guards/-/react-focus-guards-1.1.3.tgz",
"integrity": "sha512-0rFg/Rj2Q62NCm62jZw0QX7a3sz6QCQU0LpZdNrJX8byRGaGVTqbrW9jAoIAHyMQqsNpeZ81YgSizOt5WXq0Pw==",
"license": "MIT",
+ "peer": true,
"peerDependencies": {
"@types/react": "*",
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
@@ -2078,6 +2090,7 @@
"resolved": "https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-1.1.7.tgz",
"integrity": "sha512-t2ODlkXBQyn7jkl6TNaw/MtVEVvIGelJDCG41Okq/KwUsJBwQ4XVZsHAVUkK4mBv3ewiAS3PGuUWuY2BoK4ZUw==",
"license": "MIT",
+ "peer": true,
"dependencies": {
"@radix-ui/react-compose-refs": "1.1.2",
"@radix-ui/react-primitive": "2.1.3",
@@ -2103,6 +2116,7 @@
"resolved": "https://registry.npmjs.org/@radix-ui/react-id/-/react-id-1.1.1.tgz",
"integrity": "sha512-kGkGegYIdQsOb4XjsfM97rXsiHaBwco+hFI66oO4s9LU+PLAC5oJ7khdOVFxkhsmlbpUqDAvXw11CluXP+jkHg==",
"license": "MIT",
+ "peer": true,
"dependencies": {
"@radix-ui/react-use-layout-effect": "1.1.1"
},
@@ -2121,6 +2135,7 @@
"resolved": "https://registry.npmjs.org/@radix-ui/react-popper/-/react-popper-1.2.8.tgz",
"integrity": "sha512-0NJQ4LFFUuWkE7Oxf0htBKS6zLkkjBH+hM1uk7Ng705ReR8m/uelduy1DBo0PyBXPKVnBA6YBlU94MBGXrSBCw==",
"license": "MIT",
+ "peer": true,
"dependencies": {
"@floating-ui/react-dom": "^2.0.0",
"@radix-ui/react-arrow": "1.1.7",
@@ -2153,6 +2168,7 @@
"resolved": "https://registry.npmjs.org/@radix-ui/react-portal/-/react-portal-1.1.9.tgz",
"integrity": "sha512-bpIxvq03if6UNwXZ+HTK71JLh4APvnXntDc6XOX8UVq4XQOVl7lwok0AvIl+b8zgCw3fSaVTZMpAPPagXbKmHQ==",
"license": "MIT",
+ "peer": true,
"dependencies": {
"@radix-ui/react-primitive": "2.1.3",
"@radix-ui/react-use-layout-effect": "1.1.1"
@@ -2177,6 +2193,7 @@
"resolved": "https://registry.npmjs.org/@radix-ui/react-presence/-/react-presence-1.1.5.tgz",
"integrity": "sha512-/jfEwNDdQVBCNvjkGit4h6pMOzq8bHkopq458dPt2lMjx+eBQUohZNG9A7DtO/O5ukSbxuaNGXMjHicgwy6rQQ==",
"license": "MIT",
+ "peer": true,
"dependencies": {
"@radix-ui/react-compose-refs": "1.1.2",
"@radix-ui/react-use-layout-effect": "1.1.1"
@@ -2201,6 +2218,7 @@
"resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.1.3.tgz",
"integrity": "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ==",
"license": "MIT",
+ "peer": true,
"dependencies": {
"@radix-ui/react-slot": "1.2.3"
},
@@ -2224,6 +2242,7 @@
"resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.2.3.tgz",
"integrity": "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A==",
"license": "MIT",
+ "peer": true,
"dependencies": {
"@radix-ui/react-compose-refs": "1.1.2"
},
@@ -2242,6 +2261,7 @@
"resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-1.1.1.tgz",
"integrity": "sha512-FkBMwD+qbGQeMu1cOHnuGB6x4yzPjho8ap5WtbEJ26umhgqVXbhekKUQO+hZEL1vU92a3wHwdp0HAcqAUF5iDg==",
"license": "MIT",
+ "peer": true,
"peerDependencies": {
"@types/react": "*",
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
@@ -2257,6 +2277,7 @@
"resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-1.2.2.tgz",
"integrity": "sha512-BjasUjixPFdS+NKkypcyyN5Pmg83Olst0+c6vGov0diwTEo6mgdqVR6hxcEgFuh4QrAs7Rc+9KuGJ9TVCj0Zzg==",
"license": "MIT",
+ "peer": true,
"dependencies": {
"@radix-ui/react-use-effect-event": "0.0.2",
"@radix-ui/react-use-layout-effect": "1.1.1"
@@ -2276,6 +2297,7 @@
"resolved": "https://registry.npmjs.org/@radix-ui/react-use-effect-event/-/react-use-effect-event-0.0.2.tgz",
"integrity": "sha512-Qp8WbZOBe+blgpuUT+lw2xheLP8q0oatc9UpmiemEICxGvFLYmHm9QowVZGHtJlGbS6A6yJ3iViad/2cVjnOiA==",
"license": "MIT",
+ "peer": true,
"dependencies": {
"@radix-ui/react-use-layout-effect": "1.1.1"
},
@@ -2294,6 +2316,7 @@
"resolved": "https://registry.npmjs.org/@radix-ui/react-use-escape-keydown/-/react-use-escape-keydown-1.1.1.tgz",
"integrity": "sha512-Il0+boE7w/XebUHyBjroE+DbByORGR9KKmITzbR7MyQ4akpORYP/ZmbhAr0DG7RmmBqoOnZdy2QlvajJ2QA59g==",
"license": "MIT",
+ "peer": true,
"dependencies": {
"@radix-ui/react-use-callback-ref": "1.1.1"
},
@@ -2312,6 +2335,7 @@
"resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-1.1.1.tgz",
"integrity": "sha512-RbJRS4UWQFkzHTTwVymMTUv8EqYhOp8dOOviLj2ugtTiXRaRQS7GLGxZTLL1jWhMeoSCf5zmcZkqTl9IiYfXcQ==",
"license": "MIT",
+ "peer": true,
"peerDependencies": {
"@types/react": "*",
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
@@ -2327,6 +2351,7 @@
"resolved": "https://registry.npmjs.org/@radix-ui/react-use-rect/-/react-use-rect-1.1.1.tgz",
"integrity": "sha512-QTYuDesS0VtuHNNvMh+CjlKJ4LJickCMUAqjlE3+j8w+RlRpwyX3apEQKGFzbZGdo7XNG1tXa+bQqIE7HIXT2w==",
"license": "MIT",
+ "peer": true,
"dependencies": {
"@radix-ui/rect": "1.1.1"
},
@@ -2345,6 +2370,7 @@
"resolved": "https://registry.npmjs.org/@radix-ui/react-use-size/-/react-use-size-1.1.1.tgz",
"integrity": "sha512-ewrXRDTAqAXlkl6t/fkXWNAhFX9I+CkKlw6zjEwk86RSPKwZr3xpBRso655aqYafwtnbpHLj6toFzmd6xdVptQ==",
"license": "MIT",
+ "peer": true,
"dependencies": {
"@radix-ui/react-use-layout-effect": "1.1.1"
},
@@ -2362,7 +2388,8 @@
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@radix-ui/rect/-/rect-1.1.1.tgz",
"integrity": "sha512-HPwpGIzkl28mWyZqG52jiqDJ12waP11Pa1lGoiyUkIEuMLBP0oeK/C89esbXrxsky5we7dfd8U58nm0SgAWpVw==",
- "license": "MIT"
+ "license": "MIT",
+ "peer": true
},
"node_modules/@shikijs/core": {
"version": "3.21.0",
@@ -3017,7 +3044,6 @@
"resolved": "https://registry.npmjs.org/@types/node/-/node-25.0.3.tgz",
"integrity": "sha512-W609buLVRVmeW693xKfzHeIV6nJGGz98uCPfeXI1ELMLXVeKYZ9m15fAMSaUPBHYLGFsVRcMmSCksQOrZV9BYA==",
"license": "MIT",
- "peer": true,
"dependencies": {
"undici-types": "~7.16.0"
}
@@ -3102,7 +3128,6 @@
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.11.2.tgz",
"integrity": "sha512-nc0Axzp/0FILLEVsm4fNwLCwMttvhEI263QtVPQcbpfZZ3ts0hLsZGOpE6czNlid7CJ9MlyH8reXkpsf3YUY4w==",
"license": "MIT",
- "peer": true,
"bin": {
"acorn": "bin/acorn"
},
@@ -3167,7 +3192,6 @@
"resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz",
"integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==",
"license": "MIT",
- "peer": true,
"dependencies": {
"fast-deep-equal": "^3.1.3",
"fast-uri": "^3.0.1",
@@ -3294,6 +3318,7 @@
"resolved": "https://registry.npmjs.org/aria-hidden/-/aria-hidden-1.2.6.tgz",
"integrity": "sha512-ik3ZgC9dY/lYVVM++OISsaYDeg1tb0VtP5uL3ouh1koGOaUMDPpbFIei4JkFimWUFPn90sbMNMXQAIVOlnYKJA==",
"license": "MIT",
+ "peer": true,
"dependencies": {
"tslib": "^2.0.0"
},
@@ -3305,7 +3330,8 @@
"version": "2.8.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==",
- "license": "0BSD"
+ "license": "0BSD",
+ "peer": true
},
"node_modules/arkregex": {
"version": "0.0.3",
@@ -4398,7 +4424,8 @@
"version": "3.2.3",
"resolved": "https://registry.npmjs.org/csstype/-/csstype-3.2.3.tgz",
"integrity": "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==",
- "license": "MIT"
+ "license": "MIT",
+ "peer": true
},
"node_modules/data-uri-to-buffer": {
"version": "6.0.2",
@@ -4669,7 +4696,8 @@
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/detect-node-es/-/detect-node-es-1.1.0.tgz",
"integrity": "sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ==",
- "license": "MIT"
+ "license": "MIT",
+ "peer": true
},
"node_modules/detect-port": {
"version": "1.5.1",
@@ -4702,8 +4730,7 @@
"version": "0.0.1312386",
"resolved": "https://registry.npmjs.org/devtools-protocol/-/devtools-protocol-0.0.1312386.tgz",
"integrity": "sha512-DPnhUXvmvKT2dFA/j7B+riVLUt9Q6RKJlcppojL5CoRywJJKLDYnRlw0gTFKfgDPHP5E04UoB71SxoJlVZy8FA==",
- "license": "BSD-3-Clause",
- "peer": true
+ "license": "BSD-3-Clause"
},
"node_modules/didyoumean": {
"version": "1.2.2",
@@ -5815,6 +5842,7 @@
"resolved": "https://registry.npmjs.org/get-nonce/-/get-nonce-1.0.1.tgz",
"integrity": "sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q==",
"license": "MIT",
+ "peer": true,
"engines": {
"node": ">=6"
}
@@ -7329,7 +7357,6 @@
"resolved": "https://registry.npmjs.org/jsep/-/jsep-1.4.0.tgz",
"integrity": "sha512-B7qPcEVE3NVkmSJbaYxvv4cHkVW7DQsZz13pUMrfS8z8Q/BuShN+gcTXrUlPiGqM2/t/EEaI030bpxMqY8gMlw==",
"license": "MIT",
- "peer": true,
"engines": {
"node": ">= 10.16.0"
}
@@ -7485,6 +7512,7 @@
"resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz",
"integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==",
"license": "MIT",
+ "peer": true,
"dependencies": {
"js-tokens": "^3.0.0 || ^4.0.0"
},
@@ -9376,7 +9404,6 @@
}
],
"license": "MIT",
- "peer": true,
"dependencies": {
"nanoid": "^3.3.11",
"picocolors": "^1.1.1",
@@ -9752,7 +9779,6 @@
"resolved": "https://registry.npmjs.org/react/-/react-19.2.3.tgz",
"integrity": "sha512-Ku/hhYbVjOQnXDZFv2+RibmLFGwFdeeKHFcOTlrt7xplBnya5OGn/hIRDsqDiSUcfORsDC7MPxwork8jBwsIWA==",
"license": "MIT",
- "peer": true,
"engines": {
"node": ">=0.10.0"
}
@@ -9775,6 +9801,8 @@
"resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.27.0.tgz",
"integrity": "sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q==",
"license": "MIT"
+ "license": "MIT",
+ "peer": true
},
"node_modules/react-reconciler": {
"version": "0.32.0",
@@ -9796,6 +9824,7 @@
"resolved": "https://registry.npmjs.org/react-remove-scroll/-/react-remove-scroll-2.7.2.tgz",
"integrity": "sha512-Iqb9NjCCTt6Hf+vOdNIZGdTiH1QSqr27H/Ek9sv/a97gfueI/5h1s3yRi1nngzMUaOOToin5dI1dXKdXiF+u0Q==",
"license": "MIT",
+ "peer": true,
"dependencies": {
"react-remove-scroll-bar": "^2.3.7",
"react-style-singleton": "^2.2.3",
@@ -9821,6 +9850,7 @@
"resolved": "https://registry.npmjs.org/react-remove-scroll-bar/-/react-remove-scroll-bar-2.3.8.tgz",
"integrity": "sha512-9r+yi9+mgU33AKcj6IbT9oRCO78WriSj6t/cF8DWBZJ9aOGPOTEDvdUDz1FwKim7QXWwmHqtdHnRJfhAxEG46Q==",
"license": "MIT",
+ "peer": true,
"dependencies": {
"react-style-singleton": "^2.2.2",
"tslib": "^2.0.0"
@@ -9842,19 +9872,22 @@
"version": "2.8.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==",
- "license": "0BSD"
+ "license": "0BSD",
+ "peer": true
},
"node_modules/react-remove-scroll/node_modules/tslib": {
"version": "2.8.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==",
- "license": "0BSD"
+ "license": "0BSD",
+ "peer": true
},
"node_modules/react-style-singleton": {
"version": "2.2.3",
"resolved": "https://registry.npmjs.org/react-style-singleton/-/react-style-singleton-2.2.3.tgz",
"integrity": "sha512-b6jSvxvVnyptAiLjbkWLE/lOnR4lfTtDAl+eUC7RZy+QQWc6wRzIV2CE6xBuMmDxc2qIihtDCZD5NPOFl7fRBQ==",
"license": "MIT",
+ "peer": true,
"dependencies": {
"get-nonce": "^1.0.0",
"tslib": "^2.0.0"
@@ -9876,7 +9909,8 @@
"version": "2.8.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==",
- "license": "0BSD"
+ "license": "0BSD",
+ "peer": true
},
"node_modules/read-cache": {
"version": "1.0.0",
@@ -11433,7 +11467,6 @@
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz",
"integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==",
"license": "MIT",
- "peer": true,
"engines": {
"node": ">=12"
},
@@ -11687,7 +11720,6 @@
"resolved": "https://registry.npmjs.org/unified/-/unified-11.0.5.tgz",
"integrity": "sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==",
"license": "MIT",
- "peer": true,
"dependencies": {
"@types/unist": "^3.0.0",
"bail": "^2.0.0",
@@ -11914,6 +11946,7 @@
"resolved": "https://registry.npmjs.org/use-callback-ref/-/use-callback-ref-1.3.3.tgz",
"integrity": "sha512-jQL3lRnocaFtu3V00JToYz/4QkNWswxijDaCVNZRiRTO3HQDLsdu1ZtmIUvV4yPp+rvWm5j0y0TG/S61cuijTg==",
"license": "MIT",
+ "peer": true,
"dependencies": {
"tslib": "^2.0.0"
},
@@ -11934,13 +11967,15 @@
"version": "2.8.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==",
- "license": "0BSD"
+ "license": "0BSD",
+ "peer": true
},
"node_modules/use-sidecar": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/use-sidecar/-/use-sidecar-1.1.3.tgz",
"integrity": "sha512-Fedw0aZvkhynoPYlA5WXrMCAMm+nSWdZt6lzJQ7Ok8S6Q+VsHmHpRWndVRJ8Be0ZbkfPc5LRYH+5XrzXcEeLRQ==",
"license": "MIT",
+ "peer": true,
"dependencies": {
"detect-node-es": "^1.1.0",
"tslib": "^2.0.0"
@@ -11962,7 +11997,8 @@
"version": "2.8.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==",
- "license": "0BSD"
+ "license": "0BSD",
+ "peer": true
},
"node_modules/util-deprecate": {
"version": "1.0.2",
@@ -12460,7 +12496,6 @@
"resolved": "https://registry.npmjs.org/zod/-/zod-3.21.4.tgz",
"integrity": "sha512-m46AKbrzKVzOzs/DZgVnG5H55N1sv1M8qZU3A8RIKbs3mrACDNeIOeilDymVb2HdmP8uwshOCF4uJ8uM9rCqJw==",
"license": "MIT",
- "peer": true,
"funding": {
"url": "https://github.com/sponsors/colinhacks"
}
diff --git a/sdk/go/index.mdx b/sdk/go/index.mdx
index 288bb24..43803e2 100644
--- a/sdk/go/index.mdx
+++ b/sdk/go/index.mdx
@@ -4,3 +4,506 @@ sidebarTitle: Go
description: Integrate the Golang SDK in your application.
icon: golang
---
+
+The Edgee Go SDK provides a lightweight, type-safe interface to interact with the Edgee AI Gateway. It supports OpenAI-compatible chat completions, function calling, and streaming.
+
+## Installation
+
+```bash
+go get github.com/edgee-cloud/go-sdk
+```
+
+## Quick Start
+
+```go
+package main
+
+import (
+ "fmt"
+ "log"
+ "github.com/edgee-cloud/go-sdk/edgee"
+)
+
+func main() {
+ client, err := edgee.NewClient(nil)
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ response, err := client.ChatCompletion("gpt-4o", "What is the capital of France?")
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ fmt.Println(response.Text())
+ // "The capital of France is Paris."
+}
+```
+
+## Configuration
+
+The SDK can be configured in multiple ways:
+
+### Using Environment Variables
+
+```go
+// Set EDGEE_API_KEY environment variable
+client, err := edgee.NewClient(nil)
+```
+
+### Using Constructor Parameters
+
+```go
+// String API key
+client, err := edgee.NewClient("your-api-key")
+
+// Configuration struct
+client, err := edgee.NewClient(&edgee.Config{
+ APIKey: "your-api-key",
+ BaseURL: "https://api.edgee.ai", // optional, defaults to https://api.edgee.ai
+})
+
+// Map configuration
+client, err := edgee.NewClient(map[string]interface{}{
+ "api_key": "your-api-key",
+ "base_url": "https://api.edgee.ai",
+})
+```
+
+## Usage Examples
+
+### Simple String Input
+
+The simplest way to send a request is with a string input:
+
+```go
+response, err := client.ChatCompletion("gpt-4o", "Explain quantum computing in simple terms.")
+if err != nil {
+ log.Fatal(err)
+}
+
+fmt.Println(response.Text())
+```
+
+### Full Message Array
+
+For more control, use a full message array:
+
+```go
+response, err := client.ChatCompletion("gpt-4o", map[string]interface{}{
+ "messages": []map[string]string{
+ {"role": "system", "content": "You are a helpful assistant."},
+ {"role": "user", "content": "Hello!"},
+ },
+})
+if err != nil {
+ log.Fatal(err)
+}
+
+fmt.Println(response.Text())
+```
+
+### Using InputObject
+
+For better type safety, use the `InputObject` struct:
+
+```go
+response, err := client.ChatCompletion("gpt-4o", edgee.InputObject{
+ Messages: []edgee.Message{
+ {Role: "system", Content: "You are a helpful assistant."},
+ {Role: "user", Content: "Hello!"},
+ },
+})
+if err != nil {
+ log.Fatal(err)
+}
+
+fmt.Println(response.Text())
+```
+
+### Function Calling (Tools)
+
+The SDK supports OpenAI-compatible function calling:
+
+```go
+response, err := client.ChatCompletion("gpt-4o", map[string]interface{}{
+ "messages": []map[string]string{
+ {"role": "user", "content": "What is the weather in Paris?"},
+ },
+ "tools": []map[string]interface{}{
+ {
+ "type": "function",
+ "function": map[string]interface{}{
+ "name": "get_weather",
+ "description": "Get the current weather for a location",
+ "parameters": map[string]interface{}{
+ "type": "object",
+ "properties": map[string]interface{}{
+ "location": map[string]string{
+ "type": "string",
+ "description": "City name",
+ },
+ },
+ "required": []string{"location"},
+ },
+ },
+ },
+ },
+ "tool_choice": "auto", // or "none", or map[string]interface{}{"type": "function", "function": map[string]string{"name": "get_weather"}}
+})
+
+// Check if the model wants to call a function
+if toolCalls := response.ToolCalls(); len(toolCalls) > 0 {
+ toolCall := toolCalls[0]
+ fmt.Printf("Function: %s\n", toolCall.Function.Name)
+ fmt.Printf("Arguments: %s\n", toolCall.Function.Arguments)
+}
+```
+
+### Tool Response Handling
+
+After receiving a tool call, you can send the function result back:
+
+```go
+import "encoding/json"
+
+// First request - model requests a tool call
+response1, err := client.ChatCompletion("gpt-4o", map[string]interface{}{
+ "messages": []map[string]string{
+ {"role": "user", "content": "What is the weather in Paris?"},
+ },
+ "tools": []map[string]interface{}{...}, // tool definitions
+})
+
+// Execute the function and send the result
+toolCall := response1.ToolCalls()[0]
+var args map[string]interface{}
+json.Unmarshal([]byte(toolCall.Function.Arguments), &args)
+functionResult := getWeather(args)
+
+// Second request - include tool response
+resultJSON, _ := json.Marshal(functionResult)
+toolCallID := toolCall.ID
+
+response2, err := client.ChatCompletion("gpt-4o", edgee.InputObject{
+ Messages: []edgee.Message{
+ {Role: "user", Content: "What is the weather in Paris?"},
+ *response1.MessageContent(), // Include the assistant's message
+ {
+ Role: "tool",
+ ToolCallID: &toolCallID,
+ Content: string(resultJSON),
+ },
+ },
+})
+
+fmt.Println(response2.Text())
+```
+
+## Streaming
+
+The SDK supports streaming responses for real-time output. Use streaming when you want to display tokens as they're generated.
+
+Use `Stream()` to access full chunk metadata:
+
+```go
+// Stream full chunks with metadata
+chunkChan, errChan := client.Stream("gpt-4o", "Explain quantum computing")
+
+for {
+ select {
+ case chunk, ok := <-chunkChan:
+ if !ok {
+ // Stream finished
+ return
+ }
+
+ // First chunk contains the role
+ if role := chunk.Role(); role != "" {
+ fmt.Printf("Role: %s\n", role)
+ }
+
+ // Content chunks
+ if text := chunk.Text(); text != "" {
+ fmt.Print(text)
+ }
+
+ // Last chunk contains finish reason
+ if finishReason := chunk.FinishReason(); finishReason != "" {
+ fmt.Printf("\nFinish reason: %s\n", finishReason)
+ }
+
+ case err := <-errChan:
+ if err != nil {
+ log.Fatal(err)
+ }
+ }
+}
+```
+
+### Streaming with Messages
+
+Streaming works with full message arrays too:
+
+```go
+chunkChan, errChan := client.Stream("gpt-4o", edgee.InputObject{
+ Messages: []edgee.Message{
+ {Role: "system", Content: "You are a helpful assistant."},
+ {Role: "user", Content: "Write a poem about coding"},
+ },
+})
+
+for {
+ select {
+ case chunk, ok := <-chunkChan:
+ if !ok {
+ return
+ }
+ if text := chunk.Text(); text != "" {
+ fmt.Print(text)
+ }
+ case err := <-errChan:
+ if err != nil {
+ log.Fatal(err)
+ }
+ }
+}
+```
+
+### Using Send() with stream Parameter
+
+You can also use the `Send()` method with `stream=true`:
+
+```go
+// Returns streaming channels
+result, err := client.Send("gpt-4o", "Tell me a story", true)
+if err != nil {
+ log.Fatal(err)
+}
+
+// Type assertion to get the channels
+streamResult := result.(struct {
+ ChunkChan <-chan *edgee.StreamChunk
+ ErrChan <-chan error
+})
+
+for {
+ select {
+ case chunk, ok := <-streamResult.ChunkChan:
+ if !ok {
+ return
+ }
+ if text := chunk.Text(); text != "" {
+ fmt.Print(text)
+ }
+ case err := <-streamResult.ErrChan:
+ if err != nil {
+ log.Fatal(err)
+ }
+ }
+}
+```
+
+### Streaming Response Types
+
+Streaming uses different response types:
+
+```go
+// StreamChunk - returned via channels from Stream()
+type StreamChunk struct {
+ ID string
+ Object string
+ Created int64
+ Model string
+ Choices []ChatCompletionChoice
+}
+
+// Convenience methods
+func (c *StreamChunk) Text() string // Get content from first choice
+func (c *StreamChunk) Role() string // Get role from first choice
+func (c *StreamChunk) FinishReason() string // Get finish_reason from first choice
+
+type ChatCompletionChoice struct {
+ Index int
+ Delta *ChatCompletionDelta
+ FinishReason *string
+}
+
+type ChatCompletionDelta struct {
+ Role *string
+ Content *string
+ ToolCalls []ToolCall
+}
+```
+
+### Convenience Methods
+
+Both `SendResponse` and `StreamChunk` have convenience methods for easier access:
+
+```go
+// Non-streaming response
+response, _ := client.ChatCompletion("gpt-4o", "Hello")
+fmt.Println(response.Text()) // Instead of response.Choices[0].Message.Content
+fmt.Println(response.FinishReason()) // Instead of *response.Choices[0].FinishReason
+fmt.Println(response.ToolCalls()) // Instead of response.Choices[0].Message.ToolCalls
+
+// Streaming response
+for chunk := range chunkChan {
+ fmt.Println(chunk.Text()) // Instead of *chunk.Choices[0].Delta.Content
+ fmt.Println(chunk.Role()) // Instead of *chunk.Choices[0].Delta.Role
+ fmt.Println(chunk.FinishReason()) // Instead of *chunk.Choices[0].FinishReason
+}
+```
+
+## Response Structure
+
+The `ChatCompletion` method returns a `SendResponse` object:
+
+```go
+type SendResponse struct {
+ ID string
+ Object string
+ Created int64
+ Model string
+ Choices []ChatCompletionChoice
+ Usage *Usage
+}
+
+type ChatCompletionChoice struct {
+ Index int
+ Message *Message
+ FinishReason *string
+}
+
+type Message struct {
+ Role string
+ Content string
+ Name *string
+ ToolCalls []ToolCall
+ ToolCallID *string
+}
+
+type Usage struct {
+ PromptTokens int
+ CompletionTokens int
+ TotalTokens int
+}
+```
+
+### Accessing Response Data
+
+```go
+response, err := client.ChatCompletion("gpt-4o", "Hello!")
+if err != nil {
+ log.Fatal(err)
+}
+
+// Get the first choice's content
+content := response.Text()
+
+// Check finish reason
+finishReason := response.FinishReason() // "stop", "length", "tool_calls", etc.
+
+// Access token usage
+if response.Usage != nil {
+ fmt.Printf("Tokens used: %d\n", response.Usage.TotalTokens)
+ fmt.Printf("Prompt tokens: %d\n", response.Usage.PromptTokens)
+ fmt.Printf("Completion tokens: %d\n", response.Usage.CompletionTokens)
+}
+```
+
+## Type Definitions
+
+The SDK exports Go types for all request and response objects:
+
+```go
+import "github.com/edgee-cloud/go-sdk/edgee"
+
+// Main types
+type Client struct { ... }
+type Config struct { ... }
+type InputObject struct { ... }
+type Message struct { ... }
+type Tool struct { ... }
+type FunctionDefinition struct { ... }
+type ToolCall struct { ... }
+type SendResponse struct { ... }
+type StreamChunk struct { ... }
+type Usage struct { ... }
+```
+
+### Message Types
+
+```go
+type Message struct {
+ Role string `json:"role"`
+ Content string `json:"content,omitempty"`
+ Name *string `json:"name,omitempty"`
+ ToolCalls []ToolCall `json:"tool_calls,omitempty"`
+ ToolCallID *string `json:"tool_call_id,omitempty"`
+}
+```
+
+### Tool Types
+
+```go
+type FunctionDefinition struct {
+ Name string `json:"name"`
+ Description *string `json:"description,omitempty"`
+ Parameters map[string]interface{} `json:"parameters,omitempty"`
+}
+
+type Tool struct {
+ Type string `json:"type"`
+ Function FunctionDefinition `json:"function"`
+}
+
+type ToolCall struct {
+ ID string `json:"id"`
+ Type string `json:"type"`
+ Function FunctionCall `json:"function"`
+}
+
+type FunctionCall struct {
+ Name string `json:"name"`
+ Arguments string `json:"arguments"`
+}
+```
+
+## Error Handling
+
+The SDK returns errors for common issues:
+
+```go
+import "github.com/edgee-cloud/go-sdk/edgee"
+
+// Configuration error
+client, err := edgee.NewClient(nil)
+if err != nil {
+ log.Fatalf("Configuration error: %v", err)
+}
+
+// Request error
+response, err := client.ChatCompletion("gpt-4o", "Hello!")
+if err != nil {
+ log.Fatalf("Request failed: %v", err)
+ // Handle API errors, network errors, etc.
+}
+```
+
+## What's Next?
+
+
+
+ Explore the full REST API documentation.
+
+
+ Browse 200+ models available through Edgee.
+
+
+ Learn about intelligent routing, observability, and privacy controls.
+
+
+ Get started with Edgee in minutes.
+
+
diff --git a/sdk/index.mdx b/sdk/index.mdx
index dc195fe..6cf4ecf 100644
--- a/sdk/index.mdx
+++ b/sdk/index.mdx
@@ -77,22 +77,28 @@ Choose your language and get started in minutes:
- ```bash
- cargo add edgee
+ Add to `Cargo.toml`:
+ ```toml
+ [dependencies]
+ edgee = "0.1"
+ tokio = { version = "1", features = ["full"] }
```
-
+
```rust
use edgee::Edgee;
-
- let api_key = std::env::var("EDGEE_API_KEY").expect("EDGEE_API_KEY not set");
- let edgee = Edgee::new(api_key);
-
- let response = edgee.send(edgee::SendRequest {
- model: "gpt-4o".to_string(),
- input: "What is the capital of France?".to_string(),
- });
-
- println!("{}", response.output_text);
+
+ #[tokio::main]
+ async fn main() -> Result<(), Box> {
+ let client = Edgee::from_env()?;
+
+ let response = client.send(
+ "gpt-4o",
+ "What is the capital of France?"
+ ).await?;
+
+ println!("{}", response.text().unwrap_or(""));
+ Ok(())
+ }
```
@@ -129,13 +135,13 @@ To learn more about the SDKs, see the individual SDK pages:
Simple API with async/await support.
-
- Memory-safe Rust SDK for systems programming.
- Zero-cost abstractions with full type safety.
+ Modern async Rust SDK with compile-time safety and streaming support.
+ Zero-cost abstractions with strong typing and memory safety.
+
+ Explore the full REST API documentation.
+
+
+ Browse 200+ models available through Edgee.
+
+
+ Learn about intelligent routing, observability, and privacy controls.
+
+
+ Get started with Edgee in minutes.
+
+
diff --git a/sdk/rust/index.mdx b/sdk/rust/index.mdx
index 76f9f72..7b46a6b 100644
--- a/sdk/rust/index.mdx
+++ b/sdk/rust/index.mdx
@@ -4,3 +4,550 @@ sidebarTitle: Rust
description: Integrate the Rust SDK in your application.
icon: rust
---
+
+The Edgee Rust SDK provides a modern, type-safe, async interface to interact with the Edgee AI Gateway. Built with Rust's powerful type system and async/await capabilities, it offers compile-time safety, zero-cost abstractions, and excellent performance.
+
+## Installation
+
+Add the SDK to your `Cargo.toml`:
+
+```toml
+[dependencies]
+edgee = "0.1"
+tokio = { version = "1", features = ["full"] }
+```
+
+## Quick Start
+
+```rust
+use edgee::Edgee;
+
+#[tokio::main]
+async fn main() -> Result<(), Box> {
+ let client = Edgee::from_env()?;
+
+ let response = client.send("gpt-4o", "What is the capital of France?").await?;
+
+ println!("{}", response.text().unwrap_or(""));
+ // "The capital of France is Paris."
+
+ Ok(())
+}
+```
+
+## Configuration
+
+The SDK supports multiple configuration methods:
+
+### Using Environment Variables
+
+```rust
+use edgee::Edgee;
+
+// Reads EDGEE_API_KEY and optionally EDGEE_BASE_URL
+let client = Edgee::from_env()?;
+```
+
+Set environment variables:
+```bash
+export EDGEE_API_KEY="your-api-key"
+export EDGEE_BASE_URL="https://api.edgee.ai" # optional
+```
+
+### Using API Key
+
+```rust
+use edgee::Edgee;
+
+// Creates client with default base URL
+let client = Edgee::with_api_key("your-api-key");
+```
+
+### Using Configuration Object
+
+```rust
+use edgee::{Edgee, EdgeeConfig};
+
+let config = EdgeeConfig::new("your-api-key")
+ .with_base_url("https://api.edgee.ai");
+
+let client = Edgee::new(config);
+```
+
+## Usage Examples
+
+### Simple String Input
+
+The simplest way to send a request:
+
+```rust
+let response = client
+ .send("gpt-4o", "Explain quantum computing in simple terms.")
+ .await?;
+
+println!("{}", response.text().unwrap_or(""));
+```
+
+### Multi-turn Conversation
+
+Use the `Message` constructors for type-safe message creation:
+
+```rust
+use edgee::Message;
+
+let messages = vec![
+ Message::system("You are a helpful assistant."),
+ Message::user("Hello!"),
+];
+
+let response = client.send("gpt-4o", messages).await?;
+println!("{}", response.text().unwrap_or(""));
+```
+
+### Using InputObject
+
+For complex requests with tools and configuration:
+
+```rust
+use edgee::{Message, InputObject};
+
+let input = InputObject::new(vec![
+ Message::system("You are a helpful assistant."),
+ Message::user("What's the weather like?"),
+]);
+
+let response = client.send("gpt-4o", input).await?;
+```
+
+### Function Calling (Tools)
+
+The SDK supports OpenAI-compatible function calling with strong typing:
+
+```rust
+use edgee::{Edgee, Message, InputObject, Tool, FunctionDefinition, JsonSchema};
+use std::collections::HashMap;
+
+let client = Edgee::from_env()?;
+
+// Define a function
+let function = FunctionDefinition {
+ name: "get_weather".to_string(),
+ description: Some("Get the current weather for a location".to_string()),
+ parameters: JsonSchema {
+ schema_type: "object".to_string(),
+ properties: Some({
+ let mut props = HashMap::new();
+ props.insert("location".to_string(), serde_json::json!({
+ "type": "string",
+ "description": "City name"
+ }));
+ props
+ }),
+ required: Some(vec!["location".to_string()]),
+ description: None,
+ },
+};
+
+// Send request with tools
+let input = InputObject::new(vec![
+ Message::user("What is the weather in Paris?")
+])
+.with_tools(vec![Tool::function(function)]);
+
+let response = client.send("gpt-4o", input).await?;
+
+// Check if the model wants to call a function
+if let Some(tool_calls) = response.tool_calls() {
+ for call in tool_calls {
+ println!("Function: {}", call.function.name);
+ println!("Arguments: {}", call.function.arguments);
+ }
+}
+```
+
+### Tool Response Handling
+
+After receiving a tool call, send the function result back:
+
+```rust
+use serde_json;
+
+// First request - model requests a tool call
+let input = InputObject::new(vec![
+ Message::user("What is the weather in Paris?")
+])
+.with_tools(vec![/* tool definitions */]);
+
+let response1 = client.send("gpt-4o", input).await?;
+
+// Execute the function
+if let Some(tool_calls) = response1.tool_calls() {
+ let tool_call = &tool_calls[0];
+
+ // Parse arguments and execute function
+ let args: serde_json::Value = serde_json::from_str(&tool_call.function.arguments)?;
+ let result = get_weather(&args["location"].as_str().unwrap());
+
+ // Second request - include tool response
+ let mut messages = vec![
+ Message::user("What is the weather in Paris?")
+ ];
+
+ // Add assistant's message with tool calls
+ if let Some(first_choice) = response1.choices.first() {
+ messages.push(first_choice.message.clone());
+ }
+
+ // Add tool response
+ messages.push(Message::tool(tool_call.id.clone(), serde_json::to_string(&result)?));
+
+ let response2 = client.send("gpt-4o", messages).await?;
+ println!("{}", response2.text().unwrap_or(""));
+}
+```
+
+## Streaming
+
+The SDK supports streaming responses using Rust's `Stream` trait for real-time output:
+
+```rust
+use tokio_stream::StreamExt;
+
+let mut stream = client
+ .stream("gpt-4o", "Explain quantum computing")
+ .await?;
+
+while let Some(result) = stream.next().await {
+ match result {
+ Ok(chunk) => {
+ // First chunk contains the role
+ if let Some(role) = chunk.role() {
+ println!("Role: {:?}", role);
+ }
+
+ // Content chunks
+ if let Some(text) = chunk.text() {
+ print!("{}", text);
+ std::io::Write::flush(&mut std::io::stdout())?;
+ }
+
+ // Last chunk contains finish reason
+ if let Some(reason) = chunk.finish_reason() {
+ println!("\nFinish reason: {}", reason);
+ }
+ }
+ Err(e) => eprintln!("Stream error: {}", e),
+ }
+}
+```
+
+### Streaming with Messages
+
+Streaming works with message arrays too:
+
+```rust
+use edgee::Message;
+use tokio_stream::StreamExt;
+
+let messages = vec![
+ Message::system("You are a helpful assistant."),
+ Message::user("Write a poem about coding"),
+];
+
+let mut stream = client.stream("gpt-4o", messages).await?;
+
+while let Some(result) = stream.next().await {
+ if let Ok(chunk) = result {
+ if let Some(text) = chunk.text() {
+ print!("{}", text);
+ }
+ }
+}
+```
+
+### Collecting Full Response from Stream
+
+You can collect the entire streamed response:
+
+```rust
+use tokio_stream::StreamExt;
+
+let mut stream = client.stream("gpt-4o", "Tell me a story").await?;
+let mut full_text = String::new();
+
+while let Some(result) = stream.next().await {
+ if let Ok(chunk) = result {
+ if let Some(text) = chunk.text() {
+ full_text.push_str(text);
+ }
+ }
+}
+
+println!("Full response: {}", full_text);
+```
+
+## Response Structure
+
+### Non-Streaming Response
+
+The `send` method returns a `SendResponse`:
+
+```rust
+pub struct SendResponse {
+ pub id: String,
+ pub object: String,
+ pub created: u64,
+ pub model: String,
+ pub choices: Vec,
+ pub usage: Option,
+}
+
+pub struct Choice {
+ pub index: u32,
+ pub message: Message,
+ pub finish_reason: Option,
+}
+
+pub struct Usage {
+ pub prompt_tokens: u32,
+ pub completion_tokens: u32,
+ pub total_tokens: u32,
+}
+```
+
+### Accessing Response Data
+
+The SDK provides convenience methods:
+
+```rust
+let response = client.send("gpt-4o", "Hello!").await?;
+
+// Get the first choice's content
+let content = response.text(); // Returns Option<&str>
+
+// Check finish reason
+let finish_reason = response.finish_reason(); // 'stop', 'length', 'tool_calls', etc.
+
+// Access tool calls
+if let Some(tool_calls) = response.tool_calls() {
+ // Process tool calls
+}
+
+// Access token usage
+if let Some(usage) = &response.usage {
+ println!("Tokens used: {}", usage.total_tokens);
+ println!("Prompt tokens: {}", usage.prompt_tokens);
+ println!("Completion tokens: {}", usage.completion_tokens);
+}
+```
+
+### Streaming Response
+
+Streaming returns `StreamChunk` objects:
+
+```rust
+pub struct StreamChunk {
+ pub id: String,
+ pub object: String,
+ pub created: u64,
+ pub model: String,
+ pub choices: Vec,
+}
+
+pub struct StreamChoice {
+ pub index: u32,
+ pub delta: StreamDelta,
+ pub finish_reason: Option,
+}
+
+pub struct StreamDelta {
+ pub role: Option,
+ pub content: Option,
+ pub tool_calls: Option>,
+}
+```
+
+## Type System
+
+The SDK uses Rust's type system for safety and clarity:
+
+### Role Enum
+
+```rust
+pub enum Role {
+ System,
+ User,
+ Assistant,
+ Tool,
+}
+```
+
+### Message Constructors
+
+```rust
+// System message
+Message::system("You are a helpful assistant")
+
+// User message
+Message::user("Hello, how are you?")
+
+// Assistant message
+Message::assistant("I'm doing well, thank you!")
+
+// Tool response message
+Message::tool("tool-call-id", "function result")
+```
+
+### Tool Types
+
+```rust
+pub struct FunctionDefinition {
+ pub name: String,
+ pub description: Option,
+ pub parameters: JsonSchema,
+}
+
+pub struct Tool {
+ pub tool_type: String,
+ pub function: FunctionDefinition,
+}
+
+pub struct ToolCall {
+ pub id: String,
+ pub call_type: String,
+ pub function: FunctionCall,
+}
+```
+
+## Error Handling
+
+The SDK uses `Result` for explicit error handling with custom error types:
+
+```rust
+use edgee::{Edgee, Error};
+
+match client.send("gpt-4o", "Hello").await {
+ Ok(response) => {
+ println!("{}", response.text().unwrap_or(""));
+ }
+ Err(Error::Api { status, message }) => {
+ eprintln!("API error {}: {}", status, message);
+ }
+ Err(Error::MissingApiKey) => {
+ eprintln!("API key not found");
+ }
+ Err(Error::Http(e)) => {
+ eprintln!("HTTP error: {}", e);
+ }
+ Err(Error::Json(e)) => {
+ eprintln!("JSON error: {}", e);
+ }
+ Err(e) => {
+ eprintln!("Error: {}", e);
+ }
+}
+```
+
+### Error Types
+
+```rust
+pub enum Error {
+ Http(reqwest::Error), // HTTP request failed
+ Json(serde_json::Error), // JSON serialization failed
+ MissingApiKey, // API key not provided
+ Api { status: u16, message: String }, // API returned an error
+ Stream(String), // Streaming error
+ InvalidConfig(String), // Invalid configuration
+}
+```
+
+## Advanced Features
+
+### Concurrent Requests
+
+Use tokio's concurrency features for parallel requests:
+
+```rust
+use tokio;
+
+let (response1, response2) = tokio::join!(
+ client.send("gpt-4o", "Question 1"),
+ client.send("gpt-4o", "Question 2"),
+);
+
+println!("Response 1: {}", response1?.text().unwrap_or(""));
+println!("Response 2: {}", response2?.text().unwrap_or(""));
+```
+
+### Flexible Input with Into Trait
+
+The SDK accepts multiple input types through the `Into` trait:
+
+```rust
+// &str
+client.send("gpt-4o", "Hello").await?;
+
+// String
+client.send("gpt-4o", String::from("Hello")).await?;
+
+// Vec
+client.send("gpt-4o", vec![Message::user("Hello")]).await?;
+
+// InputObject
+client.send("gpt-4o", input_object).await?;
+```
+
+## Why Choose Rust SDK?
+
+### Type Safety
+- **Compile-time guarantees**: Catch errors before runtime
+- **Strong typing**: No string typos for roles, clear structure
+- **Option types**: Explicit handling of optional fields
+
+### Performance
+- **Zero-cost abstractions**: High-level API with no runtime overhead
+- **Async/await**: Non-blocking I/O for better concurrency
+- **Memory efficiency**: No garbage collection, predictable performance
+
+### Safety
+- **Ownership**: Prevents use-after-free and data races
+- **Error handling**: Explicit `Result` types
+- **Thread safety**: Safe concurrent operations
+
+### Developer Experience
+- **Rich IDE support**: Autocomplete, inline documentation
+- **Refactoring**: Compiler-assisted code changes
+- **Pattern matching**: Expressive error handling
+
+## Examples
+
+See the [examples directory](https://github.com/edgee-cloud/edgee/tree/main/rust-sdk/examples) for complete working examples:
+
+- **simple.rs**: Basic usage patterns
+- **streaming.rs**: Streaming responses
+- **tools.rs**: Function calling with tool execution
+
+Run examples:
+```bash
+export EDGEE_API_KEY="your-api-key"
+cargo run --example simple
+cargo run --example streaming
+cargo run --example tools
+```
+
+## What's Next?
+
+
+
+ Explore the full REST API documentation.
+
+
+ Browse 200+ models available through Edgee.
+
+
+ Learn about intelligent routing, observability, and privacy controls.
+
+
+ Get started with Edgee in minutes.
+
+
diff --git a/sdk/typescript/index.mdx b/sdk/typescript/index.mdx
index 488c01c..54e8b2a 100644
--- a/sdk/typescript/index.mdx
+++ b/sdk/typescript/index.mdx
@@ -165,6 +165,92 @@ const response2 = await edgee.send({
console.log(response2.choices[0].message.content);
```
+## Streaming
+
+The SDK supports streaming responses for real-time output. Use streaming when you want to display tokens as they're generated.
+
+Use `stream()` to access full chunk metadata:
+
+```typescript
+// Stream full chunks with metadata
+for await (const chunk of edgee.stream('gpt-4o', 'Explain quantum computing')) {
+ // First chunk contains the role
+ if (chunk.role) {
+ console.log('Role:', chunk.role);
+ }
+
+ // Content chunks
+ if (chunk.text) {
+ process.stdout.write(chunk.text);
+ }
+
+ // Last chunk contains finish reason
+ if (chunk.finishReason) {
+ console.log('\nFinish reason:', chunk.finishReason);
+ }
+}
+```
+
+### Streaming with Messages
+
+Streaming works with full message arrays too:
+
+```typescript
+for await (const chunk of edgee.stream('gpt-4o', {
+ messages: [
+ { role: 'system', content: 'You are a helpful assistant.' },
+ { role: 'user', content: 'Write a poem about coding' },
+ ],
+})) {
+ if (chunk.text) {
+ process.stdout.write(chunk.text);
+ }
+}
+```
+
+### Streaming Response Types
+
+Streaming uses different response types:
+
+```typescript
+// StreamChunk - returned by stream()
+interface StreamChunk {
+ choices: {
+ index: number;
+ delta: {
+ role?: string;
+ content?: string;
+ tool_calls?: ToolCall[];
+ };
+ finish_reason?: string | null;
+ }[];
+
+ // Convenience properties
+ text: string | null; // Get content from first choice
+ role: string | null; // Get role from first choice
+ finishReason: string | null; // Get finish_reason from first choice
+}
+```
+
+### Convenience Properties
+
+Both `SendResponse` and `StreamChunk` have convenience properties for easier access:
+
+```typescript
+// Non-streaming response
+const response = await edgee.send({ model: 'gpt-4o', input: 'Hello' });
+console.log(response.text); // Instead of response.choices[0].message.content
+console.log(response.finishReason); // Instead of response.choices[0].finish_reason
+console.log(response.toolCalls); // Instead of response.choices[0].message.tool_calls
+
+// Streaming response
+for await (const chunk of edgee.stream('gpt-4o', 'Hello')) {
+ console.log(chunk.text); // Instead of chunk.choices[0]?.delta?.content
+ console.log(chunk.role); // Instead of chunk.choices[0]?.delta?.role
+ console.log(chunk.finishReason); // Instead of chunk.choices[0]?.finish_reason
+}
+```
+
## Response Structure
The `send` method returns a `SendResponse` object:
@@ -221,6 +307,7 @@ import Edgee, {
type ToolChoice,
type SendOptions,
type SendResponse,
+ type StreamChunk,
type EdgeeConfig,
} from 'edgee';
```