diff --git a/.github/workflows/hql_tests.yml b/.github/workflows/hql_tests.yml index a922719e5..2c7421e1f 100644 --- a/.github/workflows/hql_tests.yml +++ b/.github/workflows/hql_tests.yml @@ -2,50 +2,50 @@ name: HQL Tests on: pull_request: - branches: [ main, dev ] + branches: [main, dev] jobs: hql-tests: - runs-on: ubuntu-latest # 8 vCPUs, 32 GB RAM + runs-on: ubuntu-latest # 8 vCPUs, 32 GB RAM strategy: matrix: - batch: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] - + batch: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + permissions: contents: read issues: write - + steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Set up Rust - uses: actions-rs/toolchain@v1 - with: - profile: minimal - toolchain: stable - target: x86_64-unknown-linux-gnu - override: true - - - name: Cache cargo registry - uses: actions/cache@v3 - continue-on-error: true - with: - path: | - ~/.cargo/registry - ~/.cargo/git - target - key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') || 'fallback' }} - restore-keys: | - ${{ runner.os }}-cargo- - - - name: Make run.sh executable - run: chmod +x ./hql-tests/run.sh - - - name: Run HQL tests - working-directory: ./hql-tests - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - GITHUB_OWNER: ${{ github.repository_owner }} - GITHUB_REPO: ${{ github.event.repository.name }} - run: ./run.sh batch 10 ${{ matrix.batch }} + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Rust + uses: actions-rs/toolchain@v1 + with: + profile: minimal + toolchain: stable + target: x86_64-unknown-linux-gnu + override: true + + - name: Cache cargo registry + uses: actions/cache@v3 + continue-on-error: true + with: + path: | + ~/.cargo/registry + ~/.cargo/git + target + key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') || 'fallback' }} + restore-keys: | + ${{ runner.os }}-cargo- + + - name: Make run.sh executable + run: chmod +x ./hql-tests/run.sh + + - name: Run HQL tests + working-directory: ./hql-tests + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_OWNER: ${{ github.repository_owner }} + GITHUB_REPO: ${{ github.event.repository.name }} + run: ./run.sh batch 10 ${{ matrix.batch }} diff --git a/.github/workflows/s3_push.yml b/.github/workflows/s3_push.yml index 22f818d22..de1fd50a1 100644 --- a/.github/workflows/s3_push.yml +++ b/.github/workflows/s3_push.yml @@ -26,21 +26,21 @@ jobs: role-to-assume: arn:aws:iam::${{ vars.AWS_ACCOUNT_ID }}:role/GitHubActionsS3Role aws-region: us-east-1 - - name: Upload specified files and directories to S3 + - name: Create and upload template.tar.gz run: | - # Sync directories - aws s3 sync helix-cli/ s3://helix-repo/template/helix-cli/ --exclude "target/*" - aws s3 sync helix-container/ s3://helix-repo/template/helix-container/ --exclude "target/*" - aws s3 sync helix-macros/ s3://helix-repo/template/helix-macros/ --exclude "target/*" - aws s3 sync metrics/ s3://helix-repo/template/metrics/ --exclude "target/*" + # Create tarball excluding .git and target directories + # Write to /tmp to avoid modifying the source directory during archiving + tar -czvf /tmp/template.tar.gz \ + --exclude='.git' \ + --exclude='target' \ + . - # Upload root-level Cargo files - aws s3 cp Cargo.lock s3://helix-repo/template/Cargo.lock - aws s3 cp Cargo.toml s3://helix-repo/template/Cargo.toml + # Upload to S3 + aws s3 cp /tmp/template.tar.gz s3://helix-repo/template.tar.gz - name: Upload completion notification if: success() run: | - echo "Successfully uploaded all files to S3 bucket: helix-repo" + echo "Successfully uploaded template.tar.gz to S3 bucket: helix-repo" echo "Upload triggered by: ${{ github.event_name }}" echo "Reference: ${{ github.ref }}" \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock index bee20f869..cbf66a43b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -300,6 +300,19 @@ version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" +[[package]] +name = "bcrypt" +version = "0.17.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "abaf6da45c74385272ddf00e1ac074c7d8a6c1a1dda376902bd6a427522a8b2c" +dependencies = [ + "base64", + "blowfish", + "getrandom 0.3.1", + "subtle", + "zeroize", +] + [[package]] name = "bincode" version = "1.3.3" @@ -342,6 +355,16 @@ dependencies = [ "generic-array", ] +[[package]] +name = "blowfish" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e412e2cd0f2b2d93e02543ceae7917b3c70331573df19ee046bcbc35e45e87d7" +dependencies = [ + "byteorder", + "cipher", +] + [[package]] name = "brotli" version = "7.0.0" @@ -433,6 +456,12 @@ dependencies = [ "shlex", ] +[[package]] +name = "cesu8" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" + [[package]] name = "cfg-if" version = "1.0.0" @@ -496,11 +525,21 @@ dependencies = [ "half", ] +[[package]] +name = "cipher" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" +dependencies = [ + "crypto-common", + "inout", +] + [[package]] name = "clap" -version = "4.5.47" +version = "4.5.53" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7eac00902d9d136acd712710d71823fb8ac8004ca445a89e73a41d45aa712931" +checksum = "c9e340e012a1bf4935f5282ed1436d1489548e8f72308207ea5df0e23d2d03f8" dependencies = [ "clap_builder", "clap_derive", @@ -508,9 +547,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.47" +version = "4.5.53" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ad9bbf750e73b5884fb8a211a9424a1906c1e156724260fdae972f31d70e1d6" +checksum = "d76b5d13eaa18c901fd2f7fca939fefe3a0727a953561fefdf3b2922b8569d00" dependencies = [ "anstream", "anstyle", @@ -520,9 +559,9 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.47" +version = "4.5.49" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbfd7eae0b0f1a6e63d4b13c9c478de77c2eb546fba158ad50b4203dc24b9f9c" +checksum = "2a0b5487afeab2deb2ff4e03a807ad1a03ac532ff5a2cee5d86884440c7f7671" dependencies = [ "heck", "proc-macro2", @@ -569,6 +608,16 @@ version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" +[[package]] +name = "combine" +version = "4.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" +dependencies = [ + "bytes", + "memchr", +] + [[package]] name = "comfy-table" version = "7.1.4" @@ -608,6 +657,19 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "console" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b430743a6eb14e9764d4260d4c0d8123087d504eeb9c48f2b2a5e810dd369df4" +dependencies = [ + "encode_unicode", + "libc", + "once_cell", + "unicode-width", + "windows-sys 0.61.1", +] + [[package]] name = "core-foundation" version = "0.9.4" @@ -850,7 +912,7 @@ dependencies = [ "libc", "option-ext", "redox_users 0.5.2", - "windows-sys 0.59.0", + "windows-sys 0.61.1", ] [[package]] @@ -940,6 +1002,17 @@ version = "1.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ca81e6b4777c89fd810c25a4be2b1bd93ea034fbe58e6a75216a34c6b82c539b" +[[package]] +name = "eventsource-stream" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74fef4569247a5f429d9156b9d0a2599914385dd189c539334c625d8099d90ab" +dependencies = [ + "futures-core", + "nom", + "pin-project-lite", +] + [[package]] name = "eyre" version = "0.6.12" @@ -1119,6 +1192,12 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" +[[package]] +name = "futures-timer" +version = "3.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" + [[package]] name = "futures-util" version = "0.3.31" @@ -1314,7 +1393,7 @@ dependencies = [ [[package]] name = "helix-cli" -version = "2.1.4" +version = "2.1.5" dependencies = [ "async-trait", "chrono", @@ -1328,10 +1407,12 @@ dependencies = [ "heed3", "helix-db", "helix-metrics", + "indicatif 0.18.3", "iota", "open", "regex", "reqwest", + "reqwest-eventsource", "self_update", "serde", "serde_json", @@ -1340,6 +1421,7 @@ dependencies = [ "tokio-tungstenite", "toml", "uuid", + "webbrowser", ] [[package]] @@ -1367,10 +1449,11 @@ dependencies = [ [[package]] name = "helix-db" -version = "1.1.4" +version = "1.1.5" dependencies = [ "async-trait", "axum", + "bcrypt", "bincode", "bumpalo", "bytemuck", @@ -1396,9 +1479,7 @@ dependencies = [ "rayon", "reqwest", "serde", - "sha_256", "sonic-rs", - "subtle", "tempfile", "thiserror 2.0.12", "tokio", @@ -1797,13 +1878,35 @@ version = "0.17.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "183b3088984b400f4cfac3620d5e076c84da5364016b4f49473de574b2586235" dependencies = [ - "console", + "console 0.15.11", "number_prefix", "portable-atomic", "unicode-width", "web-time", ] +[[package]] +name = "indicatif" +version = "0.18.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9375e112e4b463ec1b1c6c011953545c65a30164fbab5b581df32b3abf0dcb88" +dependencies = [ + "console 0.16.1", + "portable-atomic", + "unicode-width", + "unit-prefix", + "web-time", +] + +[[package]] +name = "inout" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01" +dependencies = [ + "generic-array", +] + [[package]] name = "inventory" version = "0.3.19" @@ -1863,7 +1966,7 @@ checksum = "3640c1c38b8e4e43584d8df18be5fc6b0aa314ce6ebf51b53313d4306cca8e46" dependencies = [ "hermit-abi", "libc", - "windows-sys 0.59.0", + "windows-sys 0.61.1", ] [[package]] @@ -1906,6 +2009,28 @@ version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" +[[package]] +name = "jni" +version = "0.21.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a87aa2bb7d2af34197c04845522473242e1aa17c12f4935d5856491a7fb8c97" +dependencies = [ + "cesu8", + "cfg-if", + "combine", + "jni-sys", + "log", + "thiserror 1.0.69", + "walkdir", + "windows-sys 0.45.0", +] + +[[package]] +name = "jni-sys" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" + [[package]] name = "jobserver" version = "0.1.32" @@ -2126,6 +2251,12 @@ version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + [[package]] name = "miniz_oxide" version = "0.8.5" @@ -2192,6 +2323,22 @@ dependencies = [ "tempfile", ] +[[package]] +name = "ndk-context" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "27b02d87554356db9e9a873add8782d4ea6e3e58ea071a9adb9a2e8ddb884a8b" + +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + [[package]] name = "now" version = "0.1.3" @@ -2270,6 +2417,31 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" +[[package]] +name = "objc2" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7c2599ce0ec54857b29ce62166b0ed9b4f6f1a70ccc9a71165b6154caca8c05" +dependencies = [ + "objc2-encode", +] + +[[package]] +name = "objc2-encode" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef25abbcd74fb2609453eb695bd2f860d389e457f67dc17cafc8b8cbc89d0c33" + +[[package]] +name = "objc2-foundation" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3e0adef53c21f888deb4fa59fc59f7eb17404926ee8a6f59f5df0fd7f9f3272" +dependencies = [ + "bitflags", + "objc2", +] + [[package]] name = "object" version = "0.36.7" @@ -3578,16 +3750,34 @@ dependencies = [ "tokio", "tokio-native-tls", "tokio-rustls", + "tokio-util", "tower", "tower-http", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", + "wasm-streams", "web-sys", "webpki-roots 1.0.2", ] +[[package]] +name = "reqwest-eventsource" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "632c55746dbb44275691640e7b40c907c16a2dc1a5842aa98aaec90da6ec6bde" +dependencies = [ + "eventsource-stream", + "futures-core", + "futures-timer", + "mime", + "nom", + "pin-project-lite", + "reqwest", + "thiserror 1.0.69", +] + [[package]] name = "ring" version = "0.17.14" @@ -3833,7 +4023,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d832c086ece0dacc29fb2947bb4219b8f6e12fe9e40b7108f9e57c4224e47b5c" dependencies = [ "hyper", - "indicatif", + "indicatif 0.17.11", "log", "quick-xml", "regex", @@ -3947,12 +4137,6 @@ dependencies = [ "digest", ] -[[package]] -name = "sha_256" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "327b330d8a640167a79f91ff3f2b31c36e626587f3cdb5ec970aa795af6b65e0" - [[package]] name = "sharded-slab" version = "0.1.7" @@ -4283,15 +4467,15 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.20.0" +version = "3.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8a64e3985349f2441a1a9ef0b853f869006c3855f2cda6862a94d26ebb9d6a1" +checksum = "2d31c77bdf42a745371d260a26ca7163f1e0924b64afa0b688e61b5a9fa02f16" dependencies = [ "fastrand", "getrandom 0.3.1", "once_cell", "rustix 1.0.5", - "windows-sys 0.59.0", + "windows-sys 0.61.1", ] [[package]] @@ -4486,9 +4670,9 @@ dependencies = [ [[package]] name = "tokio-tungstenite" -version = "0.27.0" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "489a59b6730eda1b0171fcfda8b121f4bee2b35cba8645ca35c5f7ba3eb736c1" +checksum = "d25a406cddcc431a75d3d9afc6a7c0f7428d4891dd973e4d54c56b46127bf857" dependencies = [ "futures-util", "log", @@ -4677,9 +4861,9 @@ checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] name = "tungstenite" -version = "0.27.0" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eadc29d668c91fcc564941132e17b28a7ceb2f3ebf0b9dae3e03fd7a6748eb0d" +checksum = "8628dcc84e5a09eb3d8423d6cb682965dea9133204e8fb3efee74c2a0c259442" dependencies = [ "bytes", "data-encoding", @@ -4755,6 +4939,12 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" +[[package]] +name = "unit-prefix" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81e544489bf3d8ef66c953931f56617f423cd4b5494be343d9b9d3dda037b9a3" + [[package]] name = "untrusted" version = "0.9.0" @@ -4961,6 +5151,19 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "wasm-streams" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15053d8d85c7eccdbefef60f06769760a563c7f0a9d6902a13d35c7800b0ad65" +dependencies = [ + "futures-util", + "js-sys", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + [[package]] name = "web-sys" version = "0.3.77" @@ -4982,6 +5185,22 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "webbrowser" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00f1243ef785213e3a32fa0396093424a3a6ea566f9948497e5a2309261a4c97" +dependencies = [ + "core-foundation 0.10.0", + "jni", + "log", + "ndk-context", + "objc2", + "objc2-foundation", + "url", + "web-sys", +] + [[package]] name = "webpki-roots" version = "0.26.11" @@ -5022,7 +5241,7 @@ version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.61.1", ] [[package]] @@ -5212,6 +5431,15 @@ dependencies = [ "windows-link 0.1.3", ] +[[package]] +name = "windows-sys" +version = "0.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets 0.42.2", +] + [[package]] name = "windows-sys" version = "0.48.0" @@ -5239,6 +5467,30 @@ dependencies = [ "windows-targets 0.52.6", ] +[[package]] +name = "windows-sys" +version = "0.61.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f109e41dd4a3c848907eb83d5a42ea98b3769495597450cf6d153507b166f0f" +dependencies = [ + "windows-link 0.2.0", +] + +[[package]] +name = "windows-targets" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +dependencies = [ + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", +] + [[package]] name = "windows-targets" version = "0.48.5" @@ -5279,6 +5531,12 @@ dependencies = [ "windows-link 0.1.3", ] +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" + [[package]] name = "windows_aarch64_gnullvm" version = "0.48.5" @@ -5291,6 +5549,12 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" + [[package]] name = "windows_aarch64_msvc" version = "0.48.5" @@ -5303,6 +5567,12 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" +[[package]] +name = "windows_i686_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" + [[package]] name = "windows_i686_gnu" version = "0.48.5" @@ -5321,6 +5591,12 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" +[[package]] +name = "windows_i686_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" + [[package]] name = "windows_i686_msvc" version = "0.48.5" @@ -5333,6 +5609,12 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" + [[package]] name = "windows_x86_64_gnu" version = "0.48.5" @@ -5345,6 +5627,12 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" + [[package]] name = "windows_x86_64_gnullvm" version = "0.48.5" @@ -5357,6 +5645,12 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" + [[package]] name = "windows_x86_64_msvc" version = "0.48.5" diff --git a/helix-cli/Cargo.toml b/helix-cli/Cargo.toml index a063daef9..7e072c955 100644 --- a/helix-cli/Cargo.toml +++ b/helix-cli/Cargo.toml @@ -1,13 +1,13 @@ [package] name = "helix-cli" -version = "2.1.4" +version = "2.1.5" edition = "2024" [dependencies] helix-metrics = { path = "../metrics" } helix-db = { path = "../helix-db" } -clap = { version = "4.5.47", features = ["derive"] } -serde = { version = "1.0.219", features = ["derive"] } +clap = { version = "4.5.53", features = ["derive"] } +serde = { version = "1.0.228", features = ["derive"] } tokio = { version = "1.47.1", features = ["full"] } eyre = "0.6.12" toml = "0.9.5" @@ -22,14 +22,17 @@ uuid = { version = "1.18.1", features = ["v4", "v6", "fast-rng"] } chrono = "0.4.42" flume = "0.11.1" dotenvy = "0.15.7" -tokio-tungstenite = "0.27.0" +tokio-tungstenite = "0.28.0" futures-util = "0.3.31" regex = "1.11.2" +reqwest-eventsource = "0.6" +indicatif = "0.18.3" +webbrowser = "1.0" heed3 = "0.22.0" open = "5.3" [dev-dependencies] -tempfile = "3.14.0" +tempfile = "3.23.0" [lib] name = "helix_cli" diff --git a/helix-cli/src/commands/add.rs b/helix-cli/src/commands/add.rs index 0e12d2259..31ea82427 100644 --- a/helix-cli/src/commands/add.rs +++ b/helix-cli/src/commands/add.rs @@ -66,14 +66,9 @@ async fn run_add_inner( // Add Helix cloud instance let helix_manager = HelixManager::new(&project_context); - // Create cloud instance configuration + // Create cloud instance configuration (without cluster_id yet) let cloud_config = helix_manager - .create_instance_config(&instance_name, region) - .await?; - - // Initialize the cloud cluster - helix_manager - .init_cluster(&instance_name, &cloud_config) + .create_instance_config(&instance_name, region.clone()) .await?; // Insert into project configuration @@ -82,7 +77,51 @@ async fn run_add_inner( CloudConfig::Helix(cloud_config.clone()), ); + // Save config first + let config_path = project_context.root.join("helix.toml"); + project_context.config.save_to_file(&config_path)?; + print_status("CLOUD", "Helix cloud instance configuration added"); + + // Prompt user to create cluster now + println!(); + println!("\nWould you like to create the cluster now?"); + println!("This will open Stripe for payment and provision your cluster."); + println!(); + print!("Create cluster now? [Y/n]: "); + use std::io::{self, Write}; + io::stdout().flush()?; + + let mut input = String::new(); + io::stdin().read_line(&mut input)?; + let input = input.trim().to_lowercase(); + + if input.is_empty() || input == "y" || input == "yes" { + // Run create-cluster flow + crate::commands::create_cluster::run(&instance_name, region).await?; + + // create_cluster::run() already saved the updated config with the real cluster_id + // Return early to avoid overwriting it with the stale in-memory config + print_success(&format!( + "Instance '{instance_name}' added to Helix project" + )); + + print_instructions( + "Next steps:", + &[ + &format!("Run 'helix build {instance_name}' to compile your project for this instance"), + &format!("Run 'helix push {instance_name}' to start the '{instance_name}' instance"), + ], + ); + + return Ok(()); + } else { + println!(); + print_status( + "INFO", + &format!("Cluster creation skipped. Run 'helix create-cluster {}' when ready.", instance_name) + ); + } } CloudDeploymentTypeCommand::Ecr { .. } => { // Add ECR instance diff --git a/helix-cli/src/commands/auth.rs b/helix-cli/src/commands/auth.rs index 5d9ef9827..79a6497b0 100644 --- a/helix-cli/src/commands/auth.rs +++ b/helix-cli/src/commands/auth.rs @@ -2,23 +2,15 @@ use crate::{ AuthAction, commands::integrations::helix::CLOUD_AUTHORITY, metrics_sender::{load_metrics_config, save_metrics_config}, + sse_client::{SseClient, SseEvent}, utils::{print_info, print_line, print_status, print_success, print_warning}, }; use color_eyre::owo_colors::OwoColorize; -use eyre::{OptionExt, Result}; -use futures_util::StreamExt; -use serde::Deserialize; +use eyre::{OptionExt, Result, eyre}; use std::{ fs::{self, File}, path::PathBuf, }; -use tokio_tungstenite::{ - connect_async, - tungstenite::{ - Message, - protocol::{CloseFrame, frame::coding::CloseCode}, - }, -}; pub async fn run(action: AuthAction) -> Result<()> { match action { @@ -175,45 +167,55 @@ impl Credentials { } } -#[derive(Deserialize)] -struct UserCodeMsg { - user_code: String, - verification_uri: String, -} - -#[derive(Deserialize)] -struct ApiKeyMsg { - user_id: String, - key: String, -} pub async fn github_login() -> Result<(String, String)> { - let url = format!("ws://{}/login", *CLOUD_AUTHORITY); - let (mut ws_stream, _) = connect_async(url).await?; - - let init_msg: UserCodeMsg = match ws_stream.next().await { - Some(Ok(Message::Text(payload))) => serde_json::from_str(&payload)?, - Some(Ok(m)) => return Err(eyre::eyre!("Unexpected message: {m:?}")), - Some(Err(e)) => return Err(e.into()), - None => return Err(eyre::eyre!("Connection Closed Unexpectedly")), - }; - - println!( - "To Login please go \x1b]8;;{}\x1b\\here\x1b]8;;\x1b\\({}),\nand enter the code: {}", - init_msg.verification_uri, - init_msg.verification_uri, - init_msg.user_code.bold() - ); - - let msg: ApiKeyMsg = match ws_stream.next().await { - Some(Ok(Message::Text(payload))) => serde_json::from_str(&payload)?, - Some(Ok(Message::Close(Some(CloseFrame { - code: CloseCode::Error, - reason, - })))) => return Err(eyre::eyre!("Error: {reason}")), - Some(Ok(m)) => return Err(eyre::eyre!("Unexpected message: {m:?}")), - Some(Err(e)) => return Err(e.into()), - None => return Err(eyre::eyre!("Connection Closed Unexpectedly")), - }; + let url = format!("https://{}/github-login", *CLOUD_AUTHORITY); + let client = SseClient::new(url).post(); + + let mut api_key: Option = None; + let mut user_id: Option = None; + + client + .connect(|event| { + match event { + SseEvent::UserVerification { + user_code, + verification_uri, + .. + } => { + println!( + "To Login please go \x1b]8;;{}\x1b\\here\x1b]8;;\x1b\\({}),\nand enter the code: {}", + verification_uri, + verification_uri, + user_code.bold() + ); + Ok(true) // Continue processing events + } + SseEvent::Success { data } => { + // Extract API key and user_id from success event + if let Some(key) = data.get("key").and_then(|v| v.as_str()) { + api_key = Some(key.to_string()); + } + if let Some(id) = data.get("user_id").and_then(|v| v.as_str()) { + user_id = Some(id.to_string()); + } + Ok(false) // Stop processing - login complete + } + SseEvent::DeviceCodeTimeout { message } => { + Err(eyre!("Login timeout: {}. Please try again.", message)) + } + SseEvent::Error { error } => { + Err(eyre!("Login error: {}", error)) + } + _ => { + // Ignore other event types during login + Ok(true) + } + } + }) + .await?; - Ok((msg.key, msg.user_id)) + match (api_key, user_id) { + (Some(key), Some(id)) => Ok((key, id)), + _ => Err(eyre!("Login completed but credentials were not received")), + } } diff --git a/helix-cli/src/commands/create_cluster.rs b/helix-cli/src/commands/create_cluster.rs new file mode 100644 index 000000000..40c417c74 --- /dev/null +++ b/helix-cli/src/commands/create_cluster.rs @@ -0,0 +1,183 @@ +use crate::{ + commands::integrations::helix::CLOUD_AUTHORITY, + config::{CloudInstanceConfig, DbConfig}, + project::ProjectContext, + sse_client::SseEvent, + utils::{print_error, print_info, print_status, print_success}, +}; +use eyre::{OptionExt, Result, eyre}; + +/// Create a new cluster in Helix Cloud +pub async fn run(instance_name: &str, region: Option) -> Result<()> { + print_status("CREATE", &format!("Creating cluster: {}", instance_name)); + + // Load project context + let project = ProjectContext::find_and_load(None)?; + + // Check if this instance already exists and has a real cluster + if let Some(existing_config) = project.config.cloud.get(instance_name) { + if let crate::config::CloudConfig::Helix(config) = existing_config { + // If cluster already has a real ID (not placeholder), error out + if config.cluster_id != "YOUR_CLUSTER_ID" { + return Err(eyre!( + "Instance '{}' already has a cluster (ID: {}). Cannot create a new cluster for this instance.", + instance_name, + config.cluster_id + )); + } + // Otherwise, proceed to create the cluster and update the config + } else { + return Err(eyre!( + "Instance '{}' exists but is not a Helix Cloud instance.", + instance_name + )); + } + } + + // Get credentials + let home = dirs::home_dir().ok_or_eyre("Cannot find home directory")?; + let cred_path = home.join(".helix").join("credentials"); + + if !cred_path.exists() { + print_error("Not logged in. Please run 'helix auth login' first."); + return Err(eyre!("Not authenticated")); + } + + let credentials = crate::commands::auth::Credentials::read_from_file(&cred_path); + + if !credentials.is_authenticated() { + print_error("Invalid credentials. Please run 'helix auth login' again."); + return Err(eyre!("Invalid credentials")); + } + + // Get or default region + let region = region.unwrap_or_else(|| "us-east-1".to_string()); + + // Connect to SSE stream for cluster creation + // The server will send CheckoutRequired, PaymentConfirmed, CreatingProject, ProjectCreated events + print_status("INITIATING", "Starting cluster creation..."); + + let create_url = format!("https://{}/create-cluster", *CLOUD_AUTHORITY); + let client = reqwest::Client::new(); + + use reqwest_eventsource::RequestBuilderExt; + let mut event_source = client + .post(&create_url) + .header("x-api-key", &credentials.helix_admin_key) + .header("Content-Type", "application/json") + .eventsource()?; + + let mut final_cluster_id: Option = None; + let mut checkout_opened = false; + + use futures_util::StreamExt; + while let Some(event) = event_source.next().await { + match event { + Ok(reqwest_eventsource::Event::Open) => { + // Connection opened + } + Ok(reqwest_eventsource::Event::Message(message)) => { + let sse_event: SseEvent = match serde_json::from_str(&message.data) { + Ok(event) => event, + Err(e) => { + print_error(&format!( + "Failed to parse event: {} | Raw data: {}", + e, message.data + )); + continue; + } + }; + + match sse_event { + SseEvent::CheckoutRequired { url } => { + if !checkout_opened { + print_info("Opening Stripe checkout in your browser..."); + print_info(&format!("If the browser doesn't open, visit: {}", url)); + + if let Err(e) = webbrowser::open(&url) { + print_error(&format!("Failed to open browser: {}", e)); + print_info(&format!("Please manually open: {}", url)); + } + + checkout_opened = true; + print_status("WAITING", "Waiting for payment confirmation..."); + } + } + SseEvent::PaymentConfirmed => { + print_success("Payment confirmed!"); + } + SseEvent::CreatingProject => { + print_status("CREATING", "Creating cluster..."); + } + SseEvent::ProjectCreated { cluster_id } => { + final_cluster_id = Some(cluster_id); + print_success("Cluster created successfully!"); + event_source.close(); + break; + } + SseEvent::Error { error } => { + print_error(&format!("Error: {}", error)); + event_source.close(); + return Err(eyre!("Cluster creation failed: {}", error)); + } + _ => { + // Ignore other event types + } + } + } + Err(err) => { + print_error(&format!("Stream error: {}", err)); + return Err(eyre!("Cluster creation stream error: {}", err)); + } + } + } + + let cluster_id = + final_cluster_id.ok_or_eyre("Cluster creation completed but no cluster_id received")?; + + // Save cluster configuration to helix.toml + // If instance already exists, preserve its existing settings and just update cluster_id + let config = if let Some(crate::config::CloudConfig::Helix(existing)) = + project.config.cloud.get(instance_name) + { + CloudInstanceConfig { + cluster_id: cluster_id.clone(), + region: existing.region.clone().or(Some(region.clone())), + build_mode: existing.build_mode, + env_vars: existing.env_vars.clone(), + db_config: existing.db_config.clone(), + } + } else { + CloudInstanceConfig { + cluster_id: cluster_id.clone(), + region: Some(region.clone()), + build_mode: crate::config::BuildMode::Release, + env_vars: std::collections::HashMap::new(), + db_config: DbConfig::default(), + } + }; + + // Update helix.toml + let mut helix_config = project.config.clone(); + helix_config.cloud.insert( + instance_name.to_string(), + crate::config::CloudConfig::Helix(config), + ); + + let config_path = project.root.join("helix.toml"); + let toml_string = toml::to_string_pretty(&helix_config)?; + std::fs::write(&config_path, toml_string)?; + + print_success(&format!( + "Cluster '{}' created successfully! (ID: {})", + instance_name, cluster_id + )); + print_info(&format!("Region: {}", region)); + print_info("Configuration saved to helix.toml"); + print_info(&format!( + "You can now deploy with: helix push {}", + instance_name + )); + + Ok(()) +} diff --git a/helix-cli/src/commands/dashboard.rs b/helix-cli/src/commands/dashboard.rs index 4234668ee..22eba97c7 100644 --- a/helix-cli/src/commands/dashboard.rs +++ b/helix-cli/src/commands/dashboard.rs @@ -1,12 +1,15 @@ //! Dashboard management for Helix projects +use crate::DashboardAction; use crate::commands::auth::Credentials; use crate::commands::integrations::helix::CLOUD_AUTHORITY; use crate::config::{ContainerRuntime, InstanceInfo}; use crate::docker::DockerManager; use crate::project::ProjectContext; -use crate::utils::{print_field, print_header, print_info, print_newline, print_status, print_success, print_warning}; -use crate::DashboardAction; +use crate::utils::{ + print_field, print_header, print_info, print_newline, print_status, print_success, + print_warning, +}; use eyre::{Result, eyre}; use std::process::Command; @@ -269,24 +272,28 @@ fn load_cloud_credentials() -> Result { fn get_cloud_url(instance_config: &InstanceInfo) -> Result { match instance_config { - InstanceInfo::Helix(config) => { - Ok(format!("http://{}/clusters/{}", *CLOUD_AUTHORITY, config.cluster_id)) - } - InstanceInfo::FlyIo(_) => { - Err(eyre!("Fly.io instances are not yet supported for the dashboard")) - } - InstanceInfo::Ecr(_) => { - Err(eyre!("ECR instances are not yet supported for the dashboard")) - } - InstanceInfo::Local(_) => { - Err(eyre!("Local instances should not call get_cloud_url")) - } + InstanceInfo::Helix(config) => Ok(format!( + "https://{}/clusters/{}", + *CLOUD_AUTHORITY, config.cluster_id + )), + InstanceInfo::FlyIo(_) => Err(eyre!( + "Fly.io instances are not yet supported for the dashboard" + )), + InstanceInfo::Ecr(_) => Err(eyre!( + "ECR instances are not yet supported for the dashboard" + )), + InstanceInfo::Local(_) => Err(eyre!("Local instances should not call get_cloud_url")), } } fn is_dashboard_running(runtime: ContainerRuntime) -> Result { let output = Command::new(runtime.binary()) - .args(["ps", "-q", "-f", &format!("name={DASHBOARD_CONTAINER_NAME}")]) + .args([ + "ps", + "-q", + "-f", + &format!("name={DASHBOARD_CONTAINER_NAME}"), + ]) .output() .map_err(|e| eyre!("Failed to check dashboard status: {e}"))?; @@ -466,7 +473,7 @@ fn status() -> Result<()> { "inspect", DASHBOARD_CONTAINER_NAME, "--format", - "{{range .Config.Env}}{{println .}}{{end}}" + "{{range .Config.Env}}{{println .}}{{end}}", ]) .output(); diff --git a/helix-cli/src/commands/init.rs b/helix-cli/src/commands/init.rs index 76b283afa..ba866a24d 100644 --- a/helix-cli/src/commands/init.rs +++ b/helix-cli/src/commands/init.rs @@ -1,5 +1,5 @@ -use crate::cleanup::CleanupTracker; use crate::CloudDeploymentTypeCommand; +use crate::cleanup::CleanupTracker; use crate::commands::integrations::ecr::{EcrAuthType, EcrManager}; use crate::commands::integrations::fly::{FlyAuthType, FlyManager, VmSize}; use crate::commands::integrations::helix::HelixManager; @@ -33,11 +33,12 @@ pub async fn run( // If there was an error, perform cleanup if let Err(ref e) = result - && cleanup_tracker.has_tracked_resources() { - eprintln!("Init failed, performing cleanup: {}", e); - let summary = cleanup_tracker.cleanup(); - summary.log_summary(); - } + && cleanup_tracker.has_tracked_resources() + { + eprintln!("Init failed, performing cleanup: {}", e); + let summary = cleanup_tracker.cleanup(); + summary.log_summary(); + } result } @@ -106,17 +107,12 @@ async fn run_init_inner( // Create Helix manager let helix_manager = HelixManager::new(&project_context); - // Create cloud instance configuration + // Create cloud instance configuration (without cluster_id yet) let cloud_config = helix_manager - .create_instance_config(project_name, region) + .create_instance_config(project_name, region.clone()) .await?; - // Initialize the cloud cluster - helix_manager - .init_cluster(project_name, &cloud_config) - .await?; - - // Insert into config + // Insert into config first config.cloud.insert( project_name.to_string(), CloudConfig::Helix(cloud_config.clone()), @@ -125,8 +121,36 @@ async fn run_init_inner( // Backup config before saving cleanup_tracker.backup_config(&config, config_path.clone()); - // save config + // Save config config.save_to_file(&config_path)?; + + // Prompt user to create cluster now + println!(); + print_status("CLUSTER", "Helix Cloud instance configuration saved"); + println!("\nWould you like to create the cluster now?"); + println!("This will open Stripe for payment and provision your cluster."); + println!(); + print!("Create cluster now? [Y/n]: "); + use std::io::{self, Write}; + io::stdout().flush()?; + + let mut input = String::new(); + io::stdin().read_line(&mut input)?; + let input = input.trim().to_lowercase(); + + if input.is_empty() || input == "y" || input == "yes" { + // Run create-cluster flow + crate::commands::create_cluster::run(project_name, region).await?; + } else { + println!(); + print_status( + "INFO", + &format!( + "Cluster creation skipped. Run 'helix create-cluster {}' when ready.", + project_name + ), + ); + } } CloudDeploymentTypeCommand::Ecr { .. } => { let cwd = env::current_dir()?; diff --git a/helix-cli/src/commands/integrations/helix.rs b/helix-cli/src/commands/integrations/helix.rs index ca4bee189..96e2bcb68 100644 --- a/helix-cli/src/commands/integrations/helix.rs +++ b/helix-cli/src/commands/integrations/helix.rs @@ -1,20 +1,28 @@ use crate::commands::auth::Credentials; use crate::config::{BuildMode, CloudInstanceConfig, DbConfig, InstanceInfo}; use crate::project::ProjectContext; +use crate::sse_client::{SseEvent, SseProgressHandler}; use crate::utils::helixc_utils::{collect_hx_files, generate_content}; -use crate::utils::{print_error_with_hint, print_status, print_success}; +use crate::utils::{print_error, print_error_with_hint, print_status, print_success}; use eyre::{OptionExt, Result, eyre}; use helix_db::helix_engine::traversal_core::config::Config; -use helix_db::utils::styled_string::StyledString; +use reqwest_eventsource::RequestBuilderExt; use serde_json::json; +use std::collections::HashMap; use std::env; use std::path::PathBuf; use std::sync::LazyLock; // use uuid::Uuid; -const DEFAULT_CLOUD_AUTHORITY: &str = "ec2-184-72-27-116.us-west-1.compute.amazonaws.com:3000"; +const DEFAULT_CLOUD_AUTHORITY: &str = "cloud.helix-db.com"; pub static CLOUD_AUTHORITY: LazyLock = LazyLock::new(|| { - std::env::var("CLOUD_AUTHORITY").unwrap_or(DEFAULT_CLOUD_AUTHORITY.to_string()) + std::env::var("CLOUD_AUTHORITY").unwrap_or_else(|_| { + if cfg!(debug_assertions) { + "localhost:3000".to_string() + } else { + DEFAULT_CLOUD_AUTHORITY.to_string() + } + }) }); pub struct HelixManager<'a> { @@ -71,10 +79,12 @@ impl<'a> HelixManager<'a> { cluster_id, region, build_mode: BuildMode::Release, + env_vars: HashMap::new(), db_config: DbConfig::default(), }) } + #[allow(dead_code)] pub async fn init_cluster( &self, instance_name: &str, @@ -164,25 +174,23 @@ impl<'a> HelixManager<'a> { // get credentials - already validated by check_auth() let credentials = Credentials::read_from_file(&self.credentials_path()?); - // read config.hx.json - let config_path = path.join("config.hx.json"); + // Optionally load config from helix.toml or legacy config.hx.json + let helix_toml_path = path.join("helix.toml"); + let config_hx_path = path.join("config.hx.json"); let schema_path = path.join("schema.hx"); - // Use from_files if schema.hx exists (backward compatibility), otherwise use from_file - let config = if schema_path.exists() { - match Config::from_files(config_path, schema_path) { - Ok(config) => config, - Err(e) => { - return Err(eyre!("Error: failed to load config: {e}")); - } + let _config: Option = if helix_toml_path.exists() { + // v2 format: helix.toml (config is already loaded in self.project) + None + } else if config_hx_path.exists() { + // v1 backward compatibility: config.hx.json + if schema_path.exists() { + Config::from_files(config_hx_path, schema_path).ok() + } else { + Config::from_file(config_hx_path).ok() } } else { - match Config::from_file(config_path) { - Ok(config) => config, - Err(e) => { - return Err(eyre!("Error: failed to load config: {e}")); - } - } + None }; // get cluster information from helix.toml @@ -193,41 +201,248 @@ impl<'a> HelixManager<'a> { } }; - // upload queries to central server + // Separate schema from query files + let mut schema_content = String::new(); + let mut queries_map: HashMap = HashMap::new(); + + for file in &content.files { + if file.name.ends_with("schema.hx") { + schema_content = file.content.clone(); + } else { + queries_map.insert(file.name.clone(), file.content.clone()); + } + } + + // Prepare deployment payload let payload = json!({ - "user_id": credentials.user_id, - "queries": content.files, - "cluster_id": cluster_info.cluster_id, - "version": "0.1.0", - "helix_config": config.to_json() + "schema": schema_content, + "queries": queries_map, + "env_vars": cluster_info.env_vars, + "instance_name": cluster_name }); - let client = reqwest::Client::new(); - let cloud_url = format!("http://{}/clusters/deploy-queries", *CLOUD_AUTHORITY); + // Initiate deployment with SSE streaming + let client = reqwest::Client::new(); + let deploy_url = format!("https://{}/deploy", *CLOUD_AUTHORITY); - match client - .post(cloud_url) - .header("x-api-key", &credentials.helix_admin_key) // used to verify user - .header("x-cluster-id", &cluster_info.cluster_id) // used to verify instance with user + let mut event_source = client + .post(&deploy_url) + .header("x-api-key", &credentials.helix_admin_key) + .header("x-cluster-id", &cluster_info.cluster_id) .header("Content-Type", "application/json") - .body(serde_json::to_string(&payload).unwrap()) - .send() - .await - { - Ok(response) => { - if response.status().is_success() { - println!("{}", "Queries uploaded to remote db".green().bold()); - } else { - return Err(eyre!("Error uploading queries to remote db")); + .json(&payload) + .eventsource()?; + + let progress = SseProgressHandler::new("Deploying queries..."); + let mut deployment_success = false; + + // Process SSE events + use futures_util::StreamExt; + + while let Some(event) = event_source.next().await { + match event { + Ok(reqwest_eventsource::Event::Open) => { + // Connection opened + } + Ok(reqwest_eventsource::Event::Message(message)) => { + // Parse the SSE event + let sse_event: SseEvent = match serde_json::from_str(&message.data) { + Ok(event) => event, + Err(e) => { + progress.println(&format!("Failed to parse event: {}", e)); + continue; + } + }; + + match sse_event { + SseEvent::Progress { + percentage, + message, + } => { + progress.set_progress(percentage); + if let Some(msg) = message { + progress.set_message(&msg); + } + } + SseEvent::Log { message, .. } => { + progress.println(&message); + } + SseEvent::StatusTransition { to, message, .. } => { + let msg = message.unwrap_or_else(|| format!("Status: {}", to)); + progress.println(&msg); + } + SseEvent::Success { .. } => { + deployment_success = true; + progress.finish("Deployment completed successfully!"); + event_source.close(); + break; + } + SseEvent::Error { error } => { + progress.finish_error(&format!("Error: {}", error)); + event_source.close(); + return Err(eyre!("Deployment failed: {}", error)); + } + // Deploy-specific events + SseEvent::ValidatingQueries => { + progress.set_message("Validating queries..."); + } + SseEvent::Building { + estimated_percentage, + } => { + progress.set_progress(estimated_percentage as f64); + progress.set_message("Building..."); + } + SseEvent::Deploying => { + progress.set_message("Deploying to infrastructure..."); + } + SseEvent::Deployed { url, auth_key } => { + deployment_success = true; + progress.finish("Deployment completed!"); + print_success(&format!("Deployed to: {}", url)); + print_status("AUTH_KEY", &format!("Your auth key: {}", auth_key)); + + // Prompt user for .env handling + println!(); + println!("Would you like to save connection details to a .env file?"); + println!(" 1. Add to .env in project root (Recommended)"); + println!(" 2. Don't add"); + println!(" 3. Specify custom path"); + print!("\nChoice [1]: "); + + use std::io::{self, Write}; + io::stdout().flush().ok(); + + let mut input = String::new(); + if io::stdin().read_line(&mut input).is_ok() { + let choice = input.trim(); + match choice { + "1" | "" => { + let env_path = self.project.root.join(".env"); + let comment = format!( + "# HelixDB Cloud URL for instance: {}", + cluster_name + ); + if let Err(e) = crate::utils::add_env_var_with_comment( + &env_path, + "HELIX_CLOUD_URL", + &url, + Some(&comment), + ) { + print_error(&format!("Failed to write .env: {}", e)); + } + match crate::utils::add_env_var_to_file( + &env_path, + "HELIX_API_KEY", + &auth_key, + ) { + Ok(_) => print_success(&format!( + "Added HELIX_CLOUD_URL and HELIX_API_KEY to {}", + env_path.display() + )), + Err(e) => { + print_error(&format!("Failed to write .env: {}", e)) + } + } + } + "2" => { + print_status("INFO", "Skipped saving to .env"); + } + "3" => { + print!("Enter path: "); + io::stdout().flush().ok(); + let mut path_input = String::new(); + if io::stdin().read_line(&mut path_input).is_ok() { + let custom_path = PathBuf::from(path_input.trim()); + let comment = format!( + "# HelixDB Cloud URL for instance: {}", + cluster_name + ); + if let Err(e) = crate::utils::add_env_var_with_comment( + &custom_path, + "HELIX_CLOUD_URL", + &url, + Some(&comment), + ) { + print_error(&format!("Failed to write .env: {}", e)); + } + match crate::utils::add_env_var_to_file( + &custom_path, + "HELIX_API_KEY", + &auth_key, + ) { + Ok(_) => print_success(&format!( + "Added HELIX_CLOUD_URL and HELIX_API_KEY to {}", + custom_path.display() + )), + Err(e) => print_error(&format!( + "Failed to write .env: {}", + e + )), + } + } + } + _ => { + print_status( + "INFO", + "Invalid choice, skipped saving to .env", + ); + } + } + } + + event_source.close(); + break; + } + SseEvent::Redeployed { url } => { + deployment_success = true; + progress.finish("Redeployment completed!"); + print_success(&format!("Redeployed to: {}", url)); + event_source.close(); + break; + } + SseEvent::BadRequest { error } => { + progress.finish_error(&format!("Bad request: {}", error)); + event_source.close(); + return Err(eyre!("Bad request: {}", error)); + } + SseEvent::QueryValidationError { error } => { + progress.finish_error(&format!("Query validation failed: {}", error)); + event_source.close(); + return Err(eyre!("Query validation error: {}", error)); + } + _ => { + // Ignore other event types + } + } + } + Err(err) => { + progress.finish_error(&format!("Stream error: {}", err)); + return Err(eyre!("Deployment stream error: {}", err)); } } - Err(e) => { - return Err(eyre!("Error uploading queries to remote db: {e}")); - } - }; + } + + if !deployment_success { + return Err(eyre!("Deployment did not complete successfully")); + } + print_success("Queries deployed successfully"); Ok(()) } + + #[allow(dead_code)] + pub(crate) async fn redeploy(&self, path: Option, cluster_name: String) -> Result<()> { + // Redeploy is similar to deploy but may have different backend handling + // For now, we'll use the same implementation with a different status message + print_status( + "REDEPLOY", + &format!("Redeploying to cluster: {}", cluster_name), + ); + + // Call deploy with the same logic + // In the future, this could use a different endpoint or add a "redeploy" flag + self.deploy(path, cluster_name).await + } } /// Returns the path or the current working directory if no path is provided diff --git a/helix-cli/src/commands/mod.rs b/helix-cli/src/commands/mod.rs index 1e60680ab..7f02573ab 100644 --- a/helix-cli/src/commands/mod.rs +++ b/helix-cli/src/commands/mod.rs @@ -4,6 +4,7 @@ pub mod backup; pub mod build; pub mod check; pub mod compile; +pub mod create_cluster; pub mod dashboard; pub mod delete; pub mod init; diff --git a/helix-cli/src/commands/push.rs b/helix-cli/src/commands/push.rs index 8d0318e6b..d72a25d64 100644 --- a/helix-cli/src/commands/push.rs +++ b/helix-cli/src/commands/push.rs @@ -134,6 +134,13 @@ async fn push_cloud_instance( .cluster_id() .ok_or_else(|| eyre::eyre!("Cloud instance '{instance_name}' must have a cluster_id"))?; + // Check if cluster has been created + if cluster_id == "YOUR_CLUSTER_ID" { + return Err(eyre::eyre!( + "Cluster for instance '{instance_name}' has not been created yet.\nRun 'helix create-cluster {instance_name}' to create the cluster first." + )); + } + let metrics_data = if instance_config.should_build_docker_image() { // Build happens, get metrics data from build crate::commands::build::run(instance_name.to_string(), metrics_sender).await? @@ -142,12 +149,7 @@ async fn push_cloud_instance( parse_queries_for_metrics(project)? }; - // TODO: Implement cloud deployment - // This would involve: - // 1. Reading compiled queries from the container directory - // 2. Uploading them to the cloud cluster - // 3. Triggering deployment on the cloud - + // Deploy to cloud let config = project.config.cloud.get(instance_name).unwrap(); let mut deploy_spinner = Spinner::new("DEPLOY", "Deploying instance..."); deploy_spinner.start(); @@ -173,7 +175,7 @@ async fn push_cloud_instance( .await?; } CloudConfig::Helix(_config) => { - deploy_spinner.update("Deploying to Helix..."); + deploy_spinner.stop(); // Stop spinner before helix.deploy() starts its own progress let helix = HelixManager::new(project); helix.deploy(None, instance_name.to_string()).await?; } diff --git a/helix-cli/src/config.rs b/helix-cli/src/config.rs index c04681ff2..f6a5e8860 100644 --- a/helix-cli/src/config.rs +++ b/helix-cli/src/config.rs @@ -132,6 +132,8 @@ pub struct CloudInstanceConfig { pub region: Option, #[serde(default = "default_dev_build_mode")] pub build_mode: BuildMode, + #[serde(default)] + pub env_vars: HashMap, #[serde(flatten)] pub db_config: DbConfig, } diff --git a/helix-cli/src/lib.rs b/helix-cli/src/lib.rs index 7523891c3..ee82b73ce 100644 --- a/helix-cli/src/lib.rs +++ b/helix-cli/src/lib.rs @@ -9,6 +9,7 @@ pub mod errors; pub mod github_issue; pub mod metrics_sender; pub mod project; +pub mod sse_client; pub mod update; pub mod utils; diff --git a/helix-cli/src/main.rs b/helix-cli/src/main.rs index 8b2686d4a..37f9a7e73 100644 --- a/helix-cli/src/main.rs +++ b/helix-cli/src/main.rs @@ -1,8 +1,7 @@ use clap::{Parser, Subcommand}; use eyre::Result; -use std::path::PathBuf; use helix_cli::{AuthAction, CloudDeploymentTypeCommand, DashboardAction, MetricsAction}; - +use std::path::PathBuf; mod cleanup; mod commands; @@ -12,6 +11,7 @@ mod errors; mod github_issue; mod metrics_sender; mod project; +mod sse_client; mod update; mod utils; @@ -48,6 +48,16 @@ enum Commands { cloud: CloudDeploymentTypeCommand, }, + /// Create a new Helix Cloud cluster + CreateCluster { + /// Instance name + instance: String, + + /// Region for cluster (defaults to us-east-1) + #[clap(short, long)] + region: Option, + }, + /// Validate project configuration and queries Check { /// Instance to check (defaults to all instances) @@ -201,6 +211,9 @@ async fn main() -> Result<()> { cloud, } => commands::init::run(path, template, queries_path, cloud).await, Commands::Add { cloud } => commands::add::run(cloud).await, + Commands::CreateCluster { instance, region } => { + commands::create_cluster::run(&instance, region).await + } Commands::Check { instance } => commands::check::run(instance, &metrics_sender).await, Commands::Compile { output, path } => commands::compile::run(output, path).await, Commands::Build { instance } => commands::build::run(instance, &metrics_sender) diff --git a/helix-cli/src/sse_client.rs b/helix-cli/src/sse_client.rs new file mode 100644 index 000000000..b7304d1cc --- /dev/null +++ b/helix-cli/src/sse_client.rs @@ -0,0 +1,250 @@ +use eyre::{Result, eyre}; +use futures_util::StreamExt; +use indicatif::{ProgressBar, ProgressStyle}; +use reqwest_eventsource::{Event, RequestBuilderExt}; +use serde::{Deserialize, Serialize}; +use std::time::Duration; + +/// SSE event types from Helix Cloud backend +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum SseEvent { + /// GitHub login: Contains user code and verification URI + UserVerification { + user_code: String, + verification_uri: String, + }, + + /// Successful authentication/operation + Success { + #[serde(flatten)] + data: serde_json::Value, + }, + + /// Device code timeout (5-minute window expired) + DeviceCodeTimeout { message: String }, + + /// Error event + Error { error: String }, + + /// Progress update with percentage + Progress { + percentage: f64, + message: Option, + }, + + /// Log message from operation + Log { + message: String, + level: Option, + }, + + /// Status transition (e.g., PENDING → PROVISIONING → READY) + StatusTransition { + from: Option, + to: String, + message: Option, + }, + + /// Cluster creation: Checkout required (Stripe) + CheckoutRequired { url: String }, + + /// Cluster creation: Payment confirmed + PaymentConfirmed, + + /// Cluster creation: Creating project + CreatingProject, + + /// Cluster creation: Project created successfully + ProjectCreated { cluster_id: String }, + + // Deploy events + /// Deploy: Validating queries + ValidatingQueries, + + /// Deploy: Building with progress + Building { estimated_percentage: u16 }, + + /// Deploy: Deploying to infrastructure + Deploying, + + /// Deploy: Successfully deployed (new instance) + Deployed { url: String, auth_key: String }, + + /// Deploy: Successfully redeployed (existing instance) + Redeployed { url: String }, + + /// Deploy: Bad request error + BadRequest { error: String }, + + /// Deploy: Query validation error + QueryValidationError { error: String }, +} + +/// SSE client for streaming events from Helix Cloud +pub struct SseClient { + url: String, + headers: Vec<(String, String)>, + timeout: Duration, + use_post: bool, +} + +impl SseClient { + /// Create a new SSE client + pub fn new(url: String) -> Self { + Self { + url, + headers: Vec::new(), + timeout: Duration::from_secs(300), // 5 minutes default + use_post: false, + } + } + + /// Add a header to the request + #[allow(dead_code)] + pub fn header(mut self, key: impl Into, value: impl Into) -> Self { + self.headers.push((key.into(), value.into())); + self + } + + /// Set the timeout duration + #[allow(dead_code)] + pub fn timeout(mut self, timeout: Duration) -> Self { + self.timeout = timeout; + self + } + + /// Use POST method instead of GET + pub fn post(mut self) -> Self { + self.use_post = true; + self + } + + /// Connect to SSE stream and process events + pub async fn connect(&self, mut handler: F) -> Result<()> + where + F: FnMut(SseEvent) -> Result, + { + let client = reqwest::Client::builder().timeout(self.timeout).build()?; + + let mut request = if self.use_post { + client.post(&self.url) + } else { + client.get(&self.url) + }; + for (key, value) in &self.headers { + request = request.header(key, value); + } + + let mut event_source = request.eventsource()?; + + while let Some(event) = event_source.next().await { + match event { + Ok(Event::Open) => { + // Connection opened + } + Ok(Event::Message(message)) => { + // Parse the SSE event + let sse_event: SseEvent = serde_json::from_str(&message.data) + .map_err(|e| eyre!("Failed to parse SSE event: {}", e))?; + + // Call handler - if it returns false, stop processing + if !handler(sse_event)? { + event_source.close(); + break; + } + } + Err(err) => { + event_source.close(); + return Err(eyre!("SSE stream error: {}", err)); + } + } + } + + Ok(()) + } +} + +/// Progress bar handler for SSE events with real-time progress +pub struct SseProgressHandler { + progress_bar: ProgressBar, +} + +impl SseProgressHandler { + /// Create a new progress handler with a message + pub fn new(message: &str) -> Self { + let progress_bar = ProgressBar::new(100); + progress_bar.set_style( + ProgressStyle::default_bar() + .template("{msg}\n{bar:40.cyan/blue} {pos}%") + .expect("Invalid progress bar template") + .progress_chars("=>-"), + ); + progress_bar.set_message(message.to_string()); + + Self { progress_bar } + } + + /// Update progress percentage + pub fn set_progress(&self, percentage: f64) { + self.progress_bar.set_position(percentage as u64); + } + + /// Update progress message + pub fn set_message(&self, message: &str) { + self.progress_bar.set_message(message.to_string()); + } + + /// Print a log message below the progress bar + pub fn println(&self, message: &str) { + self.progress_bar.println(message); + } + + /// Finish the progress bar with a message + pub fn finish(&self, message: &str) { + self.progress_bar.finish_with_message(message.to_string()); + } + + /// Finish with error + pub fn finish_error(&self, message: &str) { + self.progress_bar.abandon_with_message(message.to_string()); + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_sse_event_deserialization() { + // Test UserVerification (externally-tagged format with snake_case) + let json = r#"{ + "user_verification": { + "user_code": "ABC-123", + "verification_uri": "https://github.com/login/device" + } + }"#; + let event: SseEvent = serde_json::from_str(json).unwrap(); + match event { + SseEvent::UserVerification { user_code, .. } => { + assert_eq!(user_code, "ABC-123"); + } + _ => panic!("Wrong event type"), + } + + // Test Progress (externally-tagged format with snake_case) + let json = r#"{ + "progress": { + "percentage": 45.5, + "message": "Building..." + } + }"#; + let event: SseEvent = serde_json::from_str(json).unwrap(); + match event { + SseEvent::Progress { percentage, .. } => { + assert_eq!(percentage, 45.5); + } + _ => panic!("Wrong event type"), + } + } +} diff --git a/helix-cli/src/utils.rs b/helix-cli/src/utils.rs index 54086acd4..21d16743a 100644 --- a/helix-cli/src/utils.rs +++ b/helix-cli/src/utils.rs @@ -218,6 +218,55 @@ pub fn print_confirm(message: &str) -> std::io::Result { Ok(response.to_lowercase() == "y" || response.to_lowercase() == "yes") } +/// Add or update an environment variable in a .env file +pub fn add_env_var_to_file(file_path: &Path, key: &str, value: &str) -> std::io::Result<()> { + add_env_var_with_comment(file_path, key, value, None) +} + +/// Add or update an environment variable in a .env file with an optional comment +pub fn add_env_var_with_comment( + file_path: &Path, + key: &str, + value: &str, + comment: Option<&str>, +) -> std::io::Result<()> { + let mut content = if file_path.exists() { + fs::read_to_string(file_path)? + } else { + String::new() + }; + + let key_prefix = format!("{}=", key); + if content.lines().any(|line| line.starts_with(&key_prefix)) { + // Replace existing key (preserve any existing comment above it) + content = content + .lines() + .map(|line| { + if line.starts_with(&key_prefix) { + format!("{}={}", key, value) + } else { + line.to_string() + } + }) + .collect::>() + .join("\n"); + if !content.ends_with('\n') { + content.push('\n'); + } + } else { + // Append new key with optional comment + if !content.is_empty() && !content.ends_with('\n') { + content.push('\n'); + } + if let Some(cmt) = comment { + content.push_str(&format!("{}\n", cmt)); + } + content.push_str(&format!("{}={}\n", key, value)); + } + + fs::write(file_path, content) +} + #[derive(Default)] #[allow(unused)] pub enum Template { @@ -466,7 +515,8 @@ impl Spinner { if let Some(handle) = self.handle.take() { handle.abort(); } - print!("\r"); + // Clear the line completely + print!("\r\x1b[K"); std::io::Write::flush(&mut std::io::stdout()).unwrap(); } /// function that updates the message @@ -482,3 +532,176 @@ impl Drop for Spinner { self.stop(); } } + +#[cfg(test)] +mod tests { + use super::*; + use std::fs; + use tempfile::tempdir; + + #[test] + fn test_add_env_var_creates_new_file() { + let dir = tempdir().unwrap(); + let env_path = dir.path().join(".env"); + + add_env_var_to_file(&env_path, "HELIX_API_KEY", "test-key-123").unwrap(); + + let content = fs::read_to_string(&env_path).unwrap(); + assert_eq!(content, "HELIX_API_KEY=test-key-123\n"); + } + + #[test] + fn test_add_env_var_appends_to_existing_file() { + let dir = tempdir().unwrap(); + let env_path = dir.path().join(".env"); + + // Create existing .env file + fs::write(&env_path, "EXISTING_VAR=value\n").unwrap(); + + add_env_var_to_file(&env_path, "HELIX_API_KEY", "test-key-123").unwrap(); + + let content = fs::read_to_string(&env_path).unwrap(); + assert_eq!(content, "EXISTING_VAR=value\nHELIX_API_KEY=test-key-123\n"); + } + + #[test] + fn test_add_env_var_appends_newline_if_missing() { + let dir = tempdir().unwrap(); + let env_path = dir.path().join(".env"); + + // Create existing .env file without trailing newline + fs::write(&env_path, "EXISTING_VAR=value").unwrap(); + + add_env_var_to_file(&env_path, "HELIX_API_KEY", "test-key-123").unwrap(); + + let content = fs::read_to_string(&env_path).unwrap(); + assert_eq!(content, "EXISTING_VAR=value\nHELIX_API_KEY=test-key-123\n"); + } + + #[test] + fn test_add_env_var_updates_existing_key() { + let dir = tempdir().unwrap(); + let env_path = dir.path().join(".env"); + + // Create existing .env file with the key already present + fs::write(&env_path, "OTHER_VAR=foo\nHELIX_API_KEY=old-key\nANOTHER_VAR=bar\n").unwrap(); + + add_env_var_to_file(&env_path, "HELIX_API_KEY", "new-key-456").unwrap(); + + let content = fs::read_to_string(&env_path).unwrap(); + assert_eq!(content, "OTHER_VAR=foo\nHELIX_API_KEY=new-key-456\nANOTHER_VAR=bar\n"); + } + + #[test] + fn test_add_env_var_handles_empty_file() { + let dir = tempdir().unwrap(); + let env_path = dir.path().join(".env"); + + // Create empty .env file + fs::write(&env_path, "").unwrap(); + + add_env_var_to_file(&env_path, "HELIX_API_KEY", "test-key-123").unwrap(); + + let content = fs::read_to_string(&env_path).unwrap(); + assert_eq!(content, "HELIX_API_KEY=test-key-123\n"); + } + + #[test] + fn test_add_env_var_preserves_other_variables() { + let dir = tempdir().unwrap(); + let env_path = dir.path().join(".env"); + + // Create .env with multiple variables + fs::write(&env_path, "VAR1=value1\nVAR2=value2\nVAR3=value3\n").unwrap(); + + add_env_var_to_file(&env_path, "HELIX_API_KEY", "my-key").unwrap(); + + let content = fs::read_to_string(&env_path).unwrap(); + assert!(content.contains("VAR1=value1")); + assert!(content.contains("VAR2=value2")); + assert!(content.contains("VAR3=value3")); + assert!(content.contains("HELIX_API_KEY=my-key")); + } + + #[test] + fn test_add_env_var_with_comment_creates_file_with_comment() { + let dir = tempdir().unwrap(); + let env_path = dir.path().join(".env"); + + add_env_var_with_comment( + &env_path, + "HELIX_CLOUD_URL", + "https://example.com", + Some("# HelixDB Cloud URL for instance: test"), + ) + .unwrap(); + + let content = fs::read_to_string(&env_path).unwrap(); + assert_eq!( + content, + "# HelixDB Cloud URL for instance: test\nHELIX_CLOUD_URL=https://example.com\n" + ); + } + + #[test] + fn test_add_env_var_with_comment_appends_with_comment() { + let dir = tempdir().unwrap(); + let env_path = dir.path().join(".env"); + + // Create existing .env file + fs::write(&env_path, "EXISTING_VAR=value\n").unwrap(); + + add_env_var_with_comment( + &env_path, + "HELIX_CLOUD_URL", + "https://example.com", + Some("# HelixDB Cloud URL for instance: test"), + ) + .unwrap(); + + let content = fs::read_to_string(&env_path).unwrap(); + assert_eq!( + content, + "EXISTING_VAR=value\n# HelixDB Cloud URL for instance: test\nHELIX_CLOUD_URL=https://example.com\n" + ); + } + + #[test] + fn test_add_env_var_with_comment_updates_without_duplicate_comment() { + let dir = tempdir().unwrap(); + let env_path = dir.path().join(".env"); + + // Create existing .env file with key and comment + fs::write( + &env_path, + "# HelixDB Cloud URL for instance: old\nHELIX_CLOUD_URL=https://old.com\n", + ) + .unwrap(); + + add_env_var_with_comment( + &env_path, + "HELIX_CLOUD_URL", + "https://new.com", + Some("# HelixDB Cloud URL for instance: new"), + ) + .unwrap(); + + let content = fs::read_to_string(&env_path).unwrap(); + // Should update value but preserve existing comment (not add duplicate) + assert_eq!( + content, + "# HelixDB Cloud URL for instance: old\nHELIX_CLOUD_URL=https://new.com\n" + ); + } + + #[test] + fn test_add_env_var_with_no_comment() { + let dir = tempdir().unwrap(); + let env_path = dir.path().join(".env"); + + add_env_var_with_comment(&env_path, "HELIX_API_KEY", "test-key", None).unwrap(); + + let content = fs::read_to_string(&env_path).unwrap(); + assert_eq!(content, "HELIX_API_KEY=test-key\n"); + } +} diff --git a/helix-db/Cargo.toml b/helix-db/Cargo.toml index f0b8f4141..23b561d6d 100644 --- a/helix-db/Cargo.toml +++ b/helix-db/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "helix-db" -version = "1.1.4" +version = "1.1.5" edition = "2024" description = "HelixDB is a powerful, open-source, graph-vector database built in Rust for intelligent data storage for RAG and AI." license = "AGPL-3.0" @@ -57,8 +57,7 @@ polars = { version = "0.46.0", features = [ "lazy", "json", ], optional = true } -subtle = "2.6.1" -sha_256 = "=0.1.1" +bcrypt = "0.17" [dev-dependencies] rand = "0.9.0" @@ -88,4 +87,4 @@ production = ["api-key","server"] [[test]] name = "capacity_optimization_benches" -path = "benches/capacity_optimization_benches.rs" \ No newline at end of file +path = "benches/capacity_optimization_benches.rs" diff --git a/helix-db/src/helix_gateway/builtin/all_nodes_and_edges.rs b/helix-db/src/helix_gateway/builtin/all_nodes_and_edges.rs index 9a8c618b0..c776b1f1b 100644 --- a/helix-db/src/helix_gateway/builtin/all_nodes_and_edges.rs +++ b/helix-db/src/helix_gateway/builtin/all_nodes_and_edges.rs @@ -41,7 +41,7 @@ pub async fn nodes_edges_handler( let mut req = protocol::request::Request { name: "nodes_edges".to_string(), req_type: RequestType::Query, - api_key_hash: None, + api_key: None, body: axum::body::Bytes::new(), in_fmt: protocol::Format::default(), out_fmt: protocol::Format::default(), @@ -149,10 +149,11 @@ fn get_all_nodes_edges_json( let node = Node::from_bincode_bytes(id, value, arena)?; json_node["label"] = json!(node.label); if let Some(props) = node.properties - && let Some(prop_value) = props.get(prop) { - json_node["label"] = sonic_rs::to_value(&prop_value.inner_stringify()) - .unwrap_or_else(|_| sonic_rs::Value::from("")); - } + && let Some(prop_value) = props.get(prop) + { + json_node["label"] = sonic_rs::to_value(&prop_value.inner_stringify()) + .unwrap_or_else(|_| sonic_rs::Value::from("")); + } } nodes.push(json_node); } @@ -190,9 +191,6 @@ inventory::submit! { #[cfg(test)] mod tests { use super::*; - use std::sync::Arc; - use tempfile::TempDir; - use axum::body::Bytes; use crate::{ helix_engine::{ storage_core::version_info::VersionInfo, @@ -201,16 +199,16 @@ mod tests { config::Config, ops::{ g::G, - source::{ - add_e::AddEAdapter, - add_n::AddNAdapter, - }, + source::{add_e::AddEAdapter, add_n::AddNAdapter}, }, }, }, - protocol::{request::Request, request::RequestType, Format}, helixc::generator::traversal_steps::EdgeType, + protocol::{Format, request::Request, request::RequestType}, }; + use axum::body::Bytes; + use std::sync::Arc; + use tempfile::TempDir; fn setup_test_engine() -> (HelixGraphEngine, TempDir) { let temp_dir = TempDir::new().unwrap(); @@ -230,7 +228,7 @@ mod tests { let request = Request { name: "nodes_edges".to_string(), req_type: RequestType::Query, - api_key_hash: None, + api_key: None, body: Bytes::new(), in_fmt: Format::Json, out_fmt: Format::Json, @@ -239,7 +237,6 @@ mod tests { let input = HandlerInput { graph: Arc::new(engine), request, - }; let result = nodes_edges_inner(input); @@ -266,7 +263,9 @@ mod tests { let props1 = vec![("name", Value::String("Alice".to_string()))]; let props_map1 = ImmutablePropertiesMap::new( props1.len(), - props1.iter().map(|(k, v)| (arena.alloc_str(k) as &str, v.clone())), + props1 + .iter() + .map(|(k, v)| (arena.alloc_str(k) as &str, v.clone())), &arena, ); @@ -277,7 +276,9 @@ mod tests { let props2 = vec![("name", Value::String("Bob".to_string()))]; let props_map2 = ImmutablePropertiesMap::new( props2.len(), - props2.iter().map(|(k, v)| (arena.alloc_str(k) as &str, v.clone())), + props2 + .iter() + .map(|(k, v)| (arena.alloc_str(k) as &str, v.clone())), &arena, ); @@ -286,7 +287,13 @@ mod tests { .collect_to_obj()?; let _edge = G::new_mut(&engine.storage, &arena, &mut txn) - .add_edge(arena.alloc_str("knows"), None, node1.id(), node2.id(), false) + .add_edge( + arena.alloc_str("knows"), + None, + node1.id(), + node2.id(), + false, + ) .collect_to_obj()?; txn.commit().unwrap(); @@ -294,7 +301,7 @@ mod tests { let request = Request { name: "nodes_edges".to_string(), req_type: RequestType::Query, - api_key_hash: None, + api_key: None, body: Bytes::new(), in_fmt: Format::Json, out_fmt: Format::Json, @@ -303,7 +310,6 @@ mod tests { let input = HandlerInput { graph: Arc::new(engine), request, - }; let result = nodes_edges_inner(input); @@ -330,7 +336,9 @@ mod tests { let props = vec![("index", Value::I64(i))]; let props_map = ImmutablePropertiesMap::new( props.len(), - props.iter().map(|(k, v)| (arena.alloc_str(k) as &str, v.clone())), + props + .iter() + .map(|(k, v)| (arena.alloc_str(k) as &str, v.clone())), &arena, ); @@ -343,7 +351,13 @@ mod tests { // Add some edges to satisfy the nodes_edges_to_json method for i in 0..5 { let _edge = G::new_mut(&engine.storage, &arena, &mut txn) - .add_edge(arena.alloc_str("connects"), None, nodes[i].id(), nodes[i+1].id(), false) + .add_edge( + arena.alloc_str("connects"), + None, + nodes[i].id(), + nodes[i + 1].id(), + false, + ) .collect_to_obj()?; } @@ -353,7 +367,7 @@ mod tests { let request = Request { name: "nodes_edges".to_string(), req_type: RequestType::Query, - api_key_hash: None, + api_key: None, body: Bytes::from(params_json), in_fmt: Format::Json, out_fmt: Format::Json, @@ -362,7 +376,6 @@ mod tests { let input = HandlerInput { graph: Arc::new(engine), request, - }; let result = nodes_edges_inner(input); @@ -385,7 +398,9 @@ mod tests { let props = vec![("name", Value::String("Test".to_string()))]; let props_map = ImmutablePropertiesMap::new( props.len(), - props.iter().map(|(k, v)| (arena.alloc_str(k) as &str, v.clone())), + props + .iter() + .map(|(k, v)| (arena.alloc_str(k) as &str, v.clone())), &arena, ); @@ -399,7 +414,7 @@ mod tests { let request = Request { name: "nodes_edges".to_string(), req_type: RequestType::Query, - api_key_hash: None, + api_key: None, body: Bytes::from(params_json), in_fmt: Format::Json, out_fmt: Format::Json, @@ -408,7 +423,6 @@ mod tests { let input = HandlerInput { graph: Arc::new(engine), request, - }; let result = nodes_edges_inner(input); @@ -422,7 +436,7 @@ mod tests { let request = Request { name: "nodes_edges".to_string(), req_type: RequestType::Query, - api_key_hash: None, + api_key: None, body: Bytes::new(), in_fmt: Format::Json, out_fmt: Format::Json, @@ -431,7 +445,6 @@ mod tests { let input = HandlerInput { graph: Arc::new(engine), request, - }; let result = nodes_edges_inner(input); diff --git a/helix-db/src/helix_gateway/builtin/node_by_id.rs b/helix-db/src/helix_gateway/builtin/node_by_id.rs index 8731a6c31..3cb4d1ea8 100644 --- a/helix-db/src/helix_gateway/builtin/node_by_id.rs +++ b/helix-db/src/helix_gateway/builtin/node_by_id.rs @@ -29,7 +29,7 @@ pub async fn node_details_handler( let mut req = protocol::request::Request { name: "node_details".to_string(), req_type: RequestType::Query, - api_key_hash: None, + api_key: None, body: axum::body::Bytes::new(), in_fmt: protocol::Format::default(), out_fmt: protocol::Format::default(), @@ -128,25 +128,22 @@ inventory::submit! { #[cfg(test)] mod tests { use super::*; - use std::sync::Arc; - use tempfile::TempDir; - use axum::body::Bytes; use crate::{ helix_engine::{ storage_core::version_info::VersionInfo, traversal_core::{ HelixGraphEngine, HelixGraphEngineOpts, config::Config, - ops::{ - g::G, - source::add_n::AddNAdapter, - }, + ops::{g::G, source::add_n::AddNAdapter}, }, }, - protocol::{request::Request, request::RequestType, Format, value::Value}, helix_gateway::router::router::HandlerInput, + protocol::{Format, request::Request, request::RequestType, value::Value}, utils::id::ID, }; + use axum::body::Bytes; + use std::sync::Arc; + use tempfile::TempDir; fn setup_test_engine() -> (HelixGraphEngine, TempDir) { let temp_dir = TempDir::new().unwrap(); @@ -171,7 +168,9 @@ mod tests { let props = vec![("name", Value::String("Alice".to_string()))]; let props_map = ImmutablePropertiesMap::new( props.len(), - props.iter().map(|(k, v)| (arena.alloc_str(k) as &str, v.clone())), + props + .iter() + .map(|(k, v)| (arena.alloc_str(k) as &str, v.clone())), &arena, ); @@ -187,7 +186,7 @@ mod tests { let request = Request { name: "node_details".to_string(), req_type: RequestType::Query, - api_key_hash: None, + api_key: None, body: Bytes::from(params_json), in_fmt: Format::Json, out_fmt: Format::Json, @@ -196,7 +195,6 @@ mod tests { let input = HandlerInput { graph: Arc::new(engine), request, - }; let result = node_details_inner(input); @@ -218,7 +216,7 @@ mod tests { let request = Request { name: "node_details".to_string(), req_type: RequestType::Query, - api_key_hash: None, + api_key: None, body: Bytes::from(params_json), in_fmt: Format::Json, out_fmt: Format::Json, @@ -227,7 +225,6 @@ mod tests { let input = HandlerInput { graph: Arc::new(engine), request, - }; let result = node_details_inner(input); @@ -247,7 +244,7 @@ mod tests { let request = Request { name: "node_details".to_string(), req_type: RequestType::Query, - api_key_hash: None, + api_key: None, body: Bytes::from(params_json), in_fmt: Format::Json, out_fmt: Format::Json, @@ -256,7 +253,6 @@ mod tests { let input = HandlerInput { graph: Arc::new(engine), request, - }; let result = node_details_inner(input); @@ -270,7 +266,7 @@ mod tests { let request = Request { name: "node_details".to_string(), req_type: RequestType::Query, - api_key_hash: None, + api_key: None, body: Bytes::new(), in_fmt: Format::Json, out_fmt: Format::Json, @@ -279,7 +275,6 @@ mod tests { let input = HandlerInput { graph: Arc::new(engine), request, - }; let result = node_details_inner(input); @@ -300,7 +295,9 @@ mod tests { ]; let props_map = ImmutablePropertiesMap::new( props.len(), - props.iter().map(|(k, v)| (arena.alloc_str(k) as &str, v.clone())), + props + .iter() + .map(|(k, v)| (arena.alloc_str(k) as &str, v.clone())), &arena, ); @@ -316,7 +313,7 @@ mod tests { let request = Request { name: "node_details".to_string(), req_type: RequestType::Query, - api_key_hash: None, + api_key: None, body: Bytes::from(params_json), in_fmt: Format::Json, out_fmt: Format::Json, @@ -325,7 +322,6 @@ mod tests { let input = HandlerInput { graph: Arc::new(engine), request, - }; let result = node_details_inner(input); diff --git a/helix-db/src/helix_gateway/builtin/node_connections.rs b/helix-db/src/helix_gateway/builtin/node_connections.rs index ba7cee439..937592574 100644 --- a/helix-db/src/helix_gateway/builtin/node_connections.rs +++ b/helix-db/src/helix_gateway/builtin/node_connections.rs @@ -32,7 +32,7 @@ pub async fn node_connections_handler( let mut req = protocol::request::Request { name: "node_connections".to_string(), req_type: RequestType::Query, - api_key_hash: None, + api_key: None, body: axum::body::Bytes::new(), in_fmt: protocol::Format::default(), out_fmt: protocol::Format::default(), @@ -275,7 +275,7 @@ mod tests { let request = Request { name: "node_connections".to_string(), req_type: RequestType::Query, - api_key_hash: None, + api_key: None, body: Bytes::from(params_json), in_fmt: Format::Json, out_fmt: Format::Json, @@ -328,7 +328,7 @@ mod tests { let request = Request { name: "node_connections".to_string(), req_type: RequestType::Query, - api_key_hash: None, + api_key: None, body: Bytes::from(params_json), in_fmt: Format::Json, out_fmt: Format::Json, @@ -366,7 +366,7 @@ mod tests { let request = Request { name: "node_connections".to_string(), req_type: RequestType::Query, - api_key_hash: None, + api_key: None, body: Bytes::from(params_json), in_fmt: Format::Json, out_fmt: Format::Json, @@ -397,7 +397,7 @@ mod tests { let request = Request { name: "node_connections".to_string(), req_type: RequestType::Query, - api_key_hash: None, + api_key: None, body: Bytes::from(params_json), in_fmt: Format::Json, out_fmt: Format::Json, @@ -419,7 +419,7 @@ mod tests { let request = Request { name: "node_connections".to_string(), req_type: RequestType::Query, - api_key_hash: None, + api_key: None, body: Bytes::new(), in_fmt: Format::Json, out_fmt: Format::Json, diff --git a/helix-db/src/helix_gateway/builtin/nodes_by_label.rs b/helix-db/src/helix_gateway/builtin/nodes_by_label.rs index f4fb773e1..91060e69e 100644 --- a/helix-db/src/helix_gateway/builtin/nodes_by_label.rs +++ b/helix-db/src/helix_gateway/builtin/nodes_by_label.rs @@ -30,7 +30,7 @@ pub async fn nodes_by_label_handler( let mut req = protocol::request::Request { name: "nodes_by_label".to_string(), req_type: RequestType::Query, - api_key_hash: None, + api_key: None, body: axum::body::Bytes::new(), in_fmt: protocol::Format::default(), out_fmt: protocol::Format::default(), @@ -214,7 +214,7 @@ mod tests { let request = Request { name: "nodes_by_label".to_string(), req_type: RequestType::Query, - api_key_hash: None, + api_key: None, body: Bytes::from(params_json), in_fmt: Format::Json, out_fmt: Format::Json, @@ -264,7 +264,7 @@ mod tests { let request = Request { name: "nodes_by_label".to_string(), req_type: RequestType::Query, - api_key_hash: None, + api_key: None, body: Bytes::from(params_json), in_fmt: Format::Json, out_fmt: Format::Json, @@ -293,7 +293,7 @@ mod tests { let request = Request { name: "nodes_by_label".to_string(), req_type: RequestType::Query, - api_key_hash: None, + api_key: None, body: Bytes::from(params_json), in_fmt: Format::Json, out_fmt: Format::Json, @@ -319,7 +319,7 @@ mod tests { let request = Request { name: "nodes_by_label".to_string(), req_type: RequestType::Query, - api_key_hash: None, + api_key: None, body: Bytes::new(), in_fmt: Format::Json, out_fmt: Format::Json, @@ -355,7 +355,7 @@ mod tests { let request = Request { name: "nodes_by_label".to_string(), req_type: RequestType::Query, - api_key_hash: None, + api_key: None, body: Bytes::from(params_json), in_fmt: Format::Json, out_fmt: Format::Json, diff --git a/helix-db/src/helix_gateway/gateway.rs b/helix-db/src/helix_gateway/gateway.rs index bde679279..b06070c74 100644 --- a/helix-db/src/helix_gateway/gateway.rs +++ b/helix-db/src/helix_gateway/gateway.rs @@ -175,7 +175,7 @@ async fn post_handler( #[cfg(feature = "api-key")] { use crate::helix_gateway::key_verification::verify_key; - if let Err(e) = verify_key(&req.api_key_hash.unwrap()) { + if let Err(e) = verify_key(req.api_key.as_ref().unwrap()) { info!(?e, "Invalid API key"); helix_metrics::log_event( helix_metrics::events::EventType::InvalidApiKey, diff --git a/helix-db/src/helix_gateway/key_verification.rs b/helix-db/src/helix_gateway/key_verification.rs index a22a02ea5..315828e2d 100644 --- a/helix-db/src/helix_gateway/key_verification.rs +++ b/helix-db/src/helix_gateway/key_verification.rs @@ -1,20 +1,25 @@ use crate::protocol::HelixError; -use subtle::ConstantTimeEq; +use std::sync::LazyLock; -/// API KEY HASH -const API_KEY_HASH: &[u8] = env!("HELIX_API_KEY").as_bytes(); +/// API KEY HASH (bcrypt hash read from HELIX_API_KEY env var on startup) +static API_KEY: LazyLock = + LazyLock::new(|| std::env::var("HELIX_API_KEY").unwrap_or_default()); -pub(crate) fn verify_key(key: &[u8]) -> Result<(), HelixError> { - assert_eq!(API_KEY_HASH.len(), 32, "API key must be 32 bytes"); - if API_KEY_HASH.ct_eq(key).into() { - Ok(()) - } else { - Err(HelixError::InvalidApiKey) +#[inline(always)] +pub(crate) fn verify_key(key: &str) -> Result<(), HelixError> { + if API_KEY.is_empty() { + return Err(HelixError::InvalidApiKey); + } + match bcrypt::verify(key, &*API_KEY) { + Ok(true) => Ok(()), + Ok(false) => Err(HelixError::InvalidApiKey), + Err(_) => Err(HelixError::InvalidApiKey), } } #[cfg(test)] mod tests { + #[allow(unused_imports)] use super::*; // ============================================================================ @@ -22,88 +27,59 @@ mod tests { // ============================================================================ #[test] - fn test_verify_key_success() { - // The API key is set at compile time via env!("HELIX_API_KEY") - let result = verify_key(API_KEY_HASH); - assert!(result.is_ok()); - } - - #[test] - fn test_verify_key_wrong_key() { - let wrong_key = [0u8; 32]; // All zeros - let result = verify_key(&wrong_key); - assert!(result.is_err()); + fn test_bcrypt_verify_correct_key() { + // Generate a bcrypt hash for testing + let test_key = "test-api-key-12345"; + let hash = bcrypt::hash(test_key, bcrypt::DEFAULT_COST).unwrap(); - if let Err(e) = result { - assert!(matches!(e, HelixError::InvalidApiKey)); - assert_eq!(e.to_string(), "Invalid API key"); - } + // Verify that bcrypt::verify works correctly + assert!(bcrypt::verify(test_key, &hash).unwrap()); } #[test] - fn test_verify_key_partial_match() { - // Create a key that matches the first half but not the second - let mut partial_key = [0u8; 32]; - partial_key[..16].copy_from_slice(&API_KEY_HASH[..16]); - // Rest stays as zeros + fn test_bcrypt_verify_wrong_key() { + let test_key = "test-api-key-12345"; + let wrong_key = "wrong-api-key"; + let hash = bcrypt::hash(test_key, bcrypt::DEFAULT_COST).unwrap(); - let result = verify_key(&partial_key); - assert!(result.is_err()); - assert!(matches!(result.unwrap_err(), HelixError::InvalidApiKey)); + // Verify that wrong key fails + assert!(!bcrypt::verify(wrong_key, &hash).unwrap()); } #[test] - fn test_verify_key_off_by_one() { - // Create a key that differs by just one bit in the last byte - let mut almost_correct = API_KEY_HASH.to_vec(); - almost_correct[31] ^= 1; // Flip the least significant bit + fn test_bcrypt_verify_empty_key() { + let test_key = "test-api-key-12345"; + let hash = bcrypt::hash(test_key, bcrypt::DEFAULT_COST).unwrap(); - let result = verify_key(&almost_correct); - assert!(result.is_err()); - assert!(matches!(result.unwrap_err(), HelixError::InvalidApiKey)); - } - - #[test] - fn test_verify_key_empty() { - let empty_key = []; - let result = verify_key(&empty_key); - assert!(result.is_err()); + // Empty key should not verify + assert!(!bcrypt::verify("", &hash).unwrap()); } #[test] - fn test_verify_key_wrong_length_short() { - let short_key = [0u8; 16]; - let result = verify_key(&short_key); - assert!(result.is_err()); - } + fn test_bcrypt_verify_similar_key() { + let test_key = "test-api-key-12345"; + let similar_key = "test-api-key-12346"; // Off by one character + let hash = bcrypt::hash(test_key, bcrypt::DEFAULT_COST).unwrap(); - #[test] - fn test_verify_key_wrong_length_long() { - let long_key = [0u8; 64]; - let result = verify_key(&long_key); - assert!(result.is_err()); + // Similar key should not verify + assert!(!bcrypt::verify(similar_key, &hash).unwrap()); } #[test] - fn test_verify_key_is_constant_time() { - // This test verifies that the comparison is constant-time - // by ensuring the function doesn't panic with different inputs - let key1 = [0u8; 32]; - let key2 = [255u8; 32]; - - // Both should fail but should take similar time - // (We can't easily test timing in unit tests, but we verify they both fail) - assert!(verify_key(&key1).is_err()); - assert!(verify_key(&key2).is_err()); + fn test_bcrypt_hash_format() { + let test_key = "test-api-key"; + let hash = bcrypt::hash(test_key, bcrypt::DEFAULT_COST).unwrap(); + + // bcrypt hashes start with $2b$ (or $2a$ or $2y$) + assert!(hash.starts_with("$2")); + // bcrypt hashes are 60 characters long + assert_eq!(hash.len(), 60); } - // ============================================================================ - // API Key Length Tests - // ============================================================================ - #[test] - fn test_api_key_length() { - // Verify the compile-time API key is exactly 32 bytes - assert_eq!(API_KEY_HASH.len(), 32); + fn test_verify_key_invalid_hash_format() { + // If the stored hash is invalid, verify should fail gracefully + let result = bcrypt::verify("any-key", "not-a-valid-bcrypt-hash"); + assert!(result.is_err()); } } diff --git a/helix-db/src/helix_gateway/tests/gateway_tests.rs b/helix-db/src/helix_gateway/tests/gateway_tests.rs index fd0aa359c..237e278bd 100644 --- a/helix-db/src/helix_gateway/tests/gateway_tests.rs +++ b/helix-db/src/helix_gateway/tests/gateway_tests.rs @@ -336,27 +336,15 @@ fn test_gateway_opts_default_workers_per_core() { #[cfg(feature = "api-key")] mod api_key_tests { - use super::*; use crate::helix_gateway::key_verification::verify_key; - use crate::protocol::{HelixError, request::Request}; + use crate::protocol::request::Request; + use crate::protocol::{Format, HelixError}; use axum::body::Bytes; - use crate::protocol::Format; #[test] - fn test_verify_key_integration_success() { - // The HELIX_API_KEY env var is the expected SHA-256 hash (32 bytes) - // In production, clients send their raw key in the x-api-key header, - // which gets SHA-256 hashed in request.rs and compared here - let expected_hash = env!("HELIX_API_KEY").as_bytes(); - - let result = verify_key(expected_hash); - assert!(result.is_ok()); - } - - #[test] - fn test_verify_key_integration_wrong_key() { - let wrong_hash = [0u8; 32]; - let result = verify_key(&wrong_hash); + fn test_verify_key_wrong_key() { + let wrong_key = "wrong-api-key"; + let result = verify_key(wrong_key); assert!(result.is_err()); if let Err(e) = result { @@ -365,80 +353,67 @@ mod api_key_tests { } #[test] - fn test_verify_key_integration_all_ones() { - let wrong_hash = [255u8; 32]; - let result = verify_key(&wrong_hash); + fn test_verify_key_empty_key() { + let empty_key = ""; + let result = verify_key(empty_key); assert!(result.is_err()); } #[test] - fn test_request_with_valid_api_key_hash() { - // The stored hash is what we expect to receive - let expected_hash_bytes = env!("HELIX_API_KEY").as_bytes(); - let mut hash_array = [0u8; 32]; - hash_array.copy_from_slice(expected_hash_bytes); + fn test_request_with_api_key() { + let api_key = "test-api-key".to_string(); let request = Request { name: "test_query".to_string(), req_type: crate::protocol::request::RequestType::Query, - api_key_hash: Some(hash_array), + api_key: Some(api_key.clone()), body: Bytes::from("{}"), in_fmt: Format::Json, out_fmt: Format::Json, }; - // Verify the key in the request would pass validation - assert!(request.api_key_hash.is_some()); - let result = verify_key(&request.api_key_hash.unwrap()); - assert!(result.is_ok()); + assert!(request.api_key.is_some()); + assert_eq!(request.api_key.unwrap(), api_key); } #[test] - fn test_request_with_invalid_api_key_hash() { - let wrong_hash = [123u8; 32]; - + fn test_request_without_api_key() { let request = Request { name: "test_query".to_string(), req_type: crate::protocol::request::RequestType::Query, - api_key_hash: Some(wrong_hash), + api_key: None, body: Bytes::from("{}"), in_fmt: Format::Json, out_fmt: Format::Json, }; - // Verify the key in the request would fail validation - assert!(request.api_key_hash.is_some()); - let result = verify_key(&request.api_key_hash.unwrap()); - assert!(result.is_err()); + assert!(request.api_key.is_none()); } #[test] fn test_api_key_hash_consistency() { - // Test that the stored hash is always the same - let hash1 = env!("HELIX_API_KEY").as_bytes(); - let hash2 = env!("HELIX_API_KEY").as_bytes(); + // Test that the stored bcrypt hash is always the same + let hash1 = env!("HELIX_API_KEY"); + let hash2 = env!("HELIX_API_KEY"); assert_eq!(hash1, hash2); } #[test] - fn test_client_key_hashing() { - // Test that hashing different client keys produces different hashes - // This simulates what happens in request.rs when processing x-api-key header - let mut hasher1 = sha_256::Sha256::new(); - let hash1 = hasher1.digest(b"client_key_1"); - - let mut hasher2 = sha_256::Sha256::new(); - let hash2 = hasher2.digest(b"client_key_2"); + fn test_bcrypt_verification_works() { + // Test that bcrypt verification works correctly + let test_key = "test-api-key-12345"; + let hash = bcrypt::hash(test_key, bcrypt::DEFAULT_COST).unwrap(); - assert_ne!(hash1, hash2); + assert!(bcrypt::verify(test_key, &hash).unwrap()); + assert!(!bcrypt::verify("wrong-key", &hash).unwrap()); } #[test] fn test_verify_key_error_type() { - let wrong_hash = [0u8; 32]; - let result = verify_key(&wrong_hash); + let wrong_key = "definitely-wrong-key"; + let result = verify_key(wrong_key); assert!(result.is_err()); match result { @@ -451,8 +426,8 @@ mod api_key_tests { #[test] fn test_verify_key_error_message() { - let wrong_hash = [0u8; 32]; - let result = verify_key(&wrong_hash); + let wrong_key = "wrong-key"; + let result = verify_key(wrong_key); if let Err(e) = result { assert_eq!(e.to_string(), "Invalid API key"); @@ -463,8 +438,8 @@ mod api_key_tests { fn test_verify_key_error_http_status() { use axum::response::IntoResponse; - let wrong_hash = [0u8; 32]; - let result = verify_key(&wrong_hash); + let wrong_key = "wrong-key"; + let result = verify_key(wrong_key); if let Err(e) = result { let response = e.into_response(); diff --git a/helix-db/src/helix_gateway/tests/mcp_tests.rs b/helix-db/src/helix_gateway/tests/mcp_tests.rs index 0907475f8..27a65b685 100644 --- a/helix-db/src/helix_gateway/tests/mcp_tests.rs +++ b/helix-db/src/helix_gateway/tests/mcp_tests.rs @@ -175,7 +175,7 @@ mod mcp_tests { name: "collect".to_string(), req_type: RequestType::MCP, body: request_body, - api_key_hash: None, + api_key: None, in_fmt: Format::Json, out_fmt: Format::Json, }; @@ -326,7 +326,7 @@ mod mcp_tests { name: "out_step".to_string(), req_type: RequestType::MCP, body: request_body, - api_key_hash: None, + api_key: None, in_fmt: Format::Json, out_fmt: Format::Json, }; @@ -386,7 +386,7 @@ mod mcp_tests { name: "in_step".to_string(), req_type: RequestType::MCP, body: request_body, - api_key_hash: None, + api_key: None, in_fmt: Format::Json, out_fmt: Format::Json, }; @@ -444,7 +444,7 @@ mod mcp_tests { name: "out_e_step".to_string(), req_type: RequestType::MCP, body: request_body, - api_key_hash: None, + api_key: None, in_fmt: Format::Json, out_fmt: Format::Json, }; @@ -503,7 +503,7 @@ mod mcp_tests { name: "in_e_step".to_string(), req_type: RequestType::MCP, body: request_body, - api_key_hash: None, + api_key: None, in_fmt: Format::Json, out_fmt: Format::Json, }; @@ -549,7 +549,7 @@ mod mcp_tests { name: "n_from_type".to_string(), req_type: RequestType::MCP, body: request_body, - api_key_hash: None, + api_key: None, in_fmt: Format::Json, out_fmt: Format::Json, }; @@ -604,7 +604,7 @@ mod mcp_tests { name: "e_from_type".to_string(), req_type: RequestType::MCP, body: request_body, - api_key_hash: None, + api_key: None, in_fmt: Format::Json, out_fmt: Format::Json, }; @@ -675,7 +675,7 @@ mod mcp_tests { name: "filter_items".to_string(), req_type: RequestType::MCP, body: request_body, - api_key_hash: None, + api_key: None, in_fmt: Format::Json, out_fmt: Format::Json, }; @@ -745,7 +745,7 @@ mod mcp_tests { name: "order_by".to_string(), req_type: RequestType::MCP, body: request_body, - api_key_hash: None, + api_key: None, in_fmt: Format::Json, out_fmt: Format::Json, }; @@ -1256,7 +1256,7 @@ mod mcp_tests { name: "search_keyword".to_string(), req_type: RequestType::MCP, body: request_body, - api_key_hash: None, + api_key: None, in_fmt: Format::Json, out_fmt: Format::Json, }; @@ -1294,7 +1294,7 @@ mod mcp_tests { name: "search_keyword".to_string(), req_type: RequestType::MCP, body: request_body, - api_key_hash: None, + api_key: None, in_fmt: Format::Json, out_fmt: Format::Json, }; @@ -1336,7 +1336,7 @@ mod mcp_tests { name: "search_keyword".to_string(), req_type: RequestType::MCP, body: request_body, - api_key_hash: None, + api_key: None, in_fmt: Format::Json, out_fmt: Format::Json, }; @@ -1407,7 +1407,7 @@ mod mcp_tests { name: "search_vector".to_string(), req_type: RequestType::MCP, body: request_body, - api_key_hash: None, + api_key: None, in_fmt: Format::Json, out_fmt: Format::Json, }; @@ -1443,7 +1443,7 @@ mod mcp_tests { name: "search_vector".to_string(), req_type: RequestType::MCP, body: request_body, - api_key_hash: None, + api_key: None, in_fmt: Format::Json, out_fmt: Format::Json, }; @@ -1486,7 +1486,7 @@ mod mcp_tests { name: "search_vector".to_string(), req_type: RequestType::MCP, body: request_body, - api_key_hash: None, + api_key: None, in_fmt: Format::Json, out_fmt: Format::Json, }; @@ -1523,7 +1523,7 @@ mod mcp_tests { name: "search_vector_text".to_string(), req_type: RequestType::MCP, body: request_body, - api_key_hash: None, + api_key: None, in_fmt: Format::Json, out_fmt: Format::Json, }; @@ -1560,7 +1560,7 @@ mod mcp_tests { name: "search_vector_text".to_string(), req_type: RequestType::MCP, body: request_body, - api_key_hash: None, + api_key: None, in_fmt: Format::Json, out_fmt: Format::Json, }; @@ -1603,7 +1603,7 @@ mod mcp_tests { name: "search_vector_text".to_string(), req_type: RequestType::MCP, body: request_body, - api_key_hash: None, + api_key: None, in_fmt: Format::Json, out_fmt: Format::Json, }; diff --git a/helix-db/src/helix_gateway/tests/router_tests.rs b/helix-db/src/helix_gateway/tests/router_tests.rs index d459644e1..a31e951df 100644 --- a/helix-db/src/helix_gateway/tests/router_tests.rs +++ b/helix-db/src/helix_gateway/tests/router_tests.rs @@ -141,7 +141,7 @@ fn test_handler_invocation_success() { request: Request { name: "test".to_string(), req_type: RequestType::Query, - api_key_hash: None, + api_key: None, body: Bytes::new(), in_fmt: Format::Json, out_fmt: Format::Json, @@ -166,7 +166,7 @@ fn test_handler_invocation_error() { request: Request { name: "error".to_string(), req_type: RequestType::Query, - api_key_hash: None, + api_key: None, body: Bytes::new(), in_fmt: Format::Json, out_fmt: Format::Json, @@ -190,7 +190,7 @@ fn test_handler_invocation_echo() { request: Request { name: "test_path".to_string(), req_type: RequestType::Query, - api_key_hash: None, + api_key: None, body: Bytes::new(), in_fmt: Format::Json, out_fmt: Format::Json, @@ -221,7 +221,7 @@ fn test_handler_input_creation() { request: Request { name: "test".to_string(), req_type: RequestType::Query, - api_key_hash: None, + api_key: None, body: Bytes::new(), in_fmt: Format::Json, out_fmt: Format::Json, @@ -241,7 +241,7 @@ fn test_handler_input_with_body() { request: Request { name: "query".to_string(), req_type: RequestType::Query, - api_key_hash: None, + api_key: None, body: Bytes::from(body_data.clone()), in_fmt: Format::Json, out_fmt: Format::Json, diff --git a/helix-db/src/helix_gateway/tests/worker_pool_concurrency_tests.rs b/helix-db/src/helix_gateway/tests/worker_pool_concurrency_tests.rs index e4379e486..d6cec63ce 100644 --- a/helix-db/src/helix_gateway/tests/worker_pool_concurrency_tests.rs +++ b/helix-db/src/helix_gateway/tests/worker_pool_concurrency_tests.rs @@ -1,3 +1,4 @@ +use crate::helix_engine::traversal_core::HelixGraphEngine; /// Concurrency-specific tests for WorkerPool /// /// This test suite focuses on concurrent behavior and race conditions in the WorkerPool. @@ -16,20 +17,15 @@ /// - Worker fairness under load /// - No coordination between workers accessing shared graph /// - No deadlocks or livelocks under high concurrency - use crate::helix_engine::traversal_core::HelixGraphEngineOpts; use crate::helix_engine::traversal_core::config::Config; -use crate::helix_engine::{traversal_core::HelixGraphEngine}; use crate::helix_gateway::worker_pool::WorkerPool; -use crate::helix_gateway::{ - gateway::CoreSetter, - router::router::HelixRouter, -}; +use crate::helix_gateway::{gateway::CoreSetter, router::router::HelixRouter}; use crate::protocol::Format; use crate::protocol::{Request, request::RequestType}; use axum::body::Bytes; -use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Arc; +use std::sync::atomic::{AtomicUsize, Ordering}; use std::time::Duration; use tempfile::TempDir; use tokio::time::timeout; @@ -49,14 +45,17 @@ fn create_request(name: &str) -> Request { Request { name: name.to_string(), req_type: RequestType::Query, - api_key_hash: None, + api_key: None, body: Bytes::new(), in_fmt: Format::Json, out_fmt: Format::Json, } } -fn create_test_pool(num_cores: usize, threads_per_core: usize) -> (WorkerPool, Arc, TempDir) { +fn create_test_pool( + num_cores: usize, + threads_per_core: usize, +) -> (WorkerPool, Arc, TempDir) { let (graph, temp_dir) = create_test_graph(); let router = Arc::new(HelixRouter::new(None, None)); let rt = Arc::new( @@ -106,8 +105,11 @@ async fn test_concurrent_requests_high_load() { } // All should complete (no panics or hangs) - assert_eq!(completed, num_concurrent, - "All requests should complete, got {}/{}", completed, num_concurrent); + assert_eq!( + completed, num_concurrent, + "All requests should complete, got {}/{}", + completed, num_concurrent + ); println!("High load test: {} requests completed", num_concurrent); } @@ -223,7 +225,10 @@ async fn test_parity_mechanism_both_workers() { // All should complete if parity mechanism allows all workers to participate assert_eq!(completed, num_requests); - println!("Parity test: {} requests completed across even/odd workers", completed); + println!( + "Parity test: {} requests completed across even/odd workers", + completed + ); } #[tokio::test] @@ -326,7 +331,10 @@ async fn test_concurrent_different_request_types() { let expected = 25 * request_types.len(); assert_eq!(completed, expected); - println!("Different request types: {}/{} completed", completed, expected); + println!( + "Different request types: {}/{} completed", + completed, expected + ); } #[tokio::test] @@ -396,6 +404,9 @@ async fn test_worker_distribution_fairness() { println!("Fairness test: 100 requests completed in {:?}", elapsed); // Basic sanity: should complete in reasonable time - assert!(elapsed < Duration::from_secs(10), - "Requests took {:?}, may indicate poor distribution", elapsed); + assert!( + elapsed < Duration::from_secs(10), + "Requests took {:?}, may indicate poor distribution", + elapsed + ); } diff --git a/helix-db/src/helix_gateway/tests/worker_pool_tests.rs b/helix-db/src/helix_gateway/tests/worker_pool_tests.rs index cca337961..42a804ee7 100644 --- a/helix-db/src/helix_gateway/tests/worker_pool_tests.rs +++ b/helix-db/src/helix_gateway/tests/worker_pool_tests.rs @@ -42,7 +42,7 @@ fn create_test_request(name: &str, req_type: RequestType) -> Request { Request { name: name.to_string(), req_type, - api_key_hash: None, + api_key: None, body: Bytes::new(), in_fmt: Format::Json, out_fmt: Format::Json, @@ -588,7 +588,7 @@ async fn test_request_with_body_data() { body: Bytes::from(vec![1, 2, 3, 4]), in_fmt: Format::Json, out_fmt: Format::Json, - api_key_hash: None, + api_key: None, }; let result = pool.process(request).await; @@ -648,7 +648,7 @@ async fn test_request_format_json() { body: Bytes::new(), in_fmt: Format::Json, out_fmt: Format::Json, - api_key_hash: None, + api_key: None, }; let result = pool.process(request).await; @@ -1143,7 +1143,7 @@ async fn test_request_with_large_body() { body: Bytes::from(large_body), in_fmt: Format::Json, out_fmt: Format::Json, - api_key_hash: None, + api_key: None, }; let result = pool.process(request).await; @@ -1246,7 +1246,7 @@ async fn test_request_type_query_explicit() { body: Bytes::new(), in_fmt: Format::Json, out_fmt: Format::Json, - api_key_hash: None, + api_key: None, }; let result = pool.process(request).await; diff --git a/helix-db/src/protocol/request.rs b/helix-db/src/protocol/request.rs index 3fe3c314d..1fa3dc7a1 100644 --- a/helix-db/src/protocol/request.rs +++ b/helix-db/src/protocol/request.rs @@ -16,7 +16,7 @@ pub type ReqMsg = (Request, RetChan); pub struct Request { pub name: String, pub req_type: RequestType, - pub api_key_hash: Option<[u8; 32]>, + pub api_key: Option, /// This contains the input parameters serialized with in_fmt pub body: Bytes, pub in_fmt: Format, @@ -65,21 +65,17 @@ where None => Format::default(), }; - let api_key_hash = { + let api_key = { #[cfg(feature = "api-key")] match headers.get("x-api-key") { Some(v) => match v.to_str() { - Ok(s) => { - let mut hasher = sha_256::Sha256::new(); - let hash = hasher.digest(s.as_bytes()); - Some(hash) - } + Ok(s) => Some(s.to_string()), Err(_) => return Err(StatusCode::BAD_REQUEST), }, None => return Err(StatusCode::BAD_REQUEST), } #[cfg(not(feature = "api-key"))] - None::<[u8; 32]> + None:: }; let out_fmt = match headers.get(ACCEPT) { @@ -100,7 +96,7 @@ where let out = Request { name, req_type, - api_key_hash, + api_key, body, in_fmt, out_fmt, @@ -124,7 +120,7 @@ mod tests { let request = Request { name: "test_query".to_string(), req_type: RequestType::Query, - api_key_hash: None, + api_key: None, body: body.clone(), in_fmt: Format::Json, out_fmt: Format::Json, @@ -141,7 +137,7 @@ mod tests { let request = Request { name: "original".to_string(), req_type: RequestType::MCP, - api_key_hash: None, + api_key: None, body: body.clone(), in_fmt: Format::Json, out_fmt: Format::Json, @@ -157,7 +153,7 @@ mod tests { let request = Request { name: "debug_test".to_string(), req_type: RequestType::Query, - api_key_hash: None, + api_key: None, body: Bytes::from("test"), in_fmt: Format::Json, out_fmt: Format::Json, @@ -218,7 +214,7 @@ mod tests { let request = Request { name: "empty_body".to_string(), req_type: RequestType::Query, - api_key_hash: None, + api_key: None, body: Bytes::new(), in_fmt: Format::Json, out_fmt: Format::Json, @@ -235,7 +231,7 @@ mod tests { let request = Request { name: "large_body".to_string(), req_type: RequestType::Query, - api_key_hash: None, + api_key: None, body: body.clone(), in_fmt: Format::Json, out_fmt: Format::Json, @@ -249,7 +245,7 @@ mod tests { let request = Request { name: "test_世界_query".to_string(), req_type: RequestType::Query, - api_key_hash: None, + api_key: None, body: Bytes::from("test"), in_fmt: Format::Json, out_fmt: Format::Json, @@ -259,53 +255,36 @@ mod tests { } // ============================================================================ - // API Key Hash Tests + // API Key Tests // ============================================================================ #[cfg(feature = "api-key")] #[test] - fn test_request_with_api_key_hash() { - let hash = [42u8; 32]; + fn test_request_with_api_key() { + let key = "my-secret-api-key".to_string(); let request = Request { name: "secure_query".to_string(), req_type: RequestType::Query, - api_key_hash: Some(hash), + api_key: Some(key.clone()), body: Bytes::from("test"), in_fmt: Format::Json, out_fmt: Format::Json, }; - assert!(request.api_key_hash.is_some()); - assert_eq!(request.api_key_hash.unwrap(), hash); + assert!(request.api_key.is_some()); + assert_eq!(request.api_key.unwrap(), key); } #[cfg(feature = "api-key")] #[test] - fn test_api_key_hash_length() { - // Verify that API key hashes are always 32 bytes (SHA-256) - let hash = [0u8; 32]; - let request = Request { - name: "test".to_string(), - req_type: RequestType::Query, - api_key_hash: Some(hash), - body: Bytes::from("test"), - in_fmt: Format::Json, - out_fmt: Format::Json, - }; - - assert_eq!(request.api_key_hash.unwrap().len(), 32); - } - - #[cfg(feature = "api-key")] - #[test] - fn test_api_key_hash_different_values() { - let hash1 = [1u8; 32]; - let hash2 = [2u8; 32]; + fn test_api_key_different_values() { + let key1 = "api-key-1".to_string(); + let key2 = "api-key-2".to_string(); let request1 = Request { name: "test1".to_string(), req_type: RequestType::Query, - api_key_hash: Some(hash1), + api_key: Some(key1.clone()), body: Bytes::from("test"), in_fmt: Format::Json, out_fmt: Format::Json, @@ -314,43 +293,43 @@ mod tests { let request2 = Request { name: "test2".to_string(), req_type: RequestType::Query, - api_key_hash: Some(hash2), + api_key: Some(key2.clone()), body: Bytes::from("test"), in_fmt: Format::Json, out_fmt: Format::Json, }; - assert_ne!(request1.api_key_hash.unwrap(), request2.api_key_hash.unwrap()); + assert_ne!(request1.api_key.unwrap(), request2.api_key.unwrap()); } #[test] - fn test_request_without_api_key_hash() { + fn test_request_without_api_key() { let request = Request { name: "unsecured_query".to_string(), req_type: RequestType::Query, - api_key_hash: None, + api_key: None, body: Bytes::from("test"), in_fmt: Format::Json, out_fmt: Format::Json, }; - assert!(request.api_key_hash.is_none()); + assert!(request.api_key.is_none()); } #[cfg(feature = "api-key")] #[test] - fn test_api_key_hash_clone() { - let hash = [99u8; 32]; + fn test_api_key_clone() { + let key = "test-api-key".to_string(); let request = Request { name: "test".to_string(), req_type: RequestType::Query, - api_key_hash: Some(hash), + api_key: Some(key), body: Bytes::from("test"), in_fmt: Format::Json, out_fmt: Format::Json, }; let cloned = request.clone(); - assert_eq!(cloned.api_key_hash, request.api_key_hash); + assert_eq!(cloned.api_key, request.api_key); } } diff --git a/hql-tests/tests/add_n/helix.toml b/hql-tests/tests/add_n/helix.toml index 2e3447042..5c7ba317c 100644 --- a/hql-tests/tests/add_n/helix.toml +++ b/hql-tests/tests/add_n/helix.toml @@ -1,9 +1,13 @@ [project] name = "add_n" queries = "." +container_runtime = "docker" [local.dev] port = 6969 build_mode = "debug" -[cloud] +[cloud.helix-production.helix] +cluster_id = "1f0ca9da-5f56-6a04-873b-010203040506" +region = "us-east-1" +build_mode = "release" diff --git a/hql-tests/tests/user_test_1/helix.toml b/hql-tests/tests/user_test_1/helix.toml index 321dd0bc9..fcc620a45 100644 --- a/hql-tests/tests/user_test_1/helix.toml +++ b/hql-tests/tests/user_test_1/helix.toml @@ -1,9 +1,17 @@ [project] -name = "where_filter" -queries = "." +name = "my-project" +queries = "./" +container_runtime = "docker" + +# Testing out helix db with default schema +[local.test] +port = 7070 +build_mode = "debug" +bm25 = true [local.dev] port = 6969 build_mode = "debug" +bm25 = true -[cloud] +[cloud] \ No newline at end of file diff --git a/hql-tests/tests/user_test_1/queries.hx b/hql-tests/tests/user_test_1/queries.hx index 3d1bbda15..411cc5395 100644 --- a/hql-tests/tests/user_test_1/queries.hx +++ b/hql-tests/tests/user_test_1/queries.hx @@ -1,340 +1,30 @@ -QUERY CreateUsers (name:String, email:String, password:String ) => - user <- AddN({ - name: name, - email: email, - password: password, - }) - RETURN user - - - -QUERY GetUserByEmail(email: String) => - user <- N::WHERE(_::{email}::EQ(email)) +// queries.hx +// Start writing your queries here. +// +// You can use the schema to help you write your queries. +// +// Queries take the form: +// QUERY {query name}({input name}: {input type}) => +// {variable} <- {traversal} +// RETURN {variable} +// +// Example: +// QUERY GetUserFriends(user_id: String) => +// friends <- N(user_id)::Out +// RETURN friends +// +// +// For more information on how to write queries, +// see the documentation at https://docs.helix-db.com +// or checkout our GitHub at https://github.com/HelixDB/helix-db + +QUERY createUser(name: String, email: String) => + user <- AddN({ + name: name, + email: email, + }) RETURN user - -QUERY GetAppByID(app_id: ID) => - app <- N(app_id) - branches <- app::Out - - RETURN app - - - -QUERY GetAppsByUserId(user_id: ID) => - user <- N(user_id) - apps <- user::Out - RETURN apps::{ - access_modified_at: _::InE::{modified_at}::RANGE(0, 1), - name, - description, - created_at, - favorite, - archived, - id, - prod_db: _::Out::RANGE(0, 1)::{ - id, - connection: _::Out::RANGE(0, 1)::{ - id, - host, - port, - username, - password, - name, - ssl - } - } - } - - - - -QUERY UpdateAppName (app_id: ID, name: String) => - app <- N(app_id)::UPDATE({name: name}) - RETURN NONE - -QUERY UpdateAppArchived (app_id: ID, archived: Boolean) => - app <- N(app_id)::UPDATE({archived: archived}) - RETURN NONE - -QUERY UpdateAppFavorite (app_id: ID, favorite_update: Boolean) => - app <- N(app_id)::UPDATE({favorite: favorite_update}) - RETURN NONE - -QUERY UpdateAppDescription (app_id: ID, description: String) => - app <- N(app_id)::UPDATE({description: description}) - RETURN NONE - -QUERY CreateFullAppWithPages (user_id: ID, app_name: String, app_description: String, created_at: Date, favorite: Boolean,archived: Boolean) => - user <- N(user_id) - - // Create the main app - app <- AddN({ - name: app_name, - description: app_description, - created_at: created_at, - favorite: favorite, - archived: archived, - }) - - dev_branch <- AddN({ - name: "Development" - }) - - prod_db <- AddN - -app_db_edge <- AddE({ - created_at: created_at - })::From(app)::To(prod_db) - - prod_db_connection <- AddN({ - host: "TestHost", - port: "TestPort", - username: "TestUsername", - password: "TestPassword", - name: "TestName", - ssl: "Prefer", - }) -prod_db_connection_edge <- AddE({ - created_at: created_at - })::From(prod_db)::To(prod_db_connection) - - -test_db <- AddN - - test_db_connection <- AddN({ -host: "TestHost", - port: "TestPort", - username: "TestUsername", - password: "TestPassword", - name: "TestName", - ssl: "Prefer", - }) - - -test_db_connection_edge <- AddE({ - created_at: created_at - })::From(test_db)::To(test_db_connection) - - - staging_branch <- AddN({ - name: "Staging" - }) - - test_db_stage_branch_edge <- AddE({ - created_at: created_at - })::From(staging_branch)::To(test_db) - -test_db_dev_branch_edge <- AddE({ - created_at: created_at - })::From(dev_branch)::To(test_db) - - - frontend_dev <- AddN - backend_dev <- AddN - - frontend_staging <- AddN - backend_staging <- AddN - - root_element <- AddN({ - element_id: "root_element", - name: "root_element" - }) - - root_element_404 <- AddN({ - element_id: "root_element", - name: "root_element" - }) - - root_element_reset <- AddN({ - element_id: "root_element", - name: "root_element" - }) - - index_page <- AddN({ - name: "index" - }) - - not_found_page <- AddN({ - name: "Page not found" - }) - - reset_password_page <- AddN({ - name: "Reset Password" - }) - - main_folder <- AddN({ - name: "Unsorted" - }) - backend_dev_root_folder <- AddN({ - name: "API", - description: "Main folder where your endpoints and functions exist." - }) - backend_staging_root_folder <- AddN({ - name: "API", - description: "Main folder where your endpoints and functions exist." - }) - backend_dev_root_folder_edge <- AddE({ - created_at: created_at, - })::From(backend_dev)::To(backend_dev_root_folder) -backend_staging_root_folder_edge <- AddE({ - created_at: created_at, - })::From(backend_staging)::To(backend_staging_root_folder) - - user_app_edge <- AddE({ - created_at: created_at, - modified_at: created_at - })::From(user)::To(app) - - app_dev_branch_edge <- AddE({ - created_at: created_at - })::From(app)::To(dev_branch) - - app_staging_branch_edge <- AddE({ - created_at: created_at - })::From(app)::To(staging_branch) - - dev_branch_frontend_edge <- AddE({ - created_at: created_at - })::From(dev_branch)::To(frontend_dev) - - dev_branch_backend_edge <- AddE({ - created_at: created_at - })::From(dev_branch)::To(backend_dev) - - staging_branch_frontend_edge <- AddE({ - created_at: created_at - })::From(staging_branch)::To(frontend_staging) - - staging_branch_backend_edge <- AddE({ - created_at: created_at - })::From(staging_branch)::To(backend_staging) - - index_page_element_edge <- AddE({ - assigned_at: created_at - })::From(index_page)::To(root_element) - - not_found_page_element_edge <- AddE({ - assigned_at: created_at - })::From(not_found_page)::To(root_element_404) - - reset_page_element_edge <- AddE({ - assigned_at: created_at - })::From(reset_password_page)::To(root_element_reset) - - folder_index_edge <- AddE({ - assigned_at: created_at - })::From(main_folder)::To(index_page) - - folder_404_edge <- AddE({ - assigned_at: created_at - })::From(main_folder)::To(not_found_page) - - folder_reset_edge <- AddE({ - assigned_at: created_at - })::From(main_folder)::To(reset_password_page) - - frontend_index_edge <- AddE({ - assigned_at: created_at - })::From(frontend_dev)::To(index_page) - - frontend_404_edge <- AddE({ - assigned_at: created_at - })::From(frontend_dev)::To(not_found_page) - - frontend_reset_edge <- AddE({ - assigned_at: created_at - })::From(frontend_dev)::To(reset_password_page) - - frontend_folder_edge <- AddE({ - assigned_at: created_at - })::From(frontend_dev)::To(main_folder) - - - -RETURN { - app: { - branches: [ - { - name: dev_branch::{name}, - frontend: { - page_folders: [ - { - name: main_folder::{name}, - pages: [index_page, not_found_page, reset_password_page] - } - ], - id:frontend_dev::{id} - }, - test_db: { - id: test_db::{id}, - connection: { - id: test_db_connection::{id}, - host: test_db_connection::{host}, - port: test_db_connection::{port}, - username: test_db_connection::{username}, - password:test_db_connection::{password}, - name: test_db_connection::{name}, - ssl:test_db_connection::{ssl} - }, - }, - backend: { - id: backend_dev::{id}, - root_folder: { - id:backend_dev_root_folder::{id}, - name: backend_dev_root_folder::{name}, - description: backend_dev_root_folder::{description} - }, - } - }, - { - name: staging_branch::{name}, -frontend: { - page_folders: [ - { - name: main_folder::{name}, - pages: [index_page, not_found_page, reset_password_page] - } - ], - id:frontend_dev::{id} - },test_db: { - id: test_db::{id}, - connection: { - id: test_db_connection::{id}, - host: test_db_connection::{host}, - port: test_db_connection::{port}, - username: test_db_connection::{username}, - password:test_db_connection::{password}, - name: test_db_connection::{name}, - ssl:test_db_connection::{ssl} - }, - }, - backend: { - id: backend_staging::{id}, - root_folder: { - id:backend_staging_root_folder::{id}, - name: backend_staging_root_folder::{name}, - description: backend_staging_root_folder::{description} - }, - } - } - ], - name: app::{name}, - description: app::{description}, - favorite: app::{favorite}, - archived: app::{archived}, - id:app::{id}, -prod_db: { - id: prod_db::{id}, - connection: { - id: prod_db_connection::{id}, - host: prod_db_connection::{host}, - port: prod_db_connection::{port}, - username: prod_db_connection::{username}, - password:prod_db_connection::{password}, - name: prod_db_connection::{name}, - ssl:prod_db_connection::{ssl} - } - } - } -} - \ No newline at end of file + +QUERY getUserByEmail(email: String) => + user <- N({email: email}) + RETURN user \ No newline at end of file diff --git a/hql-tests/tests/user_test_1/schema.hx b/hql-tests/tests/user_test_1/schema.hx index d79daef9c..664dca9b6 100644 --- a/hql-tests/tests/user_test_1/schema.hx +++ b/hql-tests/tests/user_test_1/schema.hx @@ -1,163 +1,44 @@ +// schema.hx +// Start building your schema here. +// +// The schema is used to to ensure a level of type safety in your queries. +// +// The schema is made up of Node types, denoted by N::, +// and Edge types, denoted by E:: +// +// Under the Node types you can define fields that +// will be stored in the database. +// +// Under the Edge types you can define what type of node +// the edge will connect to and from, and also the +// properties that you want to store on the edge. +// +// Example: +// +// N::User { +// Name: String, +// Label: String, +// Age: I64, +// IsAdmin: Boolean, +// } +// +// E::Knows { +// From: User, +// To: User, +// Properties: { +// Since: I64, +// } +// } + N::User { - name: String, - email: String, - password: String, -} - -N::App { - name: String DEFAULT "Some", - description: String DEFAULT "", - created_at: Date, - favorite: Boolean, - archived: Boolean, -} - -N::Database { - -} - -E::AppHasProdDb { - From: App, - To: Database, - Properties: { - created_at: Date, - } - } -N::DbConnection { - host: String, - port: String, - username: String, - password: String, - name: String, - ssl: String, -} - -E::DbHasDbConnection { - From: Database, - To: DbConnection, - Properties: { - created_at: Date, - } - } - - -N::Branch { - name: String, -} -E::BranchHasDbConnection { - From: Branch, - To: Database, - Properties: { - created_at: Date, - } - } -N::FolderItem { - - } -N::Frontend { -} - -N::Backend { -} - -N::Element { - element_id: String, - name: String, -} - -N::PageFolder { - name: String, -} - -N::Page { - name: String, -} - -N::FolderTree { - name: String, - description: String, -} - -E::Backend_Has_Root_Folder { - From: Backend, - To: FolderTree, - Properties: { - created_at: Date, - } -} - -E::FolderItem_Has_FolderTree { - From: FolderItem, - To: FolderTree, - Properties: { - created_at: Date, - } - } -E::App_Has_Branch { - From: App, - To: Branch, - Properties: { - created_at: Date, - } -} - -E::Branch_Has_Frontend { - From: Branch, - To: Frontend, - Properties: { - created_at: Date, - } -} - -E::Branch_Has_Backend { - From: Branch, - To: Backend, - Properties: { - created_at: Date, - } -} - -E::Frontend_Contains_PageFolder { - From: Frontend, - To: PageFolder, - Properties: { - created_at: Date, - assigned_at: Date, - } -} - -E::Page_Has_Root_Element { - From: Page, - To: Element, - Properties: { - created_at: Date, - assigned_at: Date, - } -} - -E::Frontend_Has_Page { - From: Frontend, - To: Page, - Properties: { - created_at: Date, - assigned_at: Date, - } -} - -E::PageFolder_Contains_Page { - From: PageFolder, - To: Page, - Properties: { - created_at: Date, - assigned_at: Date, - } -} - -E::User_Has_Access_To { - From: User, - To: App, - Properties: { - created_at: Date, - modified_at: Date, - } + name: String, + INDEX email: String, + created_at: Date DEFAULT NOW +} + +N::Post { + title: String, + content: String, + published: Boolean DEFAULT true, + created_at: Date DEFAULT NOW } \ No newline at end of file