mirror of
https://github.com/siteboon/claudecodeui.git
synced 2026-05-16 01:12:46 +00:00
Compare commits
72 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5e7c4c5f8c | ||
|
|
3f71d4932b | ||
|
|
80561ee9e9 | ||
|
|
658421c1c4 | ||
|
|
881465aa71 | ||
|
|
9f2afebc66 | ||
|
|
df3d5de8c1 | ||
|
|
b44c93d884 | ||
|
|
a1c6d667a4 | ||
|
|
0753c04783 | ||
|
|
e1275e6d3c | ||
|
|
ccb8b83692 | ||
|
|
641731b3ef | ||
|
|
d4bdc667cc | ||
|
|
ce724e6e3f | ||
|
|
b4a39c7297 | ||
|
|
44edf94f3a | ||
|
|
f6200e3e95 | ||
|
|
fa5a23897c | ||
|
|
c5e55adc89 | ||
|
|
09dd407648 | ||
|
|
89b754d186 | ||
|
|
86b6545c35 | ||
|
|
49dd3cfb23 | ||
|
|
457ca0daab | ||
|
|
09dcea05fb | ||
|
|
3969135bd4 | ||
|
|
25820ed995 | ||
|
|
fc3504eaed | ||
|
|
ec0ff974cb | ||
|
|
c471b5d3fa | ||
|
|
5758bee8a0 | ||
|
|
7763e60fb3 | ||
|
|
25b00b58de | ||
|
|
6a13e1773b | ||
|
|
6102b74455 | ||
|
|
9ef1ab533d | ||
|
|
e9c7a5041c | ||
|
|
289520814c | ||
|
|
09486016e6 | ||
|
|
4c106a5083 | ||
|
|
63e996bb77 | ||
|
|
fbad3a90f8 | ||
|
|
96463df8da | ||
|
|
31f28a2c18 | ||
|
|
8ff5f35c05 | ||
|
|
641304242d | ||
|
|
c3599cd2c4 | ||
|
|
9b11c034d9 | ||
|
|
b6d19201b6 | ||
|
|
4a569725da | ||
|
|
6ce3306947 | ||
|
|
d0dd007d0f | ||
|
|
13e97e2c71 | ||
|
|
c7a5baf147 | ||
|
|
e2459cb0f8 | ||
|
|
9552577e94 | ||
|
|
590dd42649 | ||
|
|
2207d05c1c | ||
|
|
a8dab0edcf | ||
|
|
e61f8a543d | ||
|
|
388134c7a5 | ||
|
|
ef51de259e | ||
|
|
1628868470 | ||
|
|
8f1042cf25 | ||
|
|
051a6b1e74 | ||
|
|
f1063fd339 | ||
|
|
27cd12432b | ||
|
|
004135ef01 | ||
|
|
b54cdf8168 | ||
|
|
42a131389a | ||
|
|
ebd1c0db92 |
51
.github/workflows/docker.yml
vendored
Normal file
51
.github/workflows/docker.yml
vendored
Normal file
@@ -0,0 +1,51 @@
|
||||
name: Docker
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
extra_tag:
|
||||
description: 'Additional tag to push alongside the template tag (e.g. v1.2.3, leave empty for none)'
|
||||
required: false
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
template: [claude-code, codex, gemini]
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Compute tags
|
||||
id: tags
|
||||
run: |
|
||||
TAGS="docker.io/cloudcliai/sandbox:${{ matrix.template }}"
|
||||
if [ -n "${{ inputs.extra_tag }}" ]; then
|
||||
TAGS="$TAGS,docker.io/cloudcliai/sandbox:${{ matrix.template }}-${{ inputs.extra_tag }}"
|
||||
fi
|
||||
echo "tags=$TAGS" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ./docker
|
||||
file: ./docker/${{ matrix.template }}/Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ steps.tags.outputs.tags }}
|
||||
cache-from: type=gha,scope=${{ matrix.template }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.template }}
|
||||
50
.github/workflows/release.yml
vendored
Normal file
50
.github/workflows/release.yml
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
name: Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
increment:
|
||||
description: 'Version bump: patch, minor, major, or explicit (e.g. 1.27.0)'
|
||||
required: true
|
||||
default: 'patch'
|
||||
type: string
|
||||
release_name:
|
||||
description: 'Custom release name (optional, defaults to "CloudCLI UI vX.Y.Z")'
|
||||
required: false
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
id-token: write
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.RELEASE_PAT }}
|
||||
|
||||
- uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 22
|
||||
registry-url: https://registry.npmjs.org
|
||||
|
||||
- name: git config
|
||||
run: |
|
||||
git config user.name "${GITHUB_ACTOR}"
|
||||
git config user.email "${GITHUB_ACTOR}@users.noreply.github.com"
|
||||
|
||||
- run: npm ci
|
||||
|
||||
- name: Release
|
||||
run: |
|
||||
ARGS="--ci --increment=${{ inputs.increment }}"
|
||||
if [ -n "${{ inputs.release_name }}" ]; then
|
||||
ARGS="$ARGS --github.releaseName=\"${{ inputs.release_name }}\""
|
||||
fi
|
||||
npx release-it $ARGS
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.RELEASE_PAT }}
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -8,6 +8,7 @@ lerna-debug.log*
|
||||
|
||||
# Build outputs
|
||||
dist/
|
||||
dist-server/
|
||||
dist-ssr/
|
||||
build/
|
||||
out/
|
||||
@@ -136,6 +137,8 @@ tasks/
|
||||
!src/i18n/locales/ja/tasks.json
|
||||
!src/i18n/locales/ru/tasks.json
|
||||
!src/i18n/locales/de/tasks.json
|
||||
!src/i18n/locales/tr/tasks.json
|
||||
!src/i18n/locales/it/tasks.json
|
||||
|
||||
# Git worktrees
|
||||
.worktrees/
|
||||
.worktrees/
|
||||
|
||||
@@ -6,7 +6,8 @@
|
||||
"requireCleanWorkingDir": true
|
||||
},
|
||||
"npm": {
|
||||
"publish": true
|
||||
"publish": true,
|
||||
"publishArgs": ["--access public"]
|
||||
},
|
||||
"github": {
|
||||
"release": true,
|
||||
|
||||
155
CHANGELOG.md
155
CHANGELOG.md
@@ -3,6 +3,161 @@
|
||||
All notable changes to CloudCLI UI will be documented in this file.
|
||||
|
||||
|
||||
## [1.31.5](https://github.com/siteboon/claudecodeui/compare/v1.31.4...v1.31.5) (2026-04-30)
|
||||
|
||||
### New Features
|
||||
|
||||
* add auto mode to claude code ([3f71d49](https://github.com/siteboon/claudecodeui/commit/3f71d4932b05dfedcdf816e2a3d7d0cd69c4f566))
|
||||
|
||||
## [1.31.4](https://github.com/siteboon/claudecodeui/compare/v1.31.3...v1.31.4) (2026-04-30)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* bump codex sdk to latest version ([658421c](https://github.com/siteboon/claudecodeui/commit/658421c1c44ec4eb58b69ec7b1844a9fba11a3f3))
|
||||
|
||||
## [1.31.3](https://github.com/siteboon/claudecodeui/compare/v1.31.2...v1.31.3) (2026-04-30)
|
||||
|
||||
## [1.31.2](https://github.com/siteboon/claudecodeui/compare/v1.31.0...v1.31.2) (2026-04-30)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* migrations for new sqlite schema ([0753c04](https://github.com/siteboon/claudecodeui/commit/0753c047837dab17b86ae4453027e30b465870f8))
|
||||
|
||||
## [1.31.0](https://github.com/siteboon/claudecodeui/compare/v1.30.0...v1.31.0) (2026-04-30)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **/status:** use CLAUDE_MODELS.DEFAULT instead of stale 'claude-sonnet-4.5' fallback ([#723](https://github.com/siteboon/claudecodeui/issues/723)) ([b4a39c7](https://github.com/siteboon/claudecodeui/commit/b4a39c729710a6294c62eb742e99e05f3e3914e9))
|
||||
|
||||
## [1.30.0](https://github.com/siteboon/claudecodeui/compare/v1.29.5...v1.30.0) (2026-04-21)
|
||||
|
||||
### New Features
|
||||
|
||||
* **i18n:** add Italian language support ([#677](https://github.com/siteboon/claudecodeui/issues/677)) ([86b6545](https://github.com/siteboon/claudecodeui/commit/86b6545c3505475ac2de0cec75cc8f86ab22aceb))
|
||||
* **i18n:** add Turkish (tr) language support ([#678](https://github.com/siteboon/claudecodeui/issues/678)) ([89b754d](https://github.com/siteboon/claudecodeui/commit/89b754d186b68f3df8aa439a2d535644406066f0)), closes [#384](https://github.com/siteboon/claudecodeui/issues/384) [#514](https://github.com/siteboon/claudecodeui/issues/514) [#525](https://github.com/siteboon/claudecodeui/issues/525) [#534](https://github.com/siteboon/claudecodeui/issues/534)
|
||||
* introduce opus 4.7 ([#682](https://github.com/siteboon/claudecodeui/issues/682)) ([c5e55ad](https://github.com/siteboon/claudecodeui/commit/c5e55adc89d0316675f90a927aa40d115958ae9f))
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* iOS scrolling main chat area ([3969135](https://github.com/siteboon/claudecodeui/commit/3969135bd427fbf48f29bb3dbfedb47791ca78dc))
|
||||
* migrate PlanDisplay raw params from native details to Collapsible primitive ([fc3504e](https://github.com/siteboon/claudecodeui/commit/fc3504eaed8ca7ed9214838d148ea385b8352c31))
|
||||
* precise Claude SDK denial message detection in deriveToolStatus ([09dcea0](https://github.com/siteboon/claudecodeui/commit/09dcea05fbc8c208d931aa1f08618f0e8087392f))
|
||||
* reduce size of permission mode button tap target and provider selector on mobile ([457ca0d](https://github.com/siteboon/claudecodeui/commit/457ca0daabcaa8397f4375ee8aa2671336b648ff))
|
||||
* small mobile respnosive fixes ([25820ed](https://github.com/siteboon/claudecodeui/commit/25820ed995c1b813b1f9ed073097b08eb1d902ec))
|
||||
* small mobile respnosive fixes ([c471b5d](https://github.com/siteboon/claudecodeui/commit/c471b5d3fa6ce1968adb4cf87a15ac0e18febd20))
|
||||
|
||||
### Refactoring
|
||||
|
||||
* add primitives, plan mode display, and new session model selector ([7763e60](https://github.com/siteboon/claudecodeui/commit/7763e60fb32e34742058c055c57664a503a34d1d))
|
||||
* chat composer new design ([5758bee](https://github.com/siteboon/claudecodeui/commit/5758bee8a038ed50073dba882108617959dda82c))
|
||||
* queue primitive, tool status badges, and tool display cleanup ([ec0ff97](https://github.com/siteboon/claudecodeui/commit/ec0ff974cba213a1100b2a071b8ba533e812fe82))
|
||||
|
||||
### Maintenance
|
||||
|
||||
* add docker sandbox action ([fa5a238](https://github.com/siteboon/claudecodeui/commit/fa5a23897c086bcacf1cf5d926c650f98a0f2222))
|
||||
|
||||
## [1.29.5](https://github.com/siteboon/claudecodeui/compare/v1.29.4...v1.29.5) (2026-04-16)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* update node-pty to latest version ([6a13e17](https://github.com/siteboon/claudecodeui/commit/6a13e1773b145049ade512aa6e5cac21c2e5c4de))
|
||||
|
||||
## [1.29.4](https://github.com/siteboon/claudecodeui/compare/v1.29.3...v1.29.4) (2026-04-16)
|
||||
|
||||
### New Features
|
||||
|
||||
* deleting from sidebar will now ask whether to remove all data as well ([e9c7a50](https://github.com/siteboon/claudecodeui/commit/e9c7a5041c31a6f7b2032f06abe19c52d3d4cd8c))
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* pass pathToClaudeCodeExecutable to SDK when CLAUDE_CLI_PATH is set ([4c106a5](https://github.com/siteboon/claudecodeui/commit/4c106a5083d90989bbeedaefdbb68f5b3fa6fd58)), closes [#468](https://github.com/siteboon/claudecodeui/issues/468)
|
||||
|
||||
### Refactoring
|
||||
|
||||
* remove the sqlite3 dependency ([2895208](https://github.com/siteboon/claudecodeui/commit/289520814cf3ca36403056739ef22021f78c6033))
|
||||
* **server:** extract URL detection and color utils from index.js ([#657](https://github.com/siteboon/claudecodeui/issues/657)) ([63e996b](https://github.com/siteboon/claudecodeui/commit/63e996bb77cfa97b1f55f6bdccc50161a75a3eee))
|
||||
|
||||
### Maintenance
|
||||
|
||||
* upgrade commit lint to 20.5.0 ([0948601](https://github.com/siteboon/claudecodeui/commit/09486016e67d97358c228ebc6eb4502ccb0012e4))
|
||||
|
||||
## [1.29.3](https://github.com/siteboon/claudecodeui/compare/v1.29.2...v1.29.3) (2026-04-15)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **version-upgrade-modal:** implement reload countdown and update UI messages ([#655](https://github.com/siteboon/claudecodeui/issues/655)) ([6413042](https://github.com/siteboon/claudecodeui/commit/641304242d7705b54aab65faa4a7673438c92c60))
|
||||
|
||||
### Maintenance
|
||||
|
||||
* remove unused route (migrated to providers already) ([31f28a2](https://github.com/siteboon/claudecodeui/commit/31f28a2c183f6ead50941027632d7ab64b7bb2d4))
|
||||
|
||||
## [1.29.2](https://github.com/siteboon/claudecodeui/compare/v1.29.1...v1.29.2) (2026-04-14)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **sandbox:** use backgrounded sbx run to keep sandbox alive ([9b11c03](https://github.com/siteboon/claudecodeui/commit/9b11c034d9a19710a23b56c62dcf07c21a17bd97))
|
||||
|
||||
## [1.29.1](https://github.com/siteboon/claudecodeui/compare/v1.29.0...v1.29.1) (2026-04-14)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add latest tag to docker npx command and change the detach mode to work without spawn ([4a56972](https://github.com/siteboon/claudecodeui/commit/4a569725dae320a505753359d8edfd8ca79f0fd7))
|
||||
|
||||
## [1.29.0](https://github.com/siteboon/claudecodeui/compare/v1.28.1...v1.29.0) (2026-04-14)
|
||||
|
||||
### New Features
|
||||
|
||||
* adding docker sandbox environments ([13e97e2](https://github.com/siteboon/claudecodeui/commit/13e97e2c71254de7a60afb5495b21064c4bc4241))
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **thinking-mode:** fix dropdown positioning ([#646](https://github.com/siteboon/claudecodeui/issues/646)) ([c7a5baf](https://github.com/siteboon/claudecodeui/commit/c7a5baf1479404bd40e23aa58bd9f677df9a04c6))
|
||||
|
||||
### Maintenance
|
||||
|
||||
* update release flow node version ([e2459cb](https://github.com/siteboon/claudecodeui/commit/e2459cb0f8b35f54827778a7b444e6c3ca326506))
|
||||
|
||||
## [1.28.1](https://github.com/siteboon/claudecodeui/compare/v1.28.0...v1.28.1) (2026-04-10)
|
||||
|
||||
### New Features
|
||||
|
||||
* add branding, community links, GitHub star badge, and About settings tab ([2207d05](https://github.com/siteboon/claudecodeui/commit/2207d05c1ca229214aa9c2e2c9f4d0827d421574))
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* corrupted binary downloads ([#634](https://github.com/siteboon/claudecodeui/issues/634)) ([e61f8a5](https://github.com/siteboon/claudecodeui/commit/e61f8a543d63fe7c24a04b3d2186085a06dcbcdb))
|
||||
* **ui:** remove mobile bottom nav, unify processing indicator, and improve tooltip behavior on mobile ([#632](https://github.com/siteboon/claudecodeui/issues/632)) ([a8dab0e](https://github.com/siteboon/claudecodeui/commit/a8dab0edcf949ae610820bae9500c433781f7c73))
|
||||
|
||||
### Refactoring
|
||||
|
||||
* remove unused whispher transcribe logic ([#637](https://github.com/siteboon/claudecodeui/issues/637)) ([590dd42](https://github.com/siteboon/claudecodeui/commit/590dd42649424ab990353fcf59ce0965036d3d25))
|
||||
|
||||
## [1.28.0](https://github.com/siteboon/claudecodeui/compare/v1.27.1...v1.28.0) (2026-04-03)
|
||||
|
||||
### New Features
|
||||
|
||||
* adding session resume in the api ([8f1042c](https://github.com/siteboon/claudecodeui/commit/8f1042cf256be282f009adcceeb55ab2dddf3fba))
|
||||
* moving new session button higher ([1628868](https://github.com/siteboon/claudecodeui/commit/16288684702dec894cf054291ca3d545ddb8214b))
|
||||
|
||||
### Maintenance
|
||||
|
||||
* changing package name to @cloudcli-ai/cloudcli ([ef51de2](https://github.com/siteboon/claudecodeui/commit/ef51de259ea2b963bc15f058b084e11220bc216a))
|
||||
|
||||
## [1.27.1](https://github.com/siteboon/claudecodeui/compare/v1.26.3...v1.27.1) (2026-03-29)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* prevent split on undefined([#491](https://github.com/siteboon/claudecodeui/issues/491)) ([#563](https://github.com/siteboon/claudecodeui/issues/563)) ([b54cdf8](https://github.com/siteboon/claudecodeui/commit/b54cdf8168fc224e9907796e4229ae8ed34e6885))
|
||||
|
||||
### Maintenance
|
||||
|
||||
* add release-it github action ([42a1313](https://github.com/siteboon/claudecodeui/commit/42a131389a6954df0d2c3bedd2cb6d3406c5ebc1))
|
||||
* add terminal plugin in the plugins list ([004135e](https://github.com/siteboon/claudecodeui/commit/004135ef0187023e1da29c4a7137a28a42ebf9af))
|
||||
* release tokens ([f1063fd](https://github.com/siteboon/claudecodeui/commit/f1063fd33964ccb517f5ebcdd14526ed162e1138))
|
||||
* relicense to AGPL-3.0-or-later ([27cd124](https://github.com/siteboon/claudecodeui/commit/27cd12432b7d3237981f86acd9cc99532d843d4a))
|
||||
|
||||
## [1.26.3](https://github.com/siteboon/claudecodeui/compare/v1.26.2...v1.26.3) (2026-03-22)
|
||||
|
||||
## [1.26.2](https://github.com/siteboon/claudecodeui/compare/v1.26.0...v1.26.2) (2026-03-21)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
@@ -153,4 +153,4 @@ This automatically:
|
||||
|
||||
## License
|
||||
|
||||
By contributing, you agree that your contributions will be licensed under the [GPL-3.0 License](LICENSE).
|
||||
By contributing, you agree that your contributions will be licensed under the [AGPL-3.0-or-later License](LICENSE), including the additional terms specified in Section 7 of the LICENSE file.
|
||||
13
NOTICE
Normal file
13
NOTICE
Normal file
@@ -0,0 +1,13 @@
|
||||
CloudCLI UI
|
||||
Copyright 2025-2026 Siteboon AI B.V. and contributors
|
||||
|
||||
This software is licensed under the GNU Affero General Public License v3.0
|
||||
or later (AGPL-3.0-or-later). See the LICENSE file for the full license text,
|
||||
including additional terms under Section 7.
|
||||
|
||||
Originally developed by Siteboon AI B.V. (https://github.com/siteboon/claudecodeui).
|
||||
|
||||
Contributions by Siteboon AI B.V. prior to commit 004135ef were originally
|
||||
published under GPL-3.0 and are hereby relicensed to AGPL-3.0-or-later.
|
||||
Contributions by other authors prior to that commit remain under GPL-3.0
|
||||
and are incorporated into this work as permitted by GPL-3.0 Section 13.
|
||||
19
README.de.md
19
README.de.md
@@ -15,7 +15,7 @@
|
||||
<a href="https://trendshift.io/repositories/15586" target="_blank"><img src="https://trendshift.io/api/badge/repositories/15586" alt="siteboon%2Fclaudecodeui | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
|
||||
</p>
|
||||
|
||||
<div align="right"><i><a href="./README.md">English</a> · <a href="./README.ru.md">Русский</a> · <b>Deutsch</b> · <a href="./README.ko.md">한국어</a> · <a href="./README.zh-CN.md">中文</a> · <a href="./README.ja.md">日本語</a></i></div>
|
||||
<div align="right"><i><a href="./README.md">English</a> · <a href="./README.ru.md">Русский</a> · <b>Deutsch</b> · <a href="./README.ko.md">한국어</a> · <a href="./README.zh-CN.md">中文</a> · <a href="./README.ja.md">日本語</a> · <a href="./README.tr.md">Türkçe</a></i></div>
|
||||
|
||||
---
|
||||
|
||||
@@ -76,16 +76,18 @@ Der schnellste Einstieg – keine lokale Einrichtung erforderlich. Erhalte eine
|
||||
|
||||
### Self-Hosted (Open Source)
|
||||
|
||||
#### npm
|
||||
|
||||
CloudCLI UI sofort mit **npx** ausprobieren (erfordert **Node.js** v22+):
|
||||
|
||||
```bash
|
||||
npx @siteboon/claude-code-ui
|
||||
npx @cloudcli-ai/cloudcli
|
||||
```
|
||||
|
||||
Oder **global** installieren für regelmäßige Nutzung:
|
||||
|
||||
```bash
|
||||
npm install -g @siteboon/claude-code-ui
|
||||
npm install -g @cloudcli-ai/cloudcli
|
||||
cloudcli
|
||||
```
|
||||
|
||||
@@ -93,6 +95,15 @@ cloudcli
|
||||
|
||||
Die **[Dokumentation →](https://cloudcli.ai/docs)** enthält weitere Konfigurationsoptionen, PM2, Remote-Server-Einrichtung und mehr.
|
||||
|
||||
#### Docker Sandboxes (Experimentell)
|
||||
|
||||
Agents in isolierten Sandboxes mit Hypervisor-Isolation ausführen. Standardmäßig wird Claude Code gestartet. Erfordert die [`sbx` CLI](https://docs.docker.com/ai/sandboxes/get-started/).
|
||||
|
||||
```
|
||||
npx @cloudcli-ai/cloudcli@latest sandbox ~/my-project
|
||||
```
|
||||
|
||||
Unterstützt Claude Code, Codex und Gemini CLI. Weitere Details in der [Sandbox-Dokumentation](docker/).
|
||||
|
||||
---
|
||||
|
||||
@@ -104,7 +115,7 @@ CloudCLI UI ist die Open-Source-UI-Schicht, die CloudCLI Cloud antreibt. Du kann
|
||||
|---|---|---|
|
||||
| **Am besten für** | Entwickler:innen, die eine vollständige UI für lokale Agent-Sitzungen auf ihrem eigenen Rechner möchten | Teams und Entwickler:innen, die Agents in der Cloud betreiben möchten, überall erreichbar |
|
||||
| **Zugriff** | Browser via `[deineIP]:port` | Browser, jede IDE, REST API, n8n |
|
||||
| **Einrichtung** | `npx @siteboon/claude-code-ui` | Keine Einrichtung erforderlich |
|
||||
| **Einrichtung** | `npx @cloudcli-ai/cloudcli` | Keine Einrichtung erforderlich |
|
||||
| **Rechner muss laufen** | Ja | Nein |
|
||||
| **Mobiler Zugriff** | Jeder Browser im Netzwerk | Jedes Gerät, native App in Entwicklung |
|
||||
| **Verfügbare Sitzungen** | Alle Sitzungen automatisch aus `~/.claude` erkannt | Alle Sitzungen in deiner Cloud-Umgebung |
|
||||
|
||||
19
README.ja.md
19
README.ja.md
@@ -15,7 +15,7 @@
|
||||
<a href="https://trendshift.io/repositories/15586" target="_blank"><img src="https://trendshift.io/api/badge/repositories/15586" alt="siteboon%2Fclaudecodeui | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
|
||||
</p>
|
||||
|
||||
<div align="right"><i><a href="./README.md">English</a> · <a href="./README.ru.md">Русский</a> · <a href="./README.de.md">Deutsch</a> · <a href="./README.ko.md">한국어</a> · <a href="./README.zh-CN.md">中文</a> · <b>日本語</b></i></div>
|
||||
<div align="right"><i><a href="./README.md">English</a> · <a href="./README.ru.md">Русский</a> · <a href="./README.de.md">Deutsch</a> · <a href="./README.ko.md">한국어</a> · <a href="./README.zh-CN.md">中文</a> · <b>日本語</b> · <a href="./README.tr.md">Türkçe</a></i></div>
|
||||
|
||||
---
|
||||
|
||||
@@ -72,16 +72,18 @@
|
||||
|
||||
### セルフホスト(オープンソース)
|
||||
|
||||
#### npm
|
||||
|
||||
**npx** で今すぐ CloudCLI UI を試せます(**Node.js** v22+ が必要):
|
||||
|
||||
```bash
|
||||
npx @siteboon/claude-code-ui
|
||||
npx @cloudcli-ai/cloudcli
|
||||
```
|
||||
|
||||
または、普段使いするなら **グローバル** にインストール:
|
||||
|
||||
```bash
|
||||
npm install -g @siteboon/claude-code-ui
|
||||
npm install -g @cloudcli-ai/cloudcli
|
||||
cloudcli
|
||||
```
|
||||
|
||||
@@ -89,6 +91,15 @@ cloudcli
|
||||
|
||||
より詳細な設定オプション、PM2、リモートサーバー設定などについては **[ドキュメントはこちら →](https://cloudcli.ai/docs)** を参照してください。
|
||||
|
||||
#### Docker Sandboxes(実験的)
|
||||
|
||||
ハイパーバイザーレベルの分離でエージェントをサンドボックスで実行します。デフォルトでは Claude Code が起動します。[`sbx` CLI](https://docs.docker.com/ai/sandboxes/get-started/) が必要です。
|
||||
|
||||
```
|
||||
npx @cloudcli-ai/cloudcli@latest sandbox ~/my-project
|
||||
```
|
||||
|
||||
Claude Code、Codex、Gemini CLI に対応。詳細は[サンドボックスのドキュメント](docker/)をご覧ください。
|
||||
|
||||
---
|
||||
|
||||
@@ -100,7 +111,7 @@ CloudCLI UI は、CloudCLI Cloud を支えるオープンソースの UI レイ
|
||||
|---|---|---|
|
||||
| **対象ユーザー** | 自分のマシン上でローカルの agent セッションに対してフル UI を使いたい開発者 | クラウド上で動く agents をどこからでも利用したいチーム/開発者 |
|
||||
| **アクセス方法** | ブラウザ(`[yourip]:port`) | ブラウザ、任意の IDE、REST API、n8n |
|
||||
| **セットアップ** | `npx @siteboon/claude-code-ui` | セットアップ不要 |
|
||||
| **セットアップ** | `npx @cloudcli-ai/cloudcli` | セットアップ不要 |
|
||||
| **マシンの稼働継続** | はい | いいえ |
|
||||
| **モバイルアクセス** | 同一ネットワーク内の任意のブラウザ | 任意のデバイス(ネイティブアプリも準備中) |
|
||||
| **利用可能なセッション** | `~/.claude` から全セッションを自動検出 | クラウド環境内の全セッション |
|
||||
|
||||
22
README.ko.md
22
README.ko.md
@@ -15,7 +15,7 @@
|
||||
<a href="https://trendshift.io/repositories/15586" target="_blank"><img src="https://trendshift.io/api/badge/repositories/15586" alt="siteboon%2Fclaudecodeui | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
|
||||
</p>
|
||||
|
||||
<div align="right"><i><a href="./README.md">English</a> · <a href="./README.ru.md">Русский</a> · <a href="./README.de.md">Deutsch</a> · <b>한국어</b> · <a href="./README.zh-CN.md">中文</a> · <a href="./README.ja.md">日本語</a></i></div>
|
||||
<div align="right"><i><a href="./README.md">English</a> · <a href="./README.ru.md">Русский</a> · <a href="./README.de.md">Deutsch</a> · <b>한국어</b> · <a href="./README.zh-CN.md">中文</a> · <a href="./README.ja.md">日本語</a> · <a href="./README.tr.md">Türkçe</a></i></div>
|
||||
|
||||
---
|
||||
|
||||
@@ -72,22 +72,34 @@
|
||||
|
||||
### 셀프 호스트 (오픈 소스)
|
||||
|
||||
#### npm
|
||||
|
||||
**npx**로 즉시 CloudCLI UI를 실행하세요 (Node.js v22+ 필요):
|
||||
|
||||
```bash
|
||||
npx @siteboon/claude-code-ui
|
||||
npx @cloudcli-ai/cloudcli
|
||||
```
|
||||
|
||||
**정기적으로 사용한다면 전역 설치:**
|
||||
|
||||
```bash
|
||||
npm install -g @siteboon/claude-code-ui
|
||||
npm install -g @cloudcli-ai/cloudcli
|
||||
cloudcli
|
||||
```
|
||||
|
||||
`http://localhost:3001`을 열면 기존 세션이 자동으로 발견됩니다.
|
||||
|
||||
자세한 구성 옵션, PM2, 원격 서버 설정 등은 **[문서 →](https://cloudcli.ai/docs)**를 참고하세요
|
||||
자세한 구성 옵션, PM2, 원격 서버 설정 등은 **[문서 →](https://cloudcli.ai/docs)**를 참고하세요.
|
||||
|
||||
#### Docker Sandboxes (실험적)
|
||||
|
||||
하이퍼바이저 수준 격리로 에이전트를 샌드박스에서 실행합니다. 기본 에이전트는 Claude Code입니다. [`sbx` CLI](https://docs.docker.com/ai/sandboxes/get-started/)가 필요합니다.
|
||||
|
||||
```
|
||||
npx @cloudcli-ai/cloudcli@latest sandbox ~/my-project
|
||||
```
|
||||
|
||||
Claude Code, Codex, Gemini CLI를 지원합니다. 자세한 내용은 [샌드박스 문서](docker/)를 참고하세요.
|
||||
|
||||
---
|
||||
|
||||
@@ -99,7 +111,7 @@ CloudCLI UI는 CloudCLI Cloud를 구동하는 오픈 소스 UI 계층입니다.
|
||||
|---|---|---|
|
||||
| **적합한 대상** | 로컬 에이전트 세션을 위한 전체 UI가 필요한 개발자 | 어디서든 접근 가능한 클라우드에서 에이전트를 운영하고자 하는 팀 및 개발자 |
|
||||
| **접근 방법** | `[yourip]:port`를 통해 브라우저 접속 | 브라우저, IDE, REST API, n8n |
|
||||
| **설정** | `npx @siteboon/claude-code-ui` | 설정 불필요 |
|
||||
| **설정** | `npx @cloudcli-ai/cloudcli` | 설정 불필요 |
|
||||
| **기기 유지 필요 여부** | 예 (머신 켜둬야 함) | 아니오 |
|
||||
| **모바일 접근** | 네트워크 내 브라우저 | 모든 기기 (네이티브 앱 예정) |
|
||||
| **세션 접근** | `~/.claude`에서 자동 발견 | 클라우드 환경 내 세션 |
|
||||
|
||||
62
README.md
62
README.md
@@ -15,7 +15,7 @@
|
||||
<a href="https://trendshift.io/repositories/15586" target="_blank"><img src="https://trendshift.io/api/badge/repositories/15586" alt="siteboon%2Fclaudecodeui | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
|
||||
</p>
|
||||
|
||||
<div align="right"><i><b>English</b> · <a href="./README.ru.md">Русский</a> · <a href="./README.de.md">Deutsch</a> · <a href="./README.ko.md">한국어</a> · <a href="./README.zh-CN.md">中文</a> · <a href="./README.ja.md">日本語</a></i></div>
|
||||
<div align="right"><i><b>English</b> · <a href="./README.ru.md">Русский</a> · <a href="./README.de.md">Deutsch</a> · <a href="./README.ko.md">한국어</a> · <a href="./README.zh-CN.md">中文</a> · <a href="./README.ja.md">日本語</a> · <a href="./README.tr.md">Türkçe</a></i></div>
|
||||
|
||||
---
|
||||
|
||||
@@ -76,48 +76,58 @@ The fastest way to get started — no local setup required. Get a fully managed,
|
||||
|
||||
### Self-Hosted (Open source)
|
||||
|
||||
#### npm
|
||||
|
||||
Try CloudCLI UI instantly with **npx** (requires **Node.js** v22+):
|
||||
|
||||
```
|
||||
npx @siteboon/claude-code-ui
|
||||
npx @cloudcli-ai/cloudcli
|
||||
```
|
||||
|
||||
Or install **globally** for regular use:
|
||||
|
||||
```
|
||||
npm install -g @siteboon/claude-code-ui
|
||||
npm install -g @cloudcli-ai/cloudcli
|
||||
cloudcli
|
||||
```
|
||||
|
||||
Open `http://localhost:3001` — all your existing sessions are discovered automatically.
|
||||
|
||||
Visit the **[documentation →](https://cloudcli.ai/docs)** for more full configuration options, PM2, remote server setup and more
|
||||
Visit the **[documentation →](https://cloudcli.ai/docs)** for full configuration options, PM2, remote server setup and more.
|
||||
|
||||
#### Docker Sandboxes (Experimental)
|
||||
|
||||
Run agents in isolated sandboxes with hypervisor-level isolation. Starts Claude Code by default. Requires the [`sbx` CLI](https://docs.docker.com/ai/sandboxes/get-started/).
|
||||
|
||||
```
|
||||
npx @cloudcli-ai/cloudcli@latest sandbox ~/my-project
|
||||
```
|
||||
|
||||
Supports Claude Code, Codex, and Gemini CLI. See the [sandbox docs](docker/) for setup and advanced options.
|
||||
|
||||
|
||||
---
|
||||
|
||||
## Which option is right for you?
|
||||
|
||||
CloudCLI UI is the open source UI layer that powers CloudCLI Cloud. You can self-host it on your own machine, or use CloudCLI Cloud which builds on top of it with a full managed cloud environment, team features, and deeper integrations.
|
||||
CloudCLI UI is the open source UI layer that powers CloudCLI Cloud. You can self-host it on your own machine, run it in a Docker sandbox for isolation, or use CloudCLI Cloud for a fully managed environment.
|
||||
|
||||
| | CloudCLI UI (Self-hosted) | CloudCLI Cloud |
|
||||
|---|---|---|
|
||||
| **Best for** | Developers who want a full UI for local agent sessions on their own machine | Teams and developers who want agents running in the cloud, accessible from anywhere |
|
||||
| **How you access it** | Browser via `[yourip]:port` | Browser, any IDE, REST API, n8n |
|
||||
| **Setup** | `npx @siteboon/claude-code-ui` | No setup required |
|
||||
| **Machine needs to stay on** | Yes | No |
|
||||
| **Mobile access** | Any browser on your network | Any device, native app coming |
|
||||
| **Sessions available** | All sessions auto-discovered from `~/.claude` | All sessions within your cloud environment |
|
||||
| **Agents supported** | Claude Code, Cursor CLI, Codex, Gemini CLI | Claude Code, Cursor CLI, Codex, Gemini CLI |
|
||||
| **File explorer and Git** | Yes, built into the UI | Yes, built into the UI |
|
||||
| **MCP configuration** | Managed via UI, synced with your local `~/.claude` config | Managed via UI |
|
||||
| **IDE access** | Your local IDE | Any IDE connected to your cloud environment |
|
||||
| **REST API** | Yes | Yes |
|
||||
| **n8n node** | No | Yes |
|
||||
| **Team sharing** | No | Yes |
|
||||
| **Platform cost** | Free, open source | Starts at $7/month |
|
||||
| | Self-Hosted (npm) | Self-Hosted (Docker Sandbox) *(Experimental)* | CloudCLI Cloud |
|
||||
|---|---|---|---|
|
||||
| **Best for** | Local agent sessions on your own machine | Isolated agents with web/mobile IDE | Teams who want agents in the cloud |
|
||||
| **How you access it** | Browser via `[yourip]:port` | Browser via `localhost:port` | Browser, any IDE, REST API, n8n |
|
||||
| **Setup** | `npx @cloudcli-ai/cloudcli` | `npx @cloudcli-ai/cloudcli@latest sandbox ~/project` | No setup required |
|
||||
| **Isolation** | Runs on your host | Hypervisor-level sandbox (microVM) | Full cloud isolation |
|
||||
| **Machine needs to stay on** | Yes | Yes | No |
|
||||
| **Mobile access** | Any browser on your network | Any browser on your network | Any device, native app coming |
|
||||
| **Agents supported** | Claude Code, Cursor CLI, Codex, Gemini CLI | Claude Code, Codex, Gemini CLI | Claude Code, Cursor CLI, Codex, Gemini CLI |
|
||||
| **File explorer and Git** | Yes | Yes | Yes |
|
||||
| **MCP configuration** | Synced with `~/.claude` | Managed via UI | Managed via UI |
|
||||
| **REST API** | Yes | Yes | Yes |
|
||||
| **Team sharing** | No | No | Yes |
|
||||
| **Platform cost** | Free, open source | Free, open source | Starts at $7/month |
|
||||
|
||||
> Both options use your own AI subscriptions (Claude, Cursor, etc.) — CloudCLI provides the environment, not the AI.
|
||||
> All options use your own AI subscriptions (Claude, Cursor, etc.) — CloudCLI provides the environment, not the AI.
|
||||
|
||||
---
|
||||
|
||||
@@ -154,7 +164,7 @@ CloudCLI has a plugin system that lets you add custom tabs with their own fronte
|
||||
|---|---|
|
||||
| **[Project Stats](https://github.com/cloudcli-ai/cloudcli-plugin-starter)** | Shows file counts, lines of code, file-type breakdown, largest files, and recently modified files for your current project |
|
||||
| **[Web Terminal](https://github.com/cloudcli-ai/cloudcli-plugin-terminal)** | Full xterm.js terminal with multi-tab support|
|
||||
|
||||
| **[CloudCLI Scheduler](https://github.com/grostim/cloudcli-cron)** | Create workspace-scoped scheduled prompts and execute them through a local CLI such as Codex, Claude Code, or Gemini CLI|
|
||||
### Build Your Own
|
||||
|
||||
**[Plugin Starter Template →](https://github.com/cloudcli-ai/cloudcli-plugin-starter)** — fork this repo to create your own plugin. It includes a working example with frontend rendering, live context updates, and RPC communication to a backend server.
|
||||
@@ -213,9 +223,11 @@ Yes, for self-hosted. CloudCLI UI reads from and writes to the same `~/.claude`
|
||||
|
||||
## License
|
||||
|
||||
GNU General Public License v3.0 - see [LICENSE](LICENSE) file for details.
|
||||
GNU Affero General Public License v3.0 or later (AGPL-3.0-or-later) — see [LICENSE](LICENSE) for the full text, including additional terms under Section 7.
|
||||
|
||||
This project is open source and free to use, modify, and distribute under the GPL v3 license.
|
||||
This project is open source and free to use, modify, and distribute under the AGPL-3.0-or-later license. If you modify this software and run it as a network service, you must make your modified source code available to users of that service.
|
||||
|
||||
CloudCLI UI - (https://cloudcli.ai).
|
||||
|
||||
## Acknowledgments
|
||||
|
||||
|
||||
21
README.ru.md
21
README.ru.md
@@ -15,7 +15,7 @@
|
||||
<a href="https://trendshift.io/repositories/15586" target="_blank"><img src="https://trendshift.io/api/badge/repositories/15586" alt="siteboon%2Fclaudecodeui | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
|
||||
</p>
|
||||
|
||||
<div align="right"><i><a href="./README.md">English</a> · <b>Русский</b> · <a href="./README.de.md">Deutsch</a> · <a href="./README.ko.md">한국어</a> · <a href="./README.zh-CN.md">中文</a> · <a href="./README.ja.md">日本語</a></i></div>
|
||||
<div align="right"><i><a href="./README.md">English</a> · <b>Русский</b> · <a href="./README.de.md">Deutsch</a> · <a href="./README.ko.md">한국어</a> · <a href="./README.zh-CN.md">中文</a> · <a href="./README.ja.md">日本語</a> · <a href="./README.tr.md">Türkçe</a></i></div>
|
||||
|
||||
---
|
||||
|
||||
@@ -76,23 +76,34 @@
|
||||
|
||||
### Self-Hosted (Open source)
|
||||
|
||||
#### npm
|
||||
|
||||
Попробовать CloudCLI UI можно сразу через **npx** (требуется **Node.js** v22+):
|
||||
|
||||
```bash
|
||||
npx @siteboon/claude-code-ui
|
||||
npx @cloudcli-ai/cloudcli
|
||||
```
|
||||
|
||||
Или установить **глобально** для регулярного использования:
|
||||
|
||||
```bash
|
||||
npm install -g @siteboon/claude-code-ui
|
||||
npm install -g @cloudcli-ai/cloudcli
|
||||
cloudcli
|
||||
```
|
||||
|
||||
Откройте `http://localhost:3001` — все ваши существующие сессии будут обнаружены автоматически.
|
||||
|
||||
Посетите **[документацию →](https://cloudcli.ai/docs)**, чтобы узнать про дополнительные варианты конфигурации, PM2, настройку удалённого сервера и многое другое
|
||||
Посетите **[документацию →](https://cloudcli.ai/docs)**, чтобы узнать про дополнительные варианты конфигурации, PM2, настройку удалённого сервера и многое другое.
|
||||
|
||||
#### Docker Sandboxes (Экспериментально)
|
||||
|
||||
Запускайте агентов в изолированных песочницах с гипервизорной изоляцией. По умолчанию запускается Claude Code. Требуется [`sbx` CLI](https://docs.docker.com/ai/sandboxes/get-started/).
|
||||
|
||||
```
|
||||
npx @cloudcli-ai/cloudcli@latest sandbox ~/my-project
|
||||
```
|
||||
|
||||
Поддерживаются Claude Code, Codex и Gemini CLI. Подробнее в [документации sandbox](docker/).
|
||||
|
||||
---
|
||||
|
||||
@@ -104,7 +115,7 @@ CloudCLI UI — это open source UI-слой, на котором постро
|
||||
|---|---|---|
|
||||
| **Лучше всего подходит для** | Разработчиков, которым нужен полноценный UI для локальных агентских сессий на своей машине | Команд и разработчиков, которым нужны агенты в облаке с доступом откуда угодно |
|
||||
| **Как вы получаете доступ** | Браузер через `[yourip]:port` | Браузер, любая IDE, REST API, n8n |
|
||||
| **Настройка** | `npx @siteboon/claude-code-ui` | Настройка не требуется |
|
||||
| **Настройка** | `npx @cloudcli-ai/cloudcli` | Настройка не требуется |
|
||||
| **Машина должна оставаться включённой** | Да | Нет |
|
||||
| **Доступ с мобильных устройств** | Любой браузер в вашей сети | Любое устройство, нативное приложение в разработке |
|
||||
| **Доступные сессии** | Все сессии автоматически обнаруживаются из `~/.claude` | Все сессии внутри вашей облачной среды |
|
||||
|
||||
252
README.tr.md
Normal file
252
README.tr.md
Normal file
@@ -0,0 +1,252 @@
|
||||
<div align="center">
|
||||
<img src="public/logo.svg" alt="CloudCLI UI" width="64" height="64">
|
||||
<h1>Cloud CLI (Claude Code UI olarak da bilinir)</h1>
|
||||
<p><a href="https://docs.anthropic.com/en/docs/claude-code">Claude Code</a>, <a href="https://docs.cursor.com/en/cli/overview">Cursor CLI</a>, <a href="https://developers.openai.com/codex">Codex</a> ve <a href="https://geminicli.com/">Gemini-CLI</a> için masaüstü ve mobil arayüz.<br>Yerel ya da uzaktan kullanarak aktif projelerine ve oturumlarına her yerden erişebilirsin.</p>
|
||||
</div>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://cloudcli.ai">CloudCLI Cloud</a> · <a href="https://cloudcli.ai/docs">Dokümantasyon</a> · <a href="https://discord.gg/buxwujPNRE">Discord</a> · <a href="https://github.com/siteboon/claudecodeui/issues">Sorun Bildir</a> · <a href="CONTRIBUTING.md">Katkıda Bulun</a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://cloudcli.ai"><img src="https://img.shields.io/badge/☁️_CloudCLI_Cloud-Hemen_Dene-0066FF?style=for-the-badge" alt="CloudCLI Cloud"></a>
|
||||
<a href="https://discord.gg/buxwujPNRE"><img src="https://img.shields.io/badge/Discord-Toplulu%C4%9Fa%20Kat%C4%B1l-5865F2?style=for-the-badge&logo=discord&logoColor=white" alt="Discord'a Katıl"></a>
|
||||
<br><br>
|
||||
<a href="https://trendshift.io/repositories/15586" target="_blank"><img src="https://trendshift.io/api/badge/repositories/15586" alt="siteboon%2Fclaudecodeui | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
|
||||
</p>
|
||||
|
||||
<div align="right"><i><a href="./README.md">English</a> · <a href="./README.ru.md">Русский</a> · <a href="./README.de.md">Deutsch</a> · <a href="./README.ko.md">한국어</a> · <a href="./README.zh-CN.md">中文</a> · <a href="./README.ja.md">日本語</a> · <b>Türkçe</b></i></div>
|
||||
|
||||
---
|
||||
|
||||
## Ekran Görüntüleri
|
||||
|
||||
<div align="center">
|
||||
|
||||
<table>
|
||||
<tr>
|
||||
<td align="center">
|
||||
<h3>Masaüstü Görünümü</h3>
|
||||
<img src="public/screenshots/desktop-main.png" alt="Masaüstü Arayüzü" width="400">
|
||||
<br>
|
||||
<em>Proje genel bakışı ve sohbeti gösteren ana arayüz</em>
|
||||
</td>
|
||||
<td align="center">
|
||||
<h3>Mobil Deneyim</h3>
|
||||
<img src="public/screenshots/mobile-chat.png" alt="Mobil Arayüz" width="250">
|
||||
<br>
|
||||
<em>Dokunma gezinmesiyle duyarlı mobil tasarım</em>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center" colspan="2">
|
||||
<h3>CLI Seçimi</h3>
|
||||
<img src="public/screenshots/cli-selection.png" alt="CLI Seçimi" width="400">
|
||||
<br>
|
||||
<em>Claude Code, Gemini, Cursor CLI ve Codex arasında seçim yap</em>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
|
||||
|
||||
</div>
|
||||
|
||||
## Özellikler
|
||||
|
||||
- **Duyarlı Tasarım** — Masaüstü, tablet ve mobilde sorunsuz çalışır; böylece ajanlarını telefondan da kullanabilirsin
|
||||
- **Etkileşimli Sohbet Arayüzü** — Ajanlarla akıcı iletişim için dahili sohbet arayüzü
|
||||
- **Entegre Shell Terminali** — Yerleşik shell özelliği üzerinden ajan CLI'larına doğrudan erişim
|
||||
- **Dosya Gezgini** — Sözdizimi vurgulama ve canlı düzenleme ile etkileşimli dosya ağacı
|
||||
- **Git Gezgini** — Değişikliklerini görüntüle, staging'e ekle ve commit'le. Dallar arası geçiş de yapabilirsin
|
||||
- **Oturum Yönetimi** — Konuşmalara devam et, birden fazla oturumu yönet ve geçmişi takip et
|
||||
- **Eklenti Sistemi** — CloudCLI'ı özel eklentilerle genişlet: yeni sekmeler, arka uç servisleri ve entegrasyonlar ekle. [Kendi eklentini yaz →](https://github.com/cloudcli-ai/cloudcli-plugin-starter)
|
||||
- **TaskMaster AI Entegrasyonu** *(İsteğe Bağlı)* — AI destekli görev planlama, PRD ayrıştırma ve iş akışı otomasyonu ile gelişmiş proje yönetimi
|
||||
- **Model Uyumluluğu** — Claude, GPT ve Gemini model aileleriyle çalışır (desteklenen tüm modeller için [`shared/modelConstants.js`](shared/modelConstants.js) dosyasına bak)
|
||||
|
||||
|
||||
## Hızlı Başlangıç
|
||||
|
||||
### CloudCLI Cloud (Önerilen)
|
||||
|
||||
Başlamanın en hızlı yolu — yerel kurulum yok. Web, mobil uygulama, API veya favori IDE'nden erişilebilen, tam yönetilen, konteyner tabanlı bir geliştirme ortamına sahip ol.
|
||||
|
||||
**[CloudCLI Cloud ile başla](https://cloudcli.ai)**
|
||||
|
||||
|
||||
### Kendin Barındır (Açık Kaynak)
|
||||
|
||||
#### npm
|
||||
|
||||
CloudCLI UI'yi **npx** ile anında dene (**Node.js** v22+ gerekir):
|
||||
|
||||
```
|
||||
npx @cloudcli-ai/cloudcli
|
||||
```
|
||||
|
||||
Veya düzenli kullanım için **genel olarak** kur:
|
||||
|
||||
```
|
||||
npm install -g @cloudcli-ai/cloudcli
|
||||
cloudcli
|
||||
```
|
||||
|
||||
`http://localhost:3001` adresini aç — mevcut tüm oturumların otomatik olarak keşfedilir.
|
||||
|
||||
Tam yapılandırma seçenekleri, PM2, uzak sunucu kurulumu ve daha fazlası için **[dokümantasyonu ziyaret et →](https://cloudcli.ai/docs)**.
|
||||
|
||||
#### Docker Sandbox'lar (Deneysel)
|
||||
|
||||
Ajanları hipervizör seviyesinde izolasyonlu sandbox'larda çalıştır. Varsayılan olarak Claude Code başlar. [`sbx` CLI](https://docs.docker.com/ai/sandboxes/get-started/) gerekir.
|
||||
|
||||
```
|
||||
npx @cloudcli-ai/cloudcli@latest sandbox ~/my-project
|
||||
```
|
||||
|
||||
Claude Code, Codex ve Gemini CLI destekler. Kurulum ve gelişmiş seçenekler için [sandbox dokümantasyonuna](docker/) bak.
|
||||
|
||||
|
||||
---
|
||||
|
||||
## Hangi seçenek sana uygun?
|
||||
|
||||
CloudCLI UI, CloudCLI Cloud'u güçlendiren açık kaynak arayüz katmanıdır. Kendi makinende barındırabilir, izolasyon için Docker sandbox'ta çalıştırabilir veya tam yönetilen ortam için CloudCLI Cloud kullanabilirsin.
|
||||
|
||||
| | Kendin Barındır (npm) | Kendin Barındır (Docker Sandbox) *(Deneysel)* | CloudCLI Cloud |
|
||||
|---|---|---|---|
|
||||
| **En iyi şunun için** | Kendi makinende yerel ajan oturumları | Web/mobil IDE ile izole ajanlar | Ajanlarını bulutta isteyen ekipler |
|
||||
| **Nasıl erişilir** | `[yourip]:port` üzerinden tarayıcıda | `localhost:port` üzerinden tarayıcıda | Tarayıcı, herhangi bir IDE, REST API, n8n |
|
||||
| **Kurulum** | `npx @cloudcli-ai/cloudcli` | `npx @cloudcli-ai/cloudcli@latest sandbox ~/project` | Kurulum gerekmez |
|
||||
| **İzolasyon** | Kendi host'unda çalışır | Hipervizör seviyesi sandbox (microVM) | Tam bulut izolasyonu |
|
||||
| **Makinenin açık kalması gerek** | Evet | Evet | Hayır |
|
||||
| **Mobil erişim** | Ağındaki herhangi bir tarayıcı | Ağındaki herhangi bir tarayıcı | Herhangi bir cihaz, native uygulama yolda |
|
||||
| **Desteklenen ajanlar** | Claude Code, Cursor CLI, Codex, Gemini CLI | Claude Code, Codex, Gemini CLI | Claude Code, Cursor CLI, Codex, Gemini CLI |
|
||||
| **Dosya gezgini ve Git** | Evet | Evet | Evet |
|
||||
| **MCP yapılandırması** | `~/.claude` ile senkron | UI üzerinden yönetilir | UI üzerinden yönetilir |
|
||||
| **REST API** | Evet | Evet | Evet |
|
||||
| **Ekip paylaşımı** | Hayır | Hayır | Evet |
|
||||
| **Platform maliyeti** | Ücretsiz, açık kaynak | Ücretsiz, açık kaynak | Aylık 7 $'dan başlar |
|
||||
|
||||
> Tüm seçenekler kendi AI aboneliklerini (Claude, Cursor, vb.) kullanır — CloudCLI AI'ı değil, ortamı sağlar.
|
||||
|
||||
---
|
||||
|
||||
## Güvenlik ve Araç Yapılandırması
|
||||
|
||||
**🔒 Önemli Uyarı**: Tüm Claude Code araçları **varsayılan olarak devre dışıdır**. Bu, potansiyel olarak zararlı işlemlerin otomatik çalışmasını önler.
|
||||
|
||||
### Araçları Etkinleştirme
|
||||
|
||||
Claude Code'un tam işlevselliğinden yararlanmak için araçları manuel olarak etkinleştirmen gerekir:
|
||||
|
||||
1. **Araç Ayarlarını Aç** — Kenar çubuğundaki dişli simgesine tıkla
|
||||
2. **Seçerek Etkinleştir** — Yalnızca ihtiyacın olan araçları aç
|
||||
3. **Ayarları Uygula** — Tercihlerin yerel olarak kaydedilir
|
||||
|
||||
<div align="center">
|
||||
|
||||

|
||||
*Araç Ayarları arayüzü — yalnızca ihtiyacın olanı etkinleştir*
|
||||
|
||||
</div>
|
||||
|
||||
**Önerilen yaklaşım**: Temel araçlarla başla ve gerektikçe daha fazlasını ekle. Bu ayarları sonra her zaman değiştirebilirsin.
|
||||
|
||||
---
|
||||
|
||||
## Eklentiler
|
||||
|
||||
CloudCLI, kendi frontend UI'sı ve isteğe bağlı Node.js arka ucu olan özel sekmeler eklemeni sağlayan bir eklenti sistemine sahiptir. Git depolarından eklentileri doğrudan **Ayarlar > Eklentiler**'den yükleyebilir veya kendi eklentini yazabilirsin.
|
||||
|
||||
### Mevcut Eklentiler
|
||||
|
||||
| Eklenti | Açıklama |
|
||||
|---|---|
|
||||
| **[Project Stats](https://github.com/cloudcli-ai/cloudcli-plugin-starter)** | Mevcut projen için dosya sayıları, kod satırları, dosya türü dağılımı, en büyük dosyalar ve son değiştirilen dosyaları gösterir |
|
||||
| **[Web Terminal](https://github.com/cloudcli-ai/cloudcli-plugin-terminal)** | Çoklu sekme destekli tam xterm.js terminali |
|
||||
|
||||
### Kendi Eklentini Yaz
|
||||
|
||||
**[Plugin Starter Şablonu →](https://github.com/cloudcli-ai/cloudcli-plugin-starter)** — kendi eklentini oluşturmak için bu repo'yu fork'la. Frontend render, canlı bağlam güncellemeleri ve arka uç sunucusuyla RPC iletişimi içeren çalışan bir örnek içerir.
|
||||
|
||||
**[Plugin Dokümantasyonu →](https://cloudcli.ai/docs/plugin-overview)** — plugin API'sı, manifest formatı, güvenlik modeli ve daha fazlası için tam rehber.
|
||||
|
||||
---
|
||||
## Sık Sorulan Sorular
|
||||
|
||||
<details>
|
||||
<summary>Bu Claude Code Remote Control'dan nasıl farklı?</summary>
|
||||
|
||||
Claude Code Remote Control, yerel terminalinde zaten çalışan bir oturuma mesaj göndermeni sağlar. Makinen açık kalmak zorunda, terminalin açık kalmak zorunda ve ağ bağlantısı olmadan yaklaşık 10 dakika sonra oturumlar zaman aşımına uğrar.
|
||||
|
||||
CloudCLI UI ve CloudCLI Cloud, Claude Code'un yanında değil içinde çalışır — MCP sunucuların, izinlerin, ayarların ve oturumların, Claude Code'un yerel olarak kullandığının birebir aynısıdır. Hiçbir şey çoğaltılmaz veya ayrı yönetilmez.
|
||||
|
||||
Pratikte bu ne demek:
|
||||
|
||||
- **Tek oturum değil, tüm oturumların** — CloudCLI UI, `~/.claude` klasöründeki her oturumu otomatik keşfeder. Remote Control yalnızca tek aktif oturumu Claude mobil uygulamasına açar.
|
||||
- **Ayarların sana ait** — UI'da değiştirdiğin MCP sunucuları, araç izinleri ve proje yapılandırması doğrudan Claude Code yapılandırmana yazılır ve anında etkili olur; tersi de geçerli.
|
||||
- **Daha fazla ajanla çalışır** — Sadece Claude Code değil; Cursor CLI, Codex ve Gemini CLI de.
|
||||
- **Sadece sohbet penceresi değil, tam UI** — dosya gezgini, Git entegrasyonu, MCP yönetimi ve shell terminali hepsi yerleşik.
|
||||
- **CloudCLI Cloud bulutta çalışır** — laptop'unu kapat, ajan çalışmaya devam eder. Beklemen gereken terminal yok, uyanık tutman gereken makine yok.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>AI aboneliği için ayrıca ödeme yapmam gerekiyor mu?</summary>
|
||||
|
||||
Evet. CloudCLI AI'yi değil, ortamı sağlar. Kendi Claude, Cursor, Codex veya Gemini aboneliğini getirirsin. CloudCLI Cloud, barındırılan ortam için aylık 7 $'dan başlar — bunun üzerine eklenir.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>CloudCLI UI'yi telefonumda kullanabilir miyim?</summary>
|
||||
|
||||
Evet. Kendin barındırdığında, sunucuyu makinende çalıştır ve ağındaki herhangi bir tarayıcıda `[yourip]:port` adresini aç. CloudCLI Cloud için, herhangi bir cihazdan aç — VPN yok, port yönlendirme yok, kurulum yok. Native bir uygulama da hazırlanıyor.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>UI'da yaptığım değişiklikler yerel Claude Code kurulumumu etkiler mi?</summary>
|
||||
|
||||
Evet, kendin barındırdığında. CloudCLI UI, Claude Code'un yerel olarak kullandığı aynı `~/.claude` yapılandırmasından okur ve ona yazar. UI üzerinden eklediğin MCP sunucuları Claude Code'da anında görünür; tersi de geçerli.
|
||||
|
||||
</details>
|
||||
|
||||
---
|
||||
|
||||
## Topluluk ve Destek
|
||||
|
||||
- **[Dokümantasyon](https://cloudcli.ai/docs)** — kurulum, yapılandırma, özellikler ve sorun giderme
|
||||
- **[Discord](https://discord.gg/buxwujPNRE)** — yardım al ve diğer kullanıcılarla tanış
|
||||
- **[GitHub Issues](https://github.com/siteboon/claudecodeui/issues)** — hata raporları ve özellik istekleri
|
||||
- **[Katkı Rehberi](CONTRIBUTING.md)** — projeye nasıl katkıda bulunulur
|
||||
|
||||
## Lisans
|
||||
|
||||
GNU Affero General Public License v3.0 veya sonrası (AGPL-3.0-or-later) — tam metin ve Bölüm 7 altındaki ek şartlar için [LICENSE](LICENSE) dosyasına bak.
|
||||
|
||||
Bu proje açık kaynaklıdır ve AGPL-3.0-or-later lisansı altında özgürce kullanılabilir, değiştirilebilir ve dağıtılabilir. Bu yazılımı değiştirir ve bir ağ servisi olarak çalıştırırsan, değiştirilmiş kaynak kodunu o servisin kullanıcılarına sunmak zorundasın.
|
||||
|
||||
CloudCLI UI — (https://cloudcli.ai).
|
||||
|
||||
## Teşekkürler
|
||||
|
||||
### Kullanılan Teknolojiler
|
||||
- **[Claude Code](https://docs.anthropic.com/en/docs/claude-code)** — Anthropic'in resmi CLI'ı
|
||||
- **[Cursor CLI](https://docs.cursor.com/en/cli/overview)** — Cursor'un resmi CLI'ı
|
||||
- **[Codex](https://developers.openai.com/codex)** — OpenAI Codex
|
||||
- **[Gemini-CLI](https://geminicli.com/)** — Google Gemini CLI
|
||||
- **[React](https://react.dev/)** — Kullanıcı arayüzü kütüphanesi
|
||||
- **[Vite](https://vitejs.dev/)** — Hızlı derleme aracı ve geliştirme sunucusu
|
||||
- **[Tailwind CSS](https://tailwindcss.com/)** — Utility-first CSS framework
|
||||
- **[CodeMirror](https://codemirror.net/)** — Gelişmiş kod editörü
|
||||
- **[TaskMaster AI](https://github.com/eyaltoledano/claude-task-master)** *(İsteğe Bağlı)* — AI destekli proje yönetimi ve görev planlama
|
||||
|
||||
|
||||
### Sponsorlar
|
||||
- [Siteboon — AI destekli web sitesi oluşturucu](https://siteboon.ai)
|
||||
---
|
||||
|
||||
<div align="center">
|
||||
<strong>Claude Code, Cursor ve Codex topluluğu için özenle yapıldı.</strong>
|
||||
</div>
|
||||
@@ -15,7 +15,7 @@
|
||||
<a href="https://trendshift.io/repositories/15586" target="_blank"><img src="https://trendshift.io/api/badge/repositories/15586" alt="siteboon%2Fclaudecodeui | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
|
||||
</p>
|
||||
|
||||
<div align="right"><i><a href="./README.md">English</a> · <a href="./README.ru.md">Русский</a> · <a href="./README.de.md">Deutsch</a> · <a href="./README.ko.md">한국어</a> · <b>中文</b> · <a href="./README.ja.md">日本語</a></i></div>
|
||||
<div align="right"><i><a href="./README.md">English</a> · <a href="./README.ru.md">Русский</a> · <a href="./README.de.md">Deutsch</a> · <a href="./README.ko.md">한국어</a> · <b>中文</b> · <a href="./README.ja.md">日本語</a> · <a href="./README.tr.md">Türkçe</a></i></div>
|
||||
|
||||
---
|
||||
|
||||
@@ -72,22 +72,34 @@
|
||||
|
||||
### 自托管(开源)
|
||||
|
||||
#### npm
|
||||
|
||||
启动 CloudCLI UI,只需一行 `npx`(需要 Node.js v22+):
|
||||
|
||||
```bash
|
||||
npx @siteboon/claude-code-ui
|
||||
npx @cloudcli-ai/cloudcli
|
||||
```
|
||||
|
||||
或进行全局安装,便于日常使用:
|
||||
|
||||
```bash
|
||||
npm install -g @siteboon/claude-code-ui
|
||||
npm install -g @cloudcli-ai/cloudcli
|
||||
cloudcli
|
||||
```
|
||||
|
||||
打开 `http://localhost:3001`,系统会自动发现所有现有会话。
|
||||
|
||||
更多配置选项、PM2、远程服务器设置等,请参阅 **[文档 →](https://cloudcli.ai/docs)**
|
||||
更多配置选项、PM2、远程服务器设置等,请参阅 **[文档 →](https://cloudcli.ai/docs)**。
|
||||
|
||||
#### Docker Sandboxes(实验性)
|
||||
|
||||
在隔离的沙箱中运行代理,具有虚拟机管理程序级别的隔离。默认启动 Claude Code。需要 [`sbx` CLI](https://docs.docker.com/ai/sandboxes/get-started/)。
|
||||
|
||||
```
|
||||
npx @cloudcli-ai/cloudcli@latest sandbox ~/my-project
|
||||
```
|
||||
|
||||
支持 Claude Code、Codex 和 Gemini CLI。详情请参阅 [沙箱文档](docker/)。
|
||||
|
||||
---
|
||||
|
||||
@@ -99,7 +111,7 @@ CloudCLI UI 是 CloudCLI Cloud 的开源 UI 层。你可以在本地机器上自
|
||||
|---|---|---|
|
||||
| **适合对象** | 需要为本地代理会话提供完整 UI 的开发者 | 需要部署在云端,随时从任何地方访问代理的团队与开发者 |
|
||||
| **访问方式** | 通过 `[yourip]:port` 在浏览器中访问 | 浏览器、任意 IDE、REST API、n8n |
|
||||
| **设置** | `npx @siteboon/claude-code-ui` | 无需设置 |
|
||||
| **设置** | `npx @cloudcli-ai/cloudcli` | 无需设置 |
|
||||
| **机器需保持开机吗** | 是 | 否 |
|
||||
| **移动端访问** | 网络内任意浏览器 | 任意设备(原生应用即将推出) |
|
||||
| **可用会话** | 自动发现 `~/.claude` 中的所有会话 | 云端环境内的会话 |
|
||||
|
||||
160
docker/README.md
Normal file
160
docker/README.md
Normal file
@@ -0,0 +1,160 @@
|
||||
<!-- Docker Hub short description (100 chars max): -->
|
||||
<!-- Sandbox templates for running AI coding agents with a web & mobile IDE (Claude Code, Codex, Gemini) -->
|
||||
|
||||
# Sandboxed coding agents with a web & mobile IDE (CloudCLI)
|
||||
|
||||
[Docker Sandbox](https://docs.docker.com/ai/sandboxes/) templates that add [CloudCLI](https://cloudcli.ai) on top of Claude Code, Codex, and Gemini CLI. You get a full web and mobile IDE accessible from any browser on any device.
|
||||
|
||||
## Get started
|
||||
|
||||
### 1. Install the sbx CLI
|
||||
|
||||
Docker Sandboxes run agents in isolated microVMs. Install the `sbx` CLI:
|
||||
|
||||
- **macOS**: `brew install docker/tap/sbx`
|
||||
- **Windows**: `winget install -h Docker.sbx`
|
||||
- **Linux**: `sudo apt-get install docker-sbx`
|
||||
|
||||
Full instructions: [docs.docker.com/ai/sandboxes/get-started](https://docs.docker.com/ai/sandboxes/get-started/)
|
||||
|
||||
### 2. Store your API key
|
||||
|
||||
`sbx` manages credentials securely — your API key never enters the sandbox. Store it once:
|
||||
|
||||
```bash
|
||||
sbx login
|
||||
sbx secret set -g anthropic
|
||||
```
|
||||
|
||||
### 3. Launch Claude Code
|
||||
|
||||
```bash
|
||||
npx @cloudcli-ai/cloudcli@latest sandbox ~/my-project
|
||||
```
|
||||
|
||||
Open **http://localhost:3001**. Set a password on first visit. Start building.
|
||||
|
||||
### Using a different agent
|
||||
|
||||
Store the matching API key and pass `--agent`:
|
||||
|
||||
```bash
|
||||
# OpenAI Codex
|
||||
sbx secret set -g openai
|
||||
npx @cloudcli-ai/cloudcli@latest sandbox ~/my-project --agent codex
|
||||
|
||||
# Gemini CLI
|
||||
sbx secret set -g google
|
||||
npx @cloudcli-ai/cloudcli@latest sandbox ~/my-project --agent gemini
|
||||
```
|
||||
|
||||
### Available templates
|
||||
|
||||
| Agent | Template |
|
||||
|-------|----------|
|
||||
| **Claude Code** (default) | `docker.io/cloudcliai/sandbox:claude-code` |
|
||||
| OpenAI Codex | `docker.io/cloudcliai/sandbox:codex` |
|
||||
| Gemini CLI | `docker.io/cloudcliai/sandbox:gemini` |
|
||||
|
||||
These are used with `--template` when running `sbx` directly (see [Advanced usage](#advanced-usage)).
|
||||
|
||||
## Managing sandboxes
|
||||
|
||||
```bash
|
||||
sbx ls # List all sandboxes
|
||||
sbx stop my-project # Stop (preserves state)
|
||||
sbx start my-project # Restart a stopped sandbox
|
||||
sbx rm my-project # Remove everything
|
||||
sbx exec my-project bash # Open a shell inside the sandbox
|
||||
```
|
||||
|
||||
If you install CloudCLI globally (`npm install -g @cloudcli-ai/cloudcli`), you can also use:
|
||||
|
||||
```bash
|
||||
cloudcli sandbox ls
|
||||
cloudcli sandbox start my-project # Restart and re-launch web UI
|
||||
cloudcli sandbox logs my-project # View server logs
|
||||
```
|
||||
|
||||
## What you get
|
||||
|
||||
- **Chat** — Markdown rendering, code blocks, message history
|
||||
- **Files** — File tree with syntax-highlighted editor
|
||||
- **Git** — Diff viewer, staging, branch switching, commits
|
||||
- **Shell** — Built-in terminal emulator
|
||||
- **MCP** — Configure Model Context Protocol servers visually
|
||||
- **Mobile** — Works on tablet and phone browsers
|
||||
|
||||
Your project directory is mounted bidirectionally — edits propagate in real time, both ways.
|
||||
|
||||
## Configuration
|
||||
|
||||
Set variables at creation time with `--env`:
|
||||
|
||||
```bash
|
||||
npx @cloudcli-ai/cloudcli@latest sandbox ~/my-project --env SERVER_PORT=8080
|
||||
```
|
||||
|
||||
Or inside a running sandbox:
|
||||
|
||||
```bash
|
||||
sbx exec my-project bash -c 'echo "export SERVER_PORT=8080" >> /etc/sandbox-persistent.sh'
|
||||
```
|
||||
|
||||
Restart CloudCLI for changes to take effect:
|
||||
|
||||
```bash
|
||||
sbx exec my-project bash -c 'pkill -f "server/index.js"'
|
||||
sbx exec -d my-project cloudcli start --port 3001
|
||||
```
|
||||
|
||||
| Variable | Default | Description |
|
||||
|----------|---------|-------------|
|
||||
| `SERVER_PORT` | `3001` | Web UI port |
|
||||
| `HOST` | `0.0.0.0` | Bind address (must be `0.0.0.0` for `sbx ports`) |
|
||||
| `DATABASE_PATH` | `~/.cloudcli/auth.db` | SQLite database location |
|
||||
|
||||
## Advanced usage
|
||||
|
||||
For branch mode, multiple workspaces, memory limits, or the terminal agent experience, use `sbx` with the template:
|
||||
|
||||
```bash
|
||||
# Terminal agent + web UI
|
||||
sbx run --template docker.io/cloudcliai/sandbox:claude-code claude ~/my-project --name my-project
|
||||
sbx ports my-project --publish 3001:3001
|
||||
|
||||
# Branch mode (Git worktree isolation)
|
||||
sbx run --template docker.io/cloudcliai/sandbox:claude-code claude ~/my-project --branch my-feature
|
||||
|
||||
# Multiple workspaces
|
||||
sbx run --template docker.io/cloudcliai/sandbox:claude-code claude ~/project ~/shared-libs:ro
|
||||
|
||||
# Pass a prompt directly
|
||||
sbx run --template docker.io/cloudcliai/sandbox:claude-code claude ~/my-project -- "Fix the auth bug"
|
||||
```
|
||||
|
||||
CloudCLI auto-starts via `.bashrc` when using `sbx run`.
|
||||
|
||||
Full options in the [Docker Sandboxes usage guide](https://docs.docker.com/ai/sandboxes/usage/).
|
||||
|
||||
## Network policies
|
||||
|
||||
Sandboxes restrict outbound access by default. To reach host services from inside the sandbox:
|
||||
|
||||
```bash
|
||||
sbx policy allow network localhost:11434
|
||||
# Inside the sandbox: curl http://host.docker.internal:11434
|
||||
```
|
||||
|
||||
The web UI itself doesn't need a policy — access it via `sbx ports`.
|
||||
|
||||
## Links
|
||||
|
||||
- [CloudCLI Cloud](https://cloudcli.ai) — fully managed, no setup required
|
||||
- [Documentation](https://cloudcli.ai/docs) — full configuration guide
|
||||
- [Discord](https://discord.gg/buxwujPNRE) — community support
|
||||
- [GitHub](https://github.com/siteboon/claudecodeui) — source code and issues
|
||||
|
||||
## License
|
||||
|
||||
AGPL-3.0-or-later
|
||||
11
docker/claude-code/Dockerfile
Normal file
11
docker/claude-code/Dockerfile
Normal file
@@ -0,0 +1,11 @@
|
||||
FROM docker/sandbox-templates:claude-code
|
||||
|
||||
USER root
|
||||
COPY shared/install-cloudcli.sh /tmp/install-cloudcli.sh
|
||||
RUN chmod +x /tmp/install-cloudcli.sh && /tmp/install-cloudcli.sh
|
||||
|
||||
USER agent
|
||||
RUN npm install -g @cloudcli-ai/cloudcli && cloudcli --version
|
||||
|
||||
COPY --chown=agent:agent shared/start-cloudcli.sh /home/agent/.cloudcli-start.sh
|
||||
RUN echo '. ~/.cloudcli-start.sh' >> /home/agent/.bashrc
|
||||
11
docker/codex/Dockerfile
Normal file
11
docker/codex/Dockerfile
Normal file
@@ -0,0 +1,11 @@
|
||||
FROM docker/sandbox-templates:codex
|
||||
|
||||
USER root
|
||||
COPY shared/install-cloudcli.sh /tmp/install-cloudcli.sh
|
||||
RUN chmod +x /tmp/install-cloudcli.sh && /tmp/install-cloudcli.sh
|
||||
|
||||
USER agent
|
||||
RUN npm install -g @cloudcli-ai/cloudcli && cloudcli --version
|
||||
|
||||
COPY --chown=agent:agent shared/start-cloudcli.sh /home/agent/.cloudcli-start.sh
|
||||
RUN echo '. ~/.cloudcli-start.sh' >> /home/agent/.bashrc
|
||||
11
docker/gemini/Dockerfile
Normal file
11
docker/gemini/Dockerfile
Normal file
@@ -0,0 +1,11 @@
|
||||
FROM docker/sandbox-templates:gemini
|
||||
|
||||
USER root
|
||||
COPY shared/install-cloudcli.sh /tmp/install-cloudcli.sh
|
||||
RUN chmod +x /tmp/install-cloudcli.sh && /tmp/install-cloudcli.sh
|
||||
|
||||
USER agent
|
||||
RUN npm install -g @cloudcli-ai/cloudcli && cloudcli --version
|
||||
|
||||
COPY --chown=agent:agent shared/start-cloudcli.sh /home/agent/.cloudcli-start.sh
|
||||
RUN echo '. ~/.cloudcli-start.sh' >> /home/agent/.bashrc
|
||||
11
docker/shared/install-cloudcli.sh
Normal file
11
docker/shared/install-cloudcli.sh
Normal file
@@ -0,0 +1,11 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# Install build tools needed for native modules (node-pty, better-sqlite3, bcrypt)
|
||||
# Node.js is already provided by the sandbox base image
|
||||
apt-get update && apt-get install -y --no-install-recommends \
|
||||
build-essential python3 python3-setuptools \
|
||||
jq ripgrep sqlite3 zip unzip tree vim-tiny
|
||||
|
||||
# Clean up apt cache to reduce image size
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
18
docker/shared/start-cloudcli.sh
Normal file
18
docker/shared/start-cloudcli.sh
Normal file
@@ -0,0 +1,18 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Auto-start CloudCLI server in background if not already running.
|
||||
# This script is sourced from ~/.bashrc on sandbox shell open.
|
||||
|
||||
if ! pgrep -f "server/index.js" > /dev/null 2>&1; then
|
||||
nohup cloudcli start --port 3001 > /tmp/cloudcli-ui.log 2>&1 &
|
||||
disown
|
||||
|
||||
echo ""
|
||||
echo " CloudCLI is starting on port 3001..."
|
||||
echo ""
|
||||
echo " Forward the port from another terminal:"
|
||||
echo " sbx ports <sandbox-name> --publish 3001:3001"
|
||||
echo ""
|
||||
echo " Then open: http://localhost:3001"
|
||||
echo ""
|
||||
fi
|
||||
149
eslint.config.js
149
eslint.config.js
@@ -3,7 +3,9 @@ import tseslint from "typescript-eslint";
|
||||
import react from "eslint-plugin-react";
|
||||
import reactHooks from "eslint-plugin-react-hooks";
|
||||
import reactRefresh from "eslint-plugin-react-refresh";
|
||||
import importX from "eslint-plugin-import-x";
|
||||
import { createNodeResolver, importX } from "eslint-plugin-import-x";
|
||||
import { createTypeScriptImportResolver } from "eslint-import-resolver-typescript";
|
||||
import boundaries from "eslint-plugin-boundaries";
|
||||
import tailwindcss from "eslint-plugin-tailwindcss";
|
||||
import unusedImports from "eslint-plugin-unused-imports";
|
||||
import globals from "globals";
|
||||
@@ -82,7 +84,7 @@ export default tseslint.config(
|
||||
"sibling",
|
||||
"index",
|
||||
],
|
||||
"newlines-between": "never",
|
||||
"newlines-between": "always",
|
||||
},
|
||||
],
|
||||
|
||||
@@ -98,5 +100,148 @@ export default tseslint.config(
|
||||
"no-control-regex": "off",
|
||||
"no-useless-escape": "off",
|
||||
},
|
||||
},
|
||||
{
|
||||
files: ["server/**/*.{js,ts}"], // apply this block only to backend source files
|
||||
ignores: ["server/**/*.d.ts"], // skip generated declaration files in backend linting
|
||||
plugins: {
|
||||
boundaries, // enforce backend architecture boundaries (module-to-module contracts)
|
||||
"import-x": importX, // keep import hygiene rules (duplicates, unresolved paths, etc.)
|
||||
"unused-imports": unusedImports, // remove dead imports/variables from backend files
|
||||
},
|
||||
languageOptions: {
|
||||
parser: tseslint.parser, // parse both JS and TS syntax in backend files
|
||||
parserOptions: {
|
||||
ecmaVersion: "latest", // support modern ECMAScript syntax in backend code
|
||||
sourceType: "module", // treat backend files as ESM modules
|
||||
},
|
||||
globals: {
|
||||
...globals.node, // expose Node.js globals such as process, Buffer, and __dirname equivalents
|
||||
},
|
||||
},
|
||||
settings: {
|
||||
"boundaries/include": ["server/**/*.{js,ts}"], // only analyze dependency boundaries inside backend files
|
||||
"import/resolver": {
|
||||
// boundaries resolves imports through eslint-module-utils, which reads the classic
|
||||
// import/resolver setting instead of import-x/resolver-next.
|
||||
typescript: {
|
||||
project: ["server/tsconfig.json"], // resolve backend aliases using the canonical backend tsconfig
|
||||
alwaysTryTypes: true, // keep normal TS package/type resolution working alongside aliases
|
||||
},
|
||||
node: {
|
||||
extensions: [".mjs", ".cjs", ".js", ".json", ".node", ".ts", ".tsx"], // preserve Node-style fallback resolution for plain files
|
||||
},
|
||||
},
|
||||
"import-x/resolver-next": [
|
||||
// ESLint's import plugin does not read tsconfig path aliases on its own.
|
||||
// This resolver teaches import-x how to understand the backend-only "@/*"
|
||||
// mapping defined in server/tsconfig.json, which fixes false no-unresolved errors in editors.
|
||||
createTypeScriptImportResolver({
|
||||
project: ["server/tsconfig.json"], // point the resolver at the canonical backend tsconfig instead of the frontend one
|
||||
alwaysTryTypes: true, // keep standard TypeScript package resolution working while backend aliases are enabled
|
||||
}),
|
||||
// Keep Node-style resolution available for normal package imports and plain relative JS files.
|
||||
// The TypeScript resolver handles aliases, while the Node resolver preserves the expected fallback behavior.
|
||||
createNodeResolver({
|
||||
extensions: [".mjs", ".cjs", ".js", ".json", ".node", ".ts", ".tsx"],
|
||||
}),
|
||||
],
|
||||
"boundaries/elements": [
|
||||
{
|
||||
type: "backend-shared-type-contract", // shared backend type/interface contracts that modules may consume without creating runtime coupling
|
||||
pattern: [
|
||||
"server/shared/types.{js,ts}",
|
||||
"server/shared/interfaces.{js,ts}",
|
||||
], // keep backend modules on explicit shared contract files for erased imports only
|
||||
mode: "file", // treat each shared contract file itself as the boundary element instead of the whole folder
|
||||
},
|
||||
{
|
||||
type: "backend-shared-utils", // shared backend runtime helpers that modules may import directly
|
||||
pattern: ["server/shared/utils.{js,ts}"], // classify the shared utils file so modules can depend on it explicitly
|
||||
mode: "file",
|
||||
},
|
||||
{
|
||||
type: "backend-legacy-runtime", // legacy runtime persistence modules used while providers migrate into server/modules
|
||||
pattern: [
|
||||
"server/projects.js",
|
||||
"server/sessionManager.js",
|
||||
"server/utils/runtime-paths.js",
|
||||
], // provider history loading still resolves session data through these legacy runtime files
|
||||
mode: "file",
|
||||
},
|
||||
{
|
||||
type: "backend-module", // logical element name used by boundaries rules below
|
||||
pattern: "server/modules/*", // each direct folder in server/modules is treated as one module boundary
|
||||
mode: "folder", // classify dependencies at folder-module level (not per individual file)
|
||||
capture: ["moduleName"], // capture the module folder name for messages/debugging/template use
|
||||
},
|
||||
],
|
||||
},
|
||||
rules: {
|
||||
// --- Unused imports/vars (backend) ---
|
||||
"unused-imports/no-unused-imports": "warn", // warn when imports are not used so they can be cleaned up
|
||||
"unused-imports/no-unused-vars": "off", // keep backend signal focused on dead imports instead of local unused variables
|
||||
|
||||
// --- Import hygiene (backend) ---
|
||||
"import-x/no-duplicates": "warn", // prevent duplicate import lines from the same module
|
||||
"import-x/order": [
|
||||
"warn", // keep backend import grouping/order consistent with the frontend config
|
||||
{
|
||||
groups: [
|
||||
"builtin", // Node built-ins such as fs, path, and url come first
|
||||
"external", // third-party packages come after built-ins
|
||||
"internal", // aliased internal imports such as @/... come next
|
||||
"parent", // ../ imports come after aliased internal imports
|
||||
"sibling", // ./foo imports come after parent imports
|
||||
"index", // bare ./ imports stay last
|
||||
],
|
||||
"newlines-between": "always", // require a blank line between import groups in backend files too
|
||||
},
|
||||
],
|
||||
"import-x/no-unresolved": "error", // fail when an import path cannot be resolved
|
||||
"import-x/no-useless-path-segments": "warn", // prefer cleaner paths (remove redundant ./ and ../ segments)
|
||||
"import-x/no-absolute-path": "error", // disallow absolute filesystem imports in backend files
|
||||
|
||||
// --- General safety/style (backend) ---
|
||||
eqeqeq: ["warn", "always", { null: "ignore" }], // avoid accidental coercion while still allowing x == null checks
|
||||
|
||||
// --- Architecture boundaries (backend modules) ---
|
||||
"boundaries/dependencies": [
|
||||
"error", // treat architecture violations as lint errors
|
||||
{
|
||||
default: "allow", // allow normal imports unless a rule below explicitly disallows them
|
||||
checkInternals: false, // do not apply these cross-module rules to imports inside the same module
|
||||
rules: [
|
||||
{
|
||||
from: { type: "backend-module" }, // modules may depend on shared type/interface contracts only as erased type-only imports
|
||||
to: { type: "backend-shared-type-contract" },
|
||||
disallow: {
|
||||
dependency: { kind: ["value", "typeof"] },
|
||||
}, // block runtime imports so shared contracts stay compile-time only instead of becoming hidden shared modules
|
||||
message:
|
||||
"Backend modules may only use `import type` when importing from server/shared/types.ts or server/shared/interfaces.ts.",
|
||||
},
|
||||
{
|
||||
to: { type: "backend-module" }, // when importing anything that belongs to another backend module
|
||||
disallow: { to: { internalPath: "**" } }, // block all direct/deep imports into module internals by default
|
||||
message:
|
||||
"Cross-module imports must go through that module's barrel file (server/modules/<module>/index.ts or index.js).", // explicit error message for architecture violations
|
||||
},
|
||||
{
|
||||
to: { type: "backend-module" }, // same target scope as the disallow rule above
|
||||
allow: {
|
||||
to: {
|
||||
internalPath: [
|
||||
"index", // allow extensionless barrel imports resolved as module root index
|
||||
"index.{js,mjs,cjs,ts,tsx}", // allow explicit index.* barrel file imports
|
||||
],
|
||||
},
|
||||
}, // re-allow only public module entry points (barrel files)
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"boundaries/no-unknown": "error", // fail fast if boundaries cannot classify a dependency, which prevents silent rule bypasses
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
3753
package-lock.json
generated
3753
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
72
package.json
72
package.json
@@ -1,17 +1,17 @@
|
||||
{
|
||||
"name": "@siteboon/claude-code-ui",
|
||||
"version": "1.26.2",
|
||||
"name": "@cloudcli-ai/cloudcli",
|
||||
"version": "1.31.5",
|
||||
"description": "A web-based UI for Claude Code CLI",
|
||||
"type": "module",
|
||||
"main": "server/index.js",
|
||||
"main": "dist-server/server/index.js",
|
||||
"bin": {
|
||||
"claude-code-ui": "server/cli.js",
|
||||
"cloudcli": "server/cli.js"
|
||||
"cloudcli": "dist-server/server/cli.js"
|
||||
},
|
||||
"files": [
|
||||
"server/",
|
||||
"shared/",
|
||||
"dist/",
|
||||
"dist-server/",
|
||||
"scripts/",
|
||||
"README.md"
|
||||
],
|
||||
@@ -24,31 +24,48 @@
|
||||
"url": "https://github.com/siteboon/claudecodeui/issues"
|
||||
},
|
||||
"scripts": {
|
||||
"dev": "concurrently --kill-others \"npm run server\" \"npm run client\"",
|
||||
"server": "node server/index.js",
|
||||
"dev": "concurrently --kill-others \"npm run server:dev\" \"npm run client\"",
|
||||
"server": "node dist-server/server/index.js",
|
||||
"server:dev": "tsx --tsconfig server/tsconfig.json server/index.js",
|
||||
"server:dev-watch": "tsx watch --tsconfig server/tsconfig.json server/index.js",
|
||||
"client": "vite",
|
||||
"build": "vite build",
|
||||
"build": "npm run build:client && npm run build:server",
|
||||
"build:client": "vite build",
|
||||
"prebuild:server": "node -e \"require('node:fs').rmSync('dist-server', { recursive: true, force: true })\"",
|
||||
"build:server": "tsc -p server/tsconfig.json && tsc-alias -p server/tsconfig.json",
|
||||
"preview": "vite preview",
|
||||
"typecheck": "tsc --noEmit -p tsconfig.json",
|
||||
"lint": "eslint src/",
|
||||
"lint:fix": "eslint src/ --fix",
|
||||
"typecheck": "tsc --noEmit -p tsconfig.json && tsc --noEmit -p server/tsconfig.json",
|
||||
"lint": "eslint src/ server/",
|
||||
"lint:fix": "eslint src/ server/ --fix",
|
||||
"start": "npm run build && npm run server",
|
||||
"release": "./release.sh",
|
||||
"prepublishOnly": "npm run build",
|
||||
"postinstall": "node scripts/fix-node-pty.js",
|
||||
"prepare": "husky"
|
||||
"prepare": "husky",
|
||||
"update:platform": "./update-platform.sh"
|
||||
},
|
||||
"keywords": [
|
||||
"claude code",
|
||||
"ai",
|
||||
"claude-code",
|
||||
"claude-code-ui",
|
||||
"cloudcli",
|
||||
"codex",
|
||||
"gemini",
|
||||
"gemini-cli",
|
||||
"cursor",
|
||||
"cursor-cli",
|
||||
"anthropic",
|
||||
"openai",
|
||||
"google",
|
||||
"coding-agent",
|
||||
"web-ui",
|
||||
"ui",
|
||||
"mobile"
|
||||
"mobile IDE"
|
||||
],
|
||||
"author": "CloudCLI UI Contributors",
|
||||
"license": "GPL-3.0",
|
||||
"license": "AGPL-3.0-or-later",
|
||||
"dependencies": {
|
||||
"@anthropic-ai/claude-agent-sdk": "^0.2.59",
|
||||
"@anthropic-ai/claude-agent-sdk": "^0.2.116",
|
||||
"@codemirror/lang-css": "^6.3.1",
|
||||
"@codemirror/lang-html": "^6.4.9",
|
||||
"@codemirror/lang-javascript": "^6.2.4",
|
||||
@@ -59,10 +76,11 @@
|
||||
"@codemirror/theme-one-dark": "^6.1.2",
|
||||
"@iarna/toml": "^2.2.5",
|
||||
"@octokit/rest": "^22.0.0",
|
||||
"@openai/codex-sdk": "^0.101.0",
|
||||
"@openai/codex-sdk": "^0.125.0",
|
||||
"@replit/codemirror-minimap": "^0.5.2",
|
||||
"@tailwindcss/typography": "^0.5.16",
|
||||
"@uiw/react-codemirror": "^4.23.13",
|
||||
"@vscode/ripgrep": "^1.17.1",
|
||||
"@xterm/addon-clipboard": "^0.1.0",
|
||||
"@xterm/addon-fit": "^0.10.0",
|
||||
"@xterm/addon-web-links": "^0.11.0",
|
||||
@@ -73,6 +91,7 @@
|
||||
"chokidar": "^4.0.3",
|
||||
"class-variance-authority": "^0.7.1",
|
||||
"clsx": "^2.1.1",
|
||||
"cmdk": "^1.1.1",
|
||||
"cors": "^2.8.5",
|
||||
"cross-spawn": "^7.0.3",
|
||||
"express": "^4.18.2",
|
||||
@@ -87,7 +106,7 @@
|
||||
"mime-types": "^3.0.1",
|
||||
"multer": "^2.0.1",
|
||||
"node-fetch": "^2.7.0",
|
||||
"node-pty": "^1.1.0-beta34",
|
||||
"node-pty": "^1.2.0-beta.12",
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0",
|
||||
"react-dropzone": "^14.2.3",
|
||||
@@ -100,25 +119,29 @@
|
||||
"rehype-raw": "^7.0.0",
|
||||
"remark-gfm": "^4.0.0",
|
||||
"remark-math": "^6.0.0",
|
||||
"sqlite": "^5.1.1",
|
||||
"sqlite3": "^5.1.7",
|
||||
"tailwind-merge": "^3.3.1",
|
||||
"web-push": "^3.6.7",
|
||||
"ws": "^8.14.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@commitlint/cli": "^20.4.3",
|
||||
"@commitlint/config-conventional": "^20.4.3",
|
||||
"@commitlint/cli": "^20.5.0",
|
||||
"@commitlint/config-conventional": "^20.5.0",
|
||||
"@eslint/js": "^9.39.3",
|
||||
"@release-it/conventional-changelog": "^10.0.5",
|
||||
"@types/better-sqlite3": "^7.6.13",
|
||||
"@types/cross-spawn": "^6.0.6",
|
||||
"@types/express": "^5.0.6",
|
||||
"@types/node": "^22.19.7",
|
||||
"@types/react": "^18.2.43",
|
||||
"@types/react-dom": "^18.2.17",
|
||||
"@types/ws": "^8.18.1",
|
||||
"@vitejs/plugin-react": "^4.6.0",
|
||||
"auto-changelog": "^2.5.0",
|
||||
"autoprefixer": "^10.4.16",
|
||||
"concurrently": "^8.2.2",
|
||||
"eslint": "^9.39.3",
|
||||
"eslint-import-resolver-typescript": "^4.4.4",
|
||||
"eslint-plugin-boundaries": "^6.0.2",
|
||||
"eslint-plugin-import-x": "^4.16.1",
|
||||
"eslint-plugin-react": "^7.37.5",
|
||||
"eslint-plugin-react-hooks": "^7.0.1",
|
||||
@@ -133,11 +156,14 @@
|
||||
"release-it": "^19.0.5",
|
||||
"sharp": "^0.34.2",
|
||||
"tailwindcss": "^3.4.0",
|
||||
"tsc-alias": "^1.8.16",
|
||||
"tsx": "^4.21.0",
|
||||
"typescript": "^5.9.3",
|
||||
"typescript-eslint": "^8.56.1",
|
||||
"vite": "^7.0.4"
|
||||
},
|
||||
"lint-staged": {
|
||||
"src/**/*.{ts,tsx,js,jsx}": "eslint"
|
||||
"src/**/*.{ts,tsx,js,jsx}": "eslint",
|
||||
"server/**/*.{js,ts}": "eslint"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Claude Code UI - API Documentation</title>
|
||||
<title>CloudCLI - API Documentation</title>
|
||||
<link rel="icon" type="image/svg+xml" href="/favicon.svg" />
|
||||
<link rel="icon" type="image/png" href="/favicon.png" />
|
||||
|
||||
@@ -418,7 +418,7 @@
|
||||
</svg>
|
||||
</div>
|
||||
<div class="brand-text">
|
||||
<h1>Claude Code UI</h1>
|
||||
<h1>CloudCLI</h1>
|
||||
<div class="subtitle">API Documentation</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
// Service Worker for Claude Code UI PWA
|
||||
// Service Worker for CloudCLI PWA
|
||||
// Cache only manifest (needed for PWA install). HTML and JS are never pre-cached
|
||||
// so a rebuild + refresh always picks up the latest assets.
|
||||
const CACHE_NAME = 'claude-ui-v2';
|
||||
@@ -79,7 +79,7 @@ self.addEventListener('push', event => {
|
||||
try {
|
||||
payload = event.data.json();
|
||||
} catch {
|
||||
payload = { title: 'Claude Code UI', body: event.data.text() };
|
||||
payload = { title: 'CloudCLI', body: event.data.text() };
|
||||
}
|
||||
|
||||
const options = {
|
||||
@@ -92,7 +92,7 @@ self.addEventListener('push', event => {
|
||||
};
|
||||
|
||||
event.waitUntil(
|
||||
self.registration.showNotification(payload.title || 'Claude Code UI', options)
|
||||
self.registration.showNotification(payload.title || 'CloudCLI', options)
|
||||
);
|
||||
});
|
||||
|
||||
|
||||
248
redirect-package/README.md
Normal file
248
redirect-package/README.md
Normal file
@@ -0,0 +1,248 @@
|
||||
<div align="center">
|
||||
|
||||
> ## This package has moved to [`@cloudcli-ai/cloudcli`](https://www.npmjs.com/package/@cloudcli-ai/cloudcli)
|
||||
>
|
||||
> ```bash
|
||||
> npm install -g @cloudcli-ai/cloudcli
|
||||
> ```
|
||||
>
|
||||
> This package (`@siteboon/claude-code-ui`) is now a thin wrapper that installs the new package automatically.
|
||||
> For new installations, use `@cloudcli-ai/cloudcli` directly.
|
||||
|
||||
</div>
|
||||
|
||||
---
|
||||
|
||||
<div align="center">
|
||||
<img src="https://raw.githubusercontent.com/siteboon/claudecodeui/main/public/logo.svg" alt="CloudCLI UI" width="64" height="64">
|
||||
<h1>Cloud CLI (aka Claude Code UI)</h1>
|
||||
<p>A desktop and mobile UI for <a href="https://docs.anthropic.com/en/docs/claude-code">Claude Code</a>, <a href="https://docs.cursor.com/en/cli/overview">Cursor CLI</a>, <a href="https://developers.openai.com/codex">Codex</a>, and <a href="https://geminicli.com/">Gemini-CLI</a>.<br>Use it locally or remotely to view your active projects and sessions from everywhere.</p>
|
||||
</div>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://cloudcli.ai">CloudCLI Cloud</a> · <a href="https://cloudcli.ai/docs">Documentation</a> · <a href="https://discord.gg/buxwujPNRE">Discord</a> · <a href="https://github.com/siteboon/claudecodeui/issues">Bug Reports</a> · <a href="https://github.com/siteboon/claudecodeui/blob/main/CONTRIBUTING.md">Contributing</a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://cloudcli.ai"><img src="https://img.shields.io/badge/☁️_CloudCLI_Cloud-Try_Now-0066FF?style=for-the-badge" alt="CloudCLI Cloud"></a>
|
||||
<a href="https://discord.gg/buxwujPNRE"><img src="https://img.shields.io/badge/Discord-Join%20Community-5865F2?style=for-the-badge&logo=discord&logoColor=white" alt="Join our Discord"></a>
|
||||
<br><br>
|
||||
<a href="https://trendshift.io/repositories/15586" target="_blank"><img src="https://trendshift.io/api/badge/repositories/15586" alt="siteboon%2Fclaudecodeui | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
|
||||
</p>
|
||||
|
||||
---
|
||||
|
||||
## Screenshots
|
||||
|
||||
<div align="center">
|
||||
|
||||
<table>
|
||||
<tr>
|
||||
<td align="center">
|
||||
<h3>Desktop View</h3>
|
||||
<img src="https://raw.githubusercontent.com/siteboon/claudecodeui/main/public/screenshots/desktop-main.png" alt="Desktop Interface" width="400">
|
||||
<br>
|
||||
<em>Main interface showing project overview and chat</em>
|
||||
</td>
|
||||
<td align="center">
|
||||
<h3>Mobile Experience</h3>
|
||||
<img src="https://raw.githubusercontent.com/siteboon/claudecodeui/main/public/screenshots/mobile-chat.png" alt="Mobile Interface" width="250">
|
||||
<br>
|
||||
<em>Responsive mobile design with touch navigation</em>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center" colspan="2">
|
||||
<h3>CLI Selection</h3>
|
||||
<img src="https://raw.githubusercontent.com/siteboon/claudecodeui/main/public/screenshots/cli-selection.png" alt="CLI Selection" width="400">
|
||||
<br>
|
||||
<em>Select between Claude Code, Gemini, Cursor CLI and Codex</em>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
|
||||
|
||||
</div>
|
||||
|
||||
## Features
|
||||
|
||||
- **Responsive Design** - Works seamlessly across desktop, tablet, and mobile so you can also use Agents from mobile
|
||||
- **Interactive Chat Interface** - Built-in chat interface for seamless communication with the Agents
|
||||
- **Integrated Shell Terminal** - Direct access to the Agents CLI through built-in shell functionality
|
||||
- **File Explorer** - Interactive file tree with syntax highlighting and live editing
|
||||
- **Git Explorer** - View, stage and commit your changes. You can also switch branches
|
||||
- **Session Management** - Resume conversations, manage multiple sessions, and track history
|
||||
- **Plugin System** - Extend CloudCLI with custom plugins — add new tabs, backend services, and integrations. [Build your own →](https://github.com/cloudcli-ai/cloudcli-plugin-starter)
|
||||
- **TaskMaster AI Integration** *(Optional)* - Advanced project management with AI-powered task planning, PRD parsing, and workflow automation
|
||||
- **Model Compatibility** - Works with Claude, GPT, and Gemini model families (see [`shared/modelConstants.js`](https://github.com/siteboon/claudecodeui/blob/main/shared/modelConstants.js) for the full list of supported models)
|
||||
|
||||
|
||||
## Quick Start
|
||||
|
||||
### CloudCLI Cloud (Recommended)
|
||||
|
||||
The fastest way to get started — no local setup required. Get a fully managed, containerized development environment accessible from the web, mobile app, API, or your favorite IDE.
|
||||
|
||||
**[Get started with CloudCLI Cloud](https://cloudcli.ai)**
|
||||
|
||||
|
||||
### Self-Hosted (Open source)
|
||||
|
||||
Try CloudCLI UI instantly with **npx** (requires **Node.js** v22+):
|
||||
|
||||
```
|
||||
npx @cloudcli-ai/cloudcli
|
||||
```
|
||||
|
||||
Or install **globally** for regular use:
|
||||
|
||||
```
|
||||
npm install -g @cloudcli-ai/cloudcli
|
||||
cloudcli
|
||||
```
|
||||
|
||||
Open `http://localhost:3001` — all your existing sessions are discovered automatically.
|
||||
|
||||
Visit the **[documentation →](https://cloudcli.ai/docs)** for more full configuration options, PM2, remote server setup and more
|
||||
|
||||
|
||||
---
|
||||
|
||||
## Which option is right for you?
|
||||
|
||||
CloudCLI UI is the open source UI layer that powers CloudCLI Cloud. You can self-host it on your own machine, or use CloudCLI Cloud which builds on top of it with a full managed cloud environment, team features, and deeper integrations.
|
||||
|
||||
| | CloudCLI UI (Self-hosted) | CloudCLI Cloud |
|
||||
|---|---|---|
|
||||
| **Best for** | Developers who want a full UI for local agent sessions on their own machine | Teams and developers who want agents running in the cloud, accessible from anywhere |
|
||||
| **How you access it** | Browser via `[yourip]:port` | Browser, any IDE, REST API, n8n |
|
||||
| **Setup** | `npx @cloudcli-ai/cloudcli` | No setup required |
|
||||
| **Machine needs to stay on** | Yes | No |
|
||||
| **Mobile access** | Any browser on your network | Any device, native app coming |
|
||||
| **Sessions available** | All sessions auto-discovered from `~/.claude` | All sessions within your cloud environment |
|
||||
| **Agents supported** | Claude Code, Cursor CLI, Codex, Gemini CLI | Claude Code, Cursor CLI, Codex, Gemini CLI |
|
||||
| **File explorer and Git** | Yes, built into the UI | Yes, built into the UI |
|
||||
| **MCP configuration** | Managed via UI, synced with your local `~/.claude` config | Managed via UI |
|
||||
| **IDE access** | Your local IDE | Any IDE connected to your cloud environment |
|
||||
| **REST API** | Yes | Yes |
|
||||
| **n8n node** | No | Yes |
|
||||
| **Team sharing** | No | Yes |
|
||||
| **Platform cost** | Free, open source | Starts at $7/month |
|
||||
|
||||
> Both options use your own AI subscriptions (Claude, Cursor, etc.) — CloudCLI provides the environment, not the AI.
|
||||
|
||||
---
|
||||
|
||||
## Security & Tools Configuration
|
||||
|
||||
**Important Notice**: All Claude Code tools are **disabled by default**. This prevents potentially harmful operations from running automatically.
|
||||
|
||||
### Enabling Tools
|
||||
|
||||
To use Claude Code's full functionality, you'll need to manually enable tools:
|
||||
|
||||
1. **Open Tools Settings** - Click the gear icon in the sidebar
|
||||
2. **Enable Selectively** - Turn on only the tools you need
|
||||
3. **Apply Settings** - Your preferences are saved locally
|
||||
|
||||
**Recommended approach**: Start with basic tools enabled and add more as needed. You can always adjust these settings later.
|
||||
|
||||
---
|
||||
|
||||
## Plugins
|
||||
|
||||
CloudCLI has a plugin system that lets you add custom tabs with their own frontend UI and optional Node.js backend. Install plugins from git repos directly in **Settings > Plugins**, or build your own.
|
||||
|
||||
### Available Plugins
|
||||
|
||||
| Plugin | Description |
|
||||
|---|---|
|
||||
| **[Project Stats](https://github.com/cloudcli-ai/cloudcli-plugin-starter)** | Shows file counts, lines of code, file-type breakdown, largest files, and recently modified files for your current project |
|
||||
| **[Web Terminal](https://github.com/cloudcli-ai/cloudcli-plugin-terminal)** | Full xterm.js terminal with multi-tab support|
|
||||
|
||||
### Build Your Own
|
||||
|
||||
**[Plugin Starter Template →](https://github.com/cloudcli-ai/cloudcli-plugin-starter)** — fork this repo to create your own plugin. It includes a working example with frontend rendering, live context updates, and RPC communication to a backend server.
|
||||
|
||||
**[Plugin Documentation →](https://cloudcli.ai/docs/plugin-overview)** — full guide to the plugin API, manifest format, security model, and more.
|
||||
|
||||
---
|
||||
## FAQ
|
||||
|
||||
<details>
|
||||
<summary>How is this different from Claude Code Remote Control?</summary>
|
||||
|
||||
Claude Code Remote Control lets you send messages to a session already running in your local terminal. Your machine has to stay on, your terminal has to stay open, and sessions time out after roughly 10 minutes without a network connection.
|
||||
|
||||
CloudCLI UI and CloudCLI Cloud extend Claude Code rather than sit alongside it — your MCP servers, permissions, settings, and sessions are the exact same ones Claude Code uses natively. Nothing is duplicated or managed separately.
|
||||
|
||||
Here's what that means in practice:
|
||||
|
||||
- **All your sessions, not just one** — CloudCLI UI auto-discovers every session from your `~/.claude` folder. Remote Control only exposes the single active session to make it available in the Claude mobile app.
|
||||
- **Your settings are your settings** — MCP servers, tool permissions, and project config you change in CloudCLI UI are written directly to your Claude Code config and take effect immediately, and vice versa.
|
||||
- **Works with more agents** — Claude Code, Cursor CLI, Codex, and Gemini CLI, not just Claude Code.
|
||||
- **Full UI, not just a chat window** — file explorer, Git integration, MCP management, and a shell terminal are all built in.
|
||||
- **CloudCLI Cloud runs in the cloud** — close your laptop, the agent keeps running. No terminal to babysit, no machine to keep awake.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Do I need to pay for an AI subscription separately?</summary>
|
||||
|
||||
Yes. CloudCLI provides the environment, not the AI. You bring your own Claude, Cursor, Codex, or Gemini subscription. CloudCLI Cloud starts at $7/month for the hosted environment on top of that.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Can I use CloudCLI UI on my phone?</summary>
|
||||
|
||||
Yes. For self-hosted, run the server on your machine and open `[yourip]:port` in any browser on your network. For CloudCLI Cloud, open it from any device — no VPN, no port forwarding, no setup. A native app is also in the works.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Will changes I make in the UI affect my local Claude Code setup?</summary>
|
||||
|
||||
Yes, for self-hosted. CloudCLI UI reads from and writes to the same `~/.claude` config that Claude Code uses natively. MCP servers you add via the UI show up in Claude Code immediately and vice versa.
|
||||
|
||||
</details>
|
||||
|
||||
---
|
||||
|
||||
## Community & Support
|
||||
|
||||
- **[Documentation](https://cloudcli.ai/docs)** — installation, configuration, features, and troubleshooting
|
||||
- **[Discord](https://discord.gg/buxwujPNRE)** — get help and connect with other users
|
||||
- **[GitHub Issues](https://github.com/siteboon/claudecodeui/issues)** — bug reports and feature requests
|
||||
- **[Contributing Guide](https://github.com/siteboon/claudecodeui/blob/main/CONTRIBUTING.md)** — how to contribute to the project
|
||||
|
||||
## License
|
||||
|
||||
GNU Affero General Public License v3.0 or later (AGPL-3.0-or-later) — see [LICENSE](https://github.com/siteboon/claudecodeui/blob/main/LICENSE) for the full text, including additional terms under Section 7.
|
||||
|
||||
This project is open source and free to use, modify, and distribute under the AGPL-3.0-or-later license. If you modify this software and run it as a network service, you must make your modified source code available to users of that service.
|
||||
|
||||
CloudCLI UI - (https://cloudcli.ai).
|
||||
|
||||
## Acknowledgments
|
||||
|
||||
### Built With
|
||||
- **[Claude Code](https://docs.anthropic.com/en/docs/claude-code)** - Anthropic's official CLI
|
||||
- **[Cursor CLI](https://docs.cursor.com/en/cli/overview)** - Cursor's official CLI
|
||||
- **[Codex](https://developers.openai.com/codex)** - OpenAI Codex
|
||||
- **[Gemini-CLI](https://geminicli.com/)** - Google Gemini CLI
|
||||
- **[React](https://react.dev/)** - User interface library
|
||||
- **[Vite](https://vitejs.dev/)** - Fast build tool and dev server
|
||||
- **[Tailwind CSS](https://tailwindcss.com/)** - Utility-first CSS framework
|
||||
- **[CodeMirror](https://codemirror.net/)** - Advanced code editor
|
||||
- **[TaskMaster AI](https://github.com/eyaltoledano/claude-task-master)** *(Optional)* - AI-powered project management and task planning
|
||||
|
||||
|
||||
### Sponsors
|
||||
- [Siteboon - AI powered website builder](https://siteboon.ai)
|
||||
---
|
||||
|
||||
<div align="center">
|
||||
<strong>Made with care for the Claude Code, Cursor and Codex community.</strong>
|
||||
</div>
|
||||
2
redirect-package/bin.js
Normal file
2
redirect-package/bin.js
Normal file
@@ -0,0 +1,2 @@
|
||||
#!/usr/bin/env node
|
||||
import('@cloudcli-ai/cloudcli/dist-server/server/cli.js');
|
||||
2
redirect-package/index.js
Normal file
2
redirect-package/index.js
Normal file
@@ -0,0 +1,2 @@
|
||||
export * from '@cloudcli-ai/cloudcli';
|
||||
export { default } from '@cloudcli-ai/cloudcli';
|
||||
43
redirect-package/package.json
Normal file
43
redirect-package/package.json
Normal file
@@ -0,0 +1,43 @@
|
||||
{
|
||||
"name": "@siteboon/claude-code-ui",
|
||||
"version": "2.0.0",
|
||||
"description": "This package has moved to @cloudcli-ai/cloudcli",
|
||||
"type": "module",
|
||||
"main": "index.js",
|
||||
"bin": {
|
||||
"claude-code-ui": "./bin.js",
|
||||
"cloudcli": "./bin.js"
|
||||
},
|
||||
"homepage": "https://cloudcli.ai",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/siteboon/claudecodeui.git"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/siteboon/claudecodeui/issues"
|
||||
},
|
||||
"keywords": [
|
||||
"claude code",
|
||||
"claude-code",
|
||||
"claude-code-ui",
|
||||
"cloudcli",
|
||||
"codex",
|
||||
"gemini",
|
||||
"gemini-cli",
|
||||
"cursor",
|
||||
"cursor-cli",
|
||||
"anthropic",
|
||||
"openai",
|
||||
"google",
|
||||
"coding-agent",
|
||||
"web-ui",
|
||||
"ui",
|
||||
"mobile IDE"
|
||||
],
|
||||
"author": "CloudCLI UI Contributors",
|
||||
"dependencies": {
|
||||
"@cloudcli-ai/cloudcli": "*"
|
||||
},
|
||||
"deprecated": "This package has been renamed to @cloudcli-ai/cloudcli. Please install @cloudcli-ai/cloudcli instead.",
|
||||
"license": "AGPL-3.0-or-later"
|
||||
}
|
||||
@@ -24,15 +24,16 @@ import {
|
||||
notifyRunStopped,
|
||||
notifyUserIfEnabled
|
||||
} from './services/notification-orchestrator.js';
|
||||
import { claudeAdapter } from './providers/claude/adapter.js';
|
||||
import { createNormalizedMessage } from './providers/types.js';
|
||||
import { sessionsService } from './modules/providers/services/sessions.service.js';
|
||||
import { providerAuthService } from './modules/providers/services/provider-auth.service.js';
|
||||
import { createNormalizedMessage } from './shared/utils.js';
|
||||
|
||||
const activeSessions = new Map();
|
||||
const pendingToolApprovals = new Map();
|
||||
|
||||
const TOOL_APPROVAL_TIMEOUT_MS = parseInt(process.env.CLAUDE_TOOL_APPROVAL_TIMEOUT_MS, 10) || 55000;
|
||||
|
||||
const TOOLS_REQUIRING_INTERACTION = new Set(['AskUserQuestion']);
|
||||
const TOOLS_REQUIRING_INTERACTION = new Set(['AskUserQuestion', 'ExitPlanMode']);
|
||||
|
||||
function createRequestId() {
|
||||
if (typeof crypto.randomUUID === 'function') {
|
||||
@@ -148,6 +149,16 @@ function mapCliOptionsToSDK(options = {}) {
|
||||
|
||||
const sdkOptions = {};
|
||||
|
||||
// Forward all host env vars (e.g. ANTHROPIC_BASE_URL) to the subprocess.
|
||||
// Since SDK 0.2.113, options.env replaces process.env instead of overlaying it.
|
||||
sdkOptions.env = { ...process.env };
|
||||
|
||||
// Use CLAUDE_CLI_PATH if explicitly set, otherwise fall back to 'claude' on PATH.
|
||||
// The SDK 0.2.113+ looks for a bundled native binary optional dep by default;
|
||||
// this fallback ensures users who installed via the official installer still work
|
||||
// even when npm prune --production has removed those optional deps.
|
||||
sdkOptions.pathToClaudeCodeExecutable = process.env.CLAUDE_CLI_PATH || 'claude';
|
||||
|
||||
// Map working directory
|
||||
if (cwd) {
|
||||
sdkOptions.cwd = cwd;
|
||||
@@ -649,7 +660,7 @@ async function queryClaudeSDK(command, options = {}, ws) {
|
||||
const sid = capturedSessionId || sessionId || null;
|
||||
|
||||
// Use adapter to normalize SDK events into NormalizedMessage[]
|
||||
const normalized = claudeAdapter.normalizeMessage(transformedMessage, sid);
|
||||
const normalized = sessionsService.normalizeMessage('claude', transformedMessage, sid);
|
||||
for (const msg of normalized) {
|
||||
// Preserve parentToolUseId from SDK wrapper for subagent tool grouping
|
||||
if (transformedMessage.parentToolUseId && !msg.parentToolUseId) {
|
||||
@@ -701,8 +712,14 @@ async function queryClaudeSDK(command, options = {}, ws) {
|
||||
// Clean up temporary image files on error
|
||||
await cleanupTempFiles(tempImagePaths, tempDir);
|
||||
|
||||
// Check if Claude CLI is installed for a clearer error message
|
||||
const installed = await providerAuthService.isProviderInstalled('claude');
|
||||
const errorContent = !installed
|
||||
? 'Claude Code is not installed. Please install it first: https://docs.anthropic.com/en/docs/claude-code'
|
||||
: error.message;
|
||||
|
||||
// Send error to WebSocket
|
||||
ws.send(createNormalizedMessage({ kind: 'error', content: error.message, sessionId: capturedSessionId || sessionId || null, provider: 'claude' }));
|
||||
ws.send(createNormalizedMessage({ kind: 'error', content: errorContent, sessionId: capturedSessionId || sessionId || null, provider: 'claude' }));
|
||||
notifyRunFailed({
|
||||
userId: ws?.userId || null,
|
||||
provider: 'claude',
|
||||
@@ -710,8 +727,6 @@ async function queryClaudeSDK(command, options = {}, ws) {
|
||||
sessionName: sessionSummary,
|
||||
error
|
||||
});
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
399
server/cli.js
399
server/cli.js
@@ -1,12 +1,13 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Claude Code UI CLI
|
||||
* CloudCLI CLI
|
||||
*
|
||||
* Provides command-line utilities for managing Claude Code UI
|
||||
* Provides command-line utilities for managing CloudCLI
|
||||
*
|
||||
* Commands:
|
||||
* (no args) - Start the server (default)
|
||||
* start - Start the server
|
||||
* sandbox - Manage Docker sandbox environments
|
||||
* status - Show configuration and data locations
|
||||
* help - Show help information
|
||||
* version - Show version information
|
||||
@@ -15,11 +16,12 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import os from 'os';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { dirname } from 'path';
|
||||
import { findAppRoot, getModuleDir } from './utils/runtime-paths.js';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
const __dirname = getModuleDir(import.meta.url);
|
||||
// The CLI is compiled into dist-server/server, but it still needs to read the top-level
|
||||
// package.json and .env file. Resolving the app root once keeps those lookups stable.
|
||||
const APP_ROOT = findAppRoot(__dirname);
|
||||
|
||||
// ANSI color codes for terminal output
|
||||
const colors = {
|
||||
@@ -49,13 +51,16 @@ const c = {
|
||||
};
|
||||
|
||||
// Load package.json for version info
|
||||
const packageJsonPath = path.join(__dirname, '../package.json');
|
||||
const packageJsonPath = path.join(APP_ROOT, 'package.json');
|
||||
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf8'));
|
||||
// Match the runtime fallback in load-env.js so "cloudcli status" reports the same default
|
||||
// database location that the backend will actually use when no DATABASE_PATH is configured.
|
||||
const DEFAULT_DATABASE_PATH = path.join(os.homedir(), '.cloudcli', 'auth.db');
|
||||
|
||||
// Load environment variables from .env file if it exists
|
||||
function loadEnvFile() {
|
||||
try {
|
||||
const envPath = path.join(__dirname, '../.env');
|
||||
const envPath = path.join(APP_ROOT, '.env');
|
||||
const envFile = fs.readFileSync(envPath, 'utf8');
|
||||
envFile.split('\n').forEach(line => {
|
||||
const trimmedLine = line.trim();
|
||||
@@ -74,17 +79,17 @@ function loadEnvFile() {
|
||||
// Get the database path (same logic as db.js)
|
||||
function getDatabasePath() {
|
||||
loadEnvFile();
|
||||
return process.env.DATABASE_PATH || path.join(__dirname, 'database', 'auth.db');
|
||||
return process.env.DATABASE_PATH || DEFAULT_DATABASE_PATH;
|
||||
}
|
||||
|
||||
// Get the installation directory
|
||||
function getInstallDir() {
|
||||
return path.join(__dirname, '..');
|
||||
return APP_ROOT;
|
||||
}
|
||||
|
||||
// Show status command
|
||||
function showStatus() {
|
||||
console.log(`\n${c.bright('Claude Code UI - Status')}\n`);
|
||||
console.log(`\n${c.bright('CloudCLI UI - Status')}\n`);
|
||||
console.log(c.dim('═'.repeat(60)));
|
||||
|
||||
// Version info
|
||||
@@ -123,7 +128,7 @@ function showStatus() {
|
||||
console.log(` Status: ${projectsExists ? c.ok('[OK] Exists') : c.warn('[WARN] Not found')}`);
|
||||
|
||||
// Config file location
|
||||
const envFilePath = path.join(__dirname, '../.env');
|
||||
const envFilePath = path.join(APP_ROOT, '.env');
|
||||
const envExists = fs.existsSync(envFilePath);
|
||||
console.log(`\n${c.info('[INFO]')} Configuration File:`);
|
||||
console.log(` ${c.dim(envFilePath)}`);
|
||||
@@ -141,7 +146,7 @@ function showStatus() {
|
||||
function showHelp() {
|
||||
console.log(`
|
||||
╔═══════════════════════════════════════════════════════════════╗
|
||||
║ Claude Code UI - Command Line Tool ║
|
||||
║ CloudCLI - Command Line Tool ║
|
||||
╚═══════════════════════════════════════════════════════════════╝
|
||||
|
||||
Usage:
|
||||
@@ -149,7 +154,8 @@ Usage:
|
||||
cloudcli [command] [options]
|
||||
|
||||
Commands:
|
||||
start Start the Claude Code UI server (default)
|
||||
start Start the CloudCLI server (default)
|
||||
sandbox Manage Docker sandbox environments
|
||||
status Show configuration and data locations
|
||||
update Update to the latest version
|
||||
help Show this help information
|
||||
@@ -164,8 +170,7 @@ Options:
|
||||
Examples:
|
||||
$ cloudcli # Start with defaults
|
||||
$ cloudcli --port 8080 # Start on port 8080
|
||||
$ cloudcli -p 3000 # Short form for port
|
||||
$ cloudcli start --port 4000 # Explicit start command
|
||||
$ cloudcli sandbox ~/my-project # Run in a Docker sandbox
|
||||
$ cloudcli status # Show configuration
|
||||
|
||||
Environment Variables:
|
||||
@@ -203,7 +208,7 @@ function isNewerVersion(v1, v2) {
|
||||
async function checkForUpdates(silent = false) {
|
||||
try {
|
||||
const { execSync } = await import('child_process');
|
||||
const latestVersion = execSync('npm show @siteboon/claude-code-ui version', { encoding: 'utf8' }).trim();
|
||||
const latestVersion = execSync('npm show @cloudcli-ai/cloudcli version', { encoding: 'utf8' }).trim();
|
||||
const currentVersion = packageJson.version;
|
||||
|
||||
if (isNewerVersion(latestVersion, currentVersion)) {
|
||||
@@ -236,14 +241,361 @@ async function updatePackage() {
|
||||
}
|
||||
|
||||
console.log(`${c.info('[INFO]')} Updating from ${currentVersion} to ${latestVersion}...`);
|
||||
execSync('npm update -g @siteboon/claude-code-ui', { stdio: 'inherit' });
|
||||
execSync('npm update -g @cloudcli-ai/cloudcli', { stdio: 'inherit' });
|
||||
console.log(`${c.ok('[OK]')} Update complete! Restart cloudcli to use the new version.`);
|
||||
} catch (e) {
|
||||
console.error(`${c.error('[ERROR]')} Update failed: ${e.message}`);
|
||||
console.log(`${c.tip('[TIP]')} Try running manually: npm update -g @siteboon/claude-code-ui`);
|
||||
console.log(`${c.tip('[TIP]')} Try running manually: npm update -g @cloudcli-ai/cloudcli`);
|
||||
}
|
||||
}
|
||||
|
||||
// ── Sandbox command ─────────────────────────────────────────
|
||||
|
||||
const SANDBOX_TEMPLATES = {
|
||||
claude: 'docker.io/cloudcliai/sandbox:claude-code',
|
||||
codex: 'docker.io/cloudcliai/sandbox:codex',
|
||||
gemini: 'docker.io/cloudcliai/sandbox:gemini',
|
||||
};
|
||||
|
||||
const SANDBOX_SECRETS = {
|
||||
claude: 'anthropic',
|
||||
codex: 'openai',
|
||||
gemini: 'google',
|
||||
};
|
||||
|
||||
function parseSandboxArgs(args) {
|
||||
const result = {
|
||||
subcommand: null,
|
||||
workspace: null,
|
||||
agent: 'claude',
|
||||
name: null,
|
||||
port: 3001,
|
||||
template: null,
|
||||
env: [],
|
||||
};
|
||||
|
||||
const subcommands = ['ls', 'stop', 'start', 'rm', 'logs', 'help'];
|
||||
|
||||
for (let i = 0; i < args.length; i++) {
|
||||
const arg = args[i];
|
||||
|
||||
if (i === 0 && subcommands.includes(arg)) {
|
||||
result.subcommand = arg;
|
||||
} else if (arg === '--agent' || arg === '-a') {
|
||||
result.agent = args[++i];
|
||||
} else if (arg === '--name' || arg === '-n') {
|
||||
result.name = args[++i];
|
||||
} else if (arg === '--port') {
|
||||
result.port = parseInt(args[++i], 10);
|
||||
} else if (arg === '--template' || arg === '-t') {
|
||||
result.template = args[++i];
|
||||
} else if (arg === '--env' || arg === '-e') {
|
||||
result.env.push(args[++i]);
|
||||
} else if (!arg.startsWith('-')) {
|
||||
if (!result.subcommand) {
|
||||
result.workspace = arg;
|
||||
} else {
|
||||
result.name = arg; // for stop/start/rm/logs <name>
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Default subcommand based on what we got
|
||||
if (!result.subcommand) {
|
||||
result.subcommand = 'create';
|
||||
}
|
||||
|
||||
// Derive name from workspace path if not set
|
||||
if (!result.name && result.workspace) {
|
||||
result.name = path.basename(path.resolve(result.workspace.replace(/^~/, os.homedir())));
|
||||
}
|
||||
|
||||
// Default template from agent
|
||||
if (!result.template) {
|
||||
result.template = SANDBOX_TEMPLATES[result.agent] || SANDBOX_TEMPLATES.claude;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function showSandboxHelp() {
|
||||
console.log(`
|
||||
${c.bright('CloudCLI Sandbox')} — Run CloudCLI inside Docker Sandboxes
|
||||
|
||||
Usage:
|
||||
cloudcli sandbox <workspace> Create and start a sandbox
|
||||
cloudcli sandbox <subcommand> [name] Manage sandboxes
|
||||
|
||||
Subcommands:
|
||||
${c.bright('(default)')} Create a sandbox and start the web UI
|
||||
${c.bright('ls')} List all sandboxes
|
||||
${c.bright('start')} Restart a stopped sandbox and re-launch the web UI
|
||||
${c.bright('stop')} Stop a sandbox (preserves state)
|
||||
${c.bright('rm')} Remove a sandbox
|
||||
${c.bright('logs')} Show CloudCLI server logs
|
||||
${c.bright('help')} Show this help
|
||||
|
||||
Options:
|
||||
-a, --agent <agent> Agent to use: claude, codex, gemini (default: claude)
|
||||
-n, --name <name> Sandbox name (default: derived from workspace folder)
|
||||
-t, --template <image> Custom template image
|
||||
-e, --env <KEY=VALUE> Set environment variable (repeatable)
|
||||
--port <port> Host port for the web UI (default: 3001)
|
||||
|
||||
Examples:
|
||||
$ cloudcli sandbox ~/my-project
|
||||
$ cloudcli sandbox ~/my-project --agent codex --port 8080
|
||||
$ cloudcli sandbox ~/my-project --env SERVER_PORT=8080 --env HOST=0.0.0.0
|
||||
$ cloudcli sandbox ls
|
||||
$ cloudcli sandbox stop my-project
|
||||
$ cloudcli sandbox start my-project
|
||||
$ cloudcli sandbox rm my-project
|
||||
|
||||
Prerequisites:
|
||||
1. Install sbx CLI: https://docs.docker.com/ai/sandboxes/get-started/
|
||||
2. Authenticate and store your API key:
|
||||
sbx login
|
||||
sbx secret set -g anthropic # for Claude
|
||||
sbx secret set -g openai # for Codex
|
||||
sbx secret set -g google # for Gemini
|
||||
|
||||
Advanced usage:
|
||||
For branch mode, multiple workspaces, memory limits, network policies,
|
||||
or passing prompts to the agent, use sbx directly with the template:
|
||||
|
||||
sbx run --template docker.io/cloudcliai/sandbox:claude-code claude ~/my-project --branch my-feature
|
||||
sbx run --template docker.io/cloudcliai/sandbox:claude-code claude ~/project ~/libs:ro --memory 8g
|
||||
|
||||
Full Docker Sandboxes docs: https://docs.docker.com/ai/sandboxes/usage/
|
||||
`);
|
||||
}
|
||||
|
||||
async function sandboxCommand(args) {
|
||||
const { execFileSync, spawn: spawnProcess } = await import('child_process');
|
||||
|
||||
// Safe execution — uses execFileSync (no shell) to prevent injection
|
||||
const sbx = (subcmd, opts = {}) => {
|
||||
const result = execFileSync('sbx', subcmd, {
|
||||
encoding: 'utf8',
|
||||
stdio: opts.inherit ? 'inherit' : 'pipe',
|
||||
});
|
||||
return result || '';
|
||||
};
|
||||
|
||||
const opts = parseSandboxArgs(args);
|
||||
|
||||
if (opts.subcommand === 'help') {
|
||||
showSandboxHelp();
|
||||
return;
|
||||
}
|
||||
|
||||
// Validate name (alphanumeric, hyphens, underscores only)
|
||||
if (opts.name && !/^[\w-]+$/.test(opts.name)) {
|
||||
console.error(`\n${c.error('❌')} Invalid sandbox name: ${opts.name}`);
|
||||
console.log(` Names may only contain letters, numbers, hyphens, and underscores.\n`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Check sbx is installed
|
||||
try {
|
||||
sbx(['version']);
|
||||
} catch {
|
||||
console.error(`\n${c.error('❌')} ${c.bright('sbx')} CLI not found.\n`);
|
||||
console.log(` Install it from: ${c.info('https://docs.docker.com/ai/sandboxes/get-started/')}`);
|
||||
console.log(` Then run: ${c.bright('sbx login')}`);
|
||||
console.log(` And store your API key: ${c.bright('sbx secret set -g anthropic')}\n`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
switch (opts.subcommand) {
|
||||
|
||||
case 'ls':
|
||||
sbx(['ls'], { inherit: true });
|
||||
break;
|
||||
|
||||
case 'stop':
|
||||
if (!opts.name) {
|
||||
console.error(`\n${c.error('❌')} Sandbox name required: cloudcli sandbox stop <name>\n`);
|
||||
process.exit(1);
|
||||
}
|
||||
sbx(['stop', opts.name], { inherit: true });
|
||||
break;
|
||||
|
||||
case 'rm':
|
||||
if (!opts.name) {
|
||||
console.error(`\n${c.error('❌')} Sandbox name required: cloudcli sandbox rm <name>\n`);
|
||||
process.exit(1);
|
||||
}
|
||||
sbx(['rm', opts.name], { inherit: true });
|
||||
break;
|
||||
|
||||
case 'logs':
|
||||
if (!opts.name) {
|
||||
console.error(`\n${c.error('❌')} Sandbox name required: cloudcli sandbox logs <name>\n`);
|
||||
process.exit(1);
|
||||
}
|
||||
try {
|
||||
sbx(['exec', opts.name, 'bash', '-c', 'cat /tmp/cloudcli-ui.log'], { inherit: true });
|
||||
} catch (e) {
|
||||
console.error(`\n${c.error('❌')} Could not read logs: ${e.message || 'Is the sandbox running?'}\n`);
|
||||
}
|
||||
break;
|
||||
|
||||
case 'start': {
|
||||
if (!opts.name) {
|
||||
console.error(`\n${c.error('❌')} Sandbox name required: cloudcli sandbox start <name>\n`);
|
||||
process.exit(1);
|
||||
}
|
||||
console.log(`\n${c.info('▶')} Starting sandbox ${c.bright(opts.name)}...`);
|
||||
const restartRun = spawnProcess('sbx', ['run', opts.name], {
|
||||
detached: true,
|
||||
stdio: ['ignore', 'ignore', 'ignore'],
|
||||
});
|
||||
restartRun.unref();
|
||||
await new Promise(resolve => setTimeout(resolve, 5000));
|
||||
|
||||
console.log(`${c.info('▶')} Launching CloudCLI web server...`);
|
||||
sbx(['exec', opts.name, 'bash', '-c', 'cloudcli start --port 3001 &']);
|
||||
|
||||
console.log(`${c.info('▶')} Forwarding port ${opts.port} → 3001...`);
|
||||
try {
|
||||
sbx(['ports', opts.name, '--publish', `${opts.port}:3001`]);
|
||||
} catch (e) {
|
||||
const msg = e.stdout || e.stderr || e.message || '';
|
||||
if (msg.includes('address already in use')) {
|
||||
const altPort = opts.port + 1;
|
||||
console.log(`${c.warn('⚠')} Port ${opts.port} in use, trying ${altPort}...`);
|
||||
try {
|
||||
sbx(['ports', opts.name, '--publish', `${altPort}:3001`]);
|
||||
opts.port = altPort;
|
||||
} catch {
|
||||
console.error(`${c.error('❌')} Ports ${opts.port} and ${altPort} both in use. Use --port to specify a free port.`);
|
||||
process.exit(1);
|
||||
}
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`\n${c.ok('✔')} ${c.bright('CloudCLI is ready!')}`);
|
||||
console.log(` ${c.info('→')} ${c.bright(`http://localhost:${opts.port}`)}\n`);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'create': {
|
||||
if (!opts.workspace) {
|
||||
console.error(`\n${c.error('❌')} Workspace path required: cloudcli sandbox <path>\n`);
|
||||
console.log(` Example: ${c.bright('cloudcli sandbox ~/my-project')}\n`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const workspace = opts.workspace.startsWith('~')
|
||||
? opts.workspace.replace(/^~/, os.homedir())
|
||||
: path.resolve(opts.workspace);
|
||||
|
||||
if (!fs.existsSync(workspace)) {
|
||||
console.error(`\n${c.error('❌')} Workspace path not found: ${c.dim(workspace)}\n`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const secret = SANDBOX_SECRETS[opts.agent] || 'anthropic';
|
||||
|
||||
// Check if the required secret is stored
|
||||
try {
|
||||
const secretList = sbx(['secret', 'ls']);
|
||||
if (!secretList.includes(secret)) {
|
||||
console.error(`\n${c.error('❌')} No ${c.bright(secret)} API key found.\n`);
|
||||
console.log(` Run: ${c.bright(`sbx secret set -g ${secret}`)}\n`);
|
||||
process.exit(1);
|
||||
}
|
||||
} catch { /* sbx secret ls not available, skip check */ }
|
||||
|
||||
console.log(`\n${c.bright('CloudCLI Sandbox')}`);
|
||||
console.log(c.dim('─'.repeat(50)));
|
||||
console.log(` Agent: ${c.info(opts.agent)} ${c.dim(`(${secret} credentials)`)}`);
|
||||
console.log(` Workspace: ${c.dim(workspace)}`);
|
||||
console.log(` Name: ${c.dim(opts.name)}`);
|
||||
console.log(` Template: ${c.dim(opts.template)}`);
|
||||
console.log(` Port: ${c.dim(String(opts.port))}`);
|
||||
if (opts.env.length > 0) {
|
||||
console.log(` Env: ${c.dim(opts.env.join(', '))}`);
|
||||
}
|
||||
console.log(c.dim('─'.repeat(50)));
|
||||
|
||||
// Step 1: Launch sandbox with sbx run in background.
|
||||
// sbx run creates the sandbox (or reconnects) AND holds an active session,
|
||||
// which prevents the sandbox from auto-stopping.
|
||||
console.log(`\n${c.info('▶')} Creating sandbox ${c.bright(opts.name)}...`);
|
||||
const bgRun = spawnProcess('sbx', [
|
||||
'run', '--template', opts.template, '--name', opts.name, opts.agent, workspace,
|
||||
], {
|
||||
detached: true,
|
||||
stdio: ['ignore', 'ignore', 'ignore'],
|
||||
});
|
||||
bgRun.unref();
|
||||
// Wait for sandbox to be ready
|
||||
await new Promise(resolve => setTimeout(resolve, 5000));
|
||||
|
||||
// Step 2: Inject environment variables
|
||||
if (opts.env.length > 0) {
|
||||
console.log(`${c.info('▶')} Setting environment variables...`);
|
||||
const exports = opts.env
|
||||
.filter(e => /^\w+=.+$/.test(e))
|
||||
.map(e => `export ${e}`)
|
||||
.join('\n');
|
||||
if (exports) {
|
||||
sbx(['exec', opts.name, 'bash', '-c', `echo '${exports}' >> /etc/sandbox-persistent.sh`]);
|
||||
}
|
||||
const invalid = opts.env.filter(e => !/^\w+=.+$/.test(e));
|
||||
if (invalid.length > 0) {
|
||||
console.log(`${c.warn('⚠')} Skipped invalid env vars: ${invalid.join(', ')} (expected KEY=VALUE)`);
|
||||
}
|
||||
}
|
||||
|
||||
// Step 3: Start CloudCLI inside the sandbox
|
||||
console.log(`${c.info('▶')} Launching CloudCLI web server...`);
|
||||
sbx(['exec', opts.name, 'bash', '-c', 'cloudcli start --port 3001 &']);
|
||||
|
||||
// Step 4: Forward port
|
||||
console.log(`${c.info('▶')} Forwarding port ${opts.port} → 3001...`);
|
||||
try {
|
||||
sbx(['ports', opts.name, '--publish', `${opts.port}:3001`]);
|
||||
} catch (e) {
|
||||
const msg = e.stdout || e.stderr || e.message || '';
|
||||
if (msg.includes('address already in use')) {
|
||||
const altPort = opts.port + 1;
|
||||
console.log(`${c.warn('⚠')} Port ${opts.port} in use, trying ${altPort}...`);
|
||||
try {
|
||||
sbx(['ports', opts.name, '--publish', `${altPort}:3001`]);
|
||||
opts.port = altPort;
|
||||
} catch {
|
||||
console.error(`${c.error('❌')} Ports ${opts.port} and ${altPort} both in use. Use --port to specify a free port.`);
|
||||
process.exit(1);
|
||||
}
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
// Done
|
||||
console.log(`\n${c.ok('✔')} ${c.bright('CloudCLI is ready!')}`);
|
||||
console.log(` ${c.info('→')} Open ${c.bright(`http://localhost:${opts.port}`)}`);
|
||||
console.log(`\n${c.dim(' Manage with:')}`);
|
||||
console.log(` ${c.dim('$')} sbx ls`);
|
||||
console.log(` ${c.dim('$')} sbx stop ${opts.name}`);
|
||||
console.log(` ${c.dim('$')} sbx start ${opts.name}`);
|
||||
console.log(` ${c.dim('$')} sbx rm ${opts.name}`);
|
||||
console.log(`\n${c.dim(' Or install globally:')} npm install -g @cloudcli-ai/cloudcli\n`);
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
showSandboxHelp();
|
||||
}
|
||||
}
|
||||
|
||||
// ── Server ──────────────────────────────────────────────────
|
||||
|
||||
// Start the server
|
||||
async function startServer() {
|
||||
// Check for updates silently on startup
|
||||
@@ -274,6 +626,10 @@ function parseArgs(args) {
|
||||
parsed.command = 'version';
|
||||
} else if (!arg.startsWith('-')) {
|
||||
parsed.command = arg;
|
||||
if (arg === 'sandbox') {
|
||||
parsed.remainingArgs = args.slice(i + 1);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -283,7 +639,7 @@ function parseArgs(args) {
|
||||
// Main CLI handler
|
||||
async function main() {
|
||||
const args = process.argv.slice(2);
|
||||
const { command, options } = parseArgs(args);
|
||||
const { command, options, remainingArgs } = parseArgs(args);
|
||||
|
||||
// Apply CLI options to environment variables
|
||||
if (options.serverPort) {
|
||||
@@ -299,6 +655,9 @@ async function main() {
|
||||
case 'start':
|
||||
await startServer();
|
||||
break;
|
||||
case 'sandbox':
|
||||
await sandboxCommand(remainingArgs || []);
|
||||
break;
|
||||
case 'status':
|
||||
case 'info':
|
||||
showStatus();
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import { spawn } from 'child_process';
|
||||
import crossSpawn from 'cross-spawn';
|
||||
import { notifyRunFailed, notifyRunStopped } from './services/notification-orchestrator.js';
|
||||
import { cursorAdapter } from './providers/cursor/adapter.js';
|
||||
import { createNormalizedMessage } from './providers/types.js';
|
||||
import { sessionsService } from './modules/providers/services/sessions.service.js';
|
||||
import { providerAuthService } from './modules/providers/services/provider-auth.service.js';
|
||||
import { createNormalizedMessage } from './shared/utils.js';
|
||||
|
||||
// Use cross-spawn on Windows for better command execution
|
||||
const spawnFunction = process.platform === 'win32' ? crossSpawn : spawn;
|
||||
@@ -189,7 +190,7 @@ async function spawnCursor(command, options = {}, ws) {
|
||||
case 'assistant':
|
||||
// Accumulate assistant message chunks
|
||||
if (response.message && response.message.content && response.message.content.length > 0) {
|
||||
const normalized = cursorAdapter.normalizeMessage(response, capturedSessionId || sessionId || null);
|
||||
const normalized = sessionsService.normalizeMessage('cursor', response, capturedSessionId || sessionId || null);
|
||||
for (const msg of normalized) ws.send(msg);
|
||||
}
|
||||
break;
|
||||
@@ -219,7 +220,7 @@ async function spawnCursor(command, options = {}, ws) {
|
||||
}
|
||||
|
||||
// If not JSON, send as stream delta via adapter
|
||||
const normalized = cursorAdapter.normalizeMessage(line, capturedSessionId || sessionId || null);
|
||||
const normalized = sessionsService.normalizeMessage('cursor', line, capturedSessionId || sessionId || null);
|
||||
for (const msg of normalized) ws.send(msg);
|
||||
}
|
||||
};
|
||||
@@ -287,14 +288,20 @@ async function spawnCursor(command, options = {}, ws) {
|
||||
});
|
||||
|
||||
// Handle process errors
|
||||
cursorProcess.on('error', (error) => {
|
||||
cursorProcess.on('error', async (error) => {
|
||||
console.error('Cursor CLI process error:', error);
|
||||
|
||||
// Clean up process reference on error
|
||||
const finalSessionId = capturedSessionId || sessionId || processKey;
|
||||
activeCursorProcesses.delete(finalSessionId);
|
||||
|
||||
ws.send(createNormalizedMessage({ kind: 'error', content: error.message, sessionId: capturedSessionId || sessionId || null, provider: 'cursor' }));
|
||||
// Check if Cursor CLI is installed for a clearer error message
|
||||
const installed = await providerAuthService.isProviderInstalled('cursor');
|
||||
const errorContent = !installed
|
||||
? 'Cursor CLI is not installed. Please install it from https://cursor.com'
|
||||
: error.message;
|
||||
|
||||
ws.send(createNormalizedMessage({ kind: 'error', content: errorContent, sessionId: capturedSessionId || sessionId || null, provider: 'cursor' }));
|
||||
notifyTerminalState({ error });
|
||||
|
||||
settleOnce(() => reject(error));
|
||||
|
||||
@@ -1,630 +0,0 @@
|
||||
import Database from 'better-sqlite3';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import crypto from 'crypto';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { dirname } from 'path';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
|
||||
// ANSI color codes for terminal output
|
||||
const colors = {
|
||||
reset: '\x1b[0m',
|
||||
bright: '\x1b[1m',
|
||||
cyan: '\x1b[36m',
|
||||
dim: '\x1b[2m',
|
||||
};
|
||||
|
||||
const c = {
|
||||
info: (text) => `${colors.cyan}${text}${colors.reset}`,
|
||||
bright: (text) => `${colors.bright}${text}${colors.reset}`,
|
||||
dim: (text) => `${colors.dim}${text}${colors.reset}`,
|
||||
};
|
||||
|
||||
// Use DATABASE_PATH environment variable if set, otherwise use default location
|
||||
const DB_PATH = process.env.DATABASE_PATH || path.join(__dirname, 'auth.db');
|
||||
const INIT_SQL_PATH = path.join(__dirname, 'init.sql');
|
||||
|
||||
// Ensure database directory exists if custom path is provided
|
||||
if (process.env.DATABASE_PATH) {
|
||||
const dbDir = path.dirname(DB_PATH);
|
||||
try {
|
||||
if (!fs.existsSync(dbDir)) {
|
||||
fs.mkdirSync(dbDir, { recursive: true });
|
||||
console.log(`Created database directory: ${dbDir}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Failed to create database directory ${dbDir}:`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// As part of 1.19.2 we are introducing a new location for auth.db. The below handles exisitng moving legacy database from install directory to new location
|
||||
const LEGACY_DB_PATH = path.join(__dirname, 'auth.db');
|
||||
if (DB_PATH !== LEGACY_DB_PATH && !fs.existsSync(DB_PATH) && fs.existsSync(LEGACY_DB_PATH)) {
|
||||
try {
|
||||
fs.copyFileSync(LEGACY_DB_PATH, DB_PATH);
|
||||
console.log(`[MIGRATION] Copied database from ${LEGACY_DB_PATH} to ${DB_PATH}`);
|
||||
for (const suffix of ['-wal', '-shm']) {
|
||||
if (fs.existsSync(LEGACY_DB_PATH + suffix)) {
|
||||
fs.copyFileSync(LEGACY_DB_PATH + suffix, DB_PATH + suffix);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.warn(`[MIGRATION] Could not copy legacy database: ${err.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Create database connection
|
||||
const db = new Database(DB_PATH);
|
||||
|
||||
// app_config must exist before any other module imports (auth.js reads the JWT secret at load time).
|
||||
// runMigrations() also creates this table, but it runs too late for existing installations
|
||||
// where auth.js is imported before initializeDatabase() is called.
|
||||
db.exec(`CREATE TABLE IF NOT EXISTS app_config (
|
||||
key TEXT PRIMARY KEY,
|
||||
value TEXT NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)`);
|
||||
|
||||
// Show app installation path prominently
|
||||
const appInstallPath = path.join(__dirname, '../..');
|
||||
console.log('');
|
||||
console.log(c.dim('═'.repeat(60)));
|
||||
console.log(`${c.info('[INFO]')} App Installation: ${c.bright(appInstallPath)}`);
|
||||
console.log(`${c.info('[INFO]')} Database: ${c.dim(path.relative(appInstallPath, DB_PATH))}`);
|
||||
if (process.env.DATABASE_PATH) {
|
||||
console.log(` ${c.dim('(Using custom DATABASE_PATH from environment)')}`);
|
||||
}
|
||||
console.log(c.dim('═'.repeat(60)));
|
||||
console.log('');
|
||||
|
||||
const runMigrations = () => {
|
||||
try {
|
||||
const tableInfo = db.prepare("PRAGMA table_info(users)").all();
|
||||
const columnNames = tableInfo.map(col => col.name);
|
||||
|
||||
if (!columnNames.includes('git_name')) {
|
||||
console.log('Running migration: Adding git_name column');
|
||||
db.exec('ALTER TABLE users ADD COLUMN git_name TEXT');
|
||||
}
|
||||
|
||||
if (!columnNames.includes('git_email')) {
|
||||
console.log('Running migration: Adding git_email column');
|
||||
db.exec('ALTER TABLE users ADD COLUMN git_email TEXT');
|
||||
}
|
||||
|
||||
if (!columnNames.includes('has_completed_onboarding')) {
|
||||
console.log('Running migration: Adding has_completed_onboarding column');
|
||||
db.exec('ALTER TABLE users ADD COLUMN has_completed_onboarding BOOLEAN DEFAULT 0');
|
||||
}
|
||||
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS user_notification_preferences (
|
||||
user_id INTEGER PRIMARY KEY,
|
||||
preferences_json TEXT NOT NULL,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
|
||||
)
|
||||
`);
|
||||
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS vapid_keys (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
public_key TEXT NOT NULL,
|
||||
private_key TEXT NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
`);
|
||||
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS push_subscriptions (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
user_id INTEGER NOT NULL,
|
||||
endpoint TEXT NOT NULL UNIQUE,
|
||||
keys_p256dh TEXT NOT NULL,
|
||||
keys_auth TEXT NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
|
||||
)
|
||||
`);
|
||||
// Create app_config table if it doesn't exist (for existing installations)
|
||||
db.exec(`CREATE TABLE IF NOT EXISTS app_config (
|
||||
key TEXT PRIMARY KEY,
|
||||
value TEXT NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)`);
|
||||
|
||||
// Create session_names table if it doesn't exist (for existing installations)
|
||||
db.exec(`CREATE TABLE IF NOT EXISTS session_names (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
session_id TEXT NOT NULL,
|
||||
provider TEXT NOT NULL DEFAULT 'claude',
|
||||
custom_name TEXT NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
UNIQUE(session_id, provider)
|
||||
)`);
|
||||
db.exec('CREATE INDEX IF NOT EXISTS idx_session_names_lookup ON session_names(session_id, provider)');
|
||||
|
||||
console.log('Database migrations completed successfully');
|
||||
} catch (error) {
|
||||
console.error('Error running migrations:', error.message);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
// Initialize database with schema
|
||||
const initializeDatabase = async () => {
|
||||
try {
|
||||
const initSQL = fs.readFileSync(INIT_SQL_PATH, 'utf8');
|
||||
db.exec(initSQL);
|
||||
console.log('Database initialized successfully');
|
||||
runMigrations();
|
||||
} catch (error) {
|
||||
console.error('Error initializing database:', error.message);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
// User database operations
|
||||
const userDb = {
|
||||
// Check if any users exist
|
||||
hasUsers: () => {
|
||||
try {
|
||||
const row = db.prepare('SELECT COUNT(*) as count FROM users').get();
|
||||
return row.count > 0;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
// Create a new user
|
||||
createUser: (username, passwordHash) => {
|
||||
try {
|
||||
const stmt = db.prepare('INSERT INTO users (username, password_hash) VALUES (?, ?)');
|
||||
const result = stmt.run(username, passwordHash);
|
||||
return { id: result.lastInsertRowid, username };
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
// Get user by username
|
||||
getUserByUsername: (username) => {
|
||||
try {
|
||||
const row = db.prepare('SELECT * FROM users WHERE username = ? AND is_active = 1').get(username);
|
||||
return row;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
// Update last login time (non-fatal — logged but not thrown)
|
||||
updateLastLogin: (userId) => {
|
||||
try {
|
||||
db.prepare('UPDATE users SET last_login = CURRENT_TIMESTAMP WHERE id = ?').run(userId);
|
||||
} catch (err) {
|
||||
console.warn('Failed to update last login:', err.message);
|
||||
}
|
||||
},
|
||||
|
||||
// Get user by ID
|
||||
getUserById: (userId) => {
|
||||
try {
|
||||
const row = db.prepare('SELECT id, username, created_at, last_login FROM users WHERE id = ? AND is_active = 1').get(userId);
|
||||
return row;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
getFirstUser: () => {
|
||||
try {
|
||||
const row = db.prepare('SELECT id, username, created_at, last_login FROM users WHERE is_active = 1 LIMIT 1').get();
|
||||
return row;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
updateGitConfig: (userId, gitName, gitEmail) => {
|
||||
try {
|
||||
const stmt = db.prepare('UPDATE users SET git_name = ?, git_email = ? WHERE id = ?');
|
||||
stmt.run(gitName, gitEmail, userId);
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
getGitConfig: (userId) => {
|
||||
try {
|
||||
const row = db.prepare('SELECT git_name, git_email FROM users WHERE id = ?').get(userId);
|
||||
return row;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
completeOnboarding: (userId) => {
|
||||
try {
|
||||
const stmt = db.prepare('UPDATE users SET has_completed_onboarding = 1 WHERE id = ?');
|
||||
stmt.run(userId);
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
hasCompletedOnboarding: (userId) => {
|
||||
try {
|
||||
const row = db.prepare('SELECT has_completed_onboarding FROM users WHERE id = ?').get(userId);
|
||||
return row?.has_completed_onboarding === 1;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// API Keys database operations
|
||||
const apiKeysDb = {
|
||||
// Generate a new API key
|
||||
generateApiKey: () => {
|
||||
return 'ck_' + crypto.randomBytes(32).toString('hex');
|
||||
},
|
||||
|
||||
// Create a new API key
|
||||
createApiKey: (userId, keyName) => {
|
||||
try {
|
||||
const apiKey = apiKeysDb.generateApiKey();
|
||||
const stmt = db.prepare('INSERT INTO api_keys (user_id, key_name, api_key) VALUES (?, ?, ?)');
|
||||
const result = stmt.run(userId, keyName, apiKey);
|
||||
return { id: result.lastInsertRowid, keyName, apiKey };
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
// Get all API keys for a user
|
||||
getApiKeys: (userId) => {
|
||||
try {
|
||||
const rows = db.prepare('SELECT id, key_name, api_key, created_at, last_used, is_active FROM api_keys WHERE user_id = ? ORDER BY created_at DESC').all(userId);
|
||||
return rows;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
// Validate API key and get user
|
||||
validateApiKey: (apiKey) => {
|
||||
try {
|
||||
const row = db.prepare(`
|
||||
SELECT u.id, u.username, ak.id as api_key_id
|
||||
FROM api_keys ak
|
||||
JOIN users u ON ak.user_id = u.id
|
||||
WHERE ak.api_key = ? AND ak.is_active = 1 AND u.is_active = 1
|
||||
`).get(apiKey);
|
||||
|
||||
if (row) {
|
||||
// Update last_used timestamp
|
||||
db.prepare('UPDATE api_keys SET last_used = CURRENT_TIMESTAMP WHERE id = ?').run(row.api_key_id);
|
||||
}
|
||||
|
||||
return row;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
// Delete an API key
|
||||
deleteApiKey: (userId, apiKeyId) => {
|
||||
try {
|
||||
const stmt = db.prepare('DELETE FROM api_keys WHERE id = ? AND user_id = ?');
|
||||
const result = stmt.run(apiKeyId, userId);
|
||||
return result.changes > 0;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
// Toggle API key active status
|
||||
toggleApiKey: (userId, apiKeyId, isActive) => {
|
||||
try {
|
||||
const stmt = db.prepare('UPDATE api_keys SET is_active = ? WHERE id = ? AND user_id = ?');
|
||||
const result = stmt.run(isActive ? 1 : 0, apiKeyId, userId);
|
||||
return result.changes > 0;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// User credentials database operations (for GitHub tokens, GitLab tokens, etc.)
|
||||
const credentialsDb = {
|
||||
// Create a new credential
|
||||
createCredential: (userId, credentialName, credentialType, credentialValue, description = null) => {
|
||||
try {
|
||||
const stmt = db.prepare('INSERT INTO user_credentials (user_id, credential_name, credential_type, credential_value, description) VALUES (?, ?, ?, ?, ?)');
|
||||
const result = stmt.run(userId, credentialName, credentialType, credentialValue, description);
|
||||
return { id: result.lastInsertRowid, credentialName, credentialType };
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
// Get all credentials for a user, optionally filtered by type
|
||||
getCredentials: (userId, credentialType = null) => {
|
||||
try {
|
||||
let query = 'SELECT id, credential_name, credential_type, description, created_at, is_active FROM user_credentials WHERE user_id = ?';
|
||||
const params = [userId];
|
||||
|
||||
if (credentialType) {
|
||||
query += ' AND credential_type = ?';
|
||||
params.push(credentialType);
|
||||
}
|
||||
|
||||
query += ' ORDER BY created_at DESC';
|
||||
|
||||
const rows = db.prepare(query).all(...params);
|
||||
return rows;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
// Get active credential value for a user by type (returns most recent active)
|
||||
getActiveCredential: (userId, credentialType) => {
|
||||
try {
|
||||
const row = db.prepare('SELECT credential_value FROM user_credentials WHERE user_id = ? AND credential_type = ? AND is_active = 1 ORDER BY created_at DESC LIMIT 1').get(userId, credentialType);
|
||||
return row?.credential_value || null;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
// Delete a credential
|
||||
deleteCredential: (userId, credentialId) => {
|
||||
try {
|
||||
const stmt = db.prepare('DELETE FROM user_credentials WHERE id = ? AND user_id = ?');
|
||||
const result = stmt.run(credentialId, userId);
|
||||
return result.changes > 0;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
// Toggle credential active status
|
||||
toggleCredential: (userId, credentialId, isActive) => {
|
||||
try {
|
||||
const stmt = db.prepare('UPDATE user_credentials SET is_active = ? WHERE id = ? AND user_id = ?');
|
||||
const result = stmt.run(isActive ? 1 : 0, credentialId, userId);
|
||||
return result.changes > 0;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const DEFAULT_NOTIFICATION_PREFERENCES = {
|
||||
channels: {
|
||||
inApp: false,
|
||||
webPush: false
|
||||
},
|
||||
events: {
|
||||
actionRequired: true,
|
||||
stop: true,
|
||||
error: true
|
||||
}
|
||||
};
|
||||
|
||||
const normalizeNotificationPreferences = (value) => {
|
||||
const source = value && typeof value === 'object' ? value : {};
|
||||
|
||||
return {
|
||||
channels: {
|
||||
inApp: source.channels?.inApp === true,
|
||||
webPush: source.channels?.webPush === true
|
||||
},
|
||||
events: {
|
||||
actionRequired: source.events?.actionRequired !== false,
|
||||
stop: source.events?.stop !== false,
|
||||
error: source.events?.error !== false
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const notificationPreferencesDb = {
|
||||
getPreferences: (userId) => {
|
||||
try {
|
||||
const row = db.prepare('SELECT preferences_json FROM user_notification_preferences WHERE user_id = ?').get(userId);
|
||||
if (!row) {
|
||||
const defaults = normalizeNotificationPreferences(DEFAULT_NOTIFICATION_PREFERENCES);
|
||||
db.prepare(
|
||||
'INSERT INTO user_notification_preferences (user_id, preferences_json, updated_at) VALUES (?, ?, CURRENT_TIMESTAMP)'
|
||||
).run(userId, JSON.stringify(defaults));
|
||||
return defaults;
|
||||
}
|
||||
|
||||
let parsed;
|
||||
try {
|
||||
parsed = JSON.parse(row.preferences_json);
|
||||
} catch {
|
||||
parsed = DEFAULT_NOTIFICATION_PREFERENCES;
|
||||
}
|
||||
return normalizeNotificationPreferences(parsed);
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
updatePreferences: (userId, preferences) => {
|
||||
try {
|
||||
const normalized = normalizeNotificationPreferences(preferences);
|
||||
db.prepare(
|
||||
`INSERT INTO user_notification_preferences (user_id, preferences_json, updated_at)
|
||||
VALUES (?, ?, CURRENT_TIMESTAMP)
|
||||
ON CONFLICT(user_id) DO UPDATE SET
|
||||
preferences_json = excluded.preferences_json,
|
||||
updated_at = CURRENT_TIMESTAMP`
|
||||
).run(userId, JSON.stringify(normalized));
|
||||
return normalized;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const pushSubscriptionsDb = {
|
||||
saveSubscription: (userId, endpoint, keysP256dh, keysAuth) => {
|
||||
try {
|
||||
db.prepare(
|
||||
`INSERT INTO push_subscriptions (user_id, endpoint, keys_p256dh, keys_auth)
|
||||
VALUES (?, ?, ?, ?)
|
||||
ON CONFLICT(endpoint) DO UPDATE SET
|
||||
user_id = excluded.user_id,
|
||||
keys_p256dh = excluded.keys_p256dh,
|
||||
keys_auth = excluded.keys_auth`
|
||||
).run(userId, endpoint, keysP256dh, keysAuth);
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
getSubscriptions: (userId) => {
|
||||
try {
|
||||
return db.prepare('SELECT endpoint, keys_p256dh, keys_auth FROM push_subscriptions WHERE user_id = ?').all(userId);
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
removeSubscription: (endpoint) => {
|
||||
try {
|
||||
db.prepare('DELETE FROM push_subscriptions WHERE endpoint = ?').run(endpoint);
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
removeAllForUser: (userId) => {
|
||||
try {
|
||||
db.prepare('DELETE FROM push_subscriptions WHERE user_id = ?').run(userId);
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Session custom names database operations
|
||||
const sessionNamesDb = {
|
||||
// Set (insert or update) a custom session name
|
||||
setName: (sessionId, provider, customName) => {
|
||||
db.prepare(`
|
||||
INSERT INTO session_names (session_id, provider, custom_name)
|
||||
VALUES (?, ?, ?)
|
||||
ON CONFLICT(session_id, provider)
|
||||
DO UPDATE SET custom_name = excluded.custom_name, updated_at = CURRENT_TIMESTAMP
|
||||
`).run(sessionId, provider, customName);
|
||||
},
|
||||
|
||||
// Get a single custom session name
|
||||
getName: (sessionId, provider) => {
|
||||
const row = db.prepare(
|
||||
'SELECT custom_name FROM session_names WHERE session_id = ? AND provider = ?'
|
||||
).get(sessionId, provider);
|
||||
return row?.custom_name || null;
|
||||
},
|
||||
|
||||
// Batch lookup — returns Map<sessionId, customName>
|
||||
getNames: (sessionIds, provider) => {
|
||||
if (!sessionIds.length) return new Map();
|
||||
const placeholders = sessionIds.map(() => '?').join(',');
|
||||
const rows = db.prepare(
|
||||
`SELECT session_id, custom_name FROM session_names
|
||||
WHERE session_id IN (${placeholders}) AND provider = ?`
|
||||
).all(...sessionIds, provider);
|
||||
return new Map(rows.map(r => [r.session_id, r.custom_name]));
|
||||
},
|
||||
|
||||
// Delete a custom session name
|
||||
deleteName: (sessionId, provider) => {
|
||||
return db.prepare(
|
||||
'DELETE FROM session_names WHERE session_id = ? AND provider = ?'
|
||||
).run(sessionId, provider).changes > 0;
|
||||
},
|
||||
};
|
||||
|
||||
// Apply custom session names from the database (overrides CLI-generated summaries)
|
||||
function applyCustomSessionNames(sessions, provider) {
|
||||
if (!sessions?.length) return;
|
||||
try {
|
||||
const ids = sessions.map(s => s.id);
|
||||
const customNames = sessionNamesDb.getNames(ids, provider);
|
||||
for (const session of sessions) {
|
||||
const custom = customNames.get(session.id);
|
||||
if (custom) session.summary = custom;
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn(`[DB] Failed to apply custom session names for ${provider}:`, error.message);
|
||||
}
|
||||
}
|
||||
|
||||
// App config database operations
|
||||
const appConfigDb = {
|
||||
get: (key) => {
|
||||
try {
|
||||
const row = db.prepare('SELECT value FROM app_config WHERE key = ?').get(key);
|
||||
return row?.value || null;
|
||||
} catch (err) {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
|
||||
set: (key, value) => {
|
||||
db.prepare(
|
||||
'INSERT INTO app_config (key, value) VALUES (?, ?) ON CONFLICT(key) DO UPDATE SET value = excluded.value'
|
||||
).run(key, value);
|
||||
},
|
||||
|
||||
getOrCreateJwtSecret: () => {
|
||||
let secret = appConfigDb.get('jwt_secret');
|
||||
if (!secret) {
|
||||
secret = crypto.randomBytes(64).toString('hex');
|
||||
appConfigDb.set('jwt_secret', secret);
|
||||
}
|
||||
return secret;
|
||||
}
|
||||
};
|
||||
|
||||
// Backward compatibility - keep old names pointing to new system
|
||||
const githubTokensDb = {
|
||||
createGithubToken: (userId, tokenName, githubToken, description = null) => {
|
||||
return credentialsDb.createCredential(userId, tokenName, 'github_token', githubToken, description);
|
||||
},
|
||||
getGithubTokens: (userId) => {
|
||||
return credentialsDb.getCredentials(userId, 'github_token');
|
||||
},
|
||||
getActiveGithubToken: (userId) => {
|
||||
return credentialsDb.getActiveCredential(userId, 'github_token');
|
||||
},
|
||||
deleteGithubToken: (userId, tokenId) => {
|
||||
return credentialsDb.deleteCredential(userId, tokenId);
|
||||
},
|
||||
toggleGithubToken: (userId, tokenId, isActive) => {
|
||||
return credentialsDb.toggleCredential(userId, tokenId, isActive);
|
||||
}
|
||||
};
|
||||
|
||||
export {
|
||||
db,
|
||||
initializeDatabase,
|
||||
userDb,
|
||||
apiKeysDb,
|
||||
credentialsDb,
|
||||
notificationPreferencesDb,
|
||||
pushSubscriptionsDb,
|
||||
sessionNamesDb,
|
||||
applyCustomSessionNames,
|
||||
appConfigDb,
|
||||
githubTokensDb // Backward compatibility
|
||||
};
|
||||
@@ -9,7 +9,8 @@ import os from 'os';
|
||||
import sessionManager from './sessionManager.js';
|
||||
import GeminiResponseHandler from './gemini-response-handler.js';
|
||||
import { notifyRunFailed, notifyRunStopped } from './services/notification-orchestrator.js';
|
||||
import { createNormalizedMessage } from './providers/types.js';
|
||||
import { providerAuthService } from './modules/providers/services/provider-auth.service.js';
|
||||
import { createNormalizedMessage } from './shared/utils.js';
|
||||
|
||||
let activeGeminiProcesses = new Map(); // Track active processes by session ID
|
||||
|
||||
@@ -380,6 +381,15 @@ async function spawnGemini(command, options = {}, ws) {
|
||||
notifyTerminalState({ code });
|
||||
resolve();
|
||||
} else {
|
||||
// code 127 = shell "command not found" — check installation
|
||||
if (code === 127) {
|
||||
const installed = await providerAuthService.isProviderInstalled('gemini');
|
||||
if (!installed) {
|
||||
const socketSessionId = typeof ws.getSessionId === 'function' ? ws.getSessionId() : finalSessionId;
|
||||
ws.send(createNormalizedMessage({ kind: 'error', content: 'Gemini CLI is not installed. Please install it first: https://github.com/google-gemini/gemini-cli', sessionId: socketSessionId, provider: 'gemini' }));
|
||||
}
|
||||
}
|
||||
|
||||
notifyTerminalState({
|
||||
code,
|
||||
error: code === null ? 'Gemini CLI process was terminated or timed out' : null
|
||||
@@ -389,13 +399,19 @@ async function spawnGemini(command, options = {}, ws) {
|
||||
});
|
||||
|
||||
// Handle process errors
|
||||
geminiProcess.on('error', (error) => {
|
||||
geminiProcess.on('error', async (error) => {
|
||||
// Clean up process reference on error
|
||||
const finalSessionId = capturedSessionId || sessionId || processKey;
|
||||
activeGeminiProcesses.delete(finalSessionId);
|
||||
|
||||
// Check if Gemini CLI is installed for a clearer error message
|
||||
const installed = await providerAuthService.isProviderInstalled('gemini');
|
||||
const errorContent = !installed
|
||||
? 'Gemini CLI is not installed. Please install it first: https://github.com/google-gemini/gemini-cli'
|
||||
: error.message;
|
||||
|
||||
const errorSessionId = typeof ws.getSessionId === 'function' ? ws.getSessionId() : finalSessionId;
|
||||
ws.send(createNormalizedMessage({ kind: 'error', content: error.message, sessionId: errorSessionId, provider: 'gemini' }));
|
||||
ws.send(createNormalizedMessage({ kind: 'error', content: errorContent, sessionId: errorSessionId, provider: 'gemini' }));
|
||||
notifyTerminalState({ error });
|
||||
|
||||
reject(error);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// Gemini Response Handler - JSON Stream processing
|
||||
import { geminiAdapter } from './providers/gemini/adapter.js';
|
||||
import { sessionsService } from './modules/providers/services/sessions.service.js';
|
||||
|
||||
class GeminiResponseHandler {
|
||||
constructor(ws, options = {}) {
|
||||
@@ -56,7 +56,7 @@ class GeminiResponseHandler {
|
||||
}
|
||||
|
||||
// Normalize via adapter and send all resulting messages
|
||||
const normalized = geminiAdapter.normalizeMessage(event, sid);
|
||||
const normalized = sessionsService.normalizeMessage('gemini', event, sid);
|
||||
for (const msg of normalized) {
|
||||
this.ws.send(msg);
|
||||
}
|
||||
|
||||
1472
server/index.js
1472
server/index.js
File diff suppressed because it is too large
Load Diff
@@ -2,14 +2,15 @@
|
||||
import fs from 'fs';
|
||||
import os from 'os';
|
||||
import path from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { dirname } from 'path';
|
||||
import { findAppRoot, getModuleDir } from './utils/runtime-paths.js';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
const __dirname = getModuleDir(import.meta.url);
|
||||
// Resolve the repo/app root via the nearest /server folder so this file keeps finding the
|
||||
// same top-level .env file from both /server/load-env.js and /dist-server/server/load-env.js.
|
||||
const APP_ROOT = findAppRoot(__dirname);
|
||||
|
||||
try {
|
||||
const envPath = path.join(__dirname, '../.env');
|
||||
const envPath = path.join(APP_ROOT, '.env');
|
||||
const envFile = fs.readFileSync(envPath, 'utf8');
|
||||
envFile.split('\n').forEach(line => {
|
||||
const trimmedLine = line.trim();
|
||||
@@ -24,6 +25,10 @@ try {
|
||||
console.log('No .env file found or error reading it:', e.message);
|
||||
}
|
||||
|
||||
// Keep the default database in a stable user-level location so rebuilding dist-server
|
||||
// never changes where the backend stores auth.db when DATABASE_PATH is not set explicitly.
|
||||
const DEFAULT_DATABASE_PATH = path.join(os.homedir(), '.cloudcli', 'auth.db');
|
||||
|
||||
if (!process.env.DATABASE_PATH) {
|
||||
process.env.DATABASE_PATH = path.join(os.homedir(), '.cloudcli', 'auth.db');
|
||||
process.env.DATABASE_PATH = DEFAULT_DATABASE_PATH;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import jwt from 'jsonwebtoken';
|
||||
import { userDb, appConfigDb } from '../database/db.js';
|
||||
import { userDb, appConfigDb } from '../modules/database/index.js';
|
||||
import { IS_PLATFORM } from '../constants/config.js';
|
||||
|
||||
// Use env var if set, otherwise auto-generate a unique secret per installation
|
||||
|
||||
143
server/modules/database/connection.ts
Normal file
143
server/modules/database/connection.ts
Normal file
@@ -0,0 +1,143 @@
|
||||
/**
|
||||
* Database connection management.
|
||||
*
|
||||
* Owns the single SQLite connection used across all repositories.
|
||||
* Handles path resolution, directory creation, legacy database migration,
|
||||
* and eager app_config bootstrap so the auth middleware can read the
|
||||
* JWT secret before the full schema is applied.
|
||||
*
|
||||
* Consumers should never create their own Database instance — they use
|
||||
* `getConnection()` to obtain the shared singleton.
|
||||
*/
|
||||
|
||||
import Database from 'better-sqlite3';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
import { APP_CONFIG_TABLE_SCHEMA_SQL } from '@/modules/database/schema.js';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Path resolution
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Resolves the database file path from environment or falls back
|
||||
* to the legacy location inside the server/database/ folder.
|
||||
*
|
||||
* Priority:
|
||||
* 1. DATABASE_PATH environment variable (set by cli.js or load-env-vars.js)
|
||||
* 2. Legacy path: server/database/auth.db
|
||||
*/
|
||||
function resolveDatabasePath(): string {
|
||||
// process.env.DATABASE_PATH is set by load-env-vars.js to either the .env value or a default(~/.cloudcli/auth.db) in the user's home directory.
|
||||
return process.env.DATABASE_PATH || resolveLegacyDatabasePath();
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves the legacy database path (always inside server/database/).
|
||||
* Used for the one-time migration to the new external location.
|
||||
*/
|
||||
function resolveLegacyDatabasePath(): string {
|
||||
const serverDir = path.resolve(__dirname, '..', '..', '..');
|
||||
return path.join(serverDir, 'database', 'auth.db');
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Directory & migration helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function ensureDatabaseDirectory(dbPath: string): void {
|
||||
const dir = path.dirname(dbPath);
|
||||
if (!fs.existsSync(dir)) {
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
console.log('Created database directory:', dir);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* If the database was moved to an external location (e.g. ~/.cloudcli/)
|
||||
* but the user still has a legacy auth.db inside the install directory,
|
||||
* copy it to the new location as a one-time migration.
|
||||
*/
|
||||
function migrateLegacyDatabase(targetPath: string): void {
|
||||
const legacyPath = resolveLegacyDatabasePath();
|
||||
|
||||
if (targetPath === legacyPath) return;
|
||||
if (fs.existsSync(targetPath)) return;
|
||||
if (!fs.existsSync(legacyPath)) return;
|
||||
|
||||
try {
|
||||
fs.copyFileSync(legacyPath, targetPath);
|
||||
console.log('Migrated legacy database', { from: legacyPath, to: targetPath });
|
||||
|
||||
|
||||
// copy the write-ahead log and shared memory files (auth.db-wal, auth.db-shm) if they exist, to preserve any uncommitted transactions
|
||||
for (const suffix of ['-wal', '-shm']) {
|
||||
const src = legacyPath + suffix;
|
||||
if (fs.existsSync(src)) {
|
||||
fs.copyFileSync(src, targetPath + suffix);
|
||||
}
|
||||
}
|
||||
} catch (err: any) {
|
||||
console.error('Could not migrate legacy database', { error: err.message });
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Singleton connection
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
let instance: Database.Database | null = null;
|
||||
|
||||
/**
|
||||
* Returns the shared database connection, creating it on first call.
|
||||
*
|
||||
* The first invocation:
|
||||
* 1. Resolves the target database path
|
||||
* 2. Ensures the parent directory exists
|
||||
* 3. Migrates from the legacy install-directory path if needed
|
||||
* 4. Opens the SQLite connection
|
||||
* 5. Eagerly creates the app_config table (auth reads JWT secret at import time)
|
||||
* 6. Logs the database location
|
||||
*/
|
||||
export function getConnection(): Database.Database {
|
||||
if (instance) return instance;
|
||||
|
||||
const dbPath = resolveDatabasePath();
|
||||
|
||||
ensureDatabaseDirectory(dbPath);
|
||||
migrateLegacyDatabase(dbPath);
|
||||
|
||||
instance = new Database(dbPath);
|
||||
|
||||
// app_config must exist immediately — the auth middleware reads
|
||||
// the JWT secret at module-load time, before initializeDatabase() runs.
|
||||
instance.exec(APP_CONFIG_TABLE_SCHEMA_SQL);
|
||||
|
||||
return instance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the resolved database file path without opening a connection.
|
||||
* Useful for diagnostics and CLI status commands.
|
||||
*/
|
||||
export function getDatabasePath(): string {
|
||||
return resolveDatabasePath();
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the database connection and clears the singleton.
|
||||
* Primarily used for graceful shutdown or testing.
|
||||
*/
|
||||
export function closeConnection(): void {
|
||||
if (instance) {
|
||||
instance.close();
|
||||
instance = null;
|
||||
console.log('Database connection closed');
|
||||
}
|
||||
}
|
||||
12
server/modules/database/index.ts
Normal file
12
server/modules/database/index.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
export { initializeDatabase } from '@/modules/database/init-db.js';
|
||||
export { apiKeysDb } from '@/modules/database/repositories/api-keys.js';
|
||||
export { appConfigDb } from '@/modules/database/repositories/app-config.js';
|
||||
export { credentialsDb } from '@/modules/database/repositories/credentials.js';
|
||||
export { githubTokensDb } from '@/modules/database/repositories/github-tokens.js';
|
||||
export { notificationPreferencesDb } from '@/modules/database/repositories/notification-preferences.js';
|
||||
export { projectsDb } from '@/modules/database/repositories/projects.db.js';
|
||||
export { pushSubscriptionsDb } from '@/modules/database/repositories/push-subscriptions.js';
|
||||
export { scanStateDb } from '@/modules/database/repositories/scan-state.db.js';
|
||||
export { sessionsDb } from '@/modules/database/repositories/sessions.db.js';
|
||||
export { userDb } from '@/modules/database/repositories/users.js';
|
||||
export { vapidKeysDb } from '@/modules/database/repositories/vapid-keys.js';
|
||||
17
server/modules/database/init-db.ts
Normal file
17
server/modules/database/init-db.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { getConnection } from "@/modules/database/connection.js";
|
||||
import { runMigrations } from "@/modules/database/migrations.js";
|
||||
import { INIT_SCHEMA_SQL } from "@/modules/database/schema.js";
|
||||
|
||||
// Initialize database with schema
|
||||
export const initializeDatabase = async () => {
|
||||
try {
|
||||
const db = getConnection();
|
||||
db.exec(INIT_SCHEMA_SQL);
|
||||
console.log('Database schema applied');
|
||||
runMigrations(db);
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
console.log('Database initialization failed', { error: message });
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
443
server/modules/database/migrations.ts
Normal file
443
server/modules/database/migrations.ts
Normal file
@@ -0,0 +1,443 @@
|
||||
import { Database } from 'better-sqlite3';
|
||||
|
||||
import {
|
||||
APP_CONFIG_TABLE_SCHEMA_SQL,
|
||||
LAST_SCANNED_AT_SQL,
|
||||
PROJECTS_TABLE_SCHEMA_SQL,
|
||||
PUSH_SUBSCRIPTIONS_TABLE_SCHEMA_SQL,
|
||||
SESSIONS_TABLE_SCHEMA_SQL,
|
||||
USER_NOTIFICATION_PREFERENCES_TABLE_SCHEMA_SQL,
|
||||
VAPID_KEYS_TABLE_SCHEMA_SQL,
|
||||
} from '@/modules/database/schema.js';
|
||||
|
||||
const SQLITE_UUID_SQL = `
|
||||
lower(hex(randomblob(4))) || '-' ||
|
||||
lower(hex(randomblob(2))) || '-' ||
|
||||
lower(hex(randomblob(2))) || '-' ||
|
||||
lower(hex(randomblob(2))) || '-' ||
|
||||
lower(hex(randomblob(6)))
|
||||
`;
|
||||
|
||||
type TableInfoRow = {
|
||||
name: string;
|
||||
pk: number;
|
||||
};
|
||||
|
||||
const addColumnToTableIfNotExists = (
|
||||
db: Database,
|
||||
tableName: string,
|
||||
columnNames: string[],
|
||||
columnName: string,
|
||||
columnType: string
|
||||
) => {
|
||||
if (!columnNames.includes(columnName)) {
|
||||
console.log(`Running migration: Adding ${columnName} column to ${tableName} table`);
|
||||
db.exec(`ALTER TABLE ${tableName} ADD COLUMN ${columnName} ${columnType}`);
|
||||
}
|
||||
};
|
||||
|
||||
const tableExists = (db: Database, tableName: string): boolean =>
|
||||
Boolean(
|
||||
db
|
||||
.prepare("SELECT name FROM sqlite_master WHERE type = 'table' AND name = ?")
|
||||
.get(tableName)
|
||||
);
|
||||
|
||||
const getTableInfo = (db: Database, tableName: string): TableInfoRow[] =>
|
||||
db.prepare(`PRAGMA table_info(${tableName})`).all() as TableInfoRow[];
|
||||
|
||||
const migrateLegacySessionNames = (db: Database): void => {
|
||||
const hasLegacySessionNamesTable = tableExists(db, 'session_names');
|
||||
const hasSessionsTable = tableExists(db, 'sessions');
|
||||
|
||||
if (!hasLegacySessionNamesTable) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (hasSessionsTable) {
|
||||
console.log('Running migration: Merging session_names into sessions');
|
||||
db.exec(`
|
||||
INSERT INTO sessions (session_id, provider, custom_name, created_at, updated_at)
|
||||
SELECT
|
||||
session_id,
|
||||
COALESCE(provider, 'claude'),
|
||||
custom_name,
|
||||
COALESCE(created_at, CURRENT_TIMESTAMP),
|
||||
COALESCE(updated_at, CURRENT_TIMESTAMP)
|
||||
FROM session_names
|
||||
WHERE true
|
||||
ON CONFLICT(session_id) DO UPDATE SET
|
||||
provider = excluded.provider,
|
||||
custom_name = COALESCE(excluded.custom_name, sessions.custom_name),
|
||||
created_at = COALESCE(sessions.created_at, excluded.created_at),
|
||||
updated_at = COALESCE(excluded.updated_at, sessions.updated_at)
|
||||
`);
|
||||
db.exec('DROP TABLE session_names');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('Running migration: Renaming session_names table to sessions');
|
||||
db.exec('ALTER TABLE session_names RENAME TO sessions');
|
||||
};
|
||||
|
||||
const migrateLegacyWorkspaceTableIntoProjects = (db: Database): void => {
|
||||
db.exec(PROJECTS_TABLE_SCHEMA_SQL);
|
||||
|
||||
if (!tableExists(db, 'workspace_original_paths')) {
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('Running migration: Migrating workspace_original_paths data into projects');
|
||||
db.exec(`
|
||||
INSERT INTO projects (project_id, project_path, custom_project_name, isStarred, isArchived)
|
||||
SELECT
|
||||
CASE
|
||||
WHEN workspace_id IS NULL OR trim(workspace_id) = ''
|
||||
THEN ${SQLITE_UUID_SQL}
|
||||
ELSE workspace_id
|
||||
END,
|
||||
workspace_path,
|
||||
custom_workspace_name,
|
||||
COALESCE(isStarred, 0),
|
||||
0
|
||||
FROM workspace_original_paths
|
||||
WHERE workspace_path IS NOT NULL AND trim(workspace_path) <> ''
|
||||
ON CONFLICT(project_path) DO UPDATE SET
|
||||
custom_project_name = COALESCE(projects.custom_project_name, excluded.custom_project_name),
|
||||
isStarred = COALESCE(projects.isStarred, excluded.isStarred)
|
||||
`);
|
||||
};
|
||||
|
||||
const rebuildProjectsTableWithPrimaryKeySchema = (db: Database): void => {
|
||||
const hasProjectsTable = tableExists(db, 'projects');
|
||||
if (!hasProjectsTable) {
|
||||
db.exec(PROJECTS_TABLE_SCHEMA_SQL);
|
||||
return;
|
||||
}
|
||||
|
||||
const projectsTableInfo = getTableInfo(db, 'projects');
|
||||
const columnNames = projectsTableInfo.map((column) => column.name);
|
||||
const hasProjectIdPrimaryKey = projectsTableInfo.some(
|
||||
(column) => column.name === 'project_id' && column.pk === 1,
|
||||
);
|
||||
|
||||
if (hasProjectIdPrimaryKey) {
|
||||
addColumnToTableIfNotExists(db, 'projects', columnNames, 'custom_project_name', 'TEXT DEFAULT NULL');
|
||||
addColumnToTableIfNotExists(db, 'projects', columnNames, 'isStarred', 'BOOLEAN DEFAULT 0');
|
||||
addColumnToTableIfNotExists(db, 'projects', columnNames, 'isArchived', 'BOOLEAN DEFAULT 0');
|
||||
db.exec(`
|
||||
UPDATE projects
|
||||
SET project_id = ${SQLITE_UUID_SQL}
|
||||
WHERE project_id IS NULL OR trim(project_id) = ''
|
||||
`);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('Running migration: Rebuilding projects table to enforce project_id primary key');
|
||||
|
||||
const projectPathExpression = columnNames.includes('project_path')
|
||||
? 'project_path'
|
||||
: columnNames.includes('workspace_path')
|
||||
? 'workspace_path'
|
||||
: 'NULL';
|
||||
|
||||
const customProjectNameExpression = columnNames.includes('custom_project_name')
|
||||
? 'custom_project_name'
|
||||
: columnNames.includes('custom_workspace_name')
|
||||
? 'custom_workspace_name'
|
||||
: 'NULL';
|
||||
|
||||
const isStarredExpression = columnNames.includes('isStarred') ? 'COALESCE(isStarred, 0)' : '0';
|
||||
|
||||
const isArchivedExpression = columnNames.includes('isArchived') ? 'COALESCE(isArchived, 0)' : '0';
|
||||
|
||||
const projectIdExpression = columnNames.includes('project_id')
|
||||
? `CASE
|
||||
WHEN project_id IS NULL OR trim(project_id) = ''
|
||||
THEN ${SQLITE_UUID_SQL}
|
||||
ELSE project_id
|
||||
END`
|
||||
: SQLITE_UUID_SQL;
|
||||
|
||||
db.exec('PRAGMA foreign_keys = OFF');
|
||||
try {
|
||||
db.exec('BEGIN TRANSACTION');
|
||||
db.exec('DROP TABLE IF EXISTS projects__new');
|
||||
db.exec(`
|
||||
CREATE TABLE projects__new (
|
||||
project_id TEXT PRIMARY KEY NOT NULL,
|
||||
project_path TEXT NOT NULL UNIQUE,
|
||||
custom_project_name TEXT DEFAULT NULL,
|
||||
isStarred BOOLEAN DEFAULT 0,
|
||||
isArchived BOOLEAN DEFAULT 0
|
||||
)
|
||||
`);
|
||||
db.exec(`
|
||||
WITH source_rows AS (
|
||||
SELECT
|
||||
${projectPathExpression} AS project_path,
|
||||
${customProjectNameExpression} AS custom_project_name,
|
||||
${isStarredExpression} AS isStarred,
|
||||
${isArchivedExpression} AS isArchived,
|
||||
${projectIdExpression} AS candidate_project_id,
|
||||
rowid AS source_rowid
|
||||
FROM projects
|
||||
WHERE ${projectPathExpression} IS NOT NULL AND trim(${projectPathExpression}) <> ''
|
||||
),
|
||||
deduped_paths AS (
|
||||
SELECT
|
||||
project_path,
|
||||
custom_project_name,
|
||||
isStarred,
|
||||
isArchived,
|
||||
candidate_project_id,
|
||||
source_rowid,
|
||||
ROW_NUMBER() OVER (PARTITION BY project_path ORDER BY source_rowid) AS project_path_rank
|
||||
FROM source_rows
|
||||
),
|
||||
prepared_rows AS (
|
||||
SELECT
|
||||
CASE
|
||||
WHEN ROW_NUMBER() OVER (PARTITION BY candidate_project_id ORDER BY source_rowid) = 1
|
||||
THEN candidate_project_id
|
||||
ELSE ${SQLITE_UUID_SQL}
|
||||
END AS project_id,
|
||||
project_path,
|
||||
custom_project_name,
|
||||
isStarred,
|
||||
isArchived
|
||||
FROM deduped_paths
|
||||
WHERE project_path_rank = 1
|
||||
)
|
||||
INSERT INTO projects__new (
|
||||
project_id,
|
||||
project_path,
|
||||
custom_project_name,
|
||||
isStarred,
|
||||
isArchived
|
||||
)
|
||||
SELECT
|
||||
project_id,
|
||||
project_path,
|
||||
custom_project_name,
|
||||
isStarred,
|
||||
isArchived
|
||||
FROM prepared_rows
|
||||
`);
|
||||
db.exec('DROP TABLE projects');
|
||||
db.exec('ALTER TABLE projects__new RENAME TO projects');
|
||||
db.exec('COMMIT');
|
||||
} catch (migrationError) {
|
||||
db.exec('ROLLBACK');
|
||||
throw migrationError;
|
||||
} finally {
|
||||
db.exec('PRAGMA foreign_keys = ON');
|
||||
}
|
||||
};
|
||||
|
||||
const rebuildSessionsTableWithProjectSchema = (db: Database): void => {
|
||||
const hasSessions = tableExists(db, 'sessions');
|
||||
if (!hasSessions) {
|
||||
db.exec(SESSIONS_TABLE_SCHEMA_SQL);
|
||||
return;
|
||||
}
|
||||
|
||||
const sessionsTableInfo = getTableInfo(db, 'sessions');
|
||||
const columnNames = sessionsTableInfo.map((column) => column.name);
|
||||
const primaryKeyColumns = sessionsTableInfo
|
||||
.filter((column) => column.pk > 0)
|
||||
.sort((a, b) => a.pk - b.pk)
|
||||
.map((column) => column.name);
|
||||
|
||||
const shouldRebuild =
|
||||
!columnNames.includes('project_path') ||
|
||||
primaryKeyColumns.length !== 1 ||
|
||||
primaryKeyColumns[0] !== 'session_id' ||
|
||||
!columnNames.includes('provider');
|
||||
|
||||
if (!shouldRebuild) {
|
||||
addColumnToTableIfNotExists(db, 'sessions', columnNames, 'jsonl_path', 'TEXT');
|
||||
addColumnToTableIfNotExists(db, 'sessions', columnNames, 'created_at', 'DATETIME');
|
||||
addColumnToTableIfNotExists(db, 'sessions', columnNames, 'updated_at', 'DATETIME');
|
||||
db.exec('UPDATE sessions SET created_at = COALESCE(created_at, CURRENT_TIMESTAMP)');
|
||||
db.exec('UPDATE sessions SET updated_at = COALESCE(updated_at, CURRENT_TIMESTAMP)');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('Running migration: Rebuilding sessions table to project-based schema');
|
||||
|
||||
const projectPathExpression = columnNames.includes('project_path')
|
||||
? 'project_path'
|
||||
: columnNames.includes('workspace_path')
|
||||
? 'workspace_path'
|
||||
: 'NULL';
|
||||
|
||||
const providerExpression = columnNames.includes('provider')
|
||||
? "COALESCE(provider, 'claude')"
|
||||
: "'claude'";
|
||||
|
||||
const customNameExpression = columnNames.includes('custom_name')
|
||||
? 'custom_name'
|
||||
: 'NULL';
|
||||
|
||||
const jsonlPathExpression = columnNames.includes('jsonl_path')
|
||||
? 'jsonl_path'
|
||||
: 'NULL';
|
||||
|
||||
const createdAtExpression = columnNames.includes('created_at')
|
||||
? 'COALESCE(created_at, CURRENT_TIMESTAMP)'
|
||||
: 'CURRENT_TIMESTAMP';
|
||||
|
||||
const updatedAtExpression = columnNames.includes('updated_at')
|
||||
? 'COALESCE(updated_at, CURRENT_TIMESTAMP)'
|
||||
: 'CURRENT_TIMESTAMP';
|
||||
|
||||
db.exec('PRAGMA foreign_keys = OFF');
|
||||
try {
|
||||
db.exec('BEGIN TRANSACTION');
|
||||
db.exec('DROP TABLE IF EXISTS sessions__new');
|
||||
db.exec(`
|
||||
CREATE TABLE sessions__new (
|
||||
session_id TEXT NOT NULL,
|
||||
provider TEXT NOT NULL DEFAULT 'claude',
|
||||
custom_name TEXT,
|
||||
project_path TEXT,
|
||||
jsonl_path TEXT,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (session_id),
|
||||
FOREIGN KEY (project_path) REFERENCES projects(project_path)
|
||||
ON DELETE SET NULL
|
||||
ON UPDATE CASCADE
|
||||
)
|
||||
`);
|
||||
db.exec(`
|
||||
WITH source_rows AS (
|
||||
SELECT
|
||||
session_id,
|
||||
${providerExpression} AS provider,
|
||||
${customNameExpression} AS custom_name,
|
||||
${projectPathExpression} AS project_path,
|
||||
${jsonlPathExpression} AS jsonl_path,
|
||||
${createdAtExpression} AS created_at,
|
||||
${updatedAtExpression} AS updated_at,
|
||||
rowid AS source_rowid
|
||||
FROM sessions
|
||||
WHERE session_id IS NOT NULL AND trim(session_id) <> ''
|
||||
),
|
||||
ranked_rows AS (
|
||||
SELECT
|
||||
session_id,
|
||||
provider,
|
||||
custom_name,
|
||||
project_path,
|
||||
jsonl_path,
|
||||
created_at,
|
||||
updated_at,
|
||||
ROW_NUMBER() OVER (
|
||||
PARTITION BY session_id
|
||||
ORDER BY datetime(COALESCE(updated_at, created_at)) DESC, source_rowid DESC
|
||||
) AS session_rank
|
||||
FROM source_rows
|
||||
)
|
||||
INSERT INTO sessions__new (
|
||||
session_id,
|
||||
provider,
|
||||
custom_name,
|
||||
project_path,
|
||||
jsonl_path,
|
||||
created_at,
|
||||
updated_at
|
||||
)
|
||||
SELECT
|
||||
session_id,
|
||||
provider,
|
||||
custom_name,
|
||||
project_path,
|
||||
jsonl_path,
|
||||
created_at,
|
||||
updated_at
|
||||
FROM ranked_rows
|
||||
WHERE session_rank = 1
|
||||
`);
|
||||
db.exec('DROP TABLE sessions');
|
||||
db.exec('ALTER TABLE sessions__new RENAME TO sessions');
|
||||
db.exec('COMMIT');
|
||||
} catch (migrationError) {
|
||||
db.exec('ROLLBACK');
|
||||
throw migrationError;
|
||||
} finally {
|
||||
db.exec('PRAGMA foreign_keys = ON');
|
||||
}
|
||||
};
|
||||
|
||||
const ensureProjectsForSessionPaths = (db: Database): void => {
|
||||
if (!tableExists(db, 'sessions')) {
|
||||
return;
|
||||
}
|
||||
|
||||
db.exec(`
|
||||
INSERT INTO projects (project_id, project_path, custom_project_name, isStarred, isArchived)
|
||||
SELECT
|
||||
${SQLITE_UUID_SQL},
|
||||
project_path,
|
||||
NULL,
|
||||
0,
|
||||
0
|
||||
FROM sessions
|
||||
WHERE project_path IS NOT NULL AND trim(project_path) <> ''
|
||||
ON CONFLICT(project_path) DO NOTHING
|
||||
`);
|
||||
};
|
||||
|
||||
export const runMigrations = (db: Database) => {
|
||||
try {
|
||||
const usersTableInfo = db.prepare('PRAGMA table_info(users)').all() as { name: string }[];
|
||||
const userColumnNames = usersTableInfo.map((column) => column.name);
|
||||
|
||||
addColumnToTableIfNotExists(db, 'users', userColumnNames, 'git_name', 'TEXT');
|
||||
addColumnToTableIfNotExists(db, 'users', userColumnNames, 'git_email', 'TEXT');
|
||||
addColumnToTableIfNotExists(
|
||||
db,
|
||||
'users',
|
||||
userColumnNames,
|
||||
'has_completed_onboarding',
|
||||
'BOOLEAN DEFAULT 0'
|
||||
);
|
||||
|
||||
db.exec(APP_CONFIG_TABLE_SCHEMA_SQL);
|
||||
db.exec(USER_NOTIFICATION_PREFERENCES_TABLE_SCHEMA_SQL);
|
||||
db.exec(VAPID_KEYS_TABLE_SCHEMA_SQL);
|
||||
db.exec(PUSH_SUBSCRIPTIONS_TABLE_SCHEMA_SQL);
|
||||
db.exec('CREATE INDEX IF NOT EXISTS idx_push_subscriptions_user_id ON push_subscriptions(user_id)');
|
||||
|
||||
db.exec(PROJECTS_TABLE_SCHEMA_SQL);
|
||||
rebuildProjectsTableWithPrimaryKeySchema(db);
|
||||
|
||||
migrateLegacyWorkspaceTableIntoProjects(db);
|
||||
rebuildSessionsTableWithProjectSchema(db);
|
||||
migrateLegacySessionNames(db);
|
||||
ensureProjectsForSessionPaths(db);
|
||||
|
||||
db.exec('CREATE INDEX IF NOT EXISTS idx_session_ids_lookup ON sessions(session_id)');
|
||||
db.exec('CREATE INDEX IF NOT EXISTS idx_sessions_project_path ON sessions(project_path)');
|
||||
db.exec('CREATE INDEX IF NOT EXISTS idx_projects_is_starred ON projects(isStarred)');
|
||||
db.exec('CREATE INDEX IF NOT EXISTS idx_projects_is_archived ON projects(isArchived)');
|
||||
|
||||
db.exec('DROP INDEX IF EXISTS idx_session_names_lookup');
|
||||
db.exec('DROP INDEX IF EXISTS idx_sessions_workspace_path');
|
||||
db.exec('DROP INDEX IF EXISTS idx_workspace_original_paths_is_starred');
|
||||
db.exec('DROP INDEX IF EXISTS idx_workspace_original_paths_workspace_id');
|
||||
|
||||
if (tableExists(db, 'workspace_original_paths')) {
|
||||
console.log('Running migration: Dropping legacy workspace_original_paths table');
|
||||
db.exec('DROP TABLE workspace_original_paths');
|
||||
}
|
||||
|
||||
db.exec(LAST_SCANNED_AT_SQL);
|
||||
console.log('Database migrations completed successfully');
|
||||
} catch (error: any) {
|
||||
console.error('Error running migrations:', error.message);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
119
server/modules/database/repositories/api-keys.ts
Normal file
119
server/modules/database/repositories/api-keys.ts
Normal file
@@ -0,0 +1,119 @@
|
||||
/**
|
||||
* API keys repository.
|
||||
*
|
||||
* Manages API keys used for external/programmatic access to the backend.
|
||||
* Keys are prefixed with `ck_` and tied to a user via foreign key.
|
||||
*/
|
||||
|
||||
import crypto from 'crypto';
|
||||
|
||||
import { getConnection } from '@/modules/database/connection.js';
|
||||
|
||||
type ApiKeyRow = {
|
||||
id: number;
|
||||
key_name: string;
|
||||
api_key: string;
|
||||
created_at: string;
|
||||
last_used: string | null;
|
||||
is_active: number;
|
||||
};
|
||||
|
||||
type CreateApiKeyResult = {
|
||||
id: number | bigint;
|
||||
keyName: string;
|
||||
apiKey: string;
|
||||
};
|
||||
|
||||
type ValidatedApiKeyUser = {
|
||||
id: number;
|
||||
username: string;
|
||||
api_key_id: number;
|
||||
};
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/** Generates a cryptographically random API key with the `ck_` prefix. */
|
||||
function generateApiKey(): string {
|
||||
return 'ck_' + crypto.randomBytes(32).toString('hex');
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Queries
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const apiKeysDb = {
|
||||
generateApiKey,
|
||||
|
||||
/** Creates a new API key for the given user and returns it for one-time display. */
|
||||
createApiKey(userId: number, keyName: string): CreateApiKeyResult {
|
||||
const db = getConnection();
|
||||
const apiKey = generateApiKey();
|
||||
const result = db
|
||||
.prepare(
|
||||
'INSERT INTO api_keys (user_id, key_name, api_key) VALUES (?, ?, ?)'
|
||||
)
|
||||
.run(userId, keyName, apiKey);
|
||||
return { id: result.lastInsertRowid, keyName, apiKey };
|
||||
},
|
||||
|
||||
/** Lists all API keys for a user, most recent first. */
|
||||
getApiKeys(userId: number): ApiKeyRow[] {
|
||||
const db = getConnection();
|
||||
return db
|
||||
.prepare(
|
||||
'SELECT id, key_name, api_key, created_at, last_used, is_active FROM api_keys WHERE user_id = ? ORDER BY created_at DESC'
|
||||
)
|
||||
.all(userId) as ApiKeyRow[];
|
||||
},
|
||||
|
||||
/**
|
||||
* Validates an API key and resolves the owning user.
|
||||
* If the key is valid, its `last_used` timestamp is updated as a side effect.
|
||||
* Returns undefined when the key is invalid or the user is inactive.
|
||||
*/
|
||||
validateApiKey(apiKey: string): ValidatedApiKeyUser | undefined {
|
||||
const db = getConnection();
|
||||
const row = db
|
||||
.prepare(
|
||||
`SELECT u.id, u.username, ak.id as api_key_id
|
||||
FROM api_keys ak
|
||||
JOIN users u ON ak.user_id = u.id
|
||||
WHERE ak.api_key = ? AND ak.is_active = 1 AND u.is_active = 1`
|
||||
)
|
||||
.get(apiKey) as ValidatedApiKeyUser | undefined;
|
||||
|
||||
if (row) {
|
||||
db.prepare(
|
||||
'UPDATE api_keys SET last_used = CURRENT_TIMESTAMP WHERE id = ?'
|
||||
).run(row.api_key_id);
|
||||
}
|
||||
|
||||
return row;
|
||||
},
|
||||
|
||||
/** Permanently removes an API key. Returns true if a row was deleted. */
|
||||
deleteApiKey(userId: number, apiKeyId: number): boolean {
|
||||
const db = getConnection();
|
||||
const result = db
|
||||
.prepare('DELETE FROM api_keys WHERE id = ? AND user_id = ?')
|
||||
.run(apiKeyId, userId);
|
||||
return result.changes > 0;
|
||||
},
|
||||
|
||||
/** Enables or disables an API key without deleting it. */
|
||||
toggleApiKey(
|
||||
userId: number,
|
||||
apiKeyId: number,
|
||||
isActive: boolean
|
||||
): boolean {
|
||||
const db = getConnection();
|
||||
const result = db
|
||||
.prepare(
|
||||
'UPDATE api_keys SET is_active = ? WHERE id = ? AND user_id = ?'
|
||||
)
|
||||
.run(isActive ? 1 : 0, apiKeyId, userId);
|
||||
return result.changes > 0;
|
||||
},
|
||||
};
|
||||
53
server/modules/database/repositories/app-config.ts
Normal file
53
server/modules/database/repositories/app-config.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
/**
|
||||
* App config repository.
|
||||
*
|
||||
* Key-value store for application-level configuration that persists
|
||||
* across restarts (JWT secret, feature flags, etc.). Values are always
|
||||
* stored as strings; callers handle parsing.
|
||||
*/
|
||||
|
||||
import crypto from 'crypto';
|
||||
|
||||
import { getConnection } from '@/modules/database/connection.js';
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Queries
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const appConfigDb = {
|
||||
/** Returns the stored value for a config key, or null if missing. */
|
||||
get(key: string): string | null {
|
||||
try {
|
||||
const db = getConnection();
|
||||
const row = db
|
||||
.prepare('SELECT value FROM app_config WHERE key = ?')
|
||||
.get(key) as { value: string } | undefined;
|
||||
return row?.value ?? null;
|
||||
} catch {
|
||||
// Swallow errors so early-startup reads (e.g. JWT secret) do not crash.
|
||||
return null;
|
||||
}
|
||||
},
|
||||
|
||||
/** Inserts or updates a config key (upsert). */
|
||||
set(key: string, value: string): void {
|
||||
const db = getConnection();
|
||||
db.prepare(
|
||||
'INSERT INTO app_config (key, value) VALUES (?, ?) ON CONFLICT(key) DO UPDATE SET value = excluded.value'
|
||||
).run(key, value);
|
||||
},
|
||||
|
||||
/**
|
||||
* Returns the JWT signing secret, generating and persisting one
|
||||
* if it does not already exist. This ensures the secret survives
|
||||
* server restarts while being created automatically on first boot.
|
||||
*/
|
||||
getOrCreateJwtSecret(): string {
|
||||
let secret = appConfigDb.get('jwt_secret');
|
||||
if (!secret) {
|
||||
secret = crypto.randomBytes(64).toString('hex');
|
||||
appConfigDb.set('jwt_secret', secret);
|
||||
}
|
||||
return secret;
|
||||
},
|
||||
};
|
||||
106
server/modules/database/repositories/credentials.ts
Normal file
106
server/modules/database/repositories/credentials.ts
Normal file
@@ -0,0 +1,106 @@
|
||||
/**
|
||||
* User credentials repository.
|
||||
*
|
||||
* Manages external service tokens (GitHub, GitLab, Bitbucket, etc.)
|
||||
* stored per-user. Each credential has a type discriminator so multiple
|
||||
* credential kinds can coexist in the same table.
|
||||
*/
|
||||
|
||||
import { getConnection } from '@/modules/database/connection.js';
|
||||
import type {
|
||||
CreateCredentialResult,
|
||||
CredentialPublicRow,
|
||||
} from '@/shared/types.js';
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Queries
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const credentialsDb = {
|
||||
/** Stores a new credential and returns a safe (no raw value) result. */
|
||||
createCredential(
|
||||
userId: number,
|
||||
credentialName: string,
|
||||
credentialType: string,
|
||||
credentialValue: string,
|
||||
description: string | null = null
|
||||
): CreateCredentialResult {
|
||||
const db = getConnection();
|
||||
const result = db
|
||||
.prepare(
|
||||
'INSERT INTO user_credentials (user_id, credential_name, credential_type, credential_value, description) VALUES (?, ?, ?, ?, ?)'
|
||||
)
|
||||
.run(userId, credentialName, credentialType, credentialValue, description);
|
||||
return {
|
||||
id: result.lastInsertRowid,
|
||||
credentialName,
|
||||
credentialType,
|
||||
};
|
||||
},
|
||||
|
||||
/**
|
||||
* Lists credentials for a user (excluding raw values).
|
||||
* Optionally filters by credential type (e.g. 'github_token').
|
||||
*/
|
||||
getCredentials(
|
||||
userId: number,
|
||||
credentialType: string | null = null
|
||||
): CredentialPublicRow[] {
|
||||
const db = getConnection();
|
||||
|
||||
if (credentialType) {
|
||||
return db
|
||||
.prepare(
|
||||
'SELECT id, credential_name, credential_type, description, created_at, is_active FROM user_credentials WHERE user_id = ? AND credential_type = ? ORDER BY created_at DESC'
|
||||
)
|
||||
.all(userId, credentialType) as CredentialPublicRow[];
|
||||
}
|
||||
|
||||
return db
|
||||
.prepare(
|
||||
'SELECT id, credential_name, credential_type, description, created_at, is_active FROM user_credentials WHERE user_id = ? ORDER BY created_at DESC'
|
||||
)
|
||||
.all(userId) as CredentialPublicRow[];
|
||||
},
|
||||
|
||||
/**
|
||||
* Returns the raw credential value for the most recent active
|
||||
* credential of the given type, or null if none exists.
|
||||
*/
|
||||
getActiveCredential(
|
||||
userId: number,
|
||||
credentialType: string
|
||||
): string | null {
|
||||
const db = getConnection();
|
||||
const row = db
|
||||
.prepare(
|
||||
'SELECT credential_value FROM user_credentials WHERE user_id = ? AND credential_type = ? AND is_active = 1 ORDER BY created_at DESC LIMIT 1'
|
||||
)
|
||||
.get(userId, credentialType) as { credential_value: string } | undefined;
|
||||
return row?.credential_value ?? null;
|
||||
},
|
||||
|
||||
/** Permanently removes a credential. Returns true if a row was deleted. */
|
||||
deleteCredential(userId: number, credentialId: number): boolean {
|
||||
const db = getConnection();
|
||||
const result = db
|
||||
.prepare('DELETE FROM user_credentials WHERE id = ? AND user_id = ?')
|
||||
.run(credentialId, userId);
|
||||
return result.changes > 0;
|
||||
},
|
||||
|
||||
/** Enables or disables a credential without deleting it. */
|
||||
toggleCredential(
|
||||
userId: number,
|
||||
credentialId: number,
|
||||
isActive: boolean
|
||||
): boolean {
|
||||
const db = getConnection();
|
||||
const result = db
|
||||
.prepare(
|
||||
'UPDATE user_credentials SET is_active = ? WHERE id = ? AND user_id = ?'
|
||||
)
|
||||
.run(isActive ? 1 : 0, credentialId, userId);
|
||||
return result.changes > 0;
|
||||
},
|
||||
};
|
||||
100
server/modules/database/repositories/github-tokens.ts
Normal file
100
server/modules/database/repositories/github-tokens.ts
Normal file
@@ -0,0 +1,100 @@
|
||||
/**
|
||||
* GitHub tokens repository.
|
||||
*
|
||||
* Backward-compatible helper layer over generic credentials storage.
|
||||
* Tokens are stored in `user_credentials` with `credential_type = 'github_token'`.
|
||||
*/
|
||||
|
||||
import { getConnection } from '@/modules/database/connection.js';
|
||||
import { credentialsDb } from '@/modules/database/repositories/credentials.js';
|
||||
import type {
|
||||
CredentialPublicRow,
|
||||
CreateCredentialResult,
|
||||
} from '@/shared/types.js';
|
||||
|
||||
const GITHUB_TOKEN_TYPE = 'github_token';
|
||||
|
||||
type CredentialRow = {
|
||||
id: number;
|
||||
user_id: number;
|
||||
credential_name: string;
|
||||
credential_type: string;
|
||||
credential_value: string;
|
||||
description: string | null;
|
||||
created_at: string;
|
||||
is_active: number;
|
||||
};
|
||||
|
||||
type GithubTokenLookup = CredentialRow & {
|
||||
github_token: string;
|
||||
};
|
||||
|
||||
export const githubTokensDb = {
|
||||
/** Creates a GitHub token credential entry. */
|
||||
createGithubToken(
|
||||
userId: number,
|
||||
tokenName: string,
|
||||
githubToken: string,
|
||||
description: string | null = null
|
||||
): CreateCredentialResult {
|
||||
return credentialsDb.createCredential(
|
||||
userId,
|
||||
tokenName,
|
||||
GITHUB_TOKEN_TYPE,
|
||||
githubToken,
|
||||
description
|
||||
);
|
||||
},
|
||||
|
||||
/** Returns all GitHub tokens (safe shape: no credential value). */
|
||||
getGithubTokens(userId: number): CredentialPublicRow[] {
|
||||
return credentialsDb.getCredentials(userId, GITHUB_TOKEN_TYPE);
|
||||
},
|
||||
|
||||
/** Returns the most recent active GitHub token value for a user. */
|
||||
getActiveGithubToken(userId: number): string | null {
|
||||
return credentialsDb.getActiveCredential(userId, GITHUB_TOKEN_TYPE);
|
||||
},
|
||||
|
||||
/**
|
||||
* Returns a specific active GitHub token row by id/user, including
|
||||
* a `github_token` compatibility field.
|
||||
*/
|
||||
getGithubTokenById(userId: number, tokenId: number): GithubTokenLookup | null {
|
||||
const db = getConnection();
|
||||
const row = db
|
||||
.prepare(
|
||||
`SELECT *
|
||||
FROM user_credentials
|
||||
WHERE id = ? AND user_id = ? AND credential_type = ? AND is_active = 1`
|
||||
)
|
||||
.get(tokenId, userId, GITHUB_TOKEN_TYPE) as CredentialRow | undefined;
|
||||
|
||||
if (!row) return null;
|
||||
|
||||
return {
|
||||
...row,
|
||||
github_token: row.credential_value,
|
||||
};
|
||||
},
|
||||
|
||||
/** Updates active state for a GitHub token. */
|
||||
updateGithubToken(
|
||||
userId: number,
|
||||
tokenId: number,
|
||||
isActive: boolean
|
||||
): boolean {
|
||||
return credentialsDb.toggleCredential(userId, tokenId, isActive);
|
||||
},
|
||||
|
||||
/** Deletes a GitHub token. */
|
||||
deleteGithubToken(userId: number, tokenId: number): boolean {
|
||||
return credentialsDb.deleteCredential(userId, tokenId);
|
||||
},
|
||||
|
||||
// Legacy alias used by existing routes
|
||||
toggleGithubToken(userId: number, tokenId: number, isActive: boolean): boolean {
|
||||
return githubTokensDb.updateGithubToken(userId, tokenId, isActive);
|
||||
},
|
||||
};
|
||||
|
||||
103
server/modules/database/repositories/notification-preferences.ts
Normal file
103
server/modules/database/repositories/notification-preferences.ts
Normal file
@@ -0,0 +1,103 @@
|
||||
/**
|
||||
* Notification preferences repository.
|
||||
*
|
||||
* Stores per-user notification channel/event preferences as JSON.
|
||||
*/
|
||||
|
||||
import { getConnection } from '@/modules/database/connection.js';
|
||||
|
||||
type NotificationPreferences = {
|
||||
channels: {
|
||||
inApp: boolean;
|
||||
webPush: boolean;
|
||||
};
|
||||
events: {
|
||||
actionRequired: boolean;
|
||||
stop: boolean;
|
||||
error: boolean;
|
||||
};
|
||||
};
|
||||
|
||||
const DEFAULT_NOTIFICATION_PREFERENCES: NotificationPreferences = {
|
||||
channels: {
|
||||
inApp: false,
|
||||
webPush: false,
|
||||
},
|
||||
events: {
|
||||
actionRequired: true,
|
||||
stop: true,
|
||||
error: true,
|
||||
},
|
||||
};
|
||||
|
||||
function normalizeNotificationPreferences(value: unknown): NotificationPreferences {
|
||||
const source = value && typeof value === 'object' ? (value as Record<string, any>) : {};
|
||||
|
||||
return {
|
||||
channels: {
|
||||
inApp: source.channels?.inApp === true,
|
||||
webPush: source.channels?.webPush === true,
|
||||
},
|
||||
events: {
|
||||
actionRequired: source.events?.actionRequired !== false,
|
||||
stop: source.events?.stop !== false,
|
||||
error: source.events?.error !== false,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export const notificationPreferencesDb = {
|
||||
/** Returns the normalized preferences for a user, creating defaults on first read. */
|
||||
getNotificationPreferences(userId: number): NotificationPreferences {
|
||||
const db = getConnection();
|
||||
const row = db
|
||||
.prepare(
|
||||
'SELECT preferences_json FROM user_notification_preferences WHERE user_id = ?'
|
||||
)
|
||||
.get(userId) as { preferences_json: string } | undefined;
|
||||
|
||||
if (!row) {
|
||||
const defaults = normalizeNotificationPreferences(DEFAULT_NOTIFICATION_PREFERENCES);
|
||||
db.prepare(
|
||||
'INSERT INTO user_notification_preferences (user_id, preferences_json, updated_at) VALUES (?, ?, CURRENT_TIMESTAMP)'
|
||||
).run(userId, JSON.stringify(defaults));
|
||||
return defaults;
|
||||
}
|
||||
|
||||
let parsed: unknown;
|
||||
try {
|
||||
parsed = JSON.parse(row.preferences_json);
|
||||
} catch {
|
||||
parsed = DEFAULT_NOTIFICATION_PREFERENCES;
|
||||
}
|
||||
return normalizeNotificationPreferences(parsed);
|
||||
},
|
||||
|
||||
/** Upserts normalized preferences for a user and returns the stored value. */
|
||||
updateNotificationPreferences(
|
||||
userId: number,
|
||||
preferences: unknown
|
||||
): NotificationPreferences {
|
||||
const normalized = normalizeNotificationPreferences(preferences);
|
||||
const db = getConnection();
|
||||
|
||||
db.prepare(
|
||||
`INSERT INTO user_notification_preferences (user_id, preferences_json, updated_at)
|
||||
VALUES (?, ?, CURRENT_TIMESTAMP)
|
||||
ON CONFLICT(user_id) DO UPDATE SET
|
||||
preferences_json = excluded.preferences_json,
|
||||
updated_at = CURRENT_TIMESTAMP`
|
||||
).run(userId, JSON.stringify(normalized));
|
||||
|
||||
return normalized;
|
||||
},
|
||||
|
||||
// Legacy aliases used by existing services/routes
|
||||
getPreferences(userId: number): NotificationPreferences {
|
||||
return notificationPreferencesDb.getNotificationPreferences(userId);
|
||||
},
|
||||
updatePreferences(userId: number, preferences: unknown): NotificationPreferences {
|
||||
return notificationPreferencesDb.updateNotificationPreferences(userId, preferences);
|
||||
},
|
||||
};
|
||||
|
||||
@@ -0,0 +1,72 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import { mkdtemp, rm } from 'node:fs/promises';
|
||||
import { tmpdir } from 'node:os';
|
||||
import path from 'node:path';
|
||||
import test from 'node:test';
|
||||
|
||||
import { closeConnection } from '@/modules/database/connection.js';
|
||||
import { initializeDatabase } from '@/modules/database/init-db.js';
|
||||
import { projectsDb } from '@/modules/database/repositories/projects.db.js';
|
||||
|
||||
async function withIsolatedDatabase(runTest: () => void | Promise<void>): Promise<void> {
|
||||
const previousDatabasePath = process.env.DATABASE_PATH;
|
||||
const tempDirectory = await mkdtemp(path.join(tmpdir(), 'projects-db-'));
|
||||
const databasePath = path.join(tempDirectory, 'auth.db');
|
||||
|
||||
closeConnection();
|
||||
process.env.DATABASE_PATH = databasePath;
|
||||
await initializeDatabase();
|
||||
|
||||
try {
|
||||
await runTest();
|
||||
} finally {
|
||||
closeConnection();
|
||||
if (previousDatabasePath === undefined) {
|
||||
delete process.env.DATABASE_PATH;
|
||||
} else {
|
||||
process.env.DATABASE_PATH = previousDatabasePath;
|
||||
}
|
||||
await rm(tempDirectory, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
|
||||
test('projectsDb.createProjectPath returns created for fresh paths', async () => {
|
||||
await withIsolatedDatabase(() => {
|
||||
const created = projectsDb.createProjectPath('/workspace/new-project');
|
||||
|
||||
assert.equal(created.outcome, 'created');
|
||||
assert.ok(created.project);
|
||||
assert.equal(created.project?.project_path, '/workspace/new-project');
|
||||
assert.equal(created.project?.isArchived, 0);
|
||||
});
|
||||
});
|
||||
|
||||
test('projectsDb.createProjectPath returns reactivated_archived for archived duplicates', async () => {
|
||||
await withIsolatedDatabase(() => {
|
||||
const initial = projectsDb.createProjectPath('/workspace/archived-project', 'Archived Project');
|
||||
assert.equal(initial.outcome, 'created');
|
||||
assert.ok(initial.project);
|
||||
|
||||
projectsDb.updateProjectIsArchived('/workspace/archived-project', true);
|
||||
|
||||
const reused = projectsDb.createProjectPath('/workspace/archived-project', 'Renamed Project');
|
||||
assert.equal(reused.outcome, 'reactivated_archived');
|
||||
assert.ok(reused.project);
|
||||
assert.equal(reused.project?.project_id, initial.project?.project_id);
|
||||
assert.equal(reused.project?.isArchived, 0);
|
||||
});
|
||||
});
|
||||
|
||||
test('projectsDb.createProjectPath returns active_conflict for active duplicates', async () => {
|
||||
await withIsolatedDatabase(() => {
|
||||
const initial = projectsDb.createProjectPath('/workspace/active-project');
|
||||
assert.equal(initial.outcome, 'created');
|
||||
assert.ok(initial.project);
|
||||
|
||||
const conflict = projectsDb.createProjectPath('/workspace/active-project');
|
||||
assert.equal(conflict.outcome, 'active_conflict');
|
||||
assert.ok(conflict.project);
|
||||
assert.equal(conflict.project?.project_id, initial.project?.project_id);
|
||||
assert.equal(conflict.project?.isArchived, 0);
|
||||
});
|
||||
});
|
||||
183
server/modules/database/repositories/projects.db.ts
Normal file
183
server/modules/database/repositories/projects.db.ts
Normal file
@@ -0,0 +1,183 @@
|
||||
import { randomUUID } from 'node:crypto';
|
||||
import path from 'node:path';
|
||||
|
||||
import { getConnection } from '@/modules/database/connection.js';
|
||||
import type { CreateProjectPathResult, ProjectRepositoryRow } from '@/shared/types.js';
|
||||
import { normalizeProjectPath } from '@/shared/utils.js';
|
||||
|
||||
function normalizeProjectDisplayName(projectPath: string, customProjectName: string | null): string {
|
||||
const trimmedCustomName = typeof customProjectName === 'string' ? customProjectName.trim() : '';
|
||||
if (trimmedCustomName.length > 0) {
|
||||
return trimmedCustomName;
|
||||
}
|
||||
|
||||
const directoryName = path.basename(projectPath);
|
||||
return directoryName || projectPath;
|
||||
}
|
||||
|
||||
export const projectsDb = {
|
||||
createProjectPath(projectPath: string, customProjectName: string | null = null): CreateProjectPathResult {
|
||||
const db = getConnection();
|
||||
const normalizedProjectPath = normalizeProjectPath(projectPath);
|
||||
const normalizedProjectName = normalizeProjectDisplayName(normalizedProjectPath, customProjectName);
|
||||
const attemptedId = randomUUID();
|
||||
const row = db.prepare(`
|
||||
INSERT INTO projects (project_id, project_path, custom_project_name, isArchived)
|
||||
VALUES (?, ?, ?, 0)
|
||||
ON CONFLICT(project_path) DO UPDATE SET
|
||||
isArchived = 0
|
||||
WHERE projects.isArchived = 1
|
||||
RETURNING project_id, project_path, custom_project_name, isStarred, isArchived
|
||||
`).get(attemptedId, normalizedProjectPath, normalizedProjectName) as ProjectRepositoryRow | undefined;
|
||||
|
||||
if (row) {
|
||||
return {
|
||||
outcome: row.project_id === attemptedId ? 'created' : 'reactivated_archived',
|
||||
project: row,
|
||||
};
|
||||
}
|
||||
|
||||
const existingProject = projectsDb.getProjectPath(normalizedProjectPath);
|
||||
return {
|
||||
outcome: 'active_conflict',
|
||||
project: existingProject,
|
||||
};
|
||||
},
|
||||
|
||||
getProjectPath(projectPath: string): ProjectRepositoryRow | null {
|
||||
const db = getConnection();
|
||||
const normalizedProjectPath = normalizeProjectPath(projectPath);
|
||||
const row = db.prepare(`
|
||||
SELECT project_id, project_path, custom_project_name, isStarred, isArchived
|
||||
FROM projects
|
||||
WHERE project_path = ?
|
||||
`).get(normalizedProjectPath) as ProjectRepositoryRow | undefined;
|
||||
|
||||
return row ?? null;
|
||||
},
|
||||
|
||||
getProjectById(projectId: string): ProjectRepositoryRow | null {
|
||||
const db = getConnection();
|
||||
const row = db.prepare(`
|
||||
SELECT project_id, project_path, custom_project_name, isStarred, isArchived
|
||||
FROM projects
|
||||
WHERE project_id = ?
|
||||
`).get(projectId) as ProjectRepositoryRow | undefined;
|
||||
|
||||
return row ?? null;
|
||||
},
|
||||
|
||||
/**
|
||||
* Resolve the absolute project directory from a database project_id.
|
||||
*
|
||||
* This is the canonical lookup used after the projectName → projectId migration:
|
||||
* API routes receive the DB-assigned `projectId` and must resolve the real folder
|
||||
* path through this helper before touching the filesystem. Returns `null` when the
|
||||
* project row does not exist so callers can respond with a 404.
|
||||
*/
|
||||
getProjectPathById(projectId: string): string | null {
|
||||
const db = getConnection();
|
||||
const row = db.prepare(`
|
||||
SELECT project_path
|
||||
FROM projects
|
||||
WHERE project_id = ?
|
||||
`).get(projectId) as Pick<ProjectRepositoryRow, 'project_path'> | undefined;
|
||||
|
||||
return row?.project_path ?? null;
|
||||
},
|
||||
|
||||
getProjectPaths(): ProjectRepositoryRow[] {
|
||||
const db = getConnection();
|
||||
return db.prepare(`
|
||||
SELECT project_id, project_path, custom_project_name, isStarred, isArchived
|
||||
FROM projects
|
||||
WHERE isArchived = 0
|
||||
`).all() as ProjectRepositoryRow[];
|
||||
},
|
||||
|
||||
getCustomProjectName(projectPath: string): string | null {
|
||||
const db = getConnection();
|
||||
const normalizedProjectPath = normalizeProjectPath(projectPath);
|
||||
const row = db.prepare(`
|
||||
SELECT custom_project_name
|
||||
FROM projects
|
||||
WHERE project_path = ?
|
||||
`).get(normalizedProjectPath) as Pick<ProjectRepositoryRow, 'custom_project_name'> | undefined;
|
||||
|
||||
return row?.custom_project_name ?? null;
|
||||
},
|
||||
|
||||
updateCustomProjectName(projectPath: string, customProjectName: string | null): void {
|
||||
const db = getConnection();
|
||||
const normalizedProjectPath = normalizeProjectPath(projectPath);
|
||||
db.prepare(`
|
||||
INSERT INTO projects (project_id, project_path, custom_project_name)
|
||||
VALUES (?, ?, ?)
|
||||
ON CONFLICT(project_path) DO UPDATE SET custom_project_name = excluded.custom_project_name
|
||||
`).run(randomUUID(), normalizedProjectPath, customProjectName);
|
||||
},
|
||||
|
||||
updateCustomProjectNameById(projectId: string, customProjectName: string | null): void {
|
||||
const db = getConnection();
|
||||
db.prepare(`
|
||||
UPDATE projects
|
||||
SET custom_project_name = ?
|
||||
WHERE project_id = ?
|
||||
`).run(customProjectName, projectId);
|
||||
},
|
||||
|
||||
updateProjectIsStarred(projectPath: string, isStarred: boolean): void {
|
||||
const db = getConnection();
|
||||
const normalizedProjectPath = normalizeProjectPath(projectPath);
|
||||
db.prepare(`
|
||||
UPDATE projects
|
||||
SET isStarred = ?
|
||||
WHERE project_path = ?
|
||||
`).run(isStarred ? 1 : 0, normalizedProjectPath);
|
||||
},
|
||||
|
||||
updateProjectIsStarredById(projectId: string, isStarred: boolean): void {
|
||||
const db = getConnection();
|
||||
db.prepare(`
|
||||
UPDATE projects
|
||||
SET isStarred = ?
|
||||
WHERE project_id = ?
|
||||
`).run(isStarred ? 1 : 0, projectId);
|
||||
},
|
||||
|
||||
updateProjectIsArchived(projectPath: string, isArchived: boolean): void {
|
||||
const db = getConnection();
|
||||
const normalizedProjectPath = normalizeProjectPath(projectPath);
|
||||
db.prepare(`
|
||||
UPDATE projects
|
||||
SET isArchived = ?
|
||||
WHERE project_path = ?
|
||||
`).run(isArchived ? 1 : 0, normalizedProjectPath);
|
||||
},
|
||||
|
||||
updateProjectIsArchivedById(projectId: string, isArchived: boolean): void {
|
||||
const db = getConnection();
|
||||
db.prepare(`
|
||||
UPDATE projects
|
||||
SET isArchived = ?
|
||||
WHERE project_id = ?
|
||||
`).run(isArchived ? 1 : 0, projectId);
|
||||
},
|
||||
|
||||
deleteProjectPath(projectPath: string): void {
|
||||
const db = getConnection();
|
||||
const normalizedProjectPath = normalizeProjectPath(projectPath);
|
||||
db.prepare(`
|
||||
DELETE FROM projects
|
||||
WHERE project_path = ?
|
||||
`).run(normalizedProjectPath);
|
||||
},
|
||||
|
||||
deleteProjectById(projectId: string): void {
|
||||
const db = getConnection();
|
||||
db.prepare(`
|
||||
DELETE FROM projects
|
||||
WHERE project_id = ?
|
||||
`).run(projectId);
|
||||
},
|
||||
};
|
||||
80
server/modules/database/repositories/push-subscriptions.ts
Normal file
80
server/modules/database/repositories/push-subscriptions.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
/**
|
||||
* Push subscriptions repository.
|
||||
*
|
||||
* Persists browser push subscription endpoints and keys per user.
|
||||
*/
|
||||
|
||||
import { getConnection } from '@/modules/database/connection.js';
|
||||
|
||||
type PushSubscriptionLookupRow = {
|
||||
endpoint: string;
|
||||
keys_p256dh: string;
|
||||
keys_auth: string;
|
||||
};
|
||||
|
||||
export const pushSubscriptionsDb = {
|
||||
/** Upserts a push subscription endpoint for a user. */
|
||||
createPushSubscription(
|
||||
userId: number,
|
||||
endpoint: string,
|
||||
keysP256dh: string,
|
||||
keysAuth: string
|
||||
): void {
|
||||
const db = getConnection();
|
||||
db.prepare(
|
||||
`INSERT INTO push_subscriptions (user_id, endpoint, keys_p256dh, keys_auth)
|
||||
VALUES (?, ?, ?, ?)
|
||||
ON CONFLICT(endpoint) DO UPDATE SET
|
||||
user_id = excluded.user_id,
|
||||
keys_p256dh = excluded.keys_p256dh,
|
||||
keys_auth = excluded.keys_auth`
|
||||
).run(userId, endpoint, keysP256dh, keysAuth);
|
||||
},
|
||||
|
||||
/** Returns all subscriptions for a user. */
|
||||
getPushSubscriptions(userId: number): PushSubscriptionLookupRow[] {
|
||||
const db = getConnection();
|
||||
return db
|
||||
.prepare(
|
||||
'SELECT endpoint, keys_p256dh, keys_auth FROM push_subscriptions WHERE user_id = ?'
|
||||
)
|
||||
.all(userId) as PushSubscriptionLookupRow[];
|
||||
},
|
||||
|
||||
/** Deletes one subscription by endpoint. */
|
||||
deletePushSubscription(endpoint: string): void {
|
||||
const db = getConnection();
|
||||
db.prepare('DELETE FROM push_subscriptions WHERE endpoint = ?').run(endpoint);
|
||||
},
|
||||
|
||||
/** Deletes all subscriptions for a user. */
|
||||
deletePushSubscriptionsForUser(userId: number): void {
|
||||
const db = getConnection();
|
||||
db.prepare('DELETE FROM push_subscriptions WHERE user_id = ?').run(userId);
|
||||
},
|
||||
|
||||
// Legacy aliases used by existing services/routes
|
||||
saveSubscription(
|
||||
userId: number,
|
||||
endpoint: string,
|
||||
keysP256dh: string,
|
||||
keysAuth: string
|
||||
): void {
|
||||
pushSubscriptionsDb.createPushSubscription(
|
||||
userId,
|
||||
endpoint,
|
||||
keysP256dh,
|
||||
keysAuth
|
||||
);
|
||||
},
|
||||
getSubscriptions(userId: number): PushSubscriptionLookupRow[] {
|
||||
return pushSubscriptionsDb.getPushSubscriptions(userId);
|
||||
},
|
||||
removeSubscription(endpoint: string): void {
|
||||
pushSubscriptionsDb.deletePushSubscription(endpoint);
|
||||
},
|
||||
removeAllForUser(userId: number): void {
|
||||
pushSubscriptionsDb.deletePushSubscriptionsForUser(userId);
|
||||
},
|
||||
};
|
||||
|
||||
42
server/modules/database/repositories/scan-state.db.ts
Normal file
42
server/modules/database/repositories/scan-state.db.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import { getConnection } from '@/modules/database/connection.js';
|
||||
|
||||
type ScanStateRow = {
|
||||
last_scanned_at: string;
|
||||
};
|
||||
|
||||
export const scanStateDb = {
|
||||
getLastScannedAt() {
|
||||
const db = getConnection();
|
||||
|
||||
const row = db
|
||||
.prepare(`SELECT last_scanned_at FROM scan_state WHERE id = 1`)
|
||||
.get() as ScanStateRow;
|
||||
|
||||
if (!row) {
|
||||
return null; // Before any scan, the row is undefined.
|
||||
}
|
||||
|
||||
let lastScannedDate: Date | null = null;
|
||||
const lastScannedStr = row.last_scanned_at;
|
||||
|
||||
if (lastScannedStr) {
|
||||
// SQLite CURRENT_TIMESTAMP returns UTC in "YYYY-MM-DD HH:MM:SS" format.
|
||||
// Replace space with 'T' and append 'Z' to parse reliably in JS across all platforms.
|
||||
lastScannedDate = new Date(lastScannedStr.replace(' ', 'T') + 'Z');
|
||||
}
|
||||
|
||||
return lastScannedDate;
|
||||
},
|
||||
|
||||
updateLastScannedAt(scannedAt: Date = new Date()) {
|
||||
const db = getConnection();
|
||||
const sqliteTimestamp = scannedAt.toISOString().slice(0, 19).replace('T', ' ');
|
||||
|
||||
db.prepare(`
|
||||
INSERT INTO scan_state (id, last_scanned_at)
|
||||
VALUES (1, ?)
|
||||
ON CONFLICT (id)
|
||||
DO UPDATE SET last_scanned_at = excluded.last_scanned_at
|
||||
`).run(sqliteTimestamp);
|
||||
}
|
||||
};
|
||||
174
server/modules/database/repositories/sessions.db.ts
Normal file
174
server/modules/database/repositories/sessions.db.ts
Normal file
@@ -0,0 +1,174 @@
|
||||
import { getConnection } from '@/modules/database/connection.js';
|
||||
import { projectsDb } from '@/modules/database/repositories/projects.db.js';
|
||||
import { normalizeProjectPath } from '@/shared/utils.js';
|
||||
|
||||
type SessionRow = {
|
||||
session_id: string;
|
||||
provider: string;
|
||||
project_path: string | null;
|
||||
jsonl_path: string | null;
|
||||
custom_name: string | null;
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
};
|
||||
|
||||
type SessionMetadataLookupRow = Pick<
|
||||
SessionRow,
|
||||
'session_id' | 'provider' | 'project_path' | 'jsonl_path' | 'custom_name' | 'created_at' | 'updated_at'
|
||||
>;
|
||||
|
||||
function normalizeTimestamp(value?: string): string | null {
|
||||
if (!value) return null;
|
||||
|
||||
const parsed = new Date(value);
|
||||
if (Number.isNaN(parsed.getTime())) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return parsed.toISOString();
|
||||
}
|
||||
|
||||
function normalizeProjectPathForProvider(provider: string, projectPath: string): string {
|
||||
void provider;
|
||||
return normalizeProjectPath(projectPath);
|
||||
}
|
||||
|
||||
export const sessionsDb = {
|
||||
createSession(
|
||||
sessionId: string,
|
||||
provider: string,
|
||||
projectPath: string,
|
||||
customName?: string,
|
||||
createdAt?: string,
|
||||
updatedAt?: string,
|
||||
jsonlPath?: string | null
|
||||
): string {
|
||||
const db = getConnection();
|
||||
const createdAtValue = normalizeTimestamp(createdAt);
|
||||
const updatedAtValue = normalizeTimestamp(updatedAt);
|
||||
const normalizedProjectPath = normalizeProjectPathForProvider(provider, projectPath);
|
||||
|
||||
// First, ensure the project path is recorded in the projects table,
|
||||
// since it's a foreign key in the sessions table.
|
||||
projectsDb.createProjectPath(normalizedProjectPath);
|
||||
|
||||
db.prepare(
|
||||
`INSERT INTO sessions (session_id, provider, custom_name, project_path, jsonl_path, created_at, updated_at)
|
||||
VALUES (?, ?, ?, ?, ?, COALESCE(?, CURRENT_TIMESTAMP), COALESCE(?, CURRENT_TIMESTAMP))
|
||||
ON CONFLICT(session_id) DO UPDATE SET
|
||||
provider = excluded.provider,
|
||||
updated_at = excluded.updated_at,
|
||||
project_path = excluded.project_path,
|
||||
jsonl_path = excluded.jsonl_path,
|
||||
custom_name = COALESCE(excluded.custom_name, sessions.custom_name)`
|
||||
).run(
|
||||
sessionId,
|
||||
provider,
|
||||
customName ?? null,
|
||||
normalizedProjectPath,
|
||||
jsonlPath ?? null,
|
||||
createdAtValue,
|
||||
updatedAtValue
|
||||
);
|
||||
|
||||
return sessionId;
|
||||
},
|
||||
|
||||
updateSessionCustomName(sessionId: string, customName: string): void {
|
||||
const db = getConnection();
|
||||
db.prepare(
|
||||
`UPDATE sessions
|
||||
SET custom_name = ?
|
||||
WHERE session_id = ?`
|
||||
).run(customName, sessionId);
|
||||
},
|
||||
|
||||
getSessionById(sessionId: string): SessionMetadataLookupRow | null {
|
||||
const db = getConnection();
|
||||
const row = db
|
||||
.prepare(
|
||||
`SELECT session_id, provider, project_path, jsonl_path, custom_name, created_at, updated_at
|
||||
FROM sessions
|
||||
WHERE session_id = ?
|
||||
ORDER BY updated_at DESC
|
||||
LIMIT 1`
|
||||
)
|
||||
.get(sessionId) as SessionMetadataLookupRow | undefined;
|
||||
|
||||
return row ?? null;
|
||||
},
|
||||
|
||||
getAllSessions(): SessionRow[] {
|
||||
const db = getConnection();
|
||||
return db
|
||||
.prepare(
|
||||
`SELECT session_id, provider, project_path, jsonl_path, custom_name, created_at, updated_at
|
||||
FROM sessions`
|
||||
)
|
||||
.all() as SessionRow[];
|
||||
},
|
||||
|
||||
getSessionsByProjectPath(projectPath: string): SessionRow[] {
|
||||
const db = getConnection();
|
||||
const normalizedProjectPath = normalizeProjectPath(projectPath);
|
||||
return db
|
||||
.prepare(
|
||||
`SELECT session_id, provider, project_path, jsonl_path, custom_name, created_at, updated_at
|
||||
FROM sessions
|
||||
WHERE project_path = ?`
|
||||
)
|
||||
.all(normalizedProjectPath) as SessionRow[];
|
||||
},
|
||||
|
||||
getSessionsByProjectPathPage(projectPath: string, limit: number, offset: number): SessionRow[] {
|
||||
const db = getConnection();
|
||||
const normalizedProjectPath = normalizeProjectPath(projectPath);
|
||||
return db
|
||||
.prepare(
|
||||
`SELECT session_id, provider, project_path, jsonl_path, custom_name, created_at, updated_at
|
||||
FROM sessions
|
||||
WHERE project_path = ?
|
||||
ORDER BY datetime(COALESCE(updated_at, created_at)) DESC, session_id DESC
|
||||
LIMIT ? OFFSET ?`
|
||||
)
|
||||
.all(normalizedProjectPath, limit, offset) as SessionRow[];
|
||||
},
|
||||
|
||||
countSessionsByProjectPath(projectPath: string): number {
|
||||
const db = getConnection();
|
||||
const normalizedProjectPath = normalizeProjectPath(projectPath);
|
||||
const row = db
|
||||
.prepare(
|
||||
`SELECT COUNT(*) AS count
|
||||
FROM sessions
|
||||
WHERE project_path = ?`
|
||||
)
|
||||
.get(normalizedProjectPath) as { count: number } | undefined;
|
||||
|
||||
return Number(row?.count ?? 0);
|
||||
},
|
||||
|
||||
deleteSessionsByProjectPath(projectPath: string): void {
|
||||
const db = getConnection();
|
||||
const normalizedProjectPath = normalizeProjectPath(projectPath);
|
||||
db.prepare(`DELETE FROM sessions WHERE project_path = ?`).run(normalizedProjectPath);
|
||||
},
|
||||
|
||||
getSessionName(sessionId: string, provider: string): string | null {
|
||||
const db = getConnection();
|
||||
const row = db
|
||||
.prepare(
|
||||
`SELECT custom_name
|
||||
FROM sessions
|
||||
WHERE session_id = ? AND provider = ?`
|
||||
)
|
||||
.get(sessionId, provider) as { custom_name: string | null } | undefined;
|
||||
|
||||
return row?.custom_name ?? null;
|
||||
},
|
||||
|
||||
deleteSessionById(sessionId: string): boolean {
|
||||
const db = getConnection();
|
||||
return db.prepare('DELETE FROM sessions WHERE session_id = ?').run(sessionId).changes > 0;
|
||||
},
|
||||
};
|
||||
140
server/modules/database/repositories/users.ts
Normal file
140
server/modules/database/repositories/users.ts
Normal file
@@ -0,0 +1,140 @@
|
||||
/**
|
||||
* User repository.
|
||||
*
|
||||
* Provides typed CRUD operations for the `users` table.
|
||||
* This is a single-user system, but the schema supports multiple
|
||||
* users for forward compatibility.
|
||||
*/
|
||||
|
||||
import { getConnection } from '@/modules/database/connection.js';
|
||||
|
||||
type UserRow = {
|
||||
id: number;
|
||||
username: string;
|
||||
password_hash: string;
|
||||
created_at: string;
|
||||
last_login: string | null;
|
||||
is_active: number;
|
||||
git_name: string | null;
|
||||
git_email: string | null;
|
||||
has_completed_onboarding: number;
|
||||
};
|
||||
|
||||
type UserPublicRow = Pick<UserRow, 'id' | 'username' | 'created_at' | 'last_login'>;
|
||||
|
||||
type UserGitConfig = {
|
||||
git_name: string | null;
|
||||
git_email: string | null;
|
||||
};
|
||||
|
||||
type CreateUserResult = {
|
||||
id: number | bigint;
|
||||
username: string;
|
||||
};
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Queries
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const userDb = {
|
||||
/** Returns true if at least one user exists in the database. */
|
||||
hasUsers(): boolean {
|
||||
const db = getConnection();
|
||||
const row = db.prepare('SELECT COUNT(*) as count FROM users').get() as {
|
||||
count: number;
|
||||
};
|
||||
return row.count > 0;
|
||||
},
|
||||
|
||||
/** Inserts a new user and returns the created ID + username. */
|
||||
createUser(username: string, passwordHash: string): CreateUserResult {
|
||||
const db = getConnection();
|
||||
const result = db
|
||||
.prepare('INSERT INTO users (username, password_hash) VALUES (?, ?)')
|
||||
.run(username, passwordHash);
|
||||
return { id: result.lastInsertRowid, username };
|
||||
},
|
||||
|
||||
/**
|
||||
* Looks up an active user by username.
|
||||
* Returns the full row (including password hash) for auth verification.
|
||||
*/
|
||||
getUserByUsername(username: string): UserRow | undefined {
|
||||
const db = getConnection();
|
||||
return db
|
||||
.prepare('SELECT * FROM users WHERE username = ? AND is_active = 1')
|
||||
.get(username) as UserRow | undefined;
|
||||
},
|
||||
|
||||
/** Updates the last_login timestamp. Non-fatal — logs but does not throw. */
|
||||
updateLastLogin(userId: number): void {
|
||||
try {
|
||||
const db = getConnection();
|
||||
db.prepare(
|
||||
'UPDATE users SET last_login = CURRENT_TIMESTAMP WHERE id = ?'
|
||||
).run(userId);
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
console.error('Failed to update last login', { error: message });
|
||||
}
|
||||
},
|
||||
|
||||
/** Returns public user fields by ID (no password hash). */
|
||||
getUserById(userId: number): UserPublicRow | undefined {
|
||||
const db = getConnection();
|
||||
return db
|
||||
.prepare(
|
||||
'SELECT id, username, created_at, last_login FROM users WHERE id = ? AND is_active = 1'
|
||||
)
|
||||
.get(userId) as UserPublicRow | undefined;
|
||||
},
|
||||
|
||||
/** Returns the first active user. Used for single-user mode lookups. */
|
||||
getFirstUser(): UserPublicRow | undefined {
|
||||
const db = getConnection();
|
||||
return db
|
||||
.prepare(
|
||||
'SELECT id, username, created_at, last_login FROM users WHERE is_active = 1 LIMIT 1'
|
||||
)
|
||||
.get() as UserPublicRow | undefined;
|
||||
},
|
||||
|
||||
/** Stores the user's preferred git name and email. */
|
||||
updateGitConfig(
|
||||
userId: number,
|
||||
gitName: string,
|
||||
gitEmail: string
|
||||
): void {
|
||||
const db = getConnection();
|
||||
db.prepare('UPDATE users SET git_name = ?, git_email = ? WHERE id = ?').run(
|
||||
gitName,
|
||||
gitEmail,
|
||||
userId
|
||||
);
|
||||
},
|
||||
|
||||
/** Retrieves the user's git identity (name + email). */
|
||||
getGitConfig(userId: number): UserGitConfig | undefined {
|
||||
const db = getConnection();
|
||||
return db
|
||||
.prepare('SELECT git_name, git_email FROM users WHERE id = ?')
|
||||
.get(userId) as UserGitConfig | undefined;
|
||||
},
|
||||
|
||||
/** Marks onboarding as complete for the given user. */
|
||||
completeOnboarding(userId: number): void {
|
||||
const db = getConnection();
|
||||
db.prepare(
|
||||
'UPDATE users SET has_completed_onboarding = 1 WHERE id = ?'
|
||||
).run(userId);
|
||||
},
|
||||
|
||||
/** Returns true if the user has finished the onboarding flow. */
|
||||
hasCompletedOnboarding(userId: number): boolean {
|
||||
const db = getConnection();
|
||||
const row = db
|
||||
.prepare('SELECT has_completed_onboarding FROM users WHERE id = ?')
|
||||
.get(userId) as { has_completed_onboarding: number } | undefined;
|
||||
return row?.has_completed_onboarding === 1;
|
||||
},
|
||||
};
|
||||
57
server/modules/database/repositories/vapid-keys.ts
Normal file
57
server/modules/database/repositories/vapid-keys.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
/**
|
||||
* VAPID keys repository.
|
||||
*
|
||||
* Stores and retrieves the Web Push VAPID key pair.
|
||||
*/
|
||||
|
||||
import { getConnection } from '@/modules/database/connection.js';
|
||||
|
||||
type VapidKeyRow = {
|
||||
public_key: string;
|
||||
private_key: string;
|
||||
};
|
||||
|
||||
type VapidKeyPair = {
|
||||
publicKey: string;
|
||||
privateKey: string;
|
||||
};
|
||||
|
||||
export const vapidKeysDb = {
|
||||
/** Returns the latest stored VAPID key pair, or null when unset. */
|
||||
getVapidKeys(): VapidKeyPair | null {
|
||||
const db = getConnection();
|
||||
const row = db
|
||||
.prepare(
|
||||
'SELECT public_key, private_key FROM vapid_keys ORDER BY id DESC LIMIT 1'
|
||||
)
|
||||
.get() as Pick<VapidKeyRow, 'public_key' | 'private_key'> | undefined;
|
||||
|
||||
if (!row) return null;
|
||||
return {
|
||||
publicKey: row.public_key,
|
||||
privateKey: row.private_key,
|
||||
};
|
||||
},
|
||||
|
||||
/** Persists a new VAPID key pair. */
|
||||
createVapidKeys(publicKey: string, privateKey: string): void {
|
||||
const db = getConnection();
|
||||
db.prepare(
|
||||
'INSERT INTO vapid_keys (public_key, private_key) VALUES (?, ?)'
|
||||
).run(publicKey, privateKey);
|
||||
},
|
||||
|
||||
/** Replaces all existing keys with a fresh pair. */
|
||||
updateVapidKeys(publicKey: string, privateKey: string): void {
|
||||
const db = getConnection();
|
||||
db.prepare('DELETE FROM vapid_keys').run();
|
||||
vapidKeysDb.createVapidKeys(publicKey, privateKey);
|
||||
},
|
||||
|
||||
/** Deletes all VAPID key rows. */
|
||||
deleteVapidKeys(): void {
|
||||
const db = getConnection();
|
||||
db.prepare('DELETE FROM vapid_keys').run();
|
||||
},
|
||||
};
|
||||
|
||||
@@ -1,7 +1,4 @@
|
||||
-- Initialize authentication database
|
||||
PRAGMA foreign_keys = ON;
|
||||
|
||||
-- Users table (single user system)
|
||||
const USER_TABLE_SCHEMA_SQL = `
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
username TEXT UNIQUE NOT NULL,
|
||||
@@ -13,12 +10,9 @@ CREATE TABLE IF NOT EXISTS users (
|
||||
git_email TEXT,
|
||||
has_completed_onboarding BOOLEAN DEFAULT 0
|
||||
);
|
||||
`;
|
||||
|
||||
-- Indexes for performance
|
||||
CREATE INDEX IF NOT EXISTS idx_users_username ON users(username);
|
||||
CREATE INDEX IF NOT EXISTS idx_users_active ON users(is_active);
|
||||
|
||||
-- API Keys table for external API access
|
||||
export const API_KEYS_TABLE_SCHEMA_SQL = `
|
||||
CREATE TABLE IF NOT EXISTS api_keys (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
user_id INTEGER NOT NULL,
|
||||
@@ -29,12 +23,9 @@ CREATE TABLE IF NOT EXISTS api_keys (
|
||||
is_active BOOLEAN DEFAULT 1,
|
||||
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
|
||||
);
|
||||
`;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_api_keys_key ON api_keys(api_key);
|
||||
CREATE INDEX IF NOT EXISTS idx_api_keys_user_id ON api_keys(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_api_keys_active ON api_keys(is_active);
|
||||
|
||||
-- User credentials table for storing various tokens/credentials (GitHub, GitLab, etc.)
|
||||
export const USER_CREDENTIALS_TABLE_SCHEMA_SQL = `
|
||||
CREATE TABLE IF NOT EXISTS user_credentials (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
user_id INTEGER NOT NULL,
|
||||
@@ -46,28 +37,27 @@ CREATE TABLE IF NOT EXISTS user_credentials (
|
||||
is_active BOOLEAN DEFAULT 1,
|
||||
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
|
||||
);
|
||||
`;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_user_credentials_user_id ON user_credentials(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_user_credentials_type ON user_credentials(credential_type);
|
||||
CREATE INDEX IF NOT EXISTS idx_user_credentials_active ON user_credentials(is_active);
|
||||
|
||||
-- User notification preferences (backend-owned, provider-agnostic)
|
||||
export const USER_NOTIFICATION_PREFERENCES_TABLE_SCHEMA_SQL = `
|
||||
CREATE TABLE IF NOT EXISTS user_notification_preferences (
|
||||
user_id INTEGER PRIMARY KEY,
|
||||
preferences_json TEXT NOT NULL,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
|
||||
);
|
||||
`;
|
||||
|
||||
-- VAPID key pair for Web Push notifications
|
||||
export const VAPID_KEYS_TABLE_SCHEMA_SQL = `
|
||||
CREATE TABLE IF NOT EXISTS vapid_keys (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
public_key TEXT NOT NULL,
|
||||
private_key TEXT NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
`;
|
||||
|
||||
-- Browser push subscriptions
|
||||
export const PUSH_SUBSCRIPTIONS_TABLE_SCHEMA_SQL = `
|
||||
CREATE TABLE IF NOT EXISTS push_subscriptions (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
user_id INTEGER NOT NULL,
|
||||
@@ -77,23 +67,86 @@ CREATE TABLE IF NOT EXISTS push_subscriptions (
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
|
||||
);
|
||||
`;
|
||||
|
||||
-- Session custom names (provider-agnostic display name overrides)
|
||||
CREATE TABLE IF NOT EXISTS session_names (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
export const PROJECTS_TABLE_SCHEMA_SQL = `
|
||||
CREATE TABLE IF NOT EXISTS projects (
|
||||
project_id TEXT PRIMARY KEY NOT NULL,
|
||||
project_path TEXT NOT NULL UNIQUE,
|
||||
custom_project_name TEXT DEFAULT NULL,
|
||||
isStarred BOOLEAN DEFAULT 0,
|
||||
isArchived BOOLEAN DEFAULT 0
|
||||
);
|
||||
`;
|
||||
|
||||
export const SESSIONS_TABLE_SCHEMA_SQL = `
|
||||
CREATE TABLE IF NOT EXISTS sessions (
|
||||
session_id TEXT NOT NULL,
|
||||
provider TEXT NOT NULL DEFAULT 'claude',
|
||||
custom_name TEXT NOT NULL,
|
||||
custom_name TEXT,
|
||||
project_path TEXT,
|
||||
jsonl_path TEXT,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
UNIQUE(session_id, provider)
|
||||
PRIMARY KEY (session_id),
|
||||
FOREIGN KEY (project_path) REFERENCES projects(project_path)
|
||||
ON DELETE SET NULL
|
||||
ON UPDATE CASCADE
|
||||
);
|
||||
`;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_session_names_lookup ON session_names(session_id, provider);
|
||||
export const LAST_SCANNED_AT_SQL = `
|
||||
CREATE TABLE IF NOT EXISTS scan_state (
|
||||
id INTEGER PRIMARY KEY CHECK (id = 1),
|
||||
last_scanned_at TIMESTAMP NULL
|
||||
);
|
||||
`;
|
||||
|
||||
-- App configuration table (auto-generated secrets, settings, etc.)
|
||||
export const APP_CONFIG_TABLE_SCHEMA_SQL = `
|
||||
CREATE TABLE IF NOT EXISTS app_config (
|
||||
key TEXT PRIMARY KEY,
|
||||
value TEXT NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
`;
|
||||
|
||||
export const INIT_SCHEMA_SQL = `
|
||||
-- Initialize authentication database
|
||||
PRAGMA foreign_keys = ON;
|
||||
|
||||
${USER_TABLE_SCHEMA_SQL}
|
||||
-- Indexes for performance for user lookups
|
||||
CREATE INDEX IF NOT EXISTS idx_users_username ON users(username);
|
||||
CREATE INDEX IF NOT EXISTS idx_users_active ON users(is_active);
|
||||
|
||||
${API_KEYS_TABLE_SCHEMA_SQL}
|
||||
CREATE INDEX IF NOT EXISTS idx_api_keys_key ON api_keys(api_key);
|
||||
CREATE INDEX IF NOT EXISTS idx_api_keys_user_id ON api_keys(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_api_keys_active ON api_keys(is_active);
|
||||
|
||||
${USER_CREDENTIALS_TABLE_SCHEMA_SQL}
|
||||
CREATE INDEX IF NOT EXISTS idx_user_credentials_user_id ON user_credentials(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_user_credentials_type ON user_credentials(credential_type);
|
||||
CREATE INDEX IF NOT EXISTS idx_user_credentials_active ON user_credentials(is_active);
|
||||
|
||||
${USER_NOTIFICATION_PREFERENCES_TABLE_SCHEMA_SQL}
|
||||
CREATE INDEX IF NOT EXISTS idx_user_notification_preferences_user_id ON user_notification_preferences(user_id);
|
||||
|
||||
${VAPID_KEYS_TABLE_SCHEMA_SQL}
|
||||
|
||||
${PUSH_SUBSCRIPTIONS_TABLE_SCHEMA_SQL}
|
||||
CREATE INDEX IF NOT EXISTS idx_push_subscriptions_user_id ON push_subscriptions(user_id);
|
||||
|
||||
${PROJECTS_TABLE_SCHEMA_SQL}
|
||||
-- NOTE: These indexes are created in migrations after legacy table-shape repairs.
|
||||
-- Creating them here can fail on upgraded installs where projects lacks those columns.
|
||||
|
||||
${SESSIONS_TABLE_SCHEMA_SQL}
|
||||
CREATE INDEX IF NOT EXISTS idx_session_ids_lookup ON sessions(session_id);
|
||||
-- NOTE: This index is created in migrations after sessions is rebuilt to include project_path.
|
||||
-- Creating it here can fail on upgraded installs where the legacy sessions table has no project_path.
|
||||
|
||||
${LAST_SCANNED_AT_SQL}
|
||||
|
||||
${APP_CONFIG_TABLE_SCHEMA_SQL}
|
||||
`;
|
||||
6
server/modules/projects/index.ts
Normal file
6
server/modules/projects/index.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
export {
|
||||
generateDisplayName,
|
||||
getProjectsWithSessions,
|
||||
} from './services/projects-with-sessions-fetch.service.js';
|
||||
export { updateProjectDisplayName } from './services/project-management.service.js';
|
||||
export { deleteOrArchiveProject, deleteSessionJsonlFilesForProjectPath } from './services/project-delete.service.js';
|
||||
247
server/modules/projects/projects.routes.ts
Normal file
247
server/modules/projects/projects.routes.ts
Normal file
@@ -0,0 +1,247 @@
|
||||
import express from 'express';
|
||||
|
||||
import { createProject, updateProjectDisplayName } from '@/modules/projects/services/project-management.service.js';
|
||||
import { startCloneProject } from '@/modules/projects/services/project-clone.service.js';
|
||||
import { getProjectTaskMaster } from '@/modules/projects/services/projects-has-taskmaster.service.js';
|
||||
import { AppError, asyncHandler } from '@/shared/utils.js';
|
||||
import { getProjectSessionsPage, getProjectsWithSessions } from '@/modules/projects/services/projects-with-sessions-fetch.service.js';
|
||||
import { deleteOrArchiveProject } from '@/modules/projects/services/project-delete.service.js';
|
||||
import { applyLegacyStarredProjectIds, toggleProjectStar } from '@/modules/projects/services/project-star.service.js';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
type AuthenticatedUser = {
|
||||
id?: number | string;
|
||||
};
|
||||
|
||||
function readQueryStringValue(value: unknown): string {
|
||||
if (typeof value === 'string') {
|
||||
return value;
|
||||
}
|
||||
|
||||
if (Array.isArray(value) && typeof value[0] === 'string') {
|
||||
return value[0];
|
||||
}
|
||||
|
||||
return '';
|
||||
}
|
||||
|
||||
function readOptionalNumericQueryValue(value: unknown): number | null {
|
||||
const rawValue = readQueryStringValue(value).trim();
|
||||
if (!rawValue) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const parsedValue = Number.parseInt(rawValue, 10);
|
||||
return Number.isNaN(parsedValue) ? null : parsedValue;
|
||||
}
|
||||
|
||||
function parseNonNegativeIntQuery(value: unknown, name: string, fallback: number): number {
|
||||
const rawValue = readQueryStringValue(value).trim();
|
||||
if (!rawValue) {
|
||||
return fallback;
|
||||
}
|
||||
|
||||
const parsedValue = Number.parseInt(rawValue, 10);
|
||||
if (Number.isNaN(parsedValue) || parsedValue < 0) {
|
||||
throw new AppError(`${name} must be a non-negative integer`, {
|
||||
code: 'INVALID_QUERY_PARAMETER',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
return parsedValue;
|
||||
}
|
||||
|
||||
function resolveRouteErrorMessage(error: unknown): string {
|
||||
if (error instanceof AppError) {
|
||||
return error.message;
|
||||
}
|
||||
|
||||
if (error instanceof Error && error.message) {
|
||||
return error.message;
|
||||
}
|
||||
|
||||
return 'Failed to clone repository';
|
||||
}
|
||||
|
||||
router.get(
|
||||
'/',
|
||||
asyncHandler(async (_req, res) => {
|
||||
const projects = await getProjectsWithSessions();
|
||||
res.json(projects);
|
||||
}),
|
||||
);
|
||||
|
||||
router.get(
|
||||
'/:projectId/sessions',
|
||||
asyncHandler(async (req, res) => {
|
||||
const projectId = typeof req.params.projectId === 'string' ? req.params.projectId : '';
|
||||
const limit = parseNonNegativeIntQuery(req.query.limit, 'limit', 20);
|
||||
const offset = parseNonNegativeIntQuery(req.query.offset, 'offset', 0);
|
||||
const sessionsPage = await getProjectSessionsPage(projectId, { limit, offset });
|
||||
res.json(sessionsPage);
|
||||
}),
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/create-project',
|
||||
asyncHandler(async (req, res) => {
|
||||
const requestBody = req.body as Record<string, unknown>;
|
||||
const projectPath = typeof requestBody.path === 'string' ? requestBody.path : '';
|
||||
const customName = typeof requestBody.customName === 'string' ? requestBody.customName : null;
|
||||
|
||||
if (requestBody.workspaceType !== undefined) {
|
||||
throw new AppError('workspaceType is no longer supported. Use the single create-project flow.', {
|
||||
code: 'LEGACY_WORKSPACE_TYPE_UNSUPPORTED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
if (requestBody.githubUrl || requestBody.githubTokenId || requestBody.newGithubToken) {
|
||||
throw new AppError('Repository cloning is not supported on create-project', {
|
||||
code: 'CLONE_NOT_SUPPORTED_ON_CREATE_PROJECT',
|
||||
statusCode: 400,
|
||||
details: 'Use /api/projects/clone-progress for cloning workflows',
|
||||
});
|
||||
}
|
||||
|
||||
const projectCreationResult = await createProject({
|
||||
projectPath,
|
||||
customName,
|
||||
});
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
project: projectCreationResult.project,
|
||||
message:
|
||||
projectCreationResult.outcome === 'reactivated_archived'
|
||||
? 'Archived project path reused successfully'
|
||||
: 'Project created successfully',
|
||||
});
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* One-time (or idempotent) migration: apply legacy `localStorage` starred projectIds to the DB, then clear client storage.
|
||||
*/
|
||||
router.post(
|
||||
'/migrate-legacy-stars',
|
||||
asyncHandler(async (req, res) => {
|
||||
const projectIds = Array.isArray((req.body as { projectIds?: unknown })?.projectIds)
|
||||
? ((req.body as { projectIds: unknown[] }).projectIds as unknown[]).map((x) => String(x))
|
||||
: [];
|
||||
const { updated } = applyLegacyStarredProjectIds(projectIds);
|
||||
res.json({ success: true, updated });
|
||||
}),
|
||||
);
|
||||
|
||||
router.get('/clone-progress', async (req, res) => {
|
||||
res.setHeader('Content-Type', 'text/event-stream');
|
||||
res.setHeader('Cache-Control', 'no-cache');
|
||||
res.setHeader('Connection', 'keep-alive');
|
||||
res.flushHeaders();
|
||||
|
||||
const sendEvent = (type: string, data: Record<string, unknown>) => {
|
||||
if (res.writableEnded) {
|
||||
return;
|
||||
}
|
||||
|
||||
res.write(`data: ${JSON.stringify({ type, ...data })}\n\n`);
|
||||
};
|
||||
|
||||
let cloneOperation: Awaited<ReturnType<typeof startCloneProject>> | null = null;
|
||||
const closeListener = () => {
|
||||
cloneOperation?.cancel();
|
||||
};
|
||||
req.on('close', closeListener);
|
||||
|
||||
try {
|
||||
const queryParams = req.query as Record<string, unknown>;
|
||||
const workspacePath = readQueryStringValue(queryParams.path);
|
||||
const githubUrl = readQueryStringValue(queryParams.githubUrl);
|
||||
const githubTokenId = readOptionalNumericQueryValue(queryParams.githubTokenId);
|
||||
const newGithubToken = readQueryStringValue(queryParams.newGithubToken) || null;
|
||||
|
||||
const authenticatedUser = (req as typeof req & { user?: AuthenticatedUser }).user;
|
||||
const userId = authenticatedUser?.id;
|
||||
if (userId === undefined || userId === null) {
|
||||
throw new AppError('Authenticated user is required', {
|
||||
code: 'AUTHENTICATION_REQUIRED',
|
||||
statusCode: 401,
|
||||
});
|
||||
}
|
||||
|
||||
cloneOperation = await startCloneProject(
|
||||
{
|
||||
workspacePath,
|
||||
githubUrl,
|
||||
githubTokenId,
|
||||
newGithubToken,
|
||||
userId,
|
||||
},
|
||||
{
|
||||
onProgress: (message) => {
|
||||
sendEvent('progress', { message });
|
||||
},
|
||||
onComplete: ({ project, message }) => {
|
||||
sendEvent('complete', { project, message });
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
await cloneOperation.waitForCompletion;
|
||||
} catch (error) {
|
||||
sendEvent('error', { message: resolveRouteErrorMessage(error) });
|
||||
} finally {
|
||||
req.off('close', closeListener);
|
||||
if (!res.writableEnded) {
|
||||
res.end();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
router.get(
|
||||
'/:projectId/taskmaster',
|
||||
asyncHandler(async (req, res) => {
|
||||
const projectId = typeof req.params.projectId === 'string' ? req.params.projectId : '';
|
||||
const taskMasterDetails = await getProjectTaskMaster(projectId);
|
||||
res.json(taskMasterDetails);
|
||||
}),
|
||||
);
|
||||
|
||||
router.put('/:projectId/rename', (req, res) => {
|
||||
try {
|
||||
const projectId = typeof req.params.projectId === 'string' ? req.params.projectId : '';
|
||||
const { displayName } = req.body as { displayName?: unknown };
|
||||
updateProjectDisplayName(projectId, displayName);
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
res.status(500).json({ error: error instanceof Error ? error.message : 'Failed to rename project' });
|
||||
}
|
||||
});
|
||||
|
||||
router.post(
|
||||
'/:projectId/toggle-star',
|
||||
asyncHandler(async (req, res) => {
|
||||
const projectId = typeof req.params.projectId === 'string' ? req.params.projectId : '';
|
||||
const { isStarred } = toggleProjectStar(projectId);
|
||||
res.json({ success: true, isStarred });
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* - `force` not set / false: archive project in DB only (`isArchived` = 1; hidden from active list).
|
||||
* - `force=true`: remove DB row, delete session rows for that path, remove all `*.jsonl` under the Claude project dir.
|
||||
*/
|
||||
router.delete(
|
||||
'/:projectId',
|
||||
asyncHandler(async (req, res) => {
|
||||
const projectId = typeof req.params.projectId === 'string' ? req.params.projectId : '';
|
||||
const force = req.query.force === 'true';
|
||||
await deleteOrArchiveProject(projectId, force);
|
||||
res.json({ success: true });
|
||||
}),
|
||||
);
|
||||
|
||||
export default router;
|
||||
321
server/modules/projects/services/project-clone.service.ts
Normal file
321
server/modules/projects/services/project-clone.service.ts
Normal file
@@ -0,0 +1,321 @@
|
||||
import { spawn } from 'node:child_process';
|
||||
import { access, mkdir, rm } from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
|
||||
import { githubTokensDb } from '@/modules/database/index.js';
|
||||
import { createProject } from '@/modules/projects/services/project-management.service.js';
|
||||
import type { WorkspacePathValidationResult } from '@/shared/types.js';
|
||||
import { AppError, validateWorkspacePath } from '@/shared/utils.js';
|
||||
|
||||
type CloneProjectInput = {
|
||||
workspacePath: string;
|
||||
githubUrl: string;
|
||||
githubTokenId?: number | null;
|
||||
newGithubToken?: string | null;
|
||||
userId: number | string;
|
||||
};
|
||||
|
||||
type CloneCompletePayload = {
|
||||
project: Record<string, unknown>;
|
||||
message: string;
|
||||
};
|
||||
|
||||
type CloneProjectEventHandlers = {
|
||||
onProgress: (message: string) => void;
|
||||
onComplete: (payload: CloneCompletePayload) => void;
|
||||
};
|
||||
|
||||
type GitCloneProcess = {
|
||||
stdout: NodeJS.ReadableStream | null;
|
||||
stderr: NodeJS.ReadableStream | null;
|
||||
on(event: 'close', listener: (code: number | null) => void): void;
|
||||
on(event: 'error', listener: (error: NodeJS.ErrnoException) => void): void;
|
||||
kill(): void;
|
||||
};
|
||||
|
||||
type CloneProjectDependencies = {
|
||||
validatePath: (requestedPath: string) => Promise<WorkspacePathValidationResult>;
|
||||
ensureDirectory: (directoryPath: string) => Promise<void>;
|
||||
pathExists: (targetPath: string) => Promise<boolean>;
|
||||
removePath: (targetPath: string) => Promise<void>;
|
||||
getGithubTokenById: (
|
||||
tokenId: number,
|
||||
userId: number,
|
||||
) => Promise<{ github_token: string } | null>;
|
||||
spawnGitClone: (cloneUrl: string, clonePath: string) => GitCloneProcess;
|
||||
registerProject: (projectPath: string, customName: string) => Promise<{ project: Record<string, unknown> }>;
|
||||
logError: (message: string, error: unknown) => void;
|
||||
};
|
||||
|
||||
export type CloneProjectOperation = {
|
||||
waitForCompletion: Promise<void>;
|
||||
cancel: () => void;
|
||||
};
|
||||
|
||||
async function defaultPathExists(targetPath: string): Promise<boolean> {
|
||||
try {
|
||||
await access(targetPath);
|
||||
return true;
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
|
||||
return false;
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
function sanitizeGitError(message: string, token: string | null): string {
|
||||
if (!message || !token) {
|
||||
return message;
|
||||
}
|
||||
|
||||
const escapedToken = token.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||
return message.replace(new RegExp(escapedToken, 'g'), '***');
|
||||
}
|
||||
|
||||
function resolveCloneFailureMessage(lastError: string, sanitizedError: string): string {
|
||||
if (lastError.includes('Authentication failed') || lastError.includes('could not read Username')) {
|
||||
return 'Authentication failed. Please check your credentials.';
|
||||
}
|
||||
|
||||
if (lastError.includes('Repository not found')) {
|
||||
return 'Repository not found. Please check the URL and ensure you have access.';
|
||||
}
|
||||
|
||||
if (lastError.includes('already exists')) {
|
||||
return 'Directory already exists';
|
||||
}
|
||||
|
||||
if (sanitizedError) {
|
||||
return sanitizedError;
|
||||
}
|
||||
|
||||
return 'Git clone failed';
|
||||
}
|
||||
|
||||
function resolveErrorMessage(error: unknown): string {
|
||||
if (error instanceof AppError) {
|
||||
return error.message;
|
||||
}
|
||||
|
||||
if (error instanceof Error && error.message) {
|
||||
return error.message;
|
||||
}
|
||||
|
||||
return 'Unexpected error';
|
||||
}
|
||||
|
||||
const defaultDependencies: CloneProjectDependencies = {
|
||||
validatePath: validateWorkspacePath,
|
||||
ensureDirectory: async (directoryPath: string): Promise<void> => {
|
||||
await mkdir(directoryPath, { recursive: true });
|
||||
},
|
||||
pathExists: defaultPathExists,
|
||||
removePath: async (targetPath: string): Promise<void> => {
|
||||
await rm(targetPath, { recursive: true, force: true });
|
||||
},
|
||||
getGithubTokenById: async (
|
||||
tokenId: number,
|
||||
userId: number,
|
||||
): Promise<{ github_token: string } | null> => {
|
||||
const tokenRow = githubTokensDb.getGithubTokenById(userId, tokenId) as
|
||||
| { github_token: string }
|
||||
| null;
|
||||
return tokenRow;
|
||||
},
|
||||
spawnGitClone: (cloneUrl: string, clonePath: string): GitCloneProcess =>
|
||||
spawn('git', ['clone', '--progress', '--', cloneUrl, clonePath], {
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
env: {
|
||||
...process.env,
|
||||
GIT_TERMINAL_PROMPT: '0',
|
||||
},
|
||||
}) as unknown as GitCloneProcess,
|
||||
registerProject: async (
|
||||
projectPath: string,
|
||||
customName: string,
|
||||
): Promise<{ project: Record<string, unknown> }> =>
|
||||
createProject({
|
||||
projectPath,
|
||||
customName,
|
||||
}) as Promise<{ project: Record<string, unknown> }>,
|
||||
logError: (message: string, error: unknown): void => {
|
||||
console.error(message, error);
|
||||
},
|
||||
};
|
||||
|
||||
export async function startCloneProject(
|
||||
input: CloneProjectInput,
|
||||
handlers: CloneProjectEventHandlers,
|
||||
dependencies: CloneProjectDependencies = defaultDependencies,
|
||||
): Promise<CloneProjectOperation> {
|
||||
const normalizedWorkspacePath = input.workspacePath.trim();
|
||||
const normalizedGithubUrl = input.githubUrl.trim();
|
||||
|
||||
if (!normalizedWorkspacePath) {
|
||||
throw new AppError('workspacePath and githubUrl are required', {
|
||||
code: 'WORKSPACE_PATH_REQUIRED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
if (!normalizedGithubUrl) {
|
||||
throw new AppError('workspacePath and githubUrl are required', {
|
||||
code: 'GITHUB_URL_REQUIRED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
if (normalizedGithubUrl.startsWith('-')) {
|
||||
throw new AppError('Invalid githubUrl', {
|
||||
code: 'INVALID_GITHUB_URL',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const pathValidation = await dependencies.validatePath(normalizedWorkspacePath);
|
||||
if (!pathValidation.valid || !pathValidation.resolvedPath) {
|
||||
throw new AppError(pathValidation.error || 'Invalid workspace path', {
|
||||
code: 'INVALID_PROJECT_PATH',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const absolutePath = pathValidation.resolvedPath;
|
||||
await dependencies.ensureDirectory(absolutePath);
|
||||
|
||||
let githubToken: string | null = null;
|
||||
if (typeof input.githubTokenId === 'number') {
|
||||
const numericUserId =
|
||||
typeof input.userId === 'number' ? input.userId : Number.parseInt(String(input.userId), 10);
|
||||
if (Number.isNaN(numericUserId)) {
|
||||
throw new AppError('Authenticated user is required', {
|
||||
code: 'AUTHENTICATION_REQUIRED',
|
||||
statusCode: 401,
|
||||
});
|
||||
}
|
||||
|
||||
const token = await dependencies.getGithubTokenById(input.githubTokenId, numericUserId);
|
||||
if (!token) {
|
||||
throw new AppError('GitHub token not found', {
|
||||
code: 'GITHUB_TOKEN_NOT_FOUND',
|
||||
statusCode: 404,
|
||||
});
|
||||
}
|
||||
|
||||
githubToken = token.github_token;
|
||||
} else if (input.newGithubToken && input.newGithubToken.trim().length > 0) {
|
||||
githubToken = input.newGithubToken.trim();
|
||||
}
|
||||
|
||||
const sanitizedGithubUrl = normalizedGithubUrl.replace(/\/+$/, '').replace(/\.git$/, '');
|
||||
const repoName = sanitizedGithubUrl.split('/').pop() || 'repository';
|
||||
const clonePath = path.join(absolutePath, repoName);
|
||||
|
||||
if (await dependencies.pathExists(clonePath)) {
|
||||
throw new AppError(
|
||||
`Directory "${repoName}" already exists. Please choose a different location or remove the existing directory.`,
|
||||
{
|
||||
code: 'CLONE_TARGET_ALREADY_EXISTS',
|
||||
statusCode: 409,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
let cloneUrl = normalizedGithubUrl;
|
||||
if (githubToken) {
|
||||
try {
|
||||
const url = new URL(normalizedGithubUrl);
|
||||
url.username = githubToken;
|
||||
url.password = '';
|
||||
cloneUrl = url.toString();
|
||||
} catch {
|
||||
// SSH URLs cannot be represented by URL constructor and are used as-is.
|
||||
}
|
||||
}
|
||||
|
||||
handlers.onProgress(`Cloning into '${repoName}'...`);
|
||||
const gitProcess = dependencies.spawnGitClone(cloneUrl, clonePath);
|
||||
let lastError = '';
|
||||
|
||||
gitProcess.stdout?.on('data', (data: Buffer | string) => {
|
||||
const message = data.toString().trim();
|
||||
if (message) {
|
||||
handlers.onProgress(message);
|
||||
}
|
||||
});
|
||||
|
||||
gitProcess.stderr?.on('data', (data: Buffer | string) => {
|
||||
const message = data.toString().trim();
|
||||
lastError = message;
|
||||
if (message) {
|
||||
handlers.onProgress(message);
|
||||
}
|
||||
});
|
||||
|
||||
const waitForCompletion = new Promise<void>((resolve, reject) => {
|
||||
gitProcess.on('close', async (code) => {
|
||||
if (code === 0) {
|
||||
try {
|
||||
const createdProject = await dependencies.registerProject(clonePath, repoName);
|
||||
handlers.onComplete({
|
||||
project: createdProject.project,
|
||||
message: 'Repository cloned successfully',
|
||||
});
|
||||
resolve();
|
||||
} catch (error) {
|
||||
reject(
|
||||
new AppError(`Clone succeeded but failed to add project: ${resolveErrorMessage(error)}`, {
|
||||
code: 'CLONE_PROJECT_REGISTRATION_FAILED',
|
||||
statusCode: 500,
|
||||
}),
|
||||
);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
const sanitizedError = sanitizeGitError(lastError, githubToken);
|
||||
const errorMessage = resolveCloneFailureMessage(lastError, sanitizedError);
|
||||
|
||||
try {
|
||||
await dependencies.removePath(clonePath);
|
||||
} catch (cleanupError) {
|
||||
dependencies.logError('Failed to clean up after clone failure:', cleanupError);
|
||||
}
|
||||
|
||||
reject(
|
||||
new AppError(errorMessage, {
|
||||
code: 'GIT_CLONE_FAILED',
|
||||
statusCode: 500,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
gitProcess.on('error', (error) => {
|
||||
if (error.code === 'ENOENT') {
|
||||
reject(
|
||||
new AppError('Git is not installed or not in PATH', {
|
||||
code: 'GIT_NOT_FOUND',
|
||||
statusCode: 500,
|
||||
}),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
reject(
|
||||
new AppError(error.message, {
|
||||
code: 'GIT_EXECUTION_FAILED',
|
||||
statusCode: 500,
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
return {
|
||||
waitForCompletion,
|
||||
cancel: () => {
|
||||
gitProcess.kill();
|
||||
},
|
||||
};
|
||||
}
|
||||
75
server/modules/projects/services/project-delete.service.ts
Normal file
75
server/modules/projects/services/project-delete.service.ts
Normal file
@@ -0,0 +1,75 @@
|
||||
import { promises as fs } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
|
||||
import { projectsDb, sessionsDb } from '@/modules/database/index.js';
|
||||
import { AppError } from '@/shared/utils.js';
|
||||
|
||||
function uniqueJsonlPathsFromSessions(
|
||||
sessions: Array<{ jsonl_path: string | null }>,
|
||||
): string[] {
|
||||
const seen = new Set<string>();
|
||||
const result: string[] = [];
|
||||
|
||||
for (const row of sessions) {
|
||||
const raw = row.jsonl_path?.trim();
|
||||
if (!raw) {
|
||||
continue;
|
||||
}
|
||||
const absolute = path.isAbsolute(raw) ? path.normalize(raw) : path.resolve(raw);
|
||||
if (seen.has(absolute)) {
|
||||
continue;
|
||||
}
|
||||
seen.add(absolute);
|
||||
result.push(absolute);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
async function unlinkJsonlIfExists(filePath: string): Promise<void> {
|
||||
try {
|
||||
await fs.unlink(filePath);
|
||||
} catch (error) {
|
||||
const code = (error as NodeJS.ErrnoException).code;
|
||||
if (code === 'ENOENT') {
|
||||
return;
|
||||
}
|
||||
console.warn(`[project-delete] Failed to remove ${filePath}:`, (error as Error).message);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads all session rows for the project path and removes each distinct `jsonl_path` file on disk.
|
||||
*/
|
||||
export async function deleteSessionJsonlFilesForProjectPath(projectPath: string): Promise<void> {
|
||||
const sessions = sessionsDb.getSessionsByProjectPath(projectPath);
|
||||
const paths = uniqueJsonlPathsFromSessions(sessions);
|
||||
|
||||
for (const filePath of paths) {
|
||||
await unlinkJsonlIfExists(filePath);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* - **Soft delete** (`force` false): set `isArchived` on the `projects` row (hide from the active list; DB only).
|
||||
* - **Force** (`force` true): for each session row for that `project_path`, delete the file at `jsonl_path`
|
||||
* (when set), then remove session rows and the `projects` row.
|
||||
*/
|
||||
export async function deleteOrArchiveProject(projectId: string, force: boolean): Promise<void> {
|
||||
const row = projectsDb.getProjectById(projectId);
|
||||
if (!row) {
|
||||
throw new AppError(`Unknown projectId: ${projectId}`, {
|
||||
code: 'PROJECT_NOT_FOUND',
|
||||
statusCode: 404,
|
||||
});
|
||||
}
|
||||
|
||||
if (!force) {
|
||||
projectsDb.updateProjectIsArchivedById(projectId, true);
|
||||
return;
|
||||
}
|
||||
|
||||
await deleteSessionJsonlFilesForProjectPath(row.project_path);
|
||||
sessionsDb.deleteSessionsByProjectPath(row.project_path);
|
||||
projectsDb.deleteProjectById(projectId);
|
||||
}
|
||||
150
server/modules/projects/services/project-management.service.ts
Normal file
150
server/modules/projects/services/project-management.service.ts
Normal file
@@ -0,0 +1,150 @@
|
||||
import fs from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
|
||||
import { projectsDb } from '@/modules/database/index.js';
|
||||
import type {
|
||||
CreateProjectPathResult,
|
||||
ProjectRepositoryRow,
|
||||
WorkspacePathValidationResult,
|
||||
} from '@/shared/types.js';
|
||||
import { AppError, normalizeProjectPath, validateWorkspacePath } from '@/shared/utils.js';
|
||||
|
||||
type CreateProjectInput = {
|
||||
projectPath: string;
|
||||
customName?: string | null;
|
||||
};
|
||||
|
||||
type CreateProjectDependencies = {
|
||||
validatePath: (projectPath: string) => Promise<WorkspacePathValidationResult>;
|
||||
ensureWorkspaceDirectory: (projectPath: string) => Promise<void>;
|
||||
persistProjectPath: (projectPath: string, customName: string | null) => CreateProjectPathResult;
|
||||
getProjectByPath: (projectPath: string) => ProjectRepositoryRow | null;
|
||||
};
|
||||
|
||||
type ProjectApiView = {
|
||||
projectId: string;
|
||||
path: string;
|
||||
fullPath: string;
|
||||
displayName: string;
|
||||
customName: string | null;
|
||||
isArchived: boolean;
|
||||
isStarred: boolean;
|
||||
sessions: [];
|
||||
cursorSessions: [];
|
||||
codexSessions: [];
|
||||
geminiSessions: [];
|
||||
sessionMeta: {
|
||||
hasMore: false;
|
||||
total: 0;
|
||||
};
|
||||
};
|
||||
|
||||
type CreateProjectServiceResult = {
|
||||
outcome: 'created' | 'reactivated_archived';
|
||||
project: ProjectApiView;
|
||||
};
|
||||
|
||||
const defaultDependencies: CreateProjectDependencies = {
|
||||
validatePath: validateWorkspacePath,
|
||||
ensureWorkspaceDirectory: async (projectPath: string): Promise<void> => {
|
||||
await fs.mkdir(projectPath, { recursive: true });
|
||||
const directoryStats = await fs.stat(projectPath);
|
||||
if (!directoryStats.isDirectory()) {
|
||||
throw new AppError('Path exists but is not a directory', {
|
||||
code: 'PROJECT_PATH_NOT_DIRECTORY',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
},
|
||||
persistProjectPath: (projectPath: string, customName: string | null): CreateProjectPathResult =>
|
||||
projectsDb.createProjectPath(projectPath, customName),
|
||||
getProjectByPath: (projectPath: string): ProjectRepositoryRow | null =>
|
||||
projectsDb.getProjectPath(projectPath),
|
||||
};
|
||||
|
||||
function resolveDisplayName(customName: string | null | undefined, projectPath: string): string {
|
||||
const trimmedCustomName = typeof customName === 'string' ? customName.trim() : '';
|
||||
if (trimmedCustomName.length > 0) {
|
||||
return trimmedCustomName;
|
||||
}
|
||||
|
||||
return path.basename(projectPath) || projectPath;
|
||||
}
|
||||
|
||||
function mapProjectRowToApiView(projectRow: ProjectRepositoryRow): ProjectApiView {
|
||||
return {
|
||||
projectId: projectRow.project_id,
|
||||
path: projectRow.project_path,
|
||||
fullPath: projectRow.project_path,
|
||||
displayName: resolveDisplayName(projectRow.custom_project_name, projectRow.project_path),
|
||||
customName: projectRow.custom_project_name,
|
||||
isArchived: Boolean(projectRow.isArchived),
|
||||
isStarred: Boolean(projectRow.isStarred),
|
||||
sessions: [],
|
||||
cursorSessions: [],
|
||||
codexSessions: [],
|
||||
geminiSessions: [],
|
||||
sessionMeta: {
|
||||
hasMore: false,
|
||||
total: 0,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export async function createProject(
|
||||
input: CreateProjectInput,
|
||||
dependencies: CreateProjectDependencies = defaultDependencies,
|
||||
): Promise<CreateProjectServiceResult> {
|
||||
const normalizedPath = normalizeProjectPath(input.projectPath || '');
|
||||
if (!normalizedPath) {
|
||||
throw new AppError('path is required', {
|
||||
code: 'PROJECT_PATH_REQUIRED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const pathValidation = await dependencies.validatePath(normalizedPath);
|
||||
if (!pathValidation.valid || !pathValidation.resolvedPath) {
|
||||
throw new AppError('Invalid project path', {
|
||||
code: 'INVALID_PROJECT_PATH',
|
||||
statusCode: 400,
|
||||
details: pathValidation.error ?? 'Path validation failed',
|
||||
});
|
||||
}
|
||||
|
||||
const resolvedProjectPath = normalizeProjectPath(pathValidation.resolvedPath);
|
||||
await dependencies.ensureWorkspaceDirectory(resolvedProjectPath);
|
||||
|
||||
const normalizedCustomName = resolveDisplayName(input.customName ?? null, resolvedProjectPath);
|
||||
const persistedProject = dependencies.persistProjectPath(resolvedProjectPath, normalizedCustomName);
|
||||
|
||||
if (persistedProject.outcome === 'active_conflict') {
|
||||
throw new AppError('Project path already exists and is active', {
|
||||
code: 'PROJECT_ALREADY_EXISTS',
|
||||
statusCode: 409,
|
||||
details: `Project path already exists: ${resolvedProjectPath}`,
|
||||
});
|
||||
}
|
||||
|
||||
const projectRow = persistedProject.project ?? dependencies.getProjectByPath(resolvedProjectPath);
|
||||
if (!projectRow) {
|
||||
throw new AppError('Failed to resolve project after creation', {
|
||||
code: 'PROJECT_CREATE_FAILED',
|
||||
statusCode: 500,
|
||||
});
|
||||
}
|
||||
|
||||
// Archived rows intentionally remain archived when reused, as requested.
|
||||
return {
|
||||
outcome: persistedProject.outcome,
|
||||
project: mapProjectRowToApiView(projectRow),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets `projects.custom_project_name` for the given `projectId` (or clears it when empty).
|
||||
*/
|
||||
export function updateProjectDisplayName(projectId: string, newDisplayName: unknown): void {
|
||||
const trimmed = typeof newDisplayName === 'string' ? newDisplayName.trim() : '';
|
||||
projectsDb.updateCustomProjectNameById(projectId, trimmed.length > 0 ? trimmed : null);
|
||||
}
|
||||
78
server/modules/projects/services/project-star.service.ts
Normal file
78
server/modules/projects/services/project-star.service.ts
Normal file
@@ -0,0 +1,78 @@
|
||||
import { projectsDb } from '@/modules/database/index.js';
|
||||
import { AppError } from '@/shared/utils.js';
|
||||
|
||||
type ToggleProjectStarResult = {
|
||||
isStarred: boolean;
|
||||
};
|
||||
|
||||
type ApplyLegacyStarredProjectIdsResult = {
|
||||
updated: number;
|
||||
};
|
||||
|
||||
function normalizeProjectId(projectId: string): string {
|
||||
return projectId.trim();
|
||||
}
|
||||
|
||||
function uniqueProjectIds(projectIds: string[]): string[] {
|
||||
const uniqueIds = new Set<string>();
|
||||
for (const projectId of projectIds) {
|
||||
const normalizedProjectId = normalizeProjectId(projectId);
|
||||
if (!normalizedProjectId) {
|
||||
continue;
|
||||
}
|
||||
uniqueIds.add(normalizedProjectId);
|
||||
}
|
||||
return [...uniqueIds];
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies legacy `localStorage` stars keyed by DB `projectId` onto `projects.isStarred`.
|
||||
*
|
||||
* The operation is idempotent: already-starred projects are ignored, unknown ids are skipped.
|
||||
*/
|
||||
export function applyLegacyStarredProjectIds(projectIds: string[]): ApplyLegacyStarredProjectIdsResult {
|
||||
const normalizedProjectIds = uniqueProjectIds(projectIds);
|
||||
let updated = 0;
|
||||
|
||||
for (const projectId of normalizedProjectIds) {
|
||||
const project = projectsDb.getProjectById(projectId);
|
||||
if (!project) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (Boolean(project.isStarred)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
projectsDb.updateProjectIsStarredById(projectId, true);
|
||||
updated += 1;
|
||||
}
|
||||
|
||||
return { updated };
|
||||
}
|
||||
|
||||
/**
|
||||
* Flips `projects.isStarred` for one project and returns the new state.
|
||||
*/
|
||||
export function toggleProjectStar(projectId: string): ToggleProjectStarResult {
|
||||
const normalizedProjectId = normalizeProjectId(projectId);
|
||||
if (!normalizedProjectId) {
|
||||
throw new AppError('projectId is required', {
|
||||
code: 'PROJECT_ID_REQUIRED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const project = projectsDb.getProjectById(normalizedProjectId);
|
||||
if (!project) {
|
||||
throw new AppError('Project not found', {
|
||||
code: 'PROJECT_NOT_FOUND',
|
||||
statusCode: 404,
|
||||
});
|
||||
}
|
||||
|
||||
const nextStarredState = !Boolean(project.isStarred);
|
||||
projectsDb.updateProjectIsStarredById(normalizedProjectId, nextStarredState);
|
||||
|
||||
return { isStarred: nextStarredState };
|
||||
}
|
||||
@@ -0,0 +1,248 @@
|
||||
import { access, readFile, stat } from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
|
||||
import { projectsDb } from '@/modules/database/index.js';
|
||||
import { AppError } from '@/shared/utils.js';
|
||||
|
||||
type TaskMasterTask = {
|
||||
status?: string;
|
||||
subtasks?: Array<{
|
||||
status?: string;
|
||||
}>;
|
||||
};
|
||||
|
||||
type TaskMasterMetadata =
|
||||
| {
|
||||
taskCount: number;
|
||||
subtaskCount: number;
|
||||
completed: number;
|
||||
pending: number;
|
||||
inProgress: number;
|
||||
review: number;
|
||||
completionPercentage: number;
|
||||
lastModified: string;
|
||||
}
|
||||
| {
|
||||
error: string;
|
||||
}
|
||||
| null;
|
||||
|
||||
type TaskMasterDetectionResult = {
|
||||
hasTaskmaster: boolean;
|
||||
hasEssentialFiles?: boolean;
|
||||
files?: Record<string, boolean>;
|
||||
metadata?: TaskMasterMetadata;
|
||||
path?: string;
|
||||
reason?: string;
|
||||
};
|
||||
|
||||
type NormalizedTaskMasterInfo = {
|
||||
hasTaskmaster: boolean;
|
||||
hasEssentialFiles: boolean;
|
||||
metadata: TaskMasterMetadata;
|
||||
status: 'configured' | 'not-configured';
|
||||
};
|
||||
|
||||
type GetProjectTaskMasterByIdResult = {
|
||||
projectId: string;
|
||||
projectPath: string;
|
||||
taskmaster: NormalizedTaskMasterInfo;
|
||||
};
|
||||
|
||||
type GetProjectTaskMasterDependencies = {
|
||||
resolveProjectPathById: (projectId: string) => string | null;
|
||||
detectTaskMasterFolder: (projectPath: string) => Promise<TaskMasterDetectionResult>;
|
||||
};
|
||||
|
||||
type GetProjectTaskMasterResolver = (projectId: string) => Promise<GetProjectTaskMasterByIdResult | null>;
|
||||
|
||||
function extractTasksFromJson(tasksData: unknown): TaskMasterTask[] {
|
||||
if (!tasksData || typeof tasksData !== 'object') {
|
||||
return [];
|
||||
}
|
||||
|
||||
const legacyTasks = (tasksData as { tasks?: unknown }).tasks;
|
||||
if (Array.isArray(legacyTasks)) {
|
||||
return legacyTasks as TaskMasterTask[];
|
||||
}
|
||||
|
||||
const taggedTaskCollections: TaskMasterTask[] = [];
|
||||
for (const tagValue of Object.values(tasksData)) {
|
||||
if (!tagValue || typeof tagValue !== 'object') {
|
||||
continue;
|
||||
}
|
||||
|
||||
const tagTasks = (tagValue as { tasks?: unknown }).tasks;
|
||||
if (Array.isArray(tagTasks)) {
|
||||
taggedTaskCollections.push(...(tagTasks as TaskMasterTask[]));
|
||||
}
|
||||
}
|
||||
|
||||
return taggedTaskCollections;
|
||||
}
|
||||
|
||||
async function detectTaskMasterFolder(projectPath: string): Promise<TaskMasterDetectionResult> {
|
||||
try {
|
||||
const taskMasterPath = path.join(projectPath, '.taskmaster');
|
||||
|
||||
try {
|
||||
const taskMasterStats = await stat(taskMasterPath);
|
||||
if (!taskMasterStats.isDirectory()) {
|
||||
return {
|
||||
hasTaskmaster: false,
|
||||
reason: '.taskmaster exists but is not a directory',
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
const fileError = error as NodeJS.ErrnoException;
|
||||
if (fileError.code === 'ENOENT') {
|
||||
return {
|
||||
hasTaskmaster: false,
|
||||
reason: '.taskmaster directory not found',
|
||||
};
|
||||
}
|
||||
|
||||
throw fileError;
|
||||
}
|
||||
|
||||
const keyFiles = ['tasks/tasks.json', 'config.json'];
|
||||
const fileStatus: Record<string, boolean> = {};
|
||||
let hasEssentialFiles = true;
|
||||
|
||||
for (const fileName of keyFiles) {
|
||||
const absoluteFilePath = path.join(taskMasterPath, fileName);
|
||||
try {
|
||||
await access(absoluteFilePath);
|
||||
fileStatus[fileName] = true;
|
||||
} catch {
|
||||
fileStatus[fileName] = false;
|
||||
if (fileName === 'tasks/tasks.json') {
|
||||
hasEssentialFiles = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let taskMetadata: TaskMasterMetadata = null;
|
||||
if (fileStatus['tasks/tasks.json']) {
|
||||
const tasksPath = path.join(taskMasterPath, 'tasks/tasks.json');
|
||||
try {
|
||||
const tasksContent = await readFile(tasksPath, 'utf8');
|
||||
const parsedTasksJson = JSON.parse(tasksContent) as unknown;
|
||||
const tasks = extractTasksFromJson(parsedTasksJson);
|
||||
|
||||
const stats = tasks.reduce(
|
||||
(accumulator, currentTask) => {
|
||||
accumulator.total += 1;
|
||||
const normalizedTaskStatus = currentTask.status || 'pending';
|
||||
accumulator.byStatus[normalizedTaskStatus] = (accumulator.byStatus[normalizedTaskStatus] || 0) + 1;
|
||||
|
||||
if (Array.isArray(currentTask.subtasks)) {
|
||||
for (const subtask of currentTask.subtasks) {
|
||||
accumulator.subtotalTasks += 1;
|
||||
const normalizedSubtaskStatus = subtask.status || 'pending';
|
||||
accumulator.subtaskByStatus[normalizedSubtaskStatus] =
|
||||
(accumulator.subtaskByStatus[normalizedSubtaskStatus] || 0) + 1;
|
||||
}
|
||||
}
|
||||
|
||||
return accumulator;
|
||||
},
|
||||
{
|
||||
total: 0,
|
||||
subtotalTasks: 0,
|
||||
byStatus: {} as Record<string, number>,
|
||||
subtaskByStatus: {} as Record<string, number>,
|
||||
},
|
||||
);
|
||||
|
||||
const tasksStat = await stat(tasksPath);
|
||||
taskMetadata = {
|
||||
taskCount: stats.total,
|
||||
subtaskCount: stats.subtotalTasks,
|
||||
completed: stats.byStatus.done || 0,
|
||||
pending: stats.byStatus.pending || 0,
|
||||
inProgress: stats.byStatus['in-progress'] || 0,
|
||||
review: stats.byStatus.review || 0,
|
||||
completionPercentage: stats.total > 0 ? Math.round(((stats.byStatus.done || 0) / stats.total) * 100) : 0,
|
||||
lastModified: tasksStat.mtime.toISOString(),
|
||||
};
|
||||
} catch (parseError) {
|
||||
console.warn('Failed to parse tasks.json:', (parseError as Error).message);
|
||||
taskMetadata = {
|
||||
error: 'Failed to parse tasks.json',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
hasTaskmaster: true,
|
||||
hasEssentialFiles,
|
||||
files: fileStatus,
|
||||
metadata: taskMetadata,
|
||||
path: taskMasterPath,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error detecting TaskMaster folder:', error);
|
||||
return {
|
||||
hasTaskmaster: false,
|
||||
reason: `Error checking directory: ${(error as Error).message}`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function normalizeTaskMasterInfo(taskMasterResult: TaskMasterDetectionResult | null = null): NormalizedTaskMasterInfo {
|
||||
const hasTaskmaster = Boolean(taskMasterResult?.hasTaskmaster);
|
||||
const hasEssentialFiles = Boolean(taskMasterResult?.hasEssentialFiles);
|
||||
|
||||
return {
|
||||
hasTaskmaster,
|
||||
hasEssentialFiles,
|
||||
metadata: taskMasterResult?.metadata ?? null,
|
||||
status: hasTaskmaster && hasEssentialFiles ? 'configured' : 'not-configured',
|
||||
};
|
||||
}
|
||||
|
||||
const defaultDependencies: GetProjectTaskMasterDependencies = {
|
||||
resolveProjectPathById: (projectId: string): string | null => projectsDb.getProjectPathById(projectId),
|
||||
detectTaskMasterFolder,
|
||||
};
|
||||
|
||||
export async function getProjectTaskMasterById(
|
||||
projectId: string,
|
||||
dependencies: GetProjectTaskMasterDependencies = defaultDependencies,
|
||||
): Promise<GetProjectTaskMasterByIdResult | null> {
|
||||
const projectPath = dependencies.resolveProjectPathById(projectId);
|
||||
if (!projectPath) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const taskMasterResult = await dependencies.detectTaskMasterFolder(projectPath);
|
||||
return {
|
||||
projectId,
|
||||
projectPath,
|
||||
taskmaster: normalizeTaskMasterInfo(taskMasterResult),
|
||||
};
|
||||
}
|
||||
|
||||
export async function getProjectTaskMaster(
|
||||
projectId: string,
|
||||
resolveById: GetProjectTaskMasterResolver = getProjectTaskMasterById,
|
||||
): Promise<GetProjectTaskMasterByIdResult> {
|
||||
const normalizedProjectId = projectId.trim();
|
||||
if (!normalizedProjectId) {
|
||||
throw new AppError('projectId is required', {
|
||||
code: 'PROJECT_ID_REQUIRED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const taskMasterDetails = await resolveById(normalizedProjectId);
|
||||
if (!taskMasterDetails) {
|
||||
throw new AppError('Project not found', {
|
||||
code: 'PROJECT_NOT_FOUND',
|
||||
statusCode: 404,
|
||||
});
|
||||
}
|
||||
|
||||
return taskMasterDetails;
|
||||
}
|
||||
@@ -0,0 +1,285 @@
|
||||
import fs from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
|
||||
import { projectsDb, sessionsDb } from '@/modules/database/index.js';
|
||||
import { sessionSynchronizerService } from '@/modules/providers/index.js';
|
||||
import { WS_OPEN_STATE, connectedClients } from '@/modules/websocket/index.js';
|
||||
import type { RealtimeClientConnection } from '@/shared/types.js';
|
||||
import { AppError } from '@/shared/utils.js';
|
||||
|
||||
type SessionSummary = {
|
||||
id: string;
|
||||
summary: string;
|
||||
messageCount: number;
|
||||
lastActivity: string;
|
||||
};
|
||||
|
||||
type SessionsByProvider = Record<'claude' | 'cursor' | 'codex' | 'gemini', SessionSummary[]>;
|
||||
|
||||
type SessionRepositoryRow = {
|
||||
provider: string;
|
||||
session_id: string;
|
||||
custom_name?: string | null;
|
||||
updated_at?: string | null;
|
||||
created_at?: string | null;
|
||||
};
|
||||
|
||||
export type ProjectListItem = {
|
||||
projectId: string;
|
||||
path: string;
|
||||
displayName: string;
|
||||
fullPath: string;
|
||||
isStarred: boolean;
|
||||
sessions: SessionSummary[];
|
||||
cursorSessions: SessionSummary[];
|
||||
codexSessions: SessionSummary[];
|
||||
geminiSessions: SessionSummary[];
|
||||
sessionMeta: {
|
||||
hasMore: boolean;
|
||||
total: number;
|
||||
};
|
||||
};
|
||||
|
||||
type ProgressUpdate = {
|
||||
phase: 'loading' | 'complete';
|
||||
current: number;
|
||||
total: number;
|
||||
currentProject?: string;
|
||||
};
|
||||
|
||||
type GetProjectsWithSessionsOptions = {
|
||||
skipSynchronization?: boolean;
|
||||
sessionsLimit?: number;
|
||||
sessionsOffset?: number;
|
||||
};
|
||||
|
||||
type SessionPaginationOptions = {
|
||||
limit?: number;
|
||||
offset?: number;
|
||||
};
|
||||
|
||||
type ProjectSessionsPageResult = {
|
||||
sessionsByProvider: SessionsByProvider;
|
||||
total: number;
|
||||
hasMore: boolean;
|
||||
};
|
||||
|
||||
export type ProjectSessionsPageApiView = {
|
||||
projectId: string;
|
||||
sessions: SessionSummary[];
|
||||
cursorSessions: SessionSummary[];
|
||||
codexSessions: SessionSummary[];
|
||||
geminiSessions: SessionSummary[];
|
||||
sessionMeta: {
|
||||
hasMore: boolean;
|
||||
total: number;
|
||||
};
|
||||
};
|
||||
|
||||
const DEFAULT_PROJECT_SESSIONS_PAGE_SIZE = 20;
|
||||
const MAX_PROJECT_SESSIONS_PAGE_SIZE = 200;
|
||||
|
||||
/**
|
||||
* Generate better display name from path.
|
||||
*/
|
||||
export async function generateDisplayName(projectName: string, actualProjectDir: string | null = null): Promise<string> {
|
||||
// Use actual project directory if provided, otherwise decode from project name.
|
||||
const projectPath = actualProjectDir || projectName.replace(/-/g, '/');
|
||||
|
||||
// Try to read package.json from the project path.
|
||||
try {
|
||||
const packageJsonPath = path.join(projectPath, 'package.json');
|
||||
const packageData = await fs.readFile(packageJsonPath, 'utf8');
|
||||
const packageJson = JSON.parse(packageData) as { name?: string };
|
||||
|
||||
// Return the name from package.json if it exists.
|
||||
if (packageJson.name) {
|
||||
return packageJson.name;
|
||||
}
|
||||
} catch {
|
||||
// Fall back to path-based naming if package.json doesn't exist or can't be read.
|
||||
}
|
||||
|
||||
// If it starts with /, it's an absolute path.
|
||||
if (projectPath.startsWith('/')) {
|
||||
const parts = projectPath.split('/').filter(Boolean);
|
||||
// Return only the last folder name.
|
||||
return parts[parts.length - 1] || projectPath;
|
||||
}
|
||||
|
||||
return projectPath;
|
||||
}
|
||||
|
||||
function normalizeSessionPagination(options: SessionPaginationOptions = {}): { limit: number; offset: number } {
|
||||
const rawLimit = Number.isFinite(options.limit) ? Math.floor(Number(options.limit)) : DEFAULT_PROJECT_SESSIONS_PAGE_SIZE;
|
||||
const rawOffset = Number.isFinite(options.offset) ? Math.floor(Number(options.offset)) : 0;
|
||||
|
||||
return {
|
||||
limit: Math.min(Math.max(1, rawLimit), MAX_PROJECT_SESSIONS_PAGE_SIZE),
|
||||
offset: Math.max(0, rawOffset),
|
||||
};
|
||||
}
|
||||
|
||||
function mapSessionRowToSummary(row: SessionRepositoryRow): SessionSummary {
|
||||
return {
|
||||
id: row.session_id,
|
||||
summary: row.custom_name || '',
|
||||
messageCount: 0,
|
||||
lastActivity: row.updated_at ?? row.created_at ?? new Date().toISOString(),
|
||||
};
|
||||
}
|
||||
|
||||
function bucketSessionRowsByProvider(rows: SessionRepositoryRow[]): SessionsByProvider {
|
||||
const byProvider: SessionsByProvider = {
|
||||
claude: [],
|
||||
cursor: [],
|
||||
codex: [],
|
||||
gemini: [],
|
||||
};
|
||||
|
||||
for (const row of rows) {
|
||||
const provider = row.provider as keyof SessionsByProvider;
|
||||
const bucket = byProvider[provider];
|
||||
if (!bucket) {
|
||||
continue;
|
||||
}
|
||||
|
||||
bucket.push(mapSessionRowToSummary(row));
|
||||
}
|
||||
|
||||
return byProvider;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads one paginated project session slice from the DB and groups rows by provider.
|
||||
*/
|
||||
function readProjectSessionsPageByPath(
|
||||
projectPath: string,
|
||||
options: SessionPaginationOptions = {},
|
||||
): ProjectSessionsPageResult {
|
||||
const pagination = normalizeSessionPagination(options);
|
||||
const rows = sessionsDb.getSessionsByProjectPathPage(
|
||||
projectPath,
|
||||
pagination.limit,
|
||||
pagination.offset,
|
||||
) as SessionRepositoryRow[];
|
||||
const total = sessionsDb.countSessionsByProjectPath(projectPath);
|
||||
|
||||
return {
|
||||
sessionsByProvider: bucketSessionRowsByProvider(rows),
|
||||
total,
|
||||
hasMore: pagination.offset + rows.length < total,
|
||||
};
|
||||
}
|
||||
|
||||
// Broadcast progress to all connected WebSocket clients
|
||||
function broadcastProgress(progress: ProgressUpdate) {
|
||||
const message = JSON.stringify({
|
||||
type: 'loading_progress',
|
||||
...progress,
|
||||
});
|
||||
|
||||
connectedClients.forEach((client: RealtimeClientConnection) => {
|
||||
if (client.readyState === WS_OPEN_STATE) {
|
||||
client.send(message);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads all projects from DB and returns provider-bucketed session summaries.
|
||||
*/
|
||||
export async function getProjectsWithSessions(
|
||||
options: GetProjectsWithSessionsOptions = {}
|
||||
): Promise<ProjectListItem[]> {
|
||||
if (!options.skipSynchronization) {
|
||||
await sessionSynchronizerService.synchronizeSessions();
|
||||
}
|
||||
|
||||
const projectRows = projectsDb.getProjectPaths() as Array<{
|
||||
project_id: string;
|
||||
project_path: string;
|
||||
custom_project_name?: string | null;
|
||||
isStarred?: number;
|
||||
}>;
|
||||
const totalProjects = projectRows.length;
|
||||
const projects: ProjectListItem[] = [];
|
||||
let processedProjects = 0;
|
||||
|
||||
for (const row of projectRows) {
|
||||
processedProjects += 1;
|
||||
|
||||
const projectId = row.project_id;
|
||||
const projectPath = row.project_path;
|
||||
|
||||
broadcastProgress({
|
||||
phase: 'loading',
|
||||
current: processedProjects,
|
||||
total: totalProjects,
|
||||
currentProject: projectPath,
|
||||
});
|
||||
|
||||
const displayName =
|
||||
row.custom_project_name && row.custom_project_name.trim().length > 0
|
||||
? row.custom_project_name
|
||||
: await generateDisplayName(path.basename(projectPath) || projectPath, projectPath);
|
||||
|
||||
const sessionsPage = readProjectSessionsPageByPath(projectPath, {
|
||||
limit: options.sessionsLimit,
|
||||
offset: options.sessionsOffset,
|
||||
});
|
||||
|
||||
projects.push({
|
||||
projectId,
|
||||
path: projectPath,
|
||||
displayName,
|
||||
fullPath: projectPath,
|
||||
isStarred: Boolean(row.isStarred),
|
||||
sessions: sessionsPage.sessionsByProvider.claude,
|
||||
cursorSessions: sessionsPage.sessionsByProvider.cursor,
|
||||
codexSessions: sessionsPage.sessionsByProvider.codex,
|
||||
geminiSessions: sessionsPage.sessionsByProvider.gemini,
|
||||
sessionMeta: {
|
||||
hasMore: sessionsPage.hasMore,
|
||||
total: sessionsPage.total,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
broadcastProgress({
|
||||
phase: 'complete',
|
||||
current: totalProjects,
|
||||
total: totalProjects,
|
||||
});
|
||||
|
||||
return projects;
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads one paginated session slice for a specific project id.
|
||||
*/
|
||||
export async function getProjectSessionsPage(
|
||||
projectId: string,
|
||||
options: SessionPaginationOptions = {},
|
||||
): Promise<ProjectSessionsPageApiView> {
|
||||
const projectRow = projectsDb.getProjectById(projectId);
|
||||
if (!projectRow) {
|
||||
throw new AppError(`Project "${projectId}" was not found.`, {
|
||||
code: 'PROJECT_NOT_FOUND',
|
||||
statusCode: 404,
|
||||
});
|
||||
}
|
||||
|
||||
const sessionsPage = readProjectSessionsPageByPath(projectRow.project_path, options);
|
||||
return {
|
||||
projectId: projectRow.project_id,
|
||||
sessions: sessionsPage.sessionsByProvider.claude,
|
||||
cursorSessions: sessionsPage.sessionsByProvider.cursor,
|
||||
codexSessions: sessionsPage.sessionsByProvider.codex,
|
||||
geminiSessions: sessionsPage.sessionsByProvider.gemini,
|
||||
sessionMeta: {
|
||||
hasMore: sessionsPage.hasMore,
|
||||
total: sessionsPage.total,
|
||||
},
|
||||
};
|
||||
}
|
||||
183
server/modules/projects/tests/project-clone.service.test.ts
Normal file
183
server/modules/projects/tests/project-clone.service.test.ts
Normal file
@@ -0,0 +1,183 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import { EventEmitter } from 'node:events';
|
||||
import path from 'node:path';
|
||||
import { PassThrough } from 'node:stream';
|
||||
import test from 'node:test';
|
||||
|
||||
import { startCloneProject } from '@/modules/projects/services/project-clone.service.js';
|
||||
import { AppError } from '@/shared/utils.js';
|
||||
|
||||
type TestDependencies = Parameters<typeof startCloneProject>[2];
|
||||
|
||||
function buildDependencies(overrides: Partial<NonNullable<TestDependencies>> = {}): NonNullable<TestDependencies> {
|
||||
return {
|
||||
validatePath: async () => ({ valid: true, resolvedPath: '/workspace/root' }),
|
||||
ensureDirectory: async () => undefined,
|
||||
pathExists: async () => false,
|
||||
removePath: async () => undefined,
|
||||
getGithubTokenById: async () => ({ github_token: 'token-value' }),
|
||||
spawnGitClone: () => {
|
||||
throw new Error('spawnGitClone should be overridden in this test');
|
||||
},
|
||||
registerProject: async () => ({ project: { projectId: 'project-1' } }),
|
||||
logError: () => undefined,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function createMockGitProcess() {
|
||||
const emitter = new EventEmitter() as EventEmitter & {
|
||||
stdout: PassThrough;
|
||||
stderr: PassThrough;
|
||||
kill: () => void;
|
||||
};
|
||||
|
||||
emitter.stdout = new PassThrough();
|
||||
emitter.stderr = new PassThrough();
|
||||
emitter.kill = () => {
|
||||
emitter.emit('close', null);
|
||||
};
|
||||
|
||||
return emitter;
|
||||
}
|
||||
|
||||
test('startCloneProject rejects when workspace path is missing', async () => {
|
||||
await assert.rejects(
|
||||
async () =>
|
||||
startCloneProject(
|
||||
{
|
||||
workspacePath: '',
|
||||
githubUrl: 'https://github.com/example/repo',
|
||||
userId: 1,
|
||||
},
|
||||
{
|
||||
onProgress: () => undefined,
|
||||
onComplete: () => undefined,
|
||||
},
|
||||
buildDependencies(),
|
||||
),
|
||||
(error: unknown) => {
|
||||
assert.ok(error instanceof AppError);
|
||||
assert.equal(error.code, 'WORKSPACE_PATH_REQUIRED');
|
||||
return true;
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
test('startCloneProject rejects when github URL is missing', async () => {
|
||||
await assert.rejects(
|
||||
async () =>
|
||||
startCloneProject(
|
||||
{
|
||||
workspacePath: '/workspace/root',
|
||||
githubUrl: '',
|
||||
userId: 1,
|
||||
},
|
||||
{
|
||||
onProgress: () => undefined,
|
||||
onComplete: () => undefined,
|
||||
},
|
||||
buildDependencies(),
|
||||
),
|
||||
(error: unknown) => {
|
||||
assert.ok(error instanceof AppError);
|
||||
assert.equal(error.code, 'GITHUB_URL_REQUIRED');
|
||||
return true;
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
test('startCloneProject rejects github URL values that begin with option prefixes', async () => {
|
||||
await assert.rejects(
|
||||
async () =>
|
||||
startCloneProject(
|
||||
{
|
||||
workspacePath: '/workspace/root',
|
||||
githubUrl: '--upload-pack=malicious',
|
||||
userId: 1,
|
||||
},
|
||||
{
|
||||
onProgress: () => undefined,
|
||||
onComplete: () => undefined,
|
||||
},
|
||||
buildDependencies(),
|
||||
),
|
||||
(error: unknown) => {
|
||||
assert.ok(error instanceof AppError);
|
||||
assert.equal(error.code, 'INVALID_GITHUB_URL');
|
||||
return true;
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
test('startCloneProject rejects when selected github token does not exist', async () => {
|
||||
await assert.rejects(
|
||||
async () =>
|
||||
startCloneProject(
|
||||
{
|
||||
workspacePath: '/workspace/root',
|
||||
githubUrl: 'https://github.com/example/repo',
|
||||
githubTokenId: 12,
|
||||
userId: 1,
|
||||
},
|
||||
{
|
||||
onProgress: () => undefined,
|
||||
onComplete: () => undefined,
|
||||
},
|
||||
buildDependencies({
|
||||
getGithubTokenById: async () => null,
|
||||
}),
|
||||
),
|
||||
(error: unknown) => {
|
||||
assert.ok(error instanceof AppError);
|
||||
assert.equal(error.code, 'GITHUB_TOKEN_NOT_FOUND');
|
||||
return true;
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
test('startCloneProject completes and emits complete payload when git exits successfully', async () => {
|
||||
const gitProcess = createMockGitProcess();
|
||||
const progressMessages: string[] = [];
|
||||
let completePayload: { project: Record<string, unknown>; message: string } | null = null;
|
||||
let capturedProjectPath = '';
|
||||
let capturedCustomName = '';
|
||||
|
||||
const operation = await startCloneProject(
|
||||
{
|
||||
workspacePath: '/workspace/root',
|
||||
githubUrl: 'https://github.com/example/repo.git',
|
||||
userId: 1,
|
||||
},
|
||||
{
|
||||
onProgress: (message) => {
|
||||
progressMessages.push(message);
|
||||
},
|
||||
onComplete: (payload: { project: Record<string, unknown>; message: string }) => {
|
||||
completePayload = payload;
|
||||
},
|
||||
},
|
||||
buildDependencies({
|
||||
spawnGitClone: () => gitProcess as any,
|
||||
registerProject: async (projectPath, customName) => {
|
||||
capturedProjectPath = projectPath;
|
||||
capturedCustomName = customName;
|
||||
return { project: { projectId: 'project-1', path: projectPath } };
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
gitProcess.emit('close', 0);
|
||||
await operation.waitForCompletion;
|
||||
|
||||
assert.ok(progressMessages.some((message) => message.includes("Cloning into 'repo'")));
|
||||
assert.equal(capturedCustomName, 'repo');
|
||||
assert.equal(path.basename(capturedProjectPath), 'repo');
|
||||
assert.notEqual(completePayload, null);
|
||||
const resolvedCompletePayload = completePayload as unknown as {
|
||||
project: Record<string, unknown>;
|
||||
message: string;
|
||||
};
|
||||
assert.equal(resolvedCompletePayload.message, 'Repository cloned successfully');
|
||||
assert.equal((resolvedCompletePayload.project.projectId as string) || '', 'project-1');
|
||||
});
|
||||
117
server/modules/projects/tests/project-management.service.test.ts
Normal file
117
server/modules/projects/tests/project-management.service.test.ts
Normal file
@@ -0,0 +1,117 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import test from 'node:test';
|
||||
|
||||
import { createProject } from '@/modules/projects/services/project-management.service.js';
|
||||
import { AppError } from '@/shared/utils.js';
|
||||
|
||||
const projectRow = {
|
||||
project_id: 'project-1',
|
||||
project_path: '/workspace/my-project',
|
||||
custom_project_name: 'my-project',
|
||||
isStarred: 0,
|
||||
isArchived: 0,
|
||||
};
|
||||
|
||||
test('createProject throws when project path is missing', async () => {
|
||||
await assert.rejects(
|
||||
async () => createProject({ projectPath: '' }),
|
||||
(error: unknown) => {
|
||||
assert.ok(error instanceof AppError);
|
||||
assert.equal(error.code, 'PROJECT_PATH_REQUIRED');
|
||||
assert.equal(error.statusCode, 400);
|
||||
return true;
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
test('createProject throws when path validation fails', async () => {
|
||||
await assert.rejects(
|
||||
async () =>
|
||||
createProject(
|
||||
{ projectPath: '/invalid/path' },
|
||||
{
|
||||
validatePath: async () => ({ valid: false, error: 'blocked path' }),
|
||||
ensureWorkspaceDirectory: async () => undefined,
|
||||
persistProjectPath: () => ({ outcome: 'created', project: projectRow }),
|
||||
getProjectByPath: () => projectRow,
|
||||
},
|
||||
),
|
||||
(error: unknown) => {
|
||||
assert.ok(error instanceof AppError);
|
||||
assert.equal(error.code, 'INVALID_PROJECT_PATH');
|
||||
assert.equal(error.statusCode, 400);
|
||||
assert.equal(error.details, 'blocked path');
|
||||
return true;
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
test('createProject throws conflict when active project path already exists', async () => {
|
||||
await assert.rejects(
|
||||
async () =>
|
||||
createProject(
|
||||
{ projectPath: '/workspace/my-project' },
|
||||
{
|
||||
validatePath: async () => ({ valid: true, resolvedPath: '/workspace/my-project' }),
|
||||
ensureWorkspaceDirectory: async () => undefined,
|
||||
persistProjectPath: () => ({ outcome: 'active_conflict', project: projectRow }),
|
||||
getProjectByPath: () => projectRow,
|
||||
},
|
||||
),
|
||||
(error: unknown) => {
|
||||
assert.ok(error instanceof AppError);
|
||||
assert.equal(error.code, 'PROJECT_ALREADY_EXISTS');
|
||||
assert.equal(error.statusCode, 409);
|
||||
assert.equal(error.details, 'Project path already exists: /workspace/my-project');
|
||||
return true;
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
test('createProject falls back to directory name when custom name is not provided', async () => {
|
||||
let capturedCustomName: string | null = null;
|
||||
|
||||
const result = await createProject(
|
||||
{ projectPath: '/workspace/my-project', customName: '' },
|
||||
{
|
||||
validatePath: async () => ({ valid: true, resolvedPath: '/workspace/my-project' }),
|
||||
ensureWorkspaceDirectory: async () => undefined,
|
||||
persistProjectPath: (_projectPath, customName) => {
|
||||
capturedCustomName = customName;
|
||||
return {
|
||||
outcome: 'created',
|
||||
project: {
|
||||
...projectRow,
|
||||
custom_project_name: customName,
|
||||
},
|
||||
};
|
||||
},
|
||||
getProjectByPath: () => projectRow,
|
||||
},
|
||||
);
|
||||
|
||||
assert.equal(capturedCustomName, 'my-project');
|
||||
assert.equal(result.outcome, 'created');
|
||||
assert.equal(result.project.displayName, 'my-project');
|
||||
});
|
||||
|
||||
test('createProject returns archived reuse outcome when archived row is reused', async () => {
|
||||
const result = await createProject(
|
||||
{ projectPath: '/workspace/my-project' },
|
||||
{
|
||||
validatePath: async () => ({ valid: true, resolvedPath: '/workspace/my-project' }),
|
||||
ensureWorkspaceDirectory: async () => undefined,
|
||||
persistProjectPath: () => ({
|
||||
outcome: 'reactivated_archived',
|
||||
project: {
|
||||
...projectRow,
|
||||
isArchived: 1,
|
||||
},
|
||||
}),
|
||||
getProjectByPath: () => projectRow,
|
||||
},
|
||||
);
|
||||
|
||||
assert.equal(result.outcome, 'reactivated_archived');
|
||||
assert.equal(result.project.isArchived, true);
|
||||
});
|
||||
123
server/modules/projects/tests/project-star.service.test.ts
Normal file
123
server/modules/projects/tests/project-star.service.test.ts
Normal file
@@ -0,0 +1,123 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import test from 'node:test';
|
||||
|
||||
import { projectsDb } from '@/modules/database/index.js';
|
||||
import { applyLegacyStarredProjectIds, toggleProjectStar } from '@/modules/projects/services/project-star.service.js';
|
||||
import { AppError } from '@/shared/utils.js';
|
||||
|
||||
type ProjectRow = {
|
||||
project_id: string;
|
||||
project_path: string;
|
||||
custom_project_name: string | null;
|
||||
isStarred: number;
|
||||
isArchived: number;
|
||||
};
|
||||
|
||||
test('toggleProjectStar throws when projectId is missing', () => {
|
||||
assert.throws(
|
||||
() => toggleProjectStar(' '),
|
||||
(error: unknown) =>
|
||||
error instanceof AppError
|
||||
&& error.code === 'PROJECT_ID_REQUIRED'
|
||||
&& error.statusCode === 400,
|
||||
);
|
||||
});
|
||||
|
||||
test('toggleProjectStar throws when project does not exist', () => {
|
||||
const originalGetProjectById = projectsDb.getProjectById;
|
||||
try {
|
||||
projectsDb.getProjectById = () => null;
|
||||
assert.throws(
|
||||
() => toggleProjectStar('project-1'),
|
||||
(error: unknown) =>
|
||||
error instanceof AppError
|
||||
&& error.code === 'PROJECT_NOT_FOUND'
|
||||
&& error.statusCode === 404,
|
||||
);
|
||||
} finally {
|
||||
projectsDb.getProjectById = originalGetProjectById;
|
||||
}
|
||||
});
|
||||
|
||||
test('toggleProjectStar flips star state and persists it', () => {
|
||||
const originalGetProjectById = projectsDb.getProjectById;
|
||||
const originalUpdateProjectIsStarredById = projectsDb.updateProjectIsStarredById;
|
||||
|
||||
let capturedProjectId = '';
|
||||
let capturedState = false;
|
||||
|
||||
try {
|
||||
projectsDb.getProjectById = () =>
|
||||
({
|
||||
project_id: 'project-1',
|
||||
project_path: '/workspace/project-1',
|
||||
custom_project_name: 'project-1',
|
||||
isStarred: 0,
|
||||
isArchived: 0,
|
||||
}) as ProjectRow;
|
||||
projectsDb.updateProjectIsStarredById = (projectId: string, isStarred: boolean) => {
|
||||
capturedProjectId = projectId;
|
||||
capturedState = isStarred;
|
||||
};
|
||||
|
||||
const result = toggleProjectStar('project-1');
|
||||
|
||||
assert.equal(result.isStarred, true);
|
||||
assert.equal(capturedProjectId, 'project-1');
|
||||
assert.equal(capturedState, true);
|
||||
} finally {
|
||||
projectsDb.getProjectById = originalGetProjectById;
|
||||
projectsDb.updateProjectIsStarredById = originalUpdateProjectIsStarredById;
|
||||
}
|
||||
});
|
||||
|
||||
test('applyLegacyStarredProjectIds stars only valid, unstarred projects', () => {
|
||||
const originalGetProjectById = projectsDb.getProjectById;
|
||||
const originalUpdateProjectIsStarredById = projectsDb.updateProjectIsStarredById;
|
||||
|
||||
const updatedProjectIds: string[] = [];
|
||||
|
||||
try {
|
||||
projectsDb.getProjectById = (projectId: string) => {
|
||||
if (projectId === 'project-a') {
|
||||
return {
|
||||
project_id: 'project-a',
|
||||
project_path: '/workspace/project-a',
|
||||
custom_project_name: 'A',
|
||||
isStarred: 0,
|
||||
isArchived: 0,
|
||||
} as ProjectRow;
|
||||
}
|
||||
|
||||
if (projectId === 'project-b') {
|
||||
return {
|
||||
project_id: 'project-b',
|
||||
project_path: '/workspace/project-b',
|
||||
custom_project_name: 'B',
|
||||
isStarred: 1,
|
||||
isArchived: 0,
|
||||
} as ProjectRow;
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
projectsDb.updateProjectIsStarredById = (projectId: string) => {
|
||||
updatedProjectIds.push(projectId);
|
||||
};
|
||||
|
||||
const result = applyLegacyStarredProjectIds([
|
||||
'project-a',
|
||||
'project-b',
|
||||
'missing-project',
|
||||
'project-a',
|
||||
'',
|
||||
' ',
|
||||
]);
|
||||
|
||||
assert.equal(result.updated, 1);
|
||||
assert.deepEqual(updatedProjectIds, ['project-a']);
|
||||
} finally {
|
||||
projectsDb.getProjectById = originalGetProjectById;
|
||||
projectsDb.updateProjectIsStarredById = originalUpdateProjectIsStarredById;
|
||||
}
|
||||
});
|
||||
@@ -0,0 +1,105 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import test from 'node:test';
|
||||
|
||||
import {
|
||||
getProjectTaskMaster,
|
||||
getProjectTaskMasterById,
|
||||
} from '@/modules/projects/services/projects-has-taskmaster.service.js';
|
||||
import { AppError } from '@/shared/utils.js';
|
||||
|
||||
test('getProjectTaskMasterById returns null when project path is missing', async () => {
|
||||
const result = await getProjectTaskMasterById('project-1', {
|
||||
resolveProjectPathById: () => null,
|
||||
detectTaskMasterFolder: async () => {
|
||||
throw new Error('detectTaskMasterFolder should not be called when path is missing');
|
||||
},
|
||||
});
|
||||
|
||||
assert.equal(result, null);
|
||||
});
|
||||
|
||||
test('getProjectTaskMasterById returns configured status when taskmaster exists with essential files', async () => {
|
||||
const result = await getProjectTaskMasterById('project-1', {
|
||||
resolveProjectPathById: () => '/workspace/project-1',
|
||||
detectTaskMasterFolder: async () => ({
|
||||
hasTaskmaster: true,
|
||||
hasEssentialFiles: true,
|
||||
metadata: {
|
||||
taskCount: 3,
|
||||
subtaskCount: 0,
|
||||
completed: 1,
|
||||
pending: 2,
|
||||
inProgress: 0,
|
||||
review: 0,
|
||||
completionPercentage: 33,
|
||||
lastModified: '2026-01-01T00:00:00.000Z',
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
assert.ok(result);
|
||||
assert.equal(result.projectId, 'project-1');
|
||||
assert.equal(result.projectPath, '/workspace/project-1');
|
||||
assert.equal(result.taskmaster.hasTaskmaster, true);
|
||||
assert.equal(result.taskmaster.hasEssentialFiles, true);
|
||||
assert.equal(result.taskmaster.status, 'configured');
|
||||
assert.deepEqual(result.taskmaster.metadata, {
|
||||
taskCount: 3,
|
||||
subtaskCount: 0,
|
||||
completed: 1,
|
||||
pending: 2,
|
||||
inProgress: 0,
|
||||
review: 0,
|
||||
completionPercentage: 33,
|
||||
lastModified: '2026-01-01T00:00:00.000Z',
|
||||
});
|
||||
});
|
||||
|
||||
test('getProjectTaskMasterById returns not-configured status when taskmaster is missing', async () => {
|
||||
const result = await getProjectTaskMasterById('project-1', {
|
||||
resolveProjectPathById: () => '/workspace/project-1',
|
||||
detectTaskMasterFolder: async () => ({
|
||||
hasTaskmaster: false,
|
||||
}),
|
||||
});
|
||||
|
||||
assert.ok(result);
|
||||
assert.equal(result.taskmaster.hasTaskmaster, false);
|
||||
assert.equal(result.taskmaster.hasEssentialFiles, false);
|
||||
assert.equal(result.taskmaster.status, 'not-configured');
|
||||
assert.equal(result.taskmaster.metadata, null);
|
||||
});
|
||||
|
||||
test('getProjectTaskMaster throws when project id is missing', async () => {
|
||||
await assert.rejects(
|
||||
async () =>
|
||||
getProjectTaskMaster('', async () => ({
|
||||
projectId: 'project-1',
|
||||
projectPath: '/workspace/project-1',
|
||||
taskmaster: {
|
||||
hasTaskmaster: true,
|
||||
hasEssentialFiles: true,
|
||||
metadata: null,
|
||||
status: 'configured',
|
||||
},
|
||||
})),
|
||||
(error: unknown) => {
|
||||
assert.ok(error instanceof AppError);
|
||||
assert.equal(error.code, 'PROJECT_ID_REQUIRED');
|
||||
assert.equal(error.statusCode, 400);
|
||||
return true;
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
test('getProjectTaskMaster throws when project does not exist', async () => {
|
||||
await assert.rejects(
|
||||
async () => getProjectTaskMaster('project-that-does-not-exist', async () => null),
|
||||
(error: unknown) => {
|
||||
assert.ok(error instanceof AppError);
|
||||
assert.equal(error.code, 'PROJECT_NOT_FOUND');
|
||||
assert.equal(error.statusCode, 404);
|
||||
return true;
|
||||
},
|
||||
);
|
||||
});
|
||||
4
server/modules/providers/index.ts
Normal file
4
server/modules/providers/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export { sessionSynchronizerService } from './services/session-synchronizer.service.js';
|
||||
|
||||
export { initializeSessionsWatcher } from './services/sessions-watcher.service.js';
|
||||
export { closeSessionsWatcher } from './services/sessions-watcher.service.js';
|
||||
123
server/modules/providers/list/claude/claude-auth.provider.ts
Normal file
123
server/modules/providers/list/claude/claude-auth.provider.ts
Normal file
@@ -0,0 +1,123 @@
|
||||
import { readFile } from 'node:fs/promises';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
|
||||
import spawn from 'cross-spawn';
|
||||
|
||||
import type { IProviderAuth } from '@/shared/interfaces.js';
|
||||
import type { ProviderAuthStatus } from '@/shared/types.js';
|
||||
import { readObjectRecord, readOptionalString } from '@/shared/utils.js';
|
||||
|
||||
type ClaudeCredentialsStatus = {
|
||||
authenticated: boolean;
|
||||
email: string | null;
|
||||
method: string | null;
|
||||
error?: string;
|
||||
};
|
||||
|
||||
export class ClaudeProviderAuth implements IProviderAuth {
|
||||
/**
|
||||
* Checks whether the Claude Code CLI is available on this host.
|
||||
*/
|
||||
private checkInstalled(): boolean {
|
||||
const cliPath = process.env.CLAUDE_CLI_PATH || 'claude';
|
||||
try {
|
||||
spawn.sync(cliPath, ['--version'], { stdio: 'ignore', timeout: 5000 });
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns Claude installation and credential status using Claude Code's auth priority.
|
||||
*/
|
||||
async getStatus(): Promise<ProviderAuthStatus> {
|
||||
const installed = this.checkInstalled();
|
||||
|
||||
if (!installed) {
|
||||
return {
|
||||
installed,
|
||||
provider: 'claude',
|
||||
authenticated: false,
|
||||
email: null,
|
||||
method: null,
|
||||
error: 'Claude Code CLI is not installed',
|
||||
};
|
||||
}
|
||||
|
||||
const credentials = await this.checkCredentials();
|
||||
|
||||
return {
|
||||
installed,
|
||||
provider: 'claude',
|
||||
authenticated: credentials.authenticated,
|
||||
email: credentials.authenticated ? credentials.email || 'Authenticated' : credentials.email,
|
||||
method: credentials.method,
|
||||
error: credentials.authenticated ? undefined : credentials.error || 'Not authenticated',
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads Claude settings env values that the CLI can use even when the server process env is empty.
|
||||
*/
|
||||
private async loadSettingsEnv(): Promise<Record<string, unknown>> {
|
||||
try {
|
||||
const settingsPath = path.join(os.homedir(), '.claude', 'settings.json');
|
||||
const content = await readFile(settingsPath, 'utf8');
|
||||
const settings = readObjectRecord(JSON.parse(content));
|
||||
return readObjectRecord(settings?.env) ?? {};
|
||||
} catch {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks Claude credentials in the same priority order used by Claude Code.
|
||||
*/
|
||||
private async checkCredentials(): Promise<ClaudeCredentialsStatus> {
|
||||
if (process.env.ANTHROPIC_API_KEY?.trim()) {
|
||||
return { authenticated: true, email: 'API Key Auth', method: 'api_key' };
|
||||
}
|
||||
|
||||
const settingsEnv = await this.loadSettingsEnv();
|
||||
if (readOptionalString(settingsEnv.ANTHROPIC_API_KEY)) {
|
||||
return { authenticated: true, email: 'API Key Auth', method: 'api_key' };
|
||||
}
|
||||
|
||||
if (readOptionalString(settingsEnv.ANTHROPIC_AUTH_TOKEN)) {
|
||||
return { authenticated: true, email: 'Configured via settings.json', method: 'api_key' };
|
||||
}
|
||||
|
||||
try {
|
||||
const credPath = path.join(os.homedir(), '.claude', '.credentials.json');
|
||||
const content = await readFile(credPath, 'utf8');
|
||||
const creds = readObjectRecord(JSON.parse(content)) ?? {};
|
||||
const oauth = readObjectRecord(creds.claudeAiOauth);
|
||||
const accessToken = readOptionalString(oauth?.accessToken);
|
||||
|
||||
if (accessToken) {
|
||||
const expiresAt = typeof oauth?.expiresAt === 'number' ? oauth.expiresAt : undefined;
|
||||
const email = readOptionalString(creds.email) ?? readOptionalString(creds.user) ?? null;
|
||||
if (!expiresAt || Date.now() < expiresAt) {
|
||||
return {
|
||||
authenticated: true,
|
||||
email,
|
||||
method: 'credentials_file',
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
authenticated: false,
|
||||
email,
|
||||
method: 'credentials_file',
|
||||
error: 'OAuth token has expired. Please re-authenticate with claude login',
|
||||
};
|
||||
}
|
||||
|
||||
return { authenticated: false, email: null, method: null };
|
||||
} catch {
|
||||
return { authenticated: false, email: null, method: null };
|
||||
}
|
||||
}
|
||||
}
|
||||
135
server/modules/providers/list/claude/claude-mcp.provider.ts
Normal file
135
server/modules/providers/list/claude/claude-mcp.provider.ts
Normal file
@@ -0,0 +1,135 @@
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
|
||||
import { McpProvider } from '@/modules/providers/shared/mcp/mcp.provider.js';
|
||||
import type { McpScope, ProviderMcpServer, UpsertProviderMcpServerInput } from '@/shared/types.js';
|
||||
import {
|
||||
AppError,
|
||||
readJsonConfig,
|
||||
readObjectRecord,
|
||||
readOptionalString,
|
||||
readStringArray,
|
||||
readStringRecord,
|
||||
writeJsonConfig,
|
||||
} from '@/shared/utils.js';
|
||||
|
||||
export class ClaudeMcpProvider extends McpProvider {
|
||||
constructor() {
|
||||
super('claude', ['user', 'local', 'project'], ['stdio', 'http', 'sse']);
|
||||
}
|
||||
|
||||
protected async readScopedServers(scope: McpScope, workspacePath: string): Promise<Record<string, unknown>> {
|
||||
if (scope === 'project') {
|
||||
const filePath = path.join(workspacePath, '.mcp.json');
|
||||
const config = await readJsonConfig(filePath);
|
||||
return readObjectRecord(config.mcpServers) ?? {};
|
||||
}
|
||||
|
||||
const filePath = path.join(os.homedir(), '.claude.json');
|
||||
const config = await readJsonConfig(filePath);
|
||||
if (scope === 'user') {
|
||||
return readObjectRecord(config.mcpServers) ?? {};
|
||||
}
|
||||
|
||||
const projects = readObjectRecord(config.projects) ?? {};
|
||||
const projectConfig = readObjectRecord(projects[workspacePath]) ?? {};
|
||||
return readObjectRecord(projectConfig.mcpServers) ?? {};
|
||||
}
|
||||
|
||||
protected async writeScopedServers(
|
||||
scope: McpScope,
|
||||
workspacePath: string,
|
||||
servers: Record<string, unknown>,
|
||||
): Promise<void> {
|
||||
if (scope === 'project') {
|
||||
const filePath = path.join(workspacePath, '.mcp.json');
|
||||
const config = await readJsonConfig(filePath);
|
||||
config.mcpServers = servers;
|
||||
await writeJsonConfig(filePath, config);
|
||||
return;
|
||||
}
|
||||
|
||||
const filePath = path.join(os.homedir(), '.claude.json');
|
||||
const config = await readJsonConfig(filePath);
|
||||
if (scope === 'user') {
|
||||
config.mcpServers = servers;
|
||||
await writeJsonConfig(filePath, config);
|
||||
return;
|
||||
}
|
||||
|
||||
const projects = readObjectRecord(config.projects) ?? {};
|
||||
const projectConfig = readObjectRecord(projects[workspacePath]) ?? {};
|
||||
projectConfig.mcpServers = servers;
|
||||
projects[workspacePath] = projectConfig;
|
||||
config.projects = projects;
|
||||
await writeJsonConfig(filePath, config);
|
||||
}
|
||||
|
||||
protected buildServerConfig(input: UpsertProviderMcpServerInput): Record<string, unknown> {
|
||||
if (input.transport === 'stdio') {
|
||||
if (!input.command?.trim()) {
|
||||
throw new AppError('command is required for stdio MCP servers.', {
|
||||
code: 'MCP_COMMAND_REQUIRED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'stdio',
|
||||
command: input.command,
|
||||
args: input.args ?? [],
|
||||
env: input.env ?? {},
|
||||
};
|
||||
}
|
||||
|
||||
if (!input.url?.trim()) {
|
||||
throw new AppError('url is required for http/sse MCP servers.', {
|
||||
code: 'MCP_URL_REQUIRED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
type: input.transport,
|
||||
url: input.url,
|
||||
headers: input.headers ?? {},
|
||||
};
|
||||
}
|
||||
|
||||
protected normalizeServerConfig(
|
||||
scope: McpScope,
|
||||
name: string,
|
||||
rawConfig: unknown,
|
||||
): ProviderMcpServer | null {
|
||||
if (!rawConfig || typeof rawConfig !== 'object') {
|
||||
return null;
|
||||
}
|
||||
|
||||
const config = rawConfig as Record<string, unknown>;
|
||||
if (typeof config.command === 'string') {
|
||||
return {
|
||||
provider: 'claude',
|
||||
name,
|
||||
scope,
|
||||
transport: 'stdio',
|
||||
command: config.command,
|
||||
args: readStringArray(config.args),
|
||||
env: readStringRecord(config.env),
|
||||
};
|
||||
}
|
||||
|
||||
if (typeof config.url === 'string') {
|
||||
const transport = readOptionalString(config.type) === 'sse' ? 'sse' : 'http';
|
||||
return {
|
||||
provider: 'claude',
|
||||
name,
|
||||
scope,
|
||||
transport,
|
||||
url: config.url,
|
||||
headers: readStringRecord(config.headers),
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,110 @@
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
|
||||
import { sessionsDb } from '@/modules/database/index.js';
|
||||
import {
|
||||
buildLookupMap,
|
||||
extractFirstValidJsonlData,
|
||||
findFilesRecursivelyCreatedAfter,
|
||||
normalizeSessionName,
|
||||
readFileTimestamps,
|
||||
} from '@/shared/utils.js';
|
||||
import type { IProviderSessionSynchronizer } from '@/shared/interfaces.js';
|
||||
|
||||
type ParsedSession = {
|
||||
sessionId: string;
|
||||
projectPath: string;
|
||||
sessionName?: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Session indexer for Claude transcript artifacts.
|
||||
*/
|
||||
export class ClaudeSessionSynchronizer implements IProviderSessionSynchronizer {
|
||||
private readonly provider = 'claude' as const;
|
||||
private readonly claudeHome = path.join(os.homedir(), '.claude');
|
||||
|
||||
/**
|
||||
* Scans ~/.claude/projects and upserts discovered sessions into DB.
|
||||
*/
|
||||
async synchronize(since?: Date): Promise<number> {
|
||||
const nameMap = await buildLookupMap(path.join(this.claudeHome, 'history.jsonl'), 'sessionId', 'display');
|
||||
const files = await findFilesRecursivelyCreatedAfter(
|
||||
path.join(this.claudeHome, 'projects'),
|
||||
'.jsonl',
|
||||
since ?? null
|
||||
);
|
||||
|
||||
let processed = 0;
|
||||
for (const filePath of files) {
|
||||
const parsed = await this.processSessionFile(filePath, nameMap);
|
||||
if (!parsed) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const timestamps = await readFileTimestamps(filePath);
|
||||
sessionsDb.createSession(
|
||||
parsed.sessionId,
|
||||
this.provider,
|
||||
parsed.projectPath,
|
||||
parsed.sessionName,
|
||||
timestamps.createdAt,
|
||||
timestamps.updatedAt,
|
||||
filePath
|
||||
);
|
||||
processed += 1;
|
||||
}
|
||||
|
||||
return processed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses and upserts one Claude session JSONL file.
|
||||
*/
|
||||
async synchronizeFile(filePath: string): Promise<string | null> {
|
||||
if (!filePath.endsWith('.jsonl')) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const nameMap = await buildLookupMap(path.join(this.claudeHome, 'history.jsonl'), 'sessionId', 'display');
|
||||
const parsed = await this.processSessionFile(filePath, nameMap);
|
||||
if (!parsed) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const timestamps = await readFileTimestamps(filePath);
|
||||
return sessionsDb.createSession(
|
||||
parsed.sessionId,
|
||||
this.provider,
|
||||
parsed.projectPath,
|
||||
parsed.sessionName,
|
||||
timestamps.createdAt,
|
||||
timestamps.updatedAt,
|
||||
filePath
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts session metadata from one Claude JSONL session file.
|
||||
*/
|
||||
private async processSessionFile(
|
||||
filePath: string,
|
||||
nameMap: Map<string, string>
|
||||
): Promise<ParsedSession | null> {
|
||||
return extractFirstValidJsonlData(filePath, (rawData) => {
|
||||
const data = rawData as Record<string, unknown>;
|
||||
const sessionId = typeof data.sessionId === 'string' ? data.sessionId : undefined;
|
||||
const projectPath = typeof data.cwd === 'string' ? data.cwd : undefined;
|
||||
|
||||
if (!sessionId || !projectPath) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
sessionId,
|
||||
projectPath,
|
||||
sessionName: normalizeSessionName(nameMap.get(sessionId), 'Untitled Claude Session'),
|
||||
};
|
||||
});
|
||||
}
|
||||
}
|
||||
475
server/modules/providers/list/claude/claude-sessions.provider.ts
Normal file
475
server/modules/providers/list/claude/claude-sessions.provider.ts
Normal file
@@ -0,0 +1,475 @@
|
||||
import fs from 'node:fs';
|
||||
import fsp from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
import readline from 'node:readline';
|
||||
|
||||
import type { IProviderSessions } from '@/shared/interfaces.js';
|
||||
import type { AnyRecord, FetchHistoryOptions, FetchHistoryResult, NormalizedMessage } from '@/shared/types.js';
|
||||
import { createNormalizedMessage, generateMessageId, readObjectRecord } from '@/shared/utils.js';
|
||||
import { sessionsDb } from '@/modules/database/index.js';
|
||||
|
||||
const PROVIDER = 'claude';
|
||||
|
||||
type ClaudeToolResult = {
|
||||
content: unknown;
|
||||
isError: boolean;
|
||||
subagentTools?: unknown;
|
||||
toolUseResult?: unknown;
|
||||
};
|
||||
|
||||
type ClaudeHistoryResult =
|
||||
| AnyRecord[]
|
||||
| {
|
||||
messages?: AnyRecord[];
|
||||
total?: number;
|
||||
hasMore?: boolean;
|
||||
};
|
||||
|
||||
type ClaudeHistoryMessagesResult =
|
||||
| AnyRecord[]
|
||||
| {
|
||||
messages: AnyRecord[];
|
||||
total: number;
|
||||
hasMore: boolean;
|
||||
offset?: number;
|
||||
limit?: number | null;
|
||||
};
|
||||
|
||||
async function parseAgentTools(filePath: string): Promise<AnyRecord[]> {
|
||||
const tools: AnyRecord[] = [];
|
||||
|
||||
try {
|
||||
const fileStream = fs.createReadStream(filePath);
|
||||
const rl = readline.createInterface({
|
||||
input: fileStream,
|
||||
crlfDelay: Infinity,
|
||||
});
|
||||
|
||||
for await (const line of rl) {
|
||||
if (!line.trim()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
const entry = JSON.parse(line) as AnyRecord;
|
||||
|
||||
if (entry.message?.role === 'assistant' && Array.isArray(entry.message?.content)) {
|
||||
for (const part of entry.message.content as AnyRecord[]) {
|
||||
if (part.type === 'tool_use') {
|
||||
tools.push({
|
||||
toolId: part.id,
|
||||
toolName: part.name,
|
||||
toolInput: part.input,
|
||||
timestamp: entry.timestamp,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (entry.message?.role === 'user' && Array.isArray(entry.message?.content)) {
|
||||
for (const part of entry.message.content as AnyRecord[]) {
|
||||
if (part.type !== 'tool_result') {
|
||||
continue;
|
||||
}
|
||||
|
||||
const tool = tools.find((candidate) => candidate.toolId === part.tool_use_id);
|
||||
if (!tool) {
|
||||
continue;
|
||||
}
|
||||
|
||||
tool.toolResult = {
|
||||
content: typeof part.content === 'string'
|
||||
? part.content
|
||||
: Array.isArray(part.content)
|
||||
? part.content
|
||||
.map((contentPart: AnyRecord) => contentPart?.text || '')
|
||||
.join('\n')
|
||||
: JSON.stringify(part.content),
|
||||
isError: Boolean(part.is_error),
|
||||
};
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Skip malformed lines that can happen during concurrent writes.
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
console.warn(`Error parsing agent file ${filePath}:`, message);
|
||||
}
|
||||
|
||||
return tools;
|
||||
}
|
||||
|
||||
async function getSessionMessages(
|
||||
sessionId: string,
|
||||
limit: number | null,
|
||||
offset: number,
|
||||
): Promise<ClaudeHistoryMessagesResult> {
|
||||
try {
|
||||
const jsonLPath = sessionsDb.getSessionById(sessionId)?.jsonl_path;
|
||||
|
||||
if (!jsonLPath) {
|
||||
return { messages: [], total: 0, hasMore: false };
|
||||
}
|
||||
|
||||
const projectDir = path.dirname(jsonLPath);
|
||||
const files = await fsp.readdir(projectDir);
|
||||
const agentFiles = files.filter((file) => file.endsWith('.jsonl') && file.startsWith('agent-'));
|
||||
|
||||
const messages: AnyRecord[] = [];
|
||||
const agentToolsCache = new Map<string, AnyRecord[]>();
|
||||
|
||||
const fileStream = fs.createReadStream(jsonLPath);
|
||||
const rl = readline.createInterface({
|
||||
input: fileStream,
|
||||
crlfDelay: Infinity,
|
||||
});
|
||||
|
||||
for await (const line of rl) {
|
||||
if (!line.trim()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
const entry = JSON.parse(line) as AnyRecord;
|
||||
if (entry.sessionId === sessionId) {
|
||||
messages.push(entry);
|
||||
}
|
||||
} catch {
|
||||
// Skip malformed JSONL lines that can happen during concurrent writes.
|
||||
}
|
||||
}
|
||||
|
||||
const agentIds = new Set<string>();
|
||||
for (const message of messages) {
|
||||
const agentId = message.toolUseResult?.agentId;
|
||||
if (agentId) {
|
||||
agentIds.add(String(agentId));
|
||||
}
|
||||
}
|
||||
|
||||
for (const agentId of agentIds) {
|
||||
const agentFileName = `agent-${agentId}.jsonl`;
|
||||
if (!agentFiles.includes(agentFileName)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const agentFilePath = path.join(projectDir, agentFileName);
|
||||
const tools = await parseAgentTools(agentFilePath);
|
||||
agentToolsCache.set(agentId, tools);
|
||||
}
|
||||
|
||||
for (const message of messages) {
|
||||
const agentId = message.toolUseResult?.agentId;
|
||||
if (!agentId) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const agentTools = agentToolsCache.get(String(agentId));
|
||||
if (agentTools && agentTools.length > 0) {
|
||||
message.subagentTools = agentTools;
|
||||
}
|
||||
}
|
||||
|
||||
const sortedMessages = messages.sort(
|
||||
(a, b) => new Date(a.timestamp || 0).getTime() - new Date(b.timestamp || 0).getTime(),
|
||||
);
|
||||
const total = sortedMessages.length;
|
||||
|
||||
if (limit === null) {
|
||||
return sortedMessages;
|
||||
}
|
||||
|
||||
const startIndex = Math.max(0, total - offset - limit);
|
||||
const endIndex = total - offset;
|
||||
const paginatedMessages = sortedMessages.slice(startIndex, endIndex);
|
||||
const hasMore = startIndex > 0;
|
||||
|
||||
return {
|
||||
messages: paginatedMessages,
|
||||
total,
|
||||
hasMore,
|
||||
offset,
|
||||
limit,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error(`Error reading messages for session ${sessionId}:`, error);
|
||||
return limit === null ? [] : { messages: [], total: 0, hasMore: false };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Claude writes internal command and system reminder entries into history.
|
||||
* Those are useful for the CLI but should not appear in the user-facing chat.
|
||||
*/
|
||||
const INTERNAL_CONTENT_PREFIXES = [
|
||||
'<command-name>',
|
||||
'<command-message>',
|
||||
'<command-args>',
|
||||
'<local-command-stdout>',
|
||||
'<system-reminder>',
|
||||
'Caveat:',
|
||||
'This session is being continued from a previous',
|
||||
'[Request interrupted',
|
||||
] as const;
|
||||
|
||||
function isInternalContent(content: string): boolean {
|
||||
return INTERNAL_CONTENT_PREFIXES.some((prefix) => content.startsWith(prefix));
|
||||
}
|
||||
|
||||
export class ClaudeSessionsProvider implements IProviderSessions {
|
||||
/**
|
||||
* Normalizes one Claude JSONL entry or live SDK stream event into the shared
|
||||
* message shape consumed by REST and WebSocket clients.
|
||||
*/
|
||||
normalizeMessage(rawMessage: unknown, sessionId: string | null): NormalizedMessage[] {
|
||||
const raw = readObjectRecord(rawMessage);
|
||||
if (!raw) {
|
||||
return [];
|
||||
}
|
||||
|
||||
if (raw.type === 'content_block_delta' && raw.delta?.text) {
|
||||
return [createNormalizedMessage({ kind: 'stream_delta', content: raw.delta.text, sessionId, provider: PROVIDER })];
|
||||
}
|
||||
if (raw.type === 'content_block_stop') {
|
||||
return [createNormalizedMessage({ kind: 'stream_end', sessionId, provider: PROVIDER })];
|
||||
}
|
||||
|
||||
const messages: NormalizedMessage[] = [];
|
||||
const ts = raw.timestamp || new Date().toISOString();
|
||||
const baseId = raw.uuid || generateMessageId('claude');
|
||||
|
||||
if (raw.message?.role === 'user' && raw.message?.content) {
|
||||
if (Array.isArray(raw.message.content)) {
|
||||
for (let partIndex = 0; partIndex < raw.message.content.length; partIndex++) {
|
||||
const part = raw.message.content[partIndex];
|
||||
if (part.type === 'tool_result') {
|
||||
messages.push(createNormalizedMessage({
|
||||
id: `${baseId}_tr_${part.tool_use_id}`,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'tool_result',
|
||||
toolId: part.tool_use_id,
|
||||
content: typeof part.content === 'string' ? part.content : JSON.stringify(part.content),
|
||||
isError: Boolean(part.is_error),
|
||||
subagentTools: raw.subagentTools,
|
||||
toolUseResult: raw.toolUseResult,
|
||||
}));
|
||||
} else if (part.type === 'text') {
|
||||
const text = part.text || '';
|
||||
if (text && !isInternalContent(text)) {
|
||||
messages.push(createNormalizedMessage({
|
||||
id: `${baseId}_text_${partIndex}`,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'text',
|
||||
role: 'user',
|
||||
content: text,
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (messages.length === 0) {
|
||||
const textParts = raw.message.content
|
||||
.filter((part: AnyRecord) => part.type === 'text')
|
||||
.map((part: AnyRecord) => part.text)
|
||||
.filter(Boolean)
|
||||
.join('\n');
|
||||
if (textParts && !isInternalContent(textParts)) {
|
||||
messages.push(createNormalizedMessage({
|
||||
id: `${baseId}_text`,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'text',
|
||||
role: 'user',
|
||||
content: textParts,
|
||||
}));
|
||||
}
|
||||
}
|
||||
} else if (typeof raw.message.content === 'string') {
|
||||
const text = raw.message.content;
|
||||
if (text && !isInternalContent(text)) {
|
||||
messages.push(createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'text',
|
||||
role: 'user',
|
||||
content: text,
|
||||
}));
|
||||
}
|
||||
}
|
||||
return messages;
|
||||
}
|
||||
|
||||
if (raw.type === 'thinking' && raw.message?.content) {
|
||||
messages.push(createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'thinking',
|
||||
content: raw.message.content,
|
||||
}));
|
||||
return messages;
|
||||
}
|
||||
|
||||
if (raw.type === 'tool_use' && raw.toolName) {
|
||||
messages.push(createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'tool_use',
|
||||
toolName: raw.toolName,
|
||||
toolInput: raw.toolInput,
|
||||
toolId: raw.toolCallId || baseId,
|
||||
}));
|
||||
return messages;
|
||||
}
|
||||
|
||||
if (raw.type === 'tool_result') {
|
||||
messages.push(createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'tool_result',
|
||||
toolId: raw.toolCallId || '',
|
||||
content: raw.output || '',
|
||||
isError: false,
|
||||
}));
|
||||
return messages;
|
||||
}
|
||||
|
||||
if (raw.message?.role === 'assistant' && raw.message?.content) {
|
||||
if (Array.isArray(raw.message.content)) {
|
||||
let partIndex = 0;
|
||||
for (const part of raw.message.content) {
|
||||
if (part.type === 'text' && part.text) {
|
||||
messages.push(createNormalizedMessage({
|
||||
id: `${baseId}_${partIndex}`,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'text',
|
||||
role: 'assistant',
|
||||
content: part.text,
|
||||
}));
|
||||
} else if (part.type === 'tool_use') {
|
||||
messages.push(createNormalizedMessage({
|
||||
id: `${baseId}_${partIndex}`,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'tool_use',
|
||||
toolName: part.name,
|
||||
toolInput: part.input,
|
||||
toolId: part.id,
|
||||
}));
|
||||
} else if (part.type === 'thinking' && part.thinking) {
|
||||
messages.push(createNormalizedMessage({
|
||||
id: `${baseId}_${partIndex}`,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'thinking',
|
||||
content: part.thinking,
|
||||
}));
|
||||
}
|
||||
partIndex++;
|
||||
}
|
||||
} else if (typeof raw.message.content === 'string') {
|
||||
messages.push(createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'text',
|
||||
role: 'assistant',
|
||||
content: raw.message.content,
|
||||
}));
|
||||
}
|
||||
return messages;
|
||||
}
|
||||
|
||||
return messages;
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads Claude JSONL history for a project/session and returns normalized
|
||||
* messages, preserving the existing pagination behavior from projects.js.
|
||||
*/
|
||||
async fetchHistory(
|
||||
sessionId: string,
|
||||
options: FetchHistoryOptions = {},
|
||||
): Promise<FetchHistoryResult> {
|
||||
const { limit = null, offset = 0 } = options;
|
||||
|
||||
let result: ClaudeHistoryResult;
|
||||
try {
|
||||
result = await getSessionMessages(sessionId, limit, offset);
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
console.warn(`[ClaudeProvider] Failed to load session ${sessionId}:`, message);
|
||||
return { messages: [], total: 0, hasMore: false, offset: 0, limit: null };
|
||||
}
|
||||
|
||||
const rawMessages = Array.isArray(result) ? result : (result.messages || []);
|
||||
const total = Array.isArray(result) ? rawMessages.length : (result.total || 0);
|
||||
const hasMore = Array.isArray(result) ? false : Boolean(result.hasMore);
|
||||
|
||||
const toolResultMap = new Map<string, ClaudeToolResult>();
|
||||
for (const raw of rawMessages) {
|
||||
if (raw.message?.role === 'user' && Array.isArray(raw.message?.content)) {
|
||||
for (const part of raw.message.content) {
|
||||
if (part.type === 'tool_result' && part.tool_use_id) {
|
||||
toolResultMap.set(part.tool_use_id, {
|
||||
content: part.content,
|
||||
isError: Boolean(part.is_error),
|
||||
subagentTools: raw.subagentTools,
|
||||
toolUseResult: raw.toolUseResult,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const normalized: NormalizedMessage[] = [];
|
||||
for (const raw of rawMessages) {
|
||||
normalized.push(...this.normalizeMessage(raw, sessionId));
|
||||
}
|
||||
|
||||
for (const msg of normalized) {
|
||||
if (msg.kind === 'tool_use' && msg.toolId && toolResultMap.has(msg.toolId)) {
|
||||
const toolResult = toolResultMap.get(msg.toolId);
|
||||
if (!toolResult) {
|
||||
continue;
|
||||
}
|
||||
|
||||
msg.toolResult = {
|
||||
content: typeof toolResult.content === 'string'
|
||||
? toolResult.content
|
||||
: JSON.stringify(toolResult.content),
|
||||
isError: toolResult.isError,
|
||||
toolUseResult: toolResult.toolUseResult,
|
||||
};
|
||||
msg.subagentTools = toolResult.subagentTools;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
messages: normalized,
|
||||
total,
|
||||
hasMore,
|
||||
offset,
|
||||
limit,
|
||||
};
|
||||
}
|
||||
}
|
||||
17
server/modules/providers/list/claude/claude.provider.ts
Normal file
17
server/modules/providers/list/claude/claude.provider.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { AbstractProvider } from '@/modules/providers/shared/base/abstract.provider.js';
|
||||
import { ClaudeProviderAuth } from '@/modules/providers/list/claude/claude-auth.provider.js';
|
||||
import { ClaudeMcpProvider } from '@/modules/providers/list/claude/claude-mcp.provider.js';
|
||||
import { ClaudeSessionSynchronizer } from '@/modules/providers/list/claude/claude-session-synchronizer.provider.js';
|
||||
import { ClaudeSessionsProvider } from '@/modules/providers/list/claude/claude-sessions.provider.js';
|
||||
import type { IProviderAuth, IProviderSessionSynchronizer, IProviderSessions } from '@/shared/interfaces.js';
|
||||
|
||||
export class ClaudeProvider extends AbstractProvider {
|
||||
readonly mcp = new ClaudeMcpProvider();
|
||||
readonly auth: IProviderAuth = new ClaudeProviderAuth();
|
||||
readonly sessions: IProviderSessions = new ClaudeSessionsProvider();
|
||||
readonly sessionSynchronizer: IProviderSessionSynchronizer = new ClaudeSessionSynchronizer();
|
||||
|
||||
constructor() {
|
||||
super('claude');
|
||||
}
|
||||
}
|
||||
100
server/modules/providers/list/codex/codex-auth.provider.ts
Normal file
100
server/modules/providers/list/codex/codex-auth.provider.ts
Normal file
@@ -0,0 +1,100 @@
|
||||
import { readFile } from 'node:fs/promises';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
|
||||
import spawn from 'cross-spawn';
|
||||
|
||||
import type { IProviderAuth } from '@/shared/interfaces.js';
|
||||
import type { ProviderAuthStatus } from '@/shared/types.js';
|
||||
import { readObjectRecord, readOptionalString } from '@/shared/utils.js';
|
||||
|
||||
type CodexCredentialsStatus = {
|
||||
authenticated: boolean;
|
||||
email: string | null;
|
||||
method: string | null;
|
||||
error?: string;
|
||||
};
|
||||
|
||||
export class CodexProviderAuth implements IProviderAuth {
|
||||
/**
|
||||
* Checks whether Codex is available to the server runtime.
|
||||
*/
|
||||
private checkInstalled(): boolean {
|
||||
try {
|
||||
spawn.sync('codex', ['--version'], { stdio: 'ignore', timeout: 5000 });
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns Codex SDK availability and credential status.
|
||||
*/
|
||||
async getStatus(): Promise<ProviderAuthStatus> {
|
||||
const installed = this.checkInstalled();
|
||||
const credentials = await this.checkCredentials();
|
||||
|
||||
return {
|
||||
installed,
|
||||
provider: 'codex',
|
||||
authenticated: credentials.authenticated,
|
||||
email: credentials.email,
|
||||
method: credentials.method,
|
||||
error: credentials.authenticated ? undefined : credentials.error || 'Not authenticated',
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads Codex auth.json and checks OAuth tokens or an API key fallback.
|
||||
*/
|
||||
private async checkCredentials(): Promise<CodexCredentialsStatus> {
|
||||
try {
|
||||
const authPath = path.join(os.homedir(), '.codex', 'auth.json');
|
||||
const content = await readFile(authPath, 'utf8');
|
||||
const auth = readObjectRecord(JSON.parse(content)) ?? {};
|
||||
const tokens = readObjectRecord(auth.tokens) ?? {};
|
||||
const idToken = readOptionalString(tokens.id_token);
|
||||
const accessToken = readOptionalString(tokens.access_token);
|
||||
|
||||
if (idToken || accessToken) {
|
||||
return {
|
||||
authenticated: true,
|
||||
email: idToken ? this.readEmailFromIdToken(idToken) : 'Authenticated',
|
||||
method: 'credentials_file',
|
||||
};
|
||||
}
|
||||
|
||||
if (readOptionalString(auth.OPENAI_API_KEY)) {
|
||||
return { authenticated: true, email: 'API Key Auth', method: 'api_key' };
|
||||
}
|
||||
|
||||
return { authenticated: false, email: null, method: null, error: 'No valid tokens found' };
|
||||
} catch (error) {
|
||||
const code = (error as NodeJS.ErrnoException).code;
|
||||
return {
|
||||
authenticated: false,
|
||||
email: null,
|
||||
method: null,
|
||||
error: code === 'ENOENT' ? 'Codex not configured' : error instanceof Error ? error.message : 'Failed to read Codex auth',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the user email from a Codex id_token when a readable JWT payload exists.
|
||||
*/
|
||||
private readEmailFromIdToken(idToken: string): string {
|
||||
try {
|
||||
const parts = idToken.split('.');
|
||||
if (parts.length >= 2) {
|
||||
const payload = readObjectRecord(JSON.parse(Buffer.from(parts[1], 'base64url').toString('utf8')));
|
||||
return readOptionalString(payload?.email) ?? readOptionalString(payload?.user) ?? 'Authenticated';
|
||||
}
|
||||
} catch {
|
||||
// Fall back to a generic authenticated marker if the token payload is not readable.
|
||||
}
|
||||
|
||||
return 'Authenticated';
|
||||
}
|
||||
}
|
||||
135
server/modules/providers/list/codex/codex-mcp.provider.ts
Normal file
135
server/modules/providers/list/codex/codex-mcp.provider.ts
Normal file
@@ -0,0 +1,135 @@
|
||||
import { mkdir, readFile, writeFile } from 'node:fs/promises';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
|
||||
import TOML from '@iarna/toml';
|
||||
|
||||
import { McpProvider } from '@/modules/providers/shared/mcp/mcp.provider.js';
|
||||
import type { McpScope, ProviderMcpServer, UpsertProviderMcpServerInput } from '@/shared/types.js';
|
||||
import {
|
||||
AppError,
|
||||
readObjectRecord,
|
||||
readOptionalString,
|
||||
readStringArray,
|
||||
readStringRecord,
|
||||
} from '@/shared/utils.js';
|
||||
|
||||
const readTomlConfig = async (filePath: string): Promise<Record<string, unknown>> => {
|
||||
try {
|
||||
const content = await readFile(filePath, 'utf8');
|
||||
const parsed = TOML.parse(content) as Record<string, unknown>;
|
||||
return readObjectRecord(parsed) ?? {};
|
||||
} catch (error) {
|
||||
const code = (error as NodeJS.ErrnoException).code;
|
||||
if (code === 'ENOENT') {
|
||||
return {};
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const writeTomlConfig = async (filePath: string, data: Record<string, unknown>): Promise<void> => {
|
||||
await mkdir(path.dirname(filePath), { recursive: true });
|
||||
const toml = TOML.stringify(data as never);
|
||||
await writeFile(filePath, toml, 'utf8');
|
||||
};
|
||||
|
||||
export class CodexMcpProvider extends McpProvider {
|
||||
constructor() {
|
||||
super('codex', ['user', 'project'], ['stdio', 'http']);
|
||||
}
|
||||
|
||||
protected async readScopedServers(scope: McpScope, workspacePath: string): Promise<Record<string, unknown>> {
|
||||
const filePath = scope === 'user'
|
||||
? path.join(os.homedir(), '.codex', 'config.toml')
|
||||
: path.join(workspacePath, '.codex', 'config.toml');
|
||||
const config = await readTomlConfig(filePath);
|
||||
return readObjectRecord(config.mcp_servers) ?? {};
|
||||
}
|
||||
|
||||
protected async writeScopedServers(
|
||||
scope: McpScope,
|
||||
workspacePath: string,
|
||||
servers: Record<string, unknown>,
|
||||
): Promise<void> {
|
||||
const filePath = scope === 'user'
|
||||
? path.join(os.homedir(), '.codex', 'config.toml')
|
||||
: path.join(workspacePath, '.codex', 'config.toml');
|
||||
const config = await readTomlConfig(filePath);
|
||||
config.mcp_servers = servers;
|
||||
await writeTomlConfig(filePath, config);
|
||||
}
|
||||
|
||||
protected buildServerConfig(input: UpsertProviderMcpServerInput): Record<string, unknown> {
|
||||
if (input.transport === 'stdio') {
|
||||
if (!input.command?.trim()) {
|
||||
throw new AppError('command is required for stdio MCP servers.', {
|
||||
code: 'MCP_COMMAND_REQUIRED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
command: input.command,
|
||||
args: input.args ?? [],
|
||||
env: input.env ?? {},
|
||||
env_vars: input.envVars ?? [],
|
||||
cwd: input.cwd,
|
||||
};
|
||||
}
|
||||
|
||||
if (!input.url?.trim()) {
|
||||
throw new AppError('url is required for http MCP servers.', {
|
||||
code: 'MCP_URL_REQUIRED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
url: input.url,
|
||||
bearer_token_env_var: input.bearerTokenEnvVar,
|
||||
http_headers: input.headers ?? {},
|
||||
env_http_headers: input.envHttpHeaders ?? {},
|
||||
};
|
||||
}
|
||||
|
||||
protected normalizeServerConfig(
|
||||
scope: McpScope,
|
||||
name: string,
|
||||
rawConfig: unknown,
|
||||
): ProviderMcpServer | null {
|
||||
if (!rawConfig || typeof rawConfig !== 'object') {
|
||||
return null;
|
||||
}
|
||||
|
||||
const config = rawConfig as Record<string, unknown>;
|
||||
if (typeof config.command === 'string') {
|
||||
return {
|
||||
provider: 'codex',
|
||||
name,
|
||||
scope,
|
||||
transport: 'stdio',
|
||||
command: config.command,
|
||||
args: readStringArray(config.args),
|
||||
env: readStringRecord(config.env),
|
||||
cwd: readOptionalString(config.cwd),
|
||||
envVars: readStringArray(config.env_vars),
|
||||
};
|
||||
}
|
||||
|
||||
if (typeof config.url === 'string') {
|
||||
return {
|
||||
provider: 'codex',
|
||||
name,
|
||||
scope,
|
||||
transport: 'http',
|
||||
url: config.url,
|
||||
headers: readStringRecord(config.http_headers),
|
||||
bearerTokenEnvVar: readOptionalString(config.bearer_token_env_var),
|
||||
envHttpHeaders: readStringRecord(config.env_http_headers),
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,119 @@
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
|
||||
import { sessionsDb } from '@/modules/database/index.js';
|
||||
import {
|
||||
buildLookupMap,
|
||||
extractFirstValidJsonlData,
|
||||
findFilesRecursivelyCreatedAfter,
|
||||
normalizeSessionName,
|
||||
readFileTimestamps,
|
||||
} from '@/shared/utils.js';
|
||||
import type { IProviderSessionSynchronizer } from '@/shared/interfaces.js';
|
||||
|
||||
type ParsedSession = {
|
||||
sessionId: string;
|
||||
projectPath: string;
|
||||
sessionName?: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Session indexer for Codex transcript artifacts.
|
||||
*/
|
||||
export class CodexSessionSynchronizer implements IProviderSessionSynchronizer {
|
||||
private readonly provider = 'codex' as const;
|
||||
private readonly codexHome = path.join(os.homedir(), '.codex');
|
||||
|
||||
/**
|
||||
* Scans ~/.codex/sessions and upserts discovered sessions into DB.
|
||||
*/
|
||||
async synchronize(since?: Date): Promise<number> {
|
||||
const nameMap = await buildLookupMap(path.join(this.codexHome, 'session_index.jsonl'), 'id', 'thread_name');
|
||||
const files = await findFilesRecursivelyCreatedAfter(
|
||||
path.join(this.codexHome, 'sessions'),
|
||||
'.jsonl',
|
||||
since ?? null
|
||||
);
|
||||
|
||||
let processed = 0;
|
||||
for (const filePath of files) {
|
||||
const parsed = await this.processSessionFile(filePath, nameMap);
|
||||
if (!parsed) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const existingSession = sessionsDb.getSessionById(parsed.sessionId);
|
||||
if (existingSession) {
|
||||
// If session name is untitled and we now have a name, update it
|
||||
if (existingSession.custom_name === 'Untitled Codex Session' && parsed.sessionName && parsed.sessionName !== 'Untitled Codex Session') {
|
||||
sessionsDb.updateSessionCustomName(parsed.sessionId, parsed.sessionName);
|
||||
}
|
||||
}
|
||||
|
||||
const timestamps = await readFileTimestamps(filePath);
|
||||
sessionsDb.createSession(
|
||||
parsed.sessionId,
|
||||
this.provider,
|
||||
parsed.projectPath,
|
||||
parsed.sessionName,
|
||||
timestamps.createdAt,
|
||||
timestamps.updatedAt,
|
||||
filePath
|
||||
);
|
||||
processed += 1;
|
||||
}
|
||||
|
||||
return processed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses and upserts one Codex session JSONL file.
|
||||
*/
|
||||
async synchronizeFile(filePath: string): Promise<string | null> {
|
||||
if (!filePath.endsWith('.jsonl')) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const nameMap = await buildLookupMap(path.join(this.codexHome, 'session_index.jsonl'), 'id', 'thread_name');
|
||||
const parsed = await this.processSessionFile(filePath, nameMap);
|
||||
if (!parsed) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const timestamps = await readFileTimestamps(filePath);
|
||||
return sessionsDb.createSession(
|
||||
parsed.sessionId,
|
||||
this.provider,
|
||||
parsed.projectPath,
|
||||
parsed.sessionName,
|
||||
timestamps.createdAt,
|
||||
timestamps.updatedAt,
|
||||
filePath
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts session metadata from one Codex JSONL session file.
|
||||
*/
|
||||
private async processSessionFile(
|
||||
filePath: string,
|
||||
nameMap: Map<string, string>
|
||||
): Promise<ParsedSession | null> {
|
||||
return extractFirstValidJsonlData(filePath, (rawData) => {
|
||||
const data = rawData as Record<string, unknown>;
|
||||
const payload = data.payload as Record<string, unknown> | undefined;
|
||||
const sessionId = typeof payload?.id === 'string' ? payload.id : undefined;
|
||||
const projectPath = typeof payload?.cwd === 'string' ? payload.cwd : undefined;
|
||||
|
||||
if (!sessionId || !projectPath) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
sessionId,
|
||||
projectPath,
|
||||
sessionName: normalizeSessionName(nameMap.get(sessionId), 'Untitled Codex Session'),
|
||||
};
|
||||
});
|
||||
}
|
||||
}
|
||||
564
server/modules/providers/list/codex/codex-sessions.provider.ts
Normal file
564
server/modules/providers/list/codex/codex-sessions.provider.ts
Normal file
@@ -0,0 +1,564 @@
|
||||
import fsSync from 'node:fs';
|
||||
import readline from 'node:readline';
|
||||
|
||||
import { sessionsDb } from '@/modules/database/index.js';
|
||||
import type { IProviderSessions } from '@/shared/interfaces.js';
|
||||
import type { AnyRecord, FetchHistoryOptions, FetchHistoryResult, NormalizedMessage } from '@/shared/types.js';
|
||||
import { createNormalizedMessage, generateMessageId, readObjectRecord } from '@/shared/utils.js';
|
||||
|
||||
const PROVIDER = 'codex';
|
||||
|
||||
type CodexHistoryResult =
|
||||
| AnyRecord[]
|
||||
| {
|
||||
messages?: AnyRecord[];
|
||||
total?: number;
|
||||
hasMore?: boolean;
|
||||
offset?: number;
|
||||
limit?: number | null;
|
||||
tokenUsage?: unknown;
|
||||
};
|
||||
|
||||
function isVisibleCodexUserMessage(payload: AnyRecord | null | undefined): boolean {
|
||||
if (!payload || payload.type !== 'user_message') {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (payload.kind && payload.kind !== 'plain') {
|
||||
return false;
|
||||
}
|
||||
|
||||
return typeof payload.message === 'string' && payload.message.trim().length > 0;
|
||||
}
|
||||
|
||||
function extractCodexTextContent(content: unknown): string {
|
||||
if (!Array.isArray(content)) {
|
||||
return typeof content === 'string' ? content : '';
|
||||
}
|
||||
|
||||
return content
|
||||
.map((item) => {
|
||||
if (!item || typeof item !== 'object') {
|
||||
return '';
|
||||
}
|
||||
|
||||
const record = item as AnyRecord;
|
||||
if (
|
||||
(record.type === 'input_text' || record.type === 'output_text' || record.type === 'text')
|
||||
&& typeof record.text === 'string'
|
||||
) {
|
||||
return record.text;
|
||||
}
|
||||
|
||||
return '';
|
||||
})
|
||||
.filter(Boolean)
|
||||
.join('\n');
|
||||
}
|
||||
|
||||
async function getCodexSessionMessages(
|
||||
sessionId: string,
|
||||
limit: number | null = null,
|
||||
offset = 0,
|
||||
): Promise<CodexHistoryResult> {
|
||||
try {
|
||||
const sessionFilePath = sessionsDb.getSessionById(sessionId)?.jsonl_path;
|
||||
|
||||
if (!sessionFilePath) {
|
||||
console.warn(`Codex session file not found for session ${sessionId}`);
|
||||
return { messages: [], total: 0, hasMore: false };
|
||||
}
|
||||
|
||||
const messages: AnyRecord[] = [];
|
||||
let tokenUsage: AnyRecord | null = null;
|
||||
const fileStream = fsSync.createReadStream(sessionFilePath);
|
||||
const rl = readline.createInterface({
|
||||
input: fileStream,
|
||||
crlfDelay: Infinity,
|
||||
});
|
||||
|
||||
for await (const line of rl) {
|
||||
if (!line.trim()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
const entry = JSON.parse(line) as AnyRecord;
|
||||
|
||||
if (entry.type === 'event_msg' && entry.payload?.type === 'token_count' && entry.payload?.info) {
|
||||
const info = entry.payload.info as AnyRecord;
|
||||
if (info.total_token_usage) {
|
||||
const usage = info.total_token_usage as AnyRecord;
|
||||
tokenUsage = {
|
||||
used: usage.total_tokens || 0,
|
||||
total: info.model_context_window || 200000,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (entry.type === 'event_msg' && isVisibleCodexUserMessage(entry.payload as AnyRecord)) {
|
||||
messages.push({
|
||||
type: 'user',
|
||||
timestamp: entry.timestamp,
|
||||
message: {
|
||||
role: 'user',
|
||||
content: entry.payload.message,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
if (
|
||||
entry.type === 'response_item' &&
|
||||
entry.payload?.type === 'message' &&
|
||||
entry.payload.role === 'assistant'
|
||||
) {
|
||||
const textContent = extractCodexTextContent(entry.payload.content);
|
||||
if (textContent.trim()) {
|
||||
messages.push({
|
||||
type: 'assistant',
|
||||
timestamp: entry.timestamp,
|
||||
message: {
|
||||
role: 'assistant',
|
||||
content: textContent,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (entry.type === 'response_item' && entry.payload?.type === 'reasoning') {
|
||||
const summaryText = Array.isArray(entry.payload.summary)
|
||||
? entry.payload.summary
|
||||
.map((item: AnyRecord) => item?.text)
|
||||
.filter(Boolean)
|
||||
.join('\n')
|
||||
: '';
|
||||
|
||||
if (summaryText.trim()) {
|
||||
messages.push({
|
||||
type: 'thinking',
|
||||
timestamp: entry.timestamp,
|
||||
message: {
|
||||
role: 'assistant',
|
||||
content: summaryText,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (entry.type === 'response_item' && entry.payload?.type === 'function_call') {
|
||||
let toolName = entry.payload.name;
|
||||
let toolInput = entry.payload.arguments;
|
||||
|
||||
if (toolName === 'shell_command') {
|
||||
toolName = 'Bash';
|
||||
try {
|
||||
const args = JSON.parse(entry.payload.arguments) as AnyRecord;
|
||||
toolInput = JSON.stringify({ command: args.command });
|
||||
} catch {
|
||||
// Keep original arguments when parsing fails.
|
||||
}
|
||||
}
|
||||
|
||||
messages.push({
|
||||
type: 'tool_use',
|
||||
timestamp: entry.timestamp,
|
||||
toolName,
|
||||
toolInput,
|
||||
toolCallId: entry.payload.call_id,
|
||||
});
|
||||
}
|
||||
|
||||
if (entry.type === 'response_item' && entry.payload?.type === 'function_call_output') {
|
||||
messages.push({
|
||||
type: 'tool_result',
|
||||
timestamp: entry.timestamp,
|
||||
toolCallId: entry.payload.call_id,
|
||||
output: entry.payload.output,
|
||||
});
|
||||
}
|
||||
|
||||
if (entry.type === 'response_item' && entry.payload?.type === 'custom_tool_call') {
|
||||
const toolName = entry.payload.name || 'custom_tool';
|
||||
const input = entry.payload.input || '';
|
||||
|
||||
if (toolName === 'apply_patch') {
|
||||
const fileMatch = String(input).match(/\*\*\* Update File: (.+)/);
|
||||
const filePath = fileMatch ? fileMatch[1].trim() : 'unknown';
|
||||
const lines = String(input).split('\n');
|
||||
const oldLines: string[] = [];
|
||||
const newLines: string[] = [];
|
||||
|
||||
for (const lineContent of lines) {
|
||||
if (lineContent.startsWith('-') && !lineContent.startsWith('---')) {
|
||||
oldLines.push(lineContent.slice(1));
|
||||
} else if (lineContent.startsWith('+') && !lineContent.startsWith('+++')) {
|
||||
newLines.push(lineContent.slice(1));
|
||||
}
|
||||
}
|
||||
|
||||
messages.push({
|
||||
type: 'tool_use',
|
||||
timestamp: entry.timestamp,
|
||||
toolName: 'Edit',
|
||||
toolInput: JSON.stringify({
|
||||
file_path: filePath,
|
||||
old_string: oldLines.join('\n'),
|
||||
new_string: newLines.join('\n'),
|
||||
}),
|
||||
toolCallId: entry.payload.call_id,
|
||||
});
|
||||
} else {
|
||||
messages.push({
|
||||
type: 'tool_use',
|
||||
timestamp: entry.timestamp,
|
||||
toolName,
|
||||
toolInput: input,
|
||||
toolCallId: entry.payload.call_id,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (entry.type === 'response_item' && entry.payload?.type === 'custom_tool_call_output') {
|
||||
messages.push({
|
||||
type: 'tool_result',
|
||||
timestamp: entry.timestamp,
|
||||
toolCallId: entry.payload.call_id,
|
||||
output: entry.payload.output || '',
|
||||
});
|
||||
}
|
||||
} catch {
|
||||
// Skip malformed lines.
|
||||
}
|
||||
}
|
||||
|
||||
messages.sort(
|
||||
(a, b) => new Date(a.timestamp || 0).getTime() - new Date(b.timestamp || 0).getTime(),
|
||||
);
|
||||
const total = messages.length;
|
||||
|
||||
if (limit !== null) {
|
||||
const startIndex = Math.max(0, total - offset - limit);
|
||||
const endIndex = total - offset;
|
||||
const paginatedMessages = messages.slice(startIndex, endIndex);
|
||||
const hasMore = startIndex > 0;
|
||||
|
||||
return {
|
||||
messages: paginatedMessages,
|
||||
total,
|
||||
hasMore,
|
||||
offset,
|
||||
limit,
|
||||
tokenUsage,
|
||||
};
|
||||
}
|
||||
|
||||
return { messages, tokenUsage };
|
||||
} catch (error) {
|
||||
console.error(`Error reading Codex session messages for ${sessionId}:`, error);
|
||||
return { messages: [], total: 0, hasMore: false };
|
||||
}
|
||||
}
|
||||
|
||||
export class CodexSessionsProvider implements IProviderSessions {
|
||||
/**
|
||||
* Normalizes a persisted Codex JSONL entry.
|
||||
*
|
||||
* Live Codex SDK events are transformed before they reach normalizeMessage(),
|
||||
* while history entries already use a compact message/tool shape from projects.js.
|
||||
*/
|
||||
private normalizeHistoryEntry(raw: AnyRecord, sessionId: string | null): NormalizedMessage[] {
|
||||
const ts = raw.timestamp || new Date().toISOString();
|
||||
const baseId = raw.uuid || generateMessageId('codex');
|
||||
|
||||
if (raw.type === 'thinking' || raw.isReasoning) {
|
||||
const thinkingContent = typeof raw.message?.content === 'string'
|
||||
? raw.message.content
|
||||
: '';
|
||||
if (!thinkingContent.trim()) {
|
||||
return [];
|
||||
}
|
||||
return [createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'thinking',
|
||||
content: thinkingContent,
|
||||
})];
|
||||
}
|
||||
|
||||
if (raw.message?.role === 'user') {
|
||||
const content = typeof raw.message.content === 'string'
|
||||
? raw.message.content
|
||||
: Array.isArray(raw.message.content)
|
||||
? raw.message.content
|
||||
.map((part: string | AnyRecord) => typeof part === 'string' ? part : part?.text || '')
|
||||
.filter(Boolean)
|
||||
.join('\n')
|
||||
: String(raw.message.content || '');
|
||||
if (!content.trim()) {
|
||||
return [];
|
||||
}
|
||||
return [createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'text',
|
||||
role: 'user',
|
||||
content,
|
||||
})];
|
||||
}
|
||||
|
||||
if (raw.message?.role === 'assistant') {
|
||||
const content = typeof raw.message.content === 'string'
|
||||
? raw.message.content
|
||||
: Array.isArray(raw.message.content)
|
||||
? raw.message.content
|
||||
.map((part: string | AnyRecord) => typeof part === 'string' ? part : part?.text || '')
|
||||
.filter(Boolean)
|
||||
.join('\n')
|
||||
: '';
|
||||
if (!content.trim()) {
|
||||
return [];
|
||||
}
|
||||
return [createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'text',
|
||||
role: 'assistant',
|
||||
content,
|
||||
})];
|
||||
}
|
||||
|
||||
if (raw.type === 'tool_use' || raw.toolName) {
|
||||
return [createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'tool_use',
|
||||
toolName: raw.toolName || 'Unknown',
|
||||
toolInput: raw.toolInput,
|
||||
toolId: raw.toolCallId || baseId,
|
||||
})];
|
||||
}
|
||||
|
||||
if (raw.type === 'tool_result') {
|
||||
return [createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'tool_result',
|
||||
toolId: raw.toolCallId || '',
|
||||
content: raw.output || '',
|
||||
isError: Boolean(raw.isError),
|
||||
})];
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalizes either a Codex history entry or a transformed live SDK event.
|
||||
*/
|
||||
normalizeMessage(rawMessage: unknown, sessionId: string | null): NormalizedMessage[] {
|
||||
const raw = readObjectRecord(rawMessage);
|
||||
if (!raw) {
|
||||
return [];
|
||||
}
|
||||
|
||||
if (raw.message?.role) {
|
||||
return this.normalizeHistoryEntry(raw, sessionId);
|
||||
}
|
||||
|
||||
const ts = raw.timestamp || new Date().toISOString();
|
||||
const baseId = raw.uuid || generateMessageId('codex');
|
||||
|
||||
if (raw.type === 'item') {
|
||||
switch (raw.itemType) {
|
||||
case 'agent_message':
|
||||
return [createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'text',
|
||||
role: 'assistant',
|
||||
content: raw.message?.content || '',
|
||||
})];
|
||||
case 'reasoning':
|
||||
return [createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'thinking',
|
||||
content: raw.message?.content || '',
|
||||
})];
|
||||
case 'command_execution':
|
||||
return [createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'tool_use',
|
||||
toolName: 'Bash',
|
||||
toolInput: { command: raw.command },
|
||||
toolId: baseId,
|
||||
output: raw.output,
|
||||
exitCode: raw.exitCode,
|
||||
status: raw.status,
|
||||
})];
|
||||
case 'file_change':
|
||||
return [createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'tool_use',
|
||||
toolName: 'FileChanges',
|
||||
toolInput: raw.changes,
|
||||
toolId: baseId,
|
||||
status: raw.status,
|
||||
})];
|
||||
case 'mcp_tool_call':
|
||||
return [createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'tool_use',
|
||||
toolName: raw.tool || 'MCP',
|
||||
toolInput: raw.arguments,
|
||||
toolId: baseId,
|
||||
server: raw.server,
|
||||
result: raw.result,
|
||||
error: raw.error,
|
||||
status: raw.status,
|
||||
})];
|
||||
case 'web_search':
|
||||
return [createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'tool_use',
|
||||
toolName: 'WebSearch',
|
||||
toolInput: { query: raw.query },
|
||||
toolId: baseId,
|
||||
})];
|
||||
case 'todo_list':
|
||||
return [createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'tool_use',
|
||||
toolName: 'TodoList',
|
||||
toolInput: { items: raw.items },
|
||||
toolId: baseId,
|
||||
})];
|
||||
case 'error':
|
||||
return [createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'error',
|
||||
content: raw.message?.content || 'Unknown error',
|
||||
})];
|
||||
default:
|
||||
return [createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'tool_use',
|
||||
toolName: raw.itemType || 'Unknown',
|
||||
toolInput: raw.item || raw,
|
||||
toolId: baseId,
|
||||
})];
|
||||
}
|
||||
}
|
||||
|
||||
if (raw.type === 'turn_complete') {
|
||||
return [createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'complete',
|
||||
})];
|
||||
}
|
||||
if (raw.type === 'turn_failed') {
|
||||
return [createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'error',
|
||||
content: raw.error?.message || 'Turn failed',
|
||||
})];
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads Codex JSONL history and keeps token usage metadata when projects.js
|
||||
* provides it.
|
||||
*/
|
||||
async fetchHistory(
|
||||
sessionId: string,
|
||||
options: FetchHistoryOptions = {},
|
||||
): Promise<FetchHistoryResult> {
|
||||
const { limit = null, offset = 0 } = options;
|
||||
|
||||
let result: CodexHistoryResult;
|
||||
try {
|
||||
result = await getCodexSessionMessages(sessionId, limit, offset);
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
console.warn(`[CodexProvider] Failed to load session ${sessionId}:`, message);
|
||||
return { messages: [], total: 0, hasMore: false, offset: 0, limit: null };
|
||||
}
|
||||
|
||||
const rawMessages = Array.isArray(result) ? result : (result.messages || []);
|
||||
const total = Array.isArray(result) ? rawMessages.length : (result.total || 0);
|
||||
const hasMore = Array.isArray(result) ? false : Boolean(result.hasMore);
|
||||
const tokenUsage = Array.isArray(result) ? undefined : result.tokenUsage;
|
||||
|
||||
const normalized: NormalizedMessage[] = [];
|
||||
for (const raw of rawMessages) {
|
||||
normalized.push(...this.normalizeHistoryEntry(raw, sessionId));
|
||||
}
|
||||
|
||||
const toolResultMap = new Map<string, NormalizedMessage>();
|
||||
for (const msg of normalized) {
|
||||
if (msg.kind === 'tool_result' && msg.toolId) {
|
||||
toolResultMap.set(msg.toolId, msg);
|
||||
}
|
||||
}
|
||||
for (const msg of normalized) {
|
||||
if (msg.kind === 'tool_use' && msg.toolId && toolResultMap.has(msg.toolId)) {
|
||||
const toolResult = toolResultMap.get(msg.toolId);
|
||||
if (toolResult) {
|
||||
msg.toolResult = { content: toolResult.content, isError: toolResult.isError };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
messages: normalized,
|
||||
total,
|
||||
hasMore,
|
||||
offset,
|
||||
limit,
|
||||
tokenUsage,
|
||||
};
|
||||
}
|
||||
}
|
||||
17
server/modules/providers/list/codex/codex.provider.ts
Normal file
17
server/modules/providers/list/codex/codex.provider.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { AbstractProvider } from '@/modules/providers/shared/base/abstract.provider.js';
|
||||
import { CodexProviderAuth } from '@/modules/providers/list/codex/codex-auth.provider.js';
|
||||
import { CodexMcpProvider } from '@/modules/providers/list/codex/codex-mcp.provider.js';
|
||||
import { CodexSessionSynchronizer } from '@/modules/providers/list/codex/codex-session-synchronizer.provider.js';
|
||||
import { CodexSessionsProvider } from '@/modules/providers/list/codex/codex-sessions.provider.js';
|
||||
import type { IProviderAuth, IProviderSessionSynchronizer, IProviderSessions } from '@/shared/interfaces.js';
|
||||
|
||||
export class CodexProvider extends AbstractProvider {
|
||||
readonly mcp = new CodexMcpProvider();
|
||||
readonly auth: IProviderAuth = new CodexProviderAuth();
|
||||
readonly sessions: IProviderSessions = new CodexSessionsProvider();
|
||||
readonly sessionSynchronizer: IProviderSessionSynchronizer = new CodexSessionSynchronizer();
|
||||
|
||||
constructor() {
|
||||
super('codex');
|
||||
}
|
||||
}
|
||||
143
server/modules/providers/list/cursor/cursor-auth.provider.ts
Normal file
143
server/modules/providers/list/cursor/cursor-auth.provider.ts
Normal file
@@ -0,0 +1,143 @@
|
||||
import spawn from 'cross-spawn';
|
||||
|
||||
import type { IProviderAuth } from '@/shared/interfaces.js';
|
||||
import type { ProviderAuthStatus } from '@/shared/types.js';
|
||||
|
||||
type CursorLoginStatus = {
|
||||
authenticated: boolean;
|
||||
email: string | null;
|
||||
method: string | null;
|
||||
error?: string;
|
||||
};
|
||||
|
||||
export class CursorProviderAuth implements IProviderAuth {
|
||||
/**
|
||||
* Checks whether the cursor-agent CLI is available on this host.
|
||||
*/
|
||||
private checkInstalled(): boolean {
|
||||
try {
|
||||
spawn.sync('cursor-agent', ['--version'], { stdio: 'ignore', timeout: 5000 });
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns Cursor CLI installation and login status.
|
||||
*/
|
||||
async getStatus(): Promise<ProviderAuthStatus> {
|
||||
const installed = this.checkInstalled();
|
||||
|
||||
if (!installed) {
|
||||
return {
|
||||
installed,
|
||||
provider: 'cursor',
|
||||
authenticated: false,
|
||||
email: null,
|
||||
method: null,
|
||||
error: 'Cursor CLI is not installed',
|
||||
};
|
||||
}
|
||||
|
||||
const login = await this.checkCursorLogin();
|
||||
|
||||
return {
|
||||
installed,
|
||||
provider: 'cursor',
|
||||
authenticated: login.authenticated,
|
||||
email: login.email,
|
||||
method: login.method,
|
||||
error: login.authenticated ? undefined : login.error || 'Not logged in',
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs cursor-agent status and parses the login marker from stdout.
|
||||
*/
|
||||
private checkCursorLogin(): Promise<CursorLoginStatus> {
|
||||
return new Promise((resolve) => {
|
||||
let processCompleted = false;
|
||||
let childProcess: ReturnType<typeof spawn> | undefined;
|
||||
|
||||
const timeout = setTimeout(() => {
|
||||
if (!processCompleted) {
|
||||
processCompleted = true;
|
||||
childProcess?.kill();
|
||||
resolve({
|
||||
authenticated: false,
|
||||
email: null,
|
||||
method: null,
|
||||
error: 'Command timeout',
|
||||
});
|
||||
}
|
||||
}, 5000);
|
||||
|
||||
try {
|
||||
childProcess = spawn('cursor-agent', ['status']);
|
||||
} catch {
|
||||
clearTimeout(timeout);
|
||||
processCompleted = true;
|
||||
resolve({
|
||||
authenticated: false,
|
||||
email: null,
|
||||
method: null,
|
||||
error: 'Cursor CLI not found or not installed',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
let stdout = '';
|
||||
let stderr = '';
|
||||
|
||||
childProcess.stdout?.on('data', (data: Buffer) => {
|
||||
stdout += data.toString();
|
||||
});
|
||||
|
||||
childProcess.stderr?.on('data', (data: Buffer) => {
|
||||
stderr += data.toString();
|
||||
});
|
||||
|
||||
childProcess.on('close', (code) => {
|
||||
if (processCompleted) {
|
||||
return;
|
||||
}
|
||||
processCompleted = true;
|
||||
clearTimeout(timeout);
|
||||
|
||||
if (code === 0) {
|
||||
const emailMatch = stdout.match(/Logged in as ([a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,})/i);
|
||||
if (emailMatch?.[1]) {
|
||||
resolve({ authenticated: true, email: emailMatch[1], method: 'cli' });
|
||||
return;
|
||||
}
|
||||
|
||||
if (stdout.includes('Logged in')) {
|
||||
resolve({ authenticated: true, email: 'Logged in', method: 'cli' });
|
||||
return;
|
||||
}
|
||||
|
||||
resolve({ authenticated: false, email: null, method: null, error: 'Not logged in' });
|
||||
return;
|
||||
}
|
||||
|
||||
resolve({ authenticated: false, email: null, method: null, error: stderr || 'Not logged in' });
|
||||
});
|
||||
|
||||
childProcess.on('error', () => {
|
||||
if (processCompleted) {
|
||||
return;
|
||||
}
|
||||
processCompleted = true;
|
||||
clearTimeout(timeout);
|
||||
|
||||
resolve({
|
||||
authenticated: false,
|
||||
email: null,
|
||||
method: null,
|
||||
error: 'Cursor CLI not found or not installed',
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
108
server/modules/providers/list/cursor/cursor-mcp.provider.ts
Normal file
108
server/modules/providers/list/cursor/cursor-mcp.provider.ts
Normal file
@@ -0,0 +1,108 @@
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
|
||||
import { McpProvider } from '@/modules/providers/shared/mcp/mcp.provider.js';
|
||||
import type { McpScope, ProviderMcpServer, UpsertProviderMcpServerInput } from '@/shared/types.js';
|
||||
import {
|
||||
AppError,
|
||||
readJsonConfig,
|
||||
readObjectRecord,
|
||||
readOptionalString,
|
||||
readStringArray,
|
||||
readStringRecord,
|
||||
writeJsonConfig,
|
||||
} from '@/shared/utils.js';
|
||||
|
||||
export class CursorMcpProvider extends McpProvider {
|
||||
constructor() {
|
||||
super('cursor', ['user', 'project'], ['stdio', 'http']);
|
||||
}
|
||||
|
||||
protected async readScopedServers(scope: McpScope, workspacePath: string): Promise<Record<string, unknown>> {
|
||||
const filePath = scope === 'user'
|
||||
? path.join(os.homedir(), '.cursor', 'mcp.json')
|
||||
: path.join(workspacePath, '.cursor', 'mcp.json');
|
||||
const config = await readJsonConfig(filePath);
|
||||
return readObjectRecord(config.mcpServers) ?? {};
|
||||
}
|
||||
|
||||
protected async writeScopedServers(
|
||||
scope: McpScope,
|
||||
workspacePath: string,
|
||||
servers: Record<string, unknown>,
|
||||
): Promise<void> {
|
||||
const filePath = scope === 'user'
|
||||
? path.join(os.homedir(), '.cursor', 'mcp.json')
|
||||
: path.join(workspacePath, '.cursor', 'mcp.json');
|
||||
const config = await readJsonConfig(filePath);
|
||||
config.mcpServers = servers;
|
||||
await writeJsonConfig(filePath, config);
|
||||
}
|
||||
|
||||
protected buildServerConfig(input: UpsertProviderMcpServerInput): Record<string, unknown> {
|
||||
if (input.transport === 'stdio') {
|
||||
if (!input.command?.trim()) {
|
||||
throw new AppError('command is required for stdio MCP servers.', {
|
||||
code: 'MCP_COMMAND_REQUIRED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
command: input.command,
|
||||
args: input.args ?? [],
|
||||
env: input.env ?? {},
|
||||
cwd: input.cwd,
|
||||
};
|
||||
}
|
||||
|
||||
if (!input.url?.trim()) {
|
||||
throw new AppError('url is required for http MCP servers.', {
|
||||
code: 'MCP_URL_REQUIRED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
url: input.url,
|
||||
headers: input.headers ?? {},
|
||||
};
|
||||
}
|
||||
|
||||
protected normalizeServerConfig(
|
||||
scope: McpScope,
|
||||
name: string,
|
||||
rawConfig: unknown,
|
||||
): ProviderMcpServer | null {
|
||||
if (!rawConfig || typeof rawConfig !== 'object') {
|
||||
return null;
|
||||
}
|
||||
|
||||
const config = rawConfig as Record<string, unknown>;
|
||||
if (typeof config.command === 'string') {
|
||||
return {
|
||||
provider: 'cursor',
|
||||
name,
|
||||
scope,
|
||||
transport: 'stdio',
|
||||
command: config.command,
|
||||
args: readStringArray(config.args),
|
||||
env: readStringRecord(config.env),
|
||||
cwd: readOptionalString(config.cwd),
|
||||
};
|
||||
}
|
||||
|
||||
if (typeof config.url === 'string') {
|
||||
return {
|
||||
provider: 'cursor',
|
||||
name,
|
||||
scope,
|
||||
transport: 'http',
|
||||
url: config.url,
|
||||
headers: readStringRecord(config.headers),
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,176 @@
|
||||
import crypto from 'node:crypto';
|
||||
import fs from 'node:fs';
|
||||
import fsp from 'node:fs/promises';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
import readline from 'node:readline';
|
||||
|
||||
import { sessionsDb } from '@/modules/database/index.js';
|
||||
import {
|
||||
extractFirstValidJsonlData,
|
||||
findFilesRecursivelyCreatedAfter,
|
||||
normalizeSessionName,
|
||||
readFileTimestamps,
|
||||
} from '@/shared/utils.js';
|
||||
import type { IProviderSessionSynchronizer } from '@/shared/interfaces.js';
|
||||
|
||||
type ParsedSession = {
|
||||
sessionId: string;
|
||||
projectPath: string;
|
||||
sessionName?: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns directory entries or an empty list when the folder is missing.
|
||||
*/
|
||||
async function listDirectoryEntriesSafe(
|
||||
directoryPath: string
|
||||
): Promise<import('node:fs').Dirent[]> {
|
||||
try {
|
||||
return await fsp.readdir(directoryPath, { withFileTypes: true });
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Session indexer for Cursor transcript artifacts.
|
||||
*/
|
||||
export class CursorSessionSynchronizer implements IProviderSessionSynchronizer {
|
||||
private readonly provider = 'cursor' as const;
|
||||
private readonly cursorHome = path.join(os.homedir(), '.cursor');
|
||||
|
||||
/**
|
||||
* Scans Cursor chats and upserts discovered sessions into DB.
|
||||
*/
|
||||
async synchronize(since?: Date): Promise<number> {
|
||||
const projectsDir = path.join(this.cursorHome, 'projects');
|
||||
const projectEntries = await listDirectoryEntriesSafe(projectsDir);
|
||||
const seenProjectPaths = new Set<string>();
|
||||
|
||||
let processed = 0;
|
||||
for (const entry of projectEntries) {
|
||||
if (!entry.isDirectory()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const workerLogPath = path.join(projectsDir, entry.name, 'worker.log');
|
||||
const projectPath = await this.extractProjectPathFromWorkerLog(workerLogPath);
|
||||
if (!projectPath || seenProjectPaths.has(projectPath)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
seenProjectPaths.add(projectPath);
|
||||
const projectHash = this.md5(projectPath);
|
||||
const chatsDir = path.join(this.cursorHome, 'chats', projectHash);
|
||||
const files = await findFilesRecursivelyCreatedAfter(chatsDir, '.jsonl', since ?? null);
|
||||
|
||||
for (const filePath of files) {
|
||||
const parsed = await this.processSessionFile(filePath);
|
||||
if (!parsed) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const timestamps = await readFileTimestamps(filePath);
|
||||
sessionsDb.createSession(
|
||||
parsed.sessionId,
|
||||
this.provider,
|
||||
parsed.projectPath,
|
||||
parsed.sessionName,
|
||||
timestamps.createdAt,
|
||||
timestamps.updatedAt,
|
||||
filePath
|
||||
);
|
||||
processed += 1;
|
||||
}
|
||||
}
|
||||
|
||||
return processed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses and upserts one Cursor session JSONL file.
|
||||
*/
|
||||
async synchronizeFile(filePath: string): Promise<string | null> {
|
||||
if (!filePath.endsWith('.jsonl')) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const parsed = await this.processSessionFile(filePath);
|
||||
if (!parsed) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const timestamps = await readFileTimestamps(filePath);
|
||||
return sessionsDb.createSession(
|
||||
parsed.sessionId,
|
||||
this.provider,
|
||||
parsed.projectPath,
|
||||
parsed.sessionName,
|
||||
timestamps.createdAt,
|
||||
timestamps.updatedAt,
|
||||
filePath
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Produces the same project hash Cursor uses in chat directory names.
|
||||
*/
|
||||
private md5(input: string): string {
|
||||
return crypto.createHash('md5').update(input).digest('hex');
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts project path from Cursor worker.log.
|
||||
*/
|
||||
private async extractProjectPathFromWorkerLog(filePath: string): Promise<string | null> {
|
||||
try {
|
||||
const fileStream = fs.createReadStream(filePath, { encoding: 'utf8' });
|
||||
const lineReader = readline.createInterface({ input: fileStream, crlfDelay: Infinity });
|
||||
|
||||
for await (const line of lineReader) {
|
||||
const match = line.match(/workspacePath=(.*)$/);
|
||||
const projectPath = match?.[1]?.trim();
|
||||
if (projectPath) {
|
||||
lineReader.close();
|
||||
fileStream.close();
|
||||
return projectPath;
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Missing worker logs are valid for partial or incomplete session data.
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts session metadata from one Cursor JSONL session file.
|
||||
*/
|
||||
private async processSessionFile(filePath: string): Promise<ParsedSession | null> {
|
||||
const sessionId = path.basename(filePath, '.jsonl');
|
||||
const grandparentDir = path.dirname(path.dirname(filePath));
|
||||
const workerLogPath = path.join(grandparentDir, 'worker.log');
|
||||
const projectPath = await this.extractProjectPathFromWorkerLog(workerLogPath);
|
||||
|
||||
if (!projectPath) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return extractFirstValidJsonlData(filePath, (rawData) => {
|
||||
const data = rawData as Record<string, any>;
|
||||
if (data.role !== 'user') {
|
||||
return null;
|
||||
}
|
||||
|
||||
const text = typeof data.message?.content?.[0]?.text === 'string' ? data.message.content[0].text : '';
|
||||
const firstLine = text.replace(/<\/?user_query>/g, '').trim().split('\n')[0];
|
||||
|
||||
return {
|
||||
sessionId,
|
||||
projectPath,
|
||||
sessionName: normalizeSessionName(firstLine, 'Untitled Cursor Session'),
|
||||
};
|
||||
});
|
||||
}
|
||||
}
|
||||
421
server/modules/providers/list/cursor/cursor-sessions.provider.ts
Normal file
421
server/modules/providers/list/cursor/cursor-sessions.provider.ts
Normal file
@@ -0,0 +1,421 @@
|
||||
import crypto from 'node:crypto';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
|
||||
import type { IProviderSessions } from '@/shared/interfaces.js';
|
||||
import type { AnyRecord, FetchHistoryOptions, FetchHistoryResult, NormalizedMessage } from '@/shared/types.js';
|
||||
import { createNormalizedMessage, generateMessageId, readObjectRecord } from '@/shared/utils.js';
|
||||
|
||||
const PROVIDER = 'cursor';
|
||||
|
||||
type CursorDbBlob = {
|
||||
rowid: number;
|
||||
id: string;
|
||||
data?: Buffer;
|
||||
};
|
||||
|
||||
type CursorJsonBlob = CursorDbBlob & {
|
||||
parsed: AnyRecord;
|
||||
};
|
||||
|
||||
type CursorMessageBlob = {
|
||||
id: string;
|
||||
sequence: number;
|
||||
rowid: number;
|
||||
content: AnyRecord;
|
||||
};
|
||||
|
||||
function sanitizeCursorSessionId(sessionId: string): string {
|
||||
const normalized = sessionId.trim();
|
||||
if (!normalized) {
|
||||
throw new Error('Cursor session id is required.');
|
||||
}
|
||||
|
||||
if (
|
||||
normalized.includes('..')
|
||||
|| normalized.includes(path.posix.sep)
|
||||
|| normalized.includes(path.win32.sep)
|
||||
|| normalized !== path.basename(normalized)
|
||||
) {
|
||||
throw new Error(`Invalid cursor session id "${sessionId}".`);
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
export class CursorSessionsProvider implements IProviderSessions {
|
||||
/**
|
||||
* Loads Cursor's SQLite blob DAG and returns message blobs in conversation
|
||||
* order. Cursor history is stored as content-addressed blobs rather than JSONL.
|
||||
*/
|
||||
private async loadCursorBlobs(sessionId: string, projectPath: string): Promise<CursorMessageBlob[]> {
|
||||
// Lazy-import better-sqlite3 so the module doesn't fail if it's unavailable
|
||||
const { default: Database } = await import('better-sqlite3');
|
||||
|
||||
const cwdId = crypto.createHash('md5').update(projectPath || process.cwd()).digest('hex');
|
||||
const safeSessionId = sanitizeCursorSessionId(sessionId);
|
||||
const baseChatsPath = path.join(os.homedir(), '.cursor', 'chats', cwdId);
|
||||
const storeDbPath = path.join(baseChatsPath, safeSessionId, 'store.db');
|
||||
const resolvedBaseChatsPath = path.resolve(baseChatsPath);
|
||||
const resolvedStoreDbPath = path.resolve(storeDbPath);
|
||||
const relativeStorePath = path.relative(resolvedBaseChatsPath, resolvedStoreDbPath);
|
||||
if (relativeStorePath.startsWith('..') || path.isAbsolute(relativeStorePath)) {
|
||||
throw new Error(`Invalid cursor session path for "${sessionId}".`);
|
||||
}
|
||||
|
||||
const db = new Database(resolvedStoreDbPath, { readonly: true, fileMustExist: true });
|
||||
|
||||
try {
|
||||
const allBlobs = db.prepare<[], CursorDbBlob>('SELECT rowid, id, data FROM blobs').all();
|
||||
|
||||
const blobMap = new Map<string, CursorDbBlob>();
|
||||
const parentRefs = new Map<string, string[]>();
|
||||
const childRefs = new Map<string, string[]>();
|
||||
const jsonBlobs: CursorJsonBlob[] = [];
|
||||
|
||||
for (const blob of allBlobs) {
|
||||
blobMap.set(blob.id, blob);
|
||||
|
||||
if (blob.data && blob.data[0] === 0x7B) {
|
||||
try {
|
||||
const parsed = JSON.parse(blob.data.toString('utf8')) as AnyRecord;
|
||||
jsonBlobs.push({ ...blob, parsed });
|
||||
} catch {
|
||||
// Cursor can include binary or partial blobs; only JSON blobs become messages.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const blob of allBlobs) {
|
||||
if (!blob.data || blob.data[0] === 0x7B) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const parents: string[] = [];
|
||||
let i = 0;
|
||||
while (i < blob.data.length - 33) {
|
||||
if (blob.data[i] === 0x0A && blob.data[i + 1] === 0x20) {
|
||||
const parentHash = blob.data.slice(i + 2, i + 34).toString('hex');
|
||||
if (blobMap.has(parentHash)) {
|
||||
parents.push(parentHash);
|
||||
}
|
||||
i += 34;
|
||||
} else {
|
||||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
if (parents.length > 0) {
|
||||
parentRefs.set(blob.id, parents);
|
||||
for (const parentId of parents) {
|
||||
if (!childRefs.has(parentId)) {
|
||||
childRefs.set(parentId, []);
|
||||
}
|
||||
childRefs.get(parentId)?.push(blob.id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const visited = new Set<string>();
|
||||
const sorted: CursorDbBlob[] = [];
|
||||
const visit = (nodeId: string): void => {
|
||||
if (visited.has(nodeId)) {
|
||||
return;
|
||||
}
|
||||
visited.add(nodeId);
|
||||
for (const parentId of parentRefs.get(nodeId) || []) {
|
||||
visit(parentId);
|
||||
}
|
||||
const blob = blobMap.get(nodeId);
|
||||
if (blob) {
|
||||
sorted.push(blob);
|
||||
}
|
||||
};
|
||||
|
||||
for (const blob of allBlobs) {
|
||||
if (!parentRefs.has(blob.id)) {
|
||||
visit(blob.id);
|
||||
}
|
||||
}
|
||||
for (const blob of allBlobs) {
|
||||
visit(blob.id);
|
||||
}
|
||||
|
||||
const messageOrder = new Map<string, number>();
|
||||
let orderIndex = 0;
|
||||
for (const blob of sorted) {
|
||||
if (blob.data && blob.data[0] !== 0x7B) {
|
||||
for (const jsonBlob of jsonBlobs) {
|
||||
try {
|
||||
const idBytes = Buffer.from(jsonBlob.id, 'hex');
|
||||
if (blob.data.includes(idBytes) && !messageOrder.has(jsonBlob.id)) {
|
||||
messageOrder.set(jsonBlob.id, orderIndex++);
|
||||
}
|
||||
} catch {
|
||||
// Ignore malformed blob ids that cannot be decoded as hex.
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const sortedJsonBlobs = jsonBlobs.sort((a, b) => {
|
||||
const aOrder = messageOrder.get(a.id) ?? Number.MAX_SAFE_INTEGER;
|
||||
const bOrder = messageOrder.get(b.id) ?? Number.MAX_SAFE_INTEGER;
|
||||
return aOrder !== bOrder ? aOrder - bOrder : a.rowid - b.rowid;
|
||||
});
|
||||
|
||||
const messages: CursorMessageBlob[] = [];
|
||||
for (let idx = 0; idx < sortedJsonBlobs.length; idx++) {
|
||||
const blob = sortedJsonBlobs[idx];
|
||||
const parsed = blob.parsed;
|
||||
const role = parsed?.role || parsed?.message?.role;
|
||||
if (role === 'system') {
|
||||
continue;
|
||||
}
|
||||
messages.push({
|
||||
id: blob.id,
|
||||
sequence: idx + 1,
|
||||
rowid: blob.rowid,
|
||||
content: parsed,
|
||||
});
|
||||
}
|
||||
|
||||
return messages;
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalizes live Cursor CLI NDJSON events. Persisted Cursor history is
|
||||
* normalized from SQLite blobs in fetchHistory().
|
||||
*/
|
||||
normalizeMessage(rawMessage: unknown, sessionId: string | null): NormalizedMessage[] {
|
||||
const raw = readObjectRecord(rawMessage);
|
||||
if (raw?.type === 'assistant' && raw.message?.content?.[0]?.text) {
|
||||
return [createNormalizedMessage({
|
||||
kind: 'stream_delta',
|
||||
content: raw.message.content[0].text,
|
||||
sessionId,
|
||||
provider: PROVIDER,
|
||||
})];
|
||||
}
|
||||
|
||||
if (typeof rawMessage === 'string' && rawMessage.trim()) {
|
||||
return [createNormalizedMessage({
|
||||
kind: 'stream_delta',
|
||||
content: rawMessage,
|
||||
sessionId,
|
||||
provider: PROVIDER,
|
||||
})];
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches and paginates Cursor session history from its project-scoped store.db.
|
||||
*/
|
||||
async fetchHistory(
|
||||
sessionId: string,
|
||||
options: FetchHistoryOptions = {},
|
||||
): Promise<FetchHistoryResult> {
|
||||
const { projectPath = '', limit = null, offset = 0 } = options;
|
||||
|
||||
try {
|
||||
const blobs = await this.loadCursorBlobs(sessionId, projectPath);
|
||||
const allNormalized = this.normalizeCursorBlobs(blobs, sessionId);
|
||||
const total = allNormalized.length;
|
||||
|
||||
if (limit !== null) {
|
||||
const start = offset;
|
||||
const page = limit === 0
|
||||
? []
|
||||
: allNormalized.slice(start, start + limit);
|
||||
const hasMore = limit === 0
|
||||
? start < total
|
||||
: start + limit < total;
|
||||
return {
|
||||
messages: page,
|
||||
total,
|
||||
hasMore,
|
||||
offset,
|
||||
limit,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
messages: allNormalized,
|
||||
total,
|
||||
hasMore: false,
|
||||
offset: 0,
|
||||
limit: null,
|
||||
};
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
console.warn(`[CursorProvider] Failed to load session ${sessionId}:`, message);
|
||||
return { messages: [], total: 0, hasMore: false, offset: 0, limit: null };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts Cursor SQLite message blobs into normalized messages and attaches
|
||||
* matching tool results to their tool_use entries.
|
||||
*/
|
||||
private normalizeCursorBlobs(blobs: CursorMessageBlob[], sessionId: string | null): NormalizedMessage[] {
|
||||
const messages: NormalizedMessage[] = [];
|
||||
const toolUseMap = new Map<string, NormalizedMessage>();
|
||||
const baseTime = Date.now();
|
||||
|
||||
for (let i = 0; i < blobs.length; i++) {
|
||||
const blob = blobs[i];
|
||||
const content = blob.content;
|
||||
const ts = new Date(baseTime + (blob.sequence ?? i) * 100).toISOString();
|
||||
const baseId = blob.id || generateMessageId('cursor');
|
||||
|
||||
try {
|
||||
if (!content?.role || !content?.content) {
|
||||
if (content?.message?.role && content?.message?.content) {
|
||||
if (content.message.role === 'system') {
|
||||
continue;
|
||||
}
|
||||
const role = content.message.role === 'user' ? 'user' : 'assistant';
|
||||
let text = '';
|
||||
if (Array.isArray(content.message.content)) {
|
||||
text = content.message.content
|
||||
.map((part: string | AnyRecord) => typeof part === 'string' ? part : part?.text || '')
|
||||
.filter(Boolean)
|
||||
.join('\n');
|
||||
} else if (typeof content.message.content === 'string') {
|
||||
text = content.message.content;
|
||||
}
|
||||
if (text?.trim()) {
|
||||
messages.push(createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'text',
|
||||
role,
|
||||
content: text,
|
||||
sequence: blob.sequence,
|
||||
rowid: blob.rowid,
|
||||
}));
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (content.role === 'system') {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (content.role === 'tool') {
|
||||
const toolItems = Array.isArray(content.content) ? content.content : [];
|
||||
for (const item of toolItems) {
|
||||
if (item?.type !== 'tool-result') {
|
||||
continue;
|
||||
}
|
||||
const toolCallId = item.toolCallId || content.id;
|
||||
messages.push(createNormalizedMessage({
|
||||
id: `${baseId}_tr`,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'tool_result',
|
||||
toolId: toolCallId,
|
||||
content: item.result || '',
|
||||
isError: false,
|
||||
}));
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
const role = content.role === 'user' ? 'user' : 'assistant';
|
||||
|
||||
if (Array.isArray(content.content)) {
|
||||
for (let partIdx = 0; partIdx < content.content.length; partIdx++) {
|
||||
const part = content.content[partIdx];
|
||||
|
||||
if (part?.type === 'text' && part?.text) {
|
||||
messages.push(createNormalizedMessage({
|
||||
id: `${baseId}_${partIdx}`,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'text',
|
||||
role,
|
||||
content: part.text,
|
||||
sequence: blob.sequence,
|
||||
rowid: blob.rowid,
|
||||
}));
|
||||
} else if (part?.type === 'reasoning' && part?.text) {
|
||||
messages.push(createNormalizedMessage({
|
||||
id: `${baseId}_${partIdx}`,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'thinking',
|
||||
content: part.text,
|
||||
}));
|
||||
} else if (part?.type === 'tool-call' || part?.type === 'tool_use') {
|
||||
const rawToolName = part.toolName || part.name || 'Unknown Tool';
|
||||
const toolName = rawToolName === 'ApplyPatch' ? 'Edit' : rawToolName;
|
||||
const toolId = part.toolCallId || part.id || `tool_${i}_${partIdx}`;
|
||||
const message = createNormalizedMessage({
|
||||
id: `${baseId}_${partIdx}`,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'tool_use',
|
||||
toolName,
|
||||
toolInput: part.args || part.input,
|
||||
toolId,
|
||||
});
|
||||
messages.push(message);
|
||||
toolUseMap.set(toolId, message);
|
||||
}
|
||||
}
|
||||
} else if (typeof content.content === 'string' && content.content.trim()) {
|
||||
messages.push(createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'text',
|
||||
role,
|
||||
content: content.content,
|
||||
sequence: blob.sequence,
|
||||
rowid: blob.rowid,
|
||||
}));
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn('Error normalizing cursor blob:', error);
|
||||
}
|
||||
}
|
||||
|
||||
for (const msg of messages) {
|
||||
if (msg.kind === 'tool_result' && msg.toolId && toolUseMap.has(msg.toolId)) {
|
||||
const toolUse = toolUseMap.get(msg.toolId);
|
||||
if (toolUse) {
|
||||
toolUse.toolResult = {
|
||||
content: msg.content,
|
||||
isError: msg.isError,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
messages.sort((a, b) => {
|
||||
if (a.sequence !== undefined && b.sequence !== undefined) {
|
||||
return a.sequence - b.sequence;
|
||||
}
|
||||
if (a.rowid !== undefined && b.rowid !== undefined) {
|
||||
return a.rowid - b.rowid;
|
||||
}
|
||||
return new Date(a.timestamp).getTime() - new Date(b.timestamp).getTime();
|
||||
});
|
||||
|
||||
return messages;
|
||||
}
|
||||
}
|
||||
17
server/modules/providers/list/cursor/cursor.provider.ts
Normal file
17
server/modules/providers/list/cursor/cursor.provider.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { AbstractProvider } from '@/modules/providers/shared/base/abstract.provider.js';
|
||||
import { CursorProviderAuth } from '@/modules/providers/list/cursor/cursor-auth.provider.js';
|
||||
import { CursorMcpProvider } from '@/modules/providers/list/cursor/cursor-mcp.provider.js';
|
||||
import { CursorSessionSynchronizer } from '@/modules/providers/list/cursor/cursor-session-synchronizer.provider.js';
|
||||
import { CursorSessionsProvider } from '@/modules/providers/list/cursor/cursor-sessions.provider.js';
|
||||
import type { IProviderAuth, IProviderSessionSynchronizer, IProviderSessions } from '@/shared/interfaces.js';
|
||||
|
||||
export class CursorProvider extends AbstractProvider {
|
||||
readonly mcp = new CursorMcpProvider();
|
||||
readonly auth: IProviderAuth = new CursorProviderAuth();
|
||||
readonly sessions: IProviderSessions = new CursorSessionsProvider();
|
||||
readonly sessionSynchronizer: IProviderSessionSynchronizer = new CursorSessionSynchronizer();
|
||||
|
||||
constructor() {
|
||||
super('cursor');
|
||||
}
|
||||
}
|
||||
151
server/modules/providers/list/gemini/gemini-auth.provider.ts
Normal file
151
server/modules/providers/list/gemini/gemini-auth.provider.ts
Normal file
@@ -0,0 +1,151 @@
|
||||
import { readFile } from 'node:fs/promises';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
|
||||
import spawn from 'cross-spawn';
|
||||
|
||||
import type { IProviderAuth } from '@/shared/interfaces.js';
|
||||
import type { ProviderAuthStatus } from '@/shared/types.js';
|
||||
import { readObjectRecord, readOptionalString } from '@/shared/utils.js';
|
||||
|
||||
type GeminiCredentialsStatus = {
|
||||
authenticated: boolean;
|
||||
email: string | null;
|
||||
method: string | null;
|
||||
error?: string;
|
||||
};
|
||||
|
||||
export class GeminiProviderAuth implements IProviderAuth {
|
||||
/**
|
||||
* Checks whether the Gemini CLI is available on this host.
|
||||
*/
|
||||
private checkInstalled(): boolean {
|
||||
const cliPath = process.env.GEMINI_PATH || 'gemini';
|
||||
try {
|
||||
spawn.sync(cliPath, ['--version'], { stdio: 'ignore', timeout: 5000 });
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns Gemini CLI installation and credential status.
|
||||
*/
|
||||
async getStatus(): Promise<ProviderAuthStatus> {
|
||||
const installed = this.checkInstalled();
|
||||
|
||||
if (!installed) {
|
||||
return {
|
||||
installed,
|
||||
provider: 'gemini',
|
||||
authenticated: false,
|
||||
email: null,
|
||||
method: null,
|
||||
error: 'Gemini CLI is not installed',
|
||||
};
|
||||
}
|
||||
|
||||
const credentials = await this.checkCredentials();
|
||||
|
||||
return {
|
||||
installed,
|
||||
provider: 'gemini',
|
||||
authenticated: credentials.authenticated,
|
||||
email: credentials.email,
|
||||
method: credentials.method,
|
||||
error: credentials.authenticated ? undefined : credentials.error || 'Not authenticated',
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks Gemini credentials from API key env vars or local OAuth credential files.
|
||||
*/
|
||||
private async checkCredentials(): Promise<GeminiCredentialsStatus> {
|
||||
if (process.env.GEMINI_API_KEY?.trim()) {
|
||||
return { authenticated: true, email: 'API Key Auth', method: 'api_key' };
|
||||
}
|
||||
|
||||
try {
|
||||
const credsPath = path.join(os.homedir(), '.gemini', 'oauth_creds.json');
|
||||
const content = await readFile(credsPath, 'utf8');
|
||||
const creds = readObjectRecord(JSON.parse(content)) ?? {};
|
||||
const accessToken = readOptionalString(creds.access_token);
|
||||
|
||||
if (!accessToken) {
|
||||
return {
|
||||
authenticated: false,
|
||||
email: null,
|
||||
method: null,
|
||||
error: 'No valid tokens found in oauth_creds',
|
||||
};
|
||||
}
|
||||
|
||||
const refreshToken = readOptionalString(creds.refresh_token);
|
||||
const tokenInfo = await this.getTokenInfoEmail(accessToken);
|
||||
if (tokenInfo.valid) {
|
||||
return {
|
||||
authenticated: true,
|
||||
email: tokenInfo.email || 'OAuth Session',
|
||||
method: 'credentials_file',
|
||||
};
|
||||
}
|
||||
|
||||
if (!refreshToken) {
|
||||
return {
|
||||
authenticated: false,
|
||||
email: null,
|
||||
method: 'credentials_file',
|
||||
error: 'Access token invalid and no refresh token found',
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
authenticated: true,
|
||||
email: await this.getActiveAccountEmail() || 'OAuth Session',
|
||||
method: 'credentials_file',
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
authenticated: false,
|
||||
email: null,
|
||||
method: null,
|
||||
error: 'Gemini CLI not configured',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates a Gemini OAuth access token and returns an email when Google reports one.
|
||||
*/
|
||||
private async getTokenInfoEmail(accessToken: string): Promise<{ valid: boolean; email: string | null }> {
|
||||
try {
|
||||
const tokenRes = await fetch(`https://oauth2.googleapis.com/tokeninfo?access_token=${accessToken}`);
|
||||
if (!tokenRes.ok) {
|
||||
return { valid: false, email: null };
|
||||
}
|
||||
|
||||
const tokenInfo = readObjectRecord(await tokenRes.json());
|
||||
return {
|
||||
valid: true,
|
||||
email: readOptionalString(tokenInfo?.email) ?? null,
|
||||
};
|
||||
} catch {
|
||||
return { valid: false, email: null };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads Gemini's active local Google account as an offline fallback for display.
|
||||
*/
|
||||
private async getActiveAccountEmail(): Promise<string | null> {
|
||||
try {
|
||||
const accPath = path.join(os.homedir(), '.gemini', 'google_accounts.json');
|
||||
const accContent = await readFile(accPath, 'utf8');
|
||||
const accounts = readObjectRecord(JSON.parse(accContent));
|
||||
return readOptionalString(accounts?.active) ?? null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
110
server/modules/providers/list/gemini/gemini-mcp.provider.ts
Normal file
110
server/modules/providers/list/gemini/gemini-mcp.provider.ts
Normal file
@@ -0,0 +1,110 @@
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
|
||||
import { McpProvider } from '@/modules/providers/shared/mcp/mcp.provider.js';
|
||||
import type { McpScope, ProviderMcpServer, UpsertProviderMcpServerInput } from '@/shared/types.js';
|
||||
import {
|
||||
AppError,
|
||||
readJsonConfig,
|
||||
readObjectRecord,
|
||||
readOptionalString,
|
||||
readStringArray,
|
||||
readStringRecord,
|
||||
writeJsonConfig,
|
||||
} from '@/shared/utils.js';
|
||||
|
||||
export class GeminiMcpProvider extends McpProvider {
|
||||
constructor() {
|
||||
super('gemini', ['user', 'project'], ['stdio', 'http', 'sse']);
|
||||
}
|
||||
|
||||
protected async readScopedServers(scope: McpScope, workspacePath: string): Promise<Record<string, unknown>> {
|
||||
const filePath = scope === 'user'
|
||||
? path.join(os.homedir(), '.gemini', 'settings.json')
|
||||
: path.join(workspacePath, '.gemini', 'settings.json');
|
||||
const config = await readJsonConfig(filePath);
|
||||
return readObjectRecord(config.mcpServers) ?? {};
|
||||
}
|
||||
|
||||
protected async writeScopedServers(
|
||||
scope: McpScope,
|
||||
workspacePath: string,
|
||||
servers: Record<string, unknown>,
|
||||
): Promise<void> {
|
||||
const filePath = scope === 'user'
|
||||
? path.join(os.homedir(), '.gemini', 'settings.json')
|
||||
: path.join(workspacePath, '.gemini', 'settings.json');
|
||||
const config = await readJsonConfig(filePath);
|
||||
config.mcpServers = servers;
|
||||
await writeJsonConfig(filePath, config);
|
||||
}
|
||||
|
||||
protected buildServerConfig(input: UpsertProviderMcpServerInput): Record<string, unknown> {
|
||||
if (input.transport === 'stdio') {
|
||||
if (!input.command?.trim()) {
|
||||
throw new AppError('command is required for stdio MCP servers.', {
|
||||
code: 'MCP_COMMAND_REQUIRED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
command: input.command,
|
||||
args: input.args ?? [],
|
||||
env: input.env ?? {},
|
||||
cwd: input.cwd,
|
||||
};
|
||||
}
|
||||
|
||||
if (!input.url?.trim()) {
|
||||
throw new AppError('url is required for http/sse MCP servers.', {
|
||||
code: 'MCP_URL_REQUIRED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
type: input.transport,
|
||||
url: input.url,
|
||||
headers: input.headers ?? {},
|
||||
};
|
||||
}
|
||||
|
||||
protected normalizeServerConfig(
|
||||
scope: McpScope,
|
||||
name: string,
|
||||
rawConfig: unknown,
|
||||
): ProviderMcpServer | null {
|
||||
if (!rawConfig || typeof rawConfig !== 'object') {
|
||||
return null;
|
||||
}
|
||||
|
||||
const config = rawConfig as Record<string, unknown>;
|
||||
if (typeof config.command === 'string') {
|
||||
return {
|
||||
provider: 'gemini',
|
||||
name,
|
||||
scope,
|
||||
transport: 'stdio',
|
||||
command: config.command,
|
||||
args: readStringArray(config.args),
|
||||
env: readStringRecord(config.env),
|
||||
cwd: readOptionalString(config.cwd),
|
||||
};
|
||||
}
|
||||
|
||||
if (typeof config.url === 'string') {
|
||||
const transport = readOptionalString(config.type) === 'sse' ? 'sse' : 'http';
|
||||
return {
|
||||
provider: 'gemini',
|
||||
name,
|
||||
scope,
|
||||
transport,
|
||||
url: config.url,
|
||||
headers: readStringRecord(config.headers),
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,401 @@
|
||||
import crypto from 'node:crypto';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
import { readFile } from 'node:fs/promises';
|
||||
|
||||
import { projectsDb, sessionsDb } from '@/modules/database/index.js';
|
||||
import {
|
||||
findFilesRecursivelyCreatedAfter,
|
||||
normalizeProjectPath,
|
||||
normalizeSessionName,
|
||||
readFileTimestamps,
|
||||
} from '@/shared/utils.js';
|
||||
import type { IProviderSessionSynchronizer } from '@/shared/interfaces.js';
|
||||
import type { AnyRecord } from '@/shared/types.js';
|
||||
|
||||
type ParsedSession = {
|
||||
sessionId: string;
|
||||
projectPath: string;
|
||||
sessionName?: string;
|
||||
};
|
||||
|
||||
type GeminiJsonlMetadata = {
|
||||
sessionId: string;
|
||||
projectPath?: string;
|
||||
projectHash?: string;
|
||||
firstUserMessage?: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Session indexer for Gemini transcript artifacts.
|
||||
*/
|
||||
export class GeminiSessionSynchronizer implements IProviderSessionSynchronizer {
|
||||
private readonly provider = 'gemini' as const;
|
||||
private readonly geminiHome = path.join(os.homedir(), '.gemini');
|
||||
|
||||
/**
|
||||
* Scans Gemini legacy JSON and new JSONL artifacts and upserts sessions into DB.
|
||||
*/
|
||||
async synchronize(since?: Date): Promise<number> {
|
||||
const projectHashLookup = this.buildProjectHashLookup();
|
||||
|
||||
const legacySessionFiles = await findFilesRecursivelyCreatedAfter(
|
||||
path.join(this.geminiHome, 'sessions'),
|
||||
'.json',
|
||||
since ?? null
|
||||
);
|
||||
const legacyTempFiles = await findFilesRecursivelyCreatedAfter(
|
||||
path.join(this.geminiHome, 'tmp'),
|
||||
'.json',
|
||||
since ?? null
|
||||
);
|
||||
const jsonlSessionFiles = await findFilesRecursivelyCreatedAfter(
|
||||
path.join(this.geminiHome, 'sessions'),
|
||||
'.jsonl',
|
||||
since ?? null
|
||||
);
|
||||
const jsonlTempFiles = await findFilesRecursivelyCreatedAfter(
|
||||
path.join(this.geminiHome, 'tmp'),
|
||||
'.jsonl',
|
||||
since ?? null
|
||||
);
|
||||
|
||||
// Process legacy JSON first, then JSONL. If both exist for a session id,
|
||||
// the JSONL artifact becomes the canonical jsonl_path via upsert.
|
||||
const files = [
|
||||
...legacySessionFiles,
|
||||
...legacyTempFiles,
|
||||
...jsonlSessionFiles,
|
||||
...jsonlTempFiles,
|
||||
];
|
||||
|
||||
let processed = 0;
|
||||
for (const filePath of files) {
|
||||
if (this.shouldSkipTempArtifact(filePath)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const parsed = filePath.endsWith('.jsonl')
|
||||
? await this.processJsonlSessionFile(filePath, projectHashLookup)
|
||||
: await this.processLegacySessionFile(filePath);
|
||||
if (!parsed) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const timestamps = await readFileTimestamps(filePath);
|
||||
sessionsDb.createSession(
|
||||
parsed.sessionId,
|
||||
this.provider,
|
||||
parsed.projectPath,
|
||||
parsed.sessionName,
|
||||
timestamps.createdAt,
|
||||
timestamps.updatedAt,
|
||||
filePath
|
||||
);
|
||||
processed += 1;
|
||||
}
|
||||
|
||||
return processed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses and upserts one Gemini legacy JSON or JSONL artifact.
|
||||
*/
|
||||
async synchronizeFile(filePath: string): Promise<string | null> {
|
||||
if (!filePath.endsWith('.json') && !filePath.endsWith('.jsonl')) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (this.shouldSkipTempArtifact(filePath)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const parsed = filePath.endsWith('.jsonl')
|
||||
? await this.processJsonlSessionFile(filePath, this.buildProjectHashLookup())
|
||||
: await this.processLegacySessionFile(filePath);
|
||||
if (!parsed) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const timestamps = await readFileTimestamps(filePath);
|
||||
return sessionsDb.createSession(
|
||||
parsed.sessionId,
|
||||
this.provider,
|
||||
parsed.projectPath,
|
||||
parsed.sessionName,
|
||||
timestamps.createdAt,
|
||||
timestamps.updatedAt,
|
||||
filePath
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts session metadata from one Gemini legacy JSON artifact.
|
||||
*/
|
||||
private async processLegacySessionFile(filePath: string): Promise<ParsedSession | null> {
|
||||
try {
|
||||
const content = await readFile(filePath, 'utf8');
|
||||
const data = JSON.parse(content) as AnyRecord;
|
||||
|
||||
const sessionId =
|
||||
typeof data.sessionId === 'string'
|
||||
? data.sessionId
|
||||
: typeof data.id === 'string'
|
||||
? data.id
|
||||
: undefined;
|
||||
if (!sessionId) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const workspaceProjectPath = await this.resolveProjectPathFromChatWorkspace(filePath);
|
||||
const projectPath = typeof data.projectPath === 'string' && data.projectPath.trim().length > 0
|
||||
? data.projectPath
|
||||
: workspaceProjectPath;
|
||||
if (!projectPath) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const messages = Array.isArray(data.messages) ? data.messages : [];
|
||||
const firstMessage = messages[0] as AnyRecord | undefined;
|
||||
let rawName: string | undefined;
|
||||
|
||||
if (Array.isArray(firstMessage?.content) && typeof firstMessage.content[0]?.text === 'string') {
|
||||
rawName = firstMessage.content[0].text;
|
||||
} else if (typeof firstMessage?.content === 'string') {
|
||||
rawName = firstMessage.content;
|
||||
}
|
||||
|
||||
return {
|
||||
sessionId,
|
||||
projectPath,
|
||||
sessionName: normalizeSessionName(rawName, 'New Gemini Chat'),
|
||||
};
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts session metadata from one Gemini JSONL artifact.
|
||||
*/
|
||||
private async processJsonlSessionFile(
|
||||
filePath: string,
|
||||
projectHashLookup: Map<string, string>
|
||||
): Promise<ParsedSession | null> {
|
||||
const metadata = await this.extractJsonlMetadata(filePath);
|
||||
if (!metadata) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let projectPath = typeof metadata.projectPath === 'string' ? metadata.projectPath.trim() : '';
|
||||
if (!projectPath) {
|
||||
const workspaceProjectPath = await this.resolveProjectPathFromChatWorkspace(filePath);
|
||||
if (workspaceProjectPath) {
|
||||
projectPath = workspaceProjectPath;
|
||||
}
|
||||
}
|
||||
if (!projectPath && typeof metadata.projectHash === 'string') {
|
||||
projectPath = projectHashLookup.get(metadata.projectHash.trim().toLowerCase()) ?? '';
|
||||
}
|
||||
if (!projectPath) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Once we resolve a project hash/path pair, keep it in-memory for this sync run.
|
||||
if (typeof metadata.projectHash === 'string' && metadata.projectHash.trim()) {
|
||||
projectHashLookup.set(metadata.projectHash.trim().toLowerCase(), projectPath);
|
||||
}
|
||||
|
||||
return {
|
||||
sessionId: metadata.sessionId,
|
||||
projectPath,
|
||||
sessionName: normalizeSessionName(metadata.firstUserMessage, 'New Gemini Chat'),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads first useful metadata from Gemini JSONL files.
|
||||
*/
|
||||
private async extractJsonlMetadata(filePath: string): Promise<GeminiJsonlMetadata | null> {
|
||||
try {
|
||||
const content = await readFile(filePath, 'utf8');
|
||||
const lines = content.split('\n');
|
||||
|
||||
let sessionId: string | undefined;
|
||||
let projectPath: string | undefined;
|
||||
let projectHash: string | undefined;
|
||||
let firstUserMessage: string | undefined;
|
||||
|
||||
for (const line of lines) {
|
||||
const trimmed = line.trim();
|
||||
if (!trimmed) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let parsed: AnyRecord;
|
||||
try {
|
||||
parsed = JSON.parse(trimmed) as AnyRecord;
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!sessionId && typeof parsed.sessionId === 'string') {
|
||||
sessionId = parsed.sessionId;
|
||||
}
|
||||
if (!projectPath && typeof parsed.projectPath === 'string') {
|
||||
projectPath = parsed.projectPath;
|
||||
}
|
||||
if (!projectHash && typeof parsed.projectHash === 'string') {
|
||||
projectHash = parsed.projectHash;
|
||||
}
|
||||
|
||||
if (!firstUserMessage && parsed.type === 'user') {
|
||||
firstUserMessage = this.extractGeminiTextContent(parsed.content);
|
||||
}
|
||||
|
||||
if (sessionId && (projectPath || projectHash) && firstUserMessage) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!sessionId) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
sessionId,
|
||||
projectPath,
|
||||
projectHash,
|
||||
firstUserMessage,
|
||||
};
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Tries to resolve project root from Gemini tmp chat workspaces.
|
||||
*/
|
||||
private async resolveProjectPathFromChatWorkspace(filePath: string): Promise<string> {
|
||||
if (!filePath.includes(`${path.sep}chats${path.sep}`)) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const chatsDir = path.dirname(filePath);
|
||||
const workspaceDir = path.dirname(chatsDir);
|
||||
const projectRootPath = path.join(workspaceDir, '.project_root');
|
||||
|
||||
try {
|
||||
const rootContent = await readFile(projectRootPath, 'utf8');
|
||||
return rootContent.trim();
|
||||
} catch {
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a hash->path lookup for Gemini JSONL metadata that stores projectHash.
|
||||
*/
|
||||
private buildProjectHashLookup(): Map<string, string> {
|
||||
const lookup = new Map<string, string>();
|
||||
const knownPaths = new Set<string>();
|
||||
|
||||
for (const project of projectsDb.getProjectPaths()) {
|
||||
if (typeof project.project_path === 'string' && project.project_path.trim()) {
|
||||
knownPaths.add(project.project_path.trim());
|
||||
}
|
||||
}
|
||||
|
||||
for (const session of sessionsDb.getAllSessions()) {
|
||||
if (session.provider === this.provider && typeof session.project_path === 'string' && session.project_path.trim()) {
|
||||
knownPaths.add(session.project_path.trim());
|
||||
}
|
||||
}
|
||||
|
||||
for (const knownPath of knownPaths) {
|
||||
this.addProjectHashCandidates(lookup, knownPath);
|
||||
}
|
||||
|
||||
return lookup;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds likely Gemini hash variants for one project path.
|
||||
*/
|
||||
private addProjectHashCandidates(lookup: Map<string, string>, projectPath: string): void {
|
||||
const trimmed = projectPath.trim();
|
||||
if (!trimmed) {
|
||||
return;
|
||||
}
|
||||
|
||||
const normalized = normalizeProjectPath(trimmed);
|
||||
const resolved = path.resolve(trimmed);
|
||||
const resolvedNormalized = normalizeProjectPath(resolved);
|
||||
|
||||
const candidates = new Set<string>([
|
||||
trimmed,
|
||||
normalized,
|
||||
resolved,
|
||||
resolvedNormalized,
|
||||
]);
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
for (const candidate of [...candidates]) {
|
||||
candidates.add(candidate.toLowerCase());
|
||||
}
|
||||
}
|
||||
|
||||
for (const candidate of candidates) {
|
||||
if (!candidate) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const hash = this.sha256(candidate);
|
||||
if (!lookup.has(hash)) {
|
||||
lookup.set(hash, trimmed);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns first user text from Gemini content payload shapes.
|
||||
*/
|
||||
private extractGeminiTextContent(content: unknown): string | undefined {
|
||||
if (typeof content === 'string' && content.trim().length > 0) {
|
||||
return content;
|
||||
}
|
||||
|
||||
if (!Array.isArray(content)) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
for (const part of content) {
|
||||
if (typeof part === 'string' && part.trim().length > 0) {
|
||||
return part;
|
||||
}
|
||||
|
||||
if (part && typeof part === 'object' && typeof (part as AnyRecord).text === 'string') {
|
||||
const text = (part as AnyRecord).text;
|
||||
if (text.trim().length > 0) {
|
||||
return text;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Keeps tmp scanning scoped to chat artifacts only.
|
||||
*/
|
||||
private shouldSkipTempArtifact(filePath: string): boolean {
|
||||
return (
|
||||
filePath.startsWith(path.join(this.geminiHome, 'tmp'))
|
||||
&& !filePath.includes(`${path.sep}chats${path.sep}`)
|
||||
);
|
||||
}
|
||||
|
||||
private sha256(value: string): string {
|
||||
return crypto.createHash('sha256').update(value).digest('hex');
|
||||
}
|
||||
}
|
||||
541
server/modules/providers/list/gemini/gemini-sessions.provider.ts
Normal file
541
server/modules/providers/list/gemini/gemini-sessions.provider.ts
Normal file
@@ -0,0 +1,541 @@
|
||||
import fsSync from 'node:fs';
|
||||
import fs from 'node:fs/promises';
|
||||
import readline from 'node:readline';
|
||||
|
||||
import { sessionsDb } from '@/modules/database/index.js';
|
||||
import type { IProviderSessions } from '@/shared/interfaces.js';
|
||||
import type { AnyRecord, FetchHistoryOptions, FetchHistoryResult, NormalizedMessage } from '@/shared/types.js';
|
||||
import { createNormalizedMessage, generateMessageId, readObjectRecord } from '@/shared/utils.js';
|
||||
|
||||
const PROVIDER = 'gemini';
|
||||
|
||||
type GeminiHistoryResult = {
|
||||
messages: AnyRecord[];
|
||||
tokenUsage?: unknown;
|
||||
};
|
||||
|
||||
function mapGeminiRole(value: unknown): 'user' | 'assistant' | null {
|
||||
if (value === 'user') {
|
||||
return 'user';
|
||||
}
|
||||
|
||||
if (value === 'gemini' || value === 'assistant') {
|
||||
return 'assistant';
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function extractGeminiTextContent(content: unknown): string {
|
||||
if (typeof content === 'string') {
|
||||
return content;
|
||||
}
|
||||
|
||||
if (!Array.isArray(content)) {
|
||||
return '';
|
||||
}
|
||||
|
||||
return content
|
||||
.map((part) => {
|
||||
if (typeof part === 'string') {
|
||||
return part;
|
||||
}
|
||||
if (!part || typeof part !== 'object') {
|
||||
return '';
|
||||
}
|
||||
|
||||
const record = part as AnyRecord;
|
||||
if (typeof record.text === 'string') {
|
||||
return record.text;
|
||||
}
|
||||
|
||||
return '';
|
||||
})
|
||||
.filter(Boolean)
|
||||
.join('\n');
|
||||
}
|
||||
|
||||
function extractGeminiThoughts(thoughts: unknown): string {
|
||||
if (!Array.isArray(thoughts)) {
|
||||
return '';
|
||||
}
|
||||
|
||||
return thoughts
|
||||
.map((item) => {
|
||||
if (!item || typeof item !== 'object') {
|
||||
return '';
|
||||
}
|
||||
|
||||
const record = item as AnyRecord;
|
||||
const subject = typeof record.subject === 'string' ? record.subject.trim() : '';
|
||||
const description = typeof record.description === 'string' ? record.description.trim() : '';
|
||||
|
||||
if (subject && description) {
|
||||
return `${subject}: ${description}`;
|
||||
}
|
||||
|
||||
return description || subject;
|
||||
})
|
||||
.filter(Boolean)
|
||||
.join('\n');
|
||||
}
|
||||
|
||||
function buildGeminiTokenUsage(tokens: unknown): AnyRecord | undefined {
|
||||
if (!tokens || typeof tokens !== 'object') {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const record = tokens as AnyRecord;
|
||||
const input = Number(record.input || 0);
|
||||
const output = Number(record.output || 0);
|
||||
const cached = Number(record.cached || 0);
|
||||
const thoughts = Number(record.thoughts || 0);
|
||||
const tool = Number(record.tool || 0);
|
||||
|
||||
const totalFromFields = input + output + cached + thoughts + tool;
|
||||
const total = Number(record.total || totalFromFields || 0);
|
||||
|
||||
return {
|
||||
used: total,
|
||||
total: total,
|
||||
breakdown: {
|
||||
input,
|
||||
output,
|
||||
cached,
|
||||
thoughts,
|
||||
tool,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
async function getGeminiLegacySessionMessages(sessionFilePath: string): Promise<GeminiHistoryResult> {
|
||||
try {
|
||||
const data = await fs.readFile(sessionFilePath, 'utf8');
|
||||
const session = JSON.parse(data) as AnyRecord;
|
||||
const sourceMessages = Array.isArray(session.messages) ? session.messages : [];
|
||||
|
||||
const messages: AnyRecord[] = [];
|
||||
for (const msg of sourceMessages) {
|
||||
const role = mapGeminiRole(msg.type ?? msg.role);
|
||||
if (!role) {
|
||||
continue;
|
||||
}
|
||||
|
||||
messages.push({
|
||||
type: 'message',
|
||||
uuid: typeof msg.id === 'string' ? msg.id : undefined,
|
||||
message: { role, content: msg.content },
|
||||
timestamp: msg.timestamp || null,
|
||||
});
|
||||
}
|
||||
|
||||
return { messages };
|
||||
} catch {
|
||||
return { messages: [] };
|
||||
}
|
||||
}
|
||||
|
||||
async function getGeminiJsonlSessionMessages(sessionFilePath: string): Promise<GeminiHistoryResult> {
|
||||
const messages: AnyRecord[] = [];
|
||||
let tokenUsage: AnyRecord | undefined;
|
||||
|
||||
try {
|
||||
const fileStream = fsSync.createReadStream(sessionFilePath);
|
||||
const lineReader = readline.createInterface({
|
||||
input: fileStream,
|
||||
crlfDelay: Infinity,
|
||||
});
|
||||
|
||||
for await (const line of lineReader) {
|
||||
const trimmed = line.trim();
|
||||
if (!trimmed) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let entry: AnyRecord;
|
||||
try {
|
||||
entry = JSON.parse(trimmed) as AnyRecord;
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Metadata/update lines (e.g. {$set:{lastUpdated:...}}) do not represent chat messages.
|
||||
if (entry.$set) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const role = mapGeminiRole(entry.type);
|
||||
if (role) {
|
||||
const textContent = extractGeminiTextContent(entry.content);
|
||||
if (textContent.trim()) {
|
||||
messages.push({
|
||||
type: 'message',
|
||||
uuid: typeof entry.id === 'string' ? entry.id : undefined,
|
||||
message: { role, content: textContent },
|
||||
timestamp: entry.timestamp || null,
|
||||
});
|
||||
}
|
||||
|
||||
const thinkingContent = extractGeminiThoughts(entry.thoughts);
|
||||
if (thinkingContent.trim()) {
|
||||
messages.push({
|
||||
type: 'thinking',
|
||||
uuid: typeof entry.id === 'string' ? `${entry.id}_thinking` : undefined,
|
||||
message: { role: 'assistant', content: thinkingContent },
|
||||
timestamp: entry.timestamp || null,
|
||||
isReasoning: true,
|
||||
});
|
||||
}
|
||||
|
||||
if (role === 'assistant') {
|
||||
const usage = buildGeminiTokenUsage(entry.tokens);
|
||||
if (usage) {
|
||||
tokenUsage = usage;
|
||||
}
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
if (entry.type === 'tool_use') {
|
||||
messages.push({
|
||||
type: 'tool_use',
|
||||
uuid: typeof entry.id === 'string' ? entry.id : undefined,
|
||||
timestamp: entry.timestamp || null,
|
||||
toolName: entry.tool_name || entry.name || 'Tool',
|
||||
toolInput: entry.parameters ?? entry.input ?? entry.arguments ?? '',
|
||||
toolCallId: entry.tool_id || entry.toolCallId || entry.id,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
if (entry.type === 'tool_result') {
|
||||
messages.push({
|
||||
type: 'tool_result',
|
||||
uuid: typeof entry.id === 'string' ? entry.id : undefined,
|
||||
timestamp: entry.timestamp || null,
|
||||
toolCallId: entry.tool_id || entry.toolCallId || entry.id || '',
|
||||
output: entry.output ?? entry.result ?? '',
|
||||
isError: Boolean(entry.error) || entry.status === 'error',
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
return { messages: [] };
|
||||
}
|
||||
|
||||
messages.sort(
|
||||
(a, b) => new Date(a.timestamp || 0).getTime() - new Date(b.timestamp || 0).getTime(),
|
||||
);
|
||||
|
||||
return { messages, tokenUsage };
|
||||
}
|
||||
|
||||
async function getGeminiCliSessionMessages(sessionId: string): Promise<GeminiHistoryResult> {
|
||||
const sessionFilePath = sessionsDb.getSessionById(sessionId)?.jsonl_path;
|
||||
if (!sessionFilePath) {
|
||||
return { messages: [] };
|
||||
}
|
||||
|
||||
if (sessionFilePath.endsWith('.jsonl')) {
|
||||
return getGeminiJsonlSessionMessages(sessionFilePath);
|
||||
}
|
||||
|
||||
return getGeminiLegacySessionMessages(sessionFilePath);
|
||||
}
|
||||
|
||||
export class GeminiSessionsProvider implements IProviderSessions {
|
||||
/**
|
||||
* Normalizes live Gemini stream-json events into the shared message shape.
|
||||
*
|
||||
* Gemini history uses a different session file shape, so fetchHistory handles
|
||||
* that separately after loading raw persisted messages.
|
||||
*/
|
||||
normalizeMessage(rawMessage: unknown, sessionId: string | null): NormalizedMessage[] {
|
||||
const raw = readObjectRecord(rawMessage);
|
||||
if (!raw) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const ts = raw.timestamp || new Date().toISOString();
|
||||
const baseId = raw.uuid || generateMessageId('gemini');
|
||||
|
||||
if (raw.type === 'message' && raw.role === 'assistant') {
|
||||
const content = raw.content || '';
|
||||
const messages: NormalizedMessage[] = [];
|
||||
if (content) {
|
||||
messages.push(createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'stream_delta',
|
||||
content,
|
||||
}));
|
||||
}
|
||||
if (raw.delta !== true) {
|
||||
messages.push(createNormalizedMessage({
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'stream_end',
|
||||
}));
|
||||
}
|
||||
return messages;
|
||||
}
|
||||
|
||||
if (raw.type === 'tool_use') {
|
||||
return [createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'tool_use',
|
||||
toolName: raw.tool_name,
|
||||
toolInput: raw.parameters || {},
|
||||
toolId: raw.tool_id || baseId,
|
||||
})];
|
||||
}
|
||||
|
||||
if (raw.type === 'tool_result') {
|
||||
return [createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'tool_result',
|
||||
toolId: raw.tool_id || '',
|
||||
content: raw.output === undefined ? '' : String(raw.output),
|
||||
isError: raw.status === 'error',
|
||||
})];
|
||||
}
|
||||
|
||||
if (raw.type === 'result') {
|
||||
const messages = [createNormalizedMessage({
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'stream_end',
|
||||
})];
|
||||
if (raw.stats?.total_tokens) {
|
||||
messages.push(createNormalizedMessage({
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'status',
|
||||
text: 'Complete',
|
||||
tokens: raw.stats.total_tokens,
|
||||
canInterrupt: false,
|
||||
}));
|
||||
}
|
||||
return messages;
|
||||
}
|
||||
|
||||
if (raw.type === 'error') {
|
||||
return [createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'error',
|
||||
content: raw.error || raw.message || 'Unknown Gemini streaming error',
|
||||
})];
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads Gemini history from Gemini CLI session files on disk.
|
||||
*/
|
||||
async fetchHistory(
|
||||
sessionId: string,
|
||||
options: FetchHistoryOptions = {},
|
||||
): Promise<FetchHistoryResult> {
|
||||
const { limit = null, offset = 0 } = options;
|
||||
|
||||
let result: GeminiHistoryResult;
|
||||
try {
|
||||
result = await getGeminiCliSessionMessages(sessionId);
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
console.warn(`[GeminiProvider] Failed to load session ${sessionId}:`, message);
|
||||
return { messages: [], total: 0, hasMore: false, offset: 0, limit: null };
|
||||
}
|
||||
|
||||
const rawMessages = result.messages;
|
||||
const normalized: NormalizedMessage[] = [];
|
||||
|
||||
for (let i = 0; i < rawMessages.length; i++) {
|
||||
const raw = rawMessages[i];
|
||||
const ts = raw.timestamp || new Date().toISOString();
|
||||
const baseId = raw.uuid || generateMessageId('gemini');
|
||||
|
||||
if (raw.type === 'thinking' || raw.isReasoning) {
|
||||
const thinkingContent = typeof raw.message?.content === 'string'
|
||||
? raw.message.content
|
||||
: typeof raw.content === 'string'
|
||||
? raw.content
|
||||
: '';
|
||||
|
||||
if (thinkingContent.trim()) {
|
||||
normalized.push(createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'thinking',
|
||||
content: thinkingContent,
|
||||
}));
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (raw.type === 'tool_use' || raw.toolName) {
|
||||
normalized.push(createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'tool_use',
|
||||
toolName: raw.toolName || 'Tool',
|
||||
toolInput: raw.toolInput,
|
||||
toolId: raw.toolCallId || baseId,
|
||||
}));
|
||||
continue;
|
||||
}
|
||||
|
||||
if (raw.type === 'tool_result') {
|
||||
normalized.push(createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'tool_result',
|
||||
toolId: raw.toolCallId || '',
|
||||
content: raw.output === undefined ? '' : String(raw.output),
|
||||
isError: Boolean(raw.isError),
|
||||
}));
|
||||
continue;
|
||||
}
|
||||
|
||||
const role = raw.message?.role || raw.role;
|
||||
const content = raw.message?.content || raw.content;
|
||||
if (!role || !content) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const normalizedRole = role === 'user' ? 'user' : 'assistant';
|
||||
|
||||
if (Array.isArray(content)) {
|
||||
for (let partIdx = 0; partIdx < content.length; partIdx++) {
|
||||
const part = content[partIdx] as AnyRecord | string;
|
||||
|
||||
if (typeof part === 'string' && part.trim()) {
|
||||
normalized.push(createNormalizedMessage({
|
||||
id: `${baseId}_${partIdx}`,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'text',
|
||||
role: normalizedRole,
|
||||
content: part,
|
||||
}));
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!part || typeof part !== 'object') {
|
||||
continue;
|
||||
}
|
||||
|
||||
if ((part.type === 'text' || !part.type) && typeof part.text === 'string' && part.text.trim()) {
|
||||
normalized.push(createNormalizedMessage({
|
||||
id: `${baseId}_${partIdx}`,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'text',
|
||||
role: normalizedRole,
|
||||
content: part.text,
|
||||
}));
|
||||
} else if (part.type === 'tool_use') {
|
||||
normalized.push(createNormalizedMessage({
|
||||
id: `${baseId}_${partIdx}`,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'tool_use',
|
||||
toolName: part.name,
|
||||
toolInput: part.input,
|
||||
toolId: part.id || generateMessageId('gemini_tool'),
|
||||
}));
|
||||
} else if (part.type === 'tool_result') {
|
||||
normalized.push(createNormalizedMessage({
|
||||
id: `${baseId}_${partIdx}`,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'tool_result',
|
||||
toolId: part.tool_use_id || '',
|
||||
content: part.content === undefined ? '' : String(part.content),
|
||||
isError: Boolean(part.is_error),
|
||||
}));
|
||||
}
|
||||
}
|
||||
} else if (typeof content === 'string' && content.trim()) {
|
||||
normalized.push(createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'text',
|
||||
role: normalizedRole,
|
||||
content,
|
||||
}));
|
||||
} else {
|
||||
const textContent = extractGeminiTextContent(content);
|
||||
if (textContent.trim()) {
|
||||
normalized.push(createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'text',
|
||||
role: normalizedRole,
|
||||
content: textContent,
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const toolResultMap = new Map<string, NormalizedMessage>();
|
||||
for (const msg of normalized) {
|
||||
if (msg.kind === 'tool_result' && msg.toolId) {
|
||||
toolResultMap.set(msg.toolId, msg);
|
||||
}
|
||||
}
|
||||
for (const msg of normalized) {
|
||||
if (msg.kind === 'tool_use' && msg.toolId && toolResultMap.has(msg.toolId)) {
|
||||
const toolResult = toolResultMap.get(msg.toolId);
|
||||
if (toolResult) {
|
||||
msg.toolResult = { content: toolResult.content, isError: toolResult.isError };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const start = Math.max(0, offset);
|
||||
const pageLimit = limit === null ? null : Math.max(0, limit);
|
||||
const messages = pageLimit === null
|
||||
? normalized.slice(start)
|
||||
: normalized.slice(start, start + pageLimit);
|
||||
|
||||
return {
|
||||
messages,
|
||||
total: normalized.length,
|
||||
hasMore: pageLimit === null ? false : start + pageLimit < normalized.length,
|
||||
offset: start,
|
||||
limit: pageLimit,
|
||||
tokenUsage: result.tokenUsage,
|
||||
};
|
||||
}
|
||||
}
|
||||
17
server/modules/providers/list/gemini/gemini.provider.ts
Normal file
17
server/modules/providers/list/gemini/gemini.provider.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { AbstractProvider } from '@/modules/providers/shared/base/abstract.provider.js';
|
||||
import { GeminiProviderAuth } from '@/modules/providers/list/gemini/gemini-auth.provider.js';
|
||||
import { GeminiMcpProvider } from '@/modules/providers/list/gemini/gemini-mcp.provider.js';
|
||||
import { GeminiSessionSynchronizer } from '@/modules/providers/list/gemini/gemini-session-synchronizer.provider.js';
|
||||
import { GeminiSessionsProvider } from '@/modules/providers/list/gemini/gemini-sessions.provider.js';
|
||||
import type { IProviderAuth, IProviderSessionSynchronizer, IProviderSessions } from '@/shared/interfaces.js';
|
||||
|
||||
export class GeminiProvider extends AbstractProvider {
|
||||
readonly mcp = new GeminiMcpProvider();
|
||||
readonly auth: IProviderAuth = new GeminiProviderAuth();
|
||||
readonly sessions: IProviderSessions = new GeminiSessionsProvider();
|
||||
readonly sessionSynchronizer: IProviderSessionSynchronizer = new GeminiSessionSynchronizer();
|
||||
|
||||
constructor() {
|
||||
super('gemini');
|
||||
}
|
||||
}
|
||||
36
server/modules/providers/provider.registry.ts
Normal file
36
server/modules/providers/provider.registry.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import { ClaudeProvider } from '@/modules/providers/list/claude/claude.provider.js';
|
||||
import { CodexProvider } from '@/modules/providers/list/codex/codex.provider.js';
|
||||
import { CursorProvider } from '@/modules/providers/list/cursor/cursor.provider.js';
|
||||
import { GeminiProvider } from '@/modules/providers/list/gemini/gemini.provider.js';
|
||||
import type { IProvider } from '@/shared/interfaces.js';
|
||||
import type { LLMProvider } from '@/shared/types.js';
|
||||
import { AppError } from '@/shared/utils.js';
|
||||
|
||||
const providers: Record<LLMProvider, IProvider> = {
|
||||
claude: new ClaudeProvider(),
|
||||
codex: new CodexProvider(),
|
||||
cursor: new CursorProvider(),
|
||||
gemini: new GeminiProvider(),
|
||||
};
|
||||
|
||||
/**
|
||||
* Central registry for resolving concrete provider implementations by id.
|
||||
*/
|
||||
export const providerRegistry = {
|
||||
listProviders(): IProvider[] {
|
||||
return Object.values(providers);
|
||||
},
|
||||
|
||||
resolveProvider(provider: string): IProvider {
|
||||
const key = provider as LLMProvider;
|
||||
const resolvedProvider = providers[key];
|
||||
if (!resolvedProvider) {
|
||||
throw new AppError(`Unsupported provider "${provider}".`, {
|
||||
code: 'UNSUPPORTED_PROVIDER',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
return resolvedProvider;
|
||||
},
|
||||
};
|
||||
425
server/modules/providers/provider.routes.ts
Normal file
425
server/modules/providers/provider.routes.ts
Normal file
@@ -0,0 +1,425 @@
|
||||
import express, { type Request, type Response } from 'express';
|
||||
|
||||
import { providerAuthService } from '@/modules/providers/services/provider-auth.service.js';
|
||||
import { providerMcpService } from '@/modules/providers/services/mcp.service.js';
|
||||
import { sessionConversationsSearchService } from '@/modules/providers/services/session-conversations-search.service.js';
|
||||
import { sessionsService } from '@/modules/providers/services/sessions.service.js';
|
||||
import type { LLMProvider, McpScope, McpTransport, UpsertProviderMcpServerInput } from '@/shared/types.js';
|
||||
import { AppError, asyncHandler, createApiSuccessResponse } from '@/shared/utils.js';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
const readPathParam = (value: unknown, name: string): string => {
|
||||
if (typeof value === 'string') {
|
||||
return value;
|
||||
}
|
||||
|
||||
if (Array.isArray(value) && typeof value[0] === 'string') {
|
||||
return value[0];
|
||||
}
|
||||
|
||||
throw new AppError(`${name} path parameter is invalid.`, {
|
||||
code: 'INVALID_PATH_PARAMETER',
|
||||
statusCode: 400,
|
||||
});
|
||||
};
|
||||
|
||||
const normalizeProviderParam = (value: unknown): string =>
|
||||
readPathParam(value, 'provider').trim().toLowerCase();
|
||||
|
||||
const SESSION_ID_PATTERN = /^[a-zA-Z0-9._-]{1,120}$/;
|
||||
|
||||
const parseSessionId = (value: unknown): string => {
|
||||
const sessionId = readPathParam(value, 'sessionId').trim();
|
||||
if (!SESSION_ID_PATTERN.test(sessionId)) {
|
||||
throw new AppError('Invalid sessionId.', {
|
||||
code: 'INVALID_SESSION_ID',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
return sessionId;
|
||||
};
|
||||
|
||||
const readOptionalQueryString = (value: unknown): string | undefined => {
|
||||
if (typeof value !== 'string') {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const normalized = value.trim();
|
||||
return normalized.length > 0 ? normalized : undefined;
|
||||
};
|
||||
|
||||
const parseOptionalBooleanQuery = (value: unknown, name: string): boolean | undefined => {
|
||||
if (value === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const normalized = readOptionalQueryString(value);
|
||||
if (!normalized) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (normalized === 'true') {
|
||||
return true;
|
||||
}
|
||||
if (normalized === 'false') {
|
||||
return false;
|
||||
}
|
||||
|
||||
throw new AppError(`${name} must be "true" or "false".`, {
|
||||
code: 'INVALID_QUERY_PARAMETER',
|
||||
statusCode: 400,
|
||||
});
|
||||
};
|
||||
|
||||
const parseMcpScope = (value: unknown): McpScope | undefined => {
|
||||
if (value === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const normalized = readOptionalQueryString(value);
|
||||
if (!normalized) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (normalized === 'user' || normalized === 'local' || normalized === 'project') {
|
||||
return normalized;
|
||||
}
|
||||
|
||||
throw new AppError(`Unsupported MCP scope "${normalized}".`, {
|
||||
code: 'INVALID_MCP_SCOPE',
|
||||
statusCode: 400,
|
||||
});
|
||||
};
|
||||
|
||||
const parseMcpTransport = (value: unknown): McpTransport => {
|
||||
const normalized = readOptionalQueryString(value);
|
||||
if (!normalized) {
|
||||
throw new AppError('transport is required.', {
|
||||
code: 'MCP_TRANSPORT_REQUIRED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
if (normalized === 'stdio' || normalized === 'http' || normalized === 'sse') {
|
||||
return normalized;
|
||||
}
|
||||
|
||||
throw new AppError(`Unsupported MCP transport "${normalized}".`, {
|
||||
code: 'INVALID_MCP_TRANSPORT',
|
||||
statusCode: 400,
|
||||
});
|
||||
};
|
||||
|
||||
const parseMcpUpsertPayload = (payload: unknown): UpsertProviderMcpServerInput => {
|
||||
if (!payload || typeof payload !== 'object') {
|
||||
throw new AppError('Request body must be an object.', {
|
||||
code: 'INVALID_REQUEST_BODY',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const body = payload as Record<string, unknown>;
|
||||
const name = readOptionalQueryString(body.name);
|
||||
if (!name) {
|
||||
throw new AppError('name is required.', {
|
||||
code: 'MCP_NAME_REQUIRED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const transport = parseMcpTransport(body.transport);
|
||||
const scope = parseMcpScope(body.scope);
|
||||
const workspacePath = readOptionalQueryString(body.workspacePath);
|
||||
|
||||
return {
|
||||
name,
|
||||
transport,
|
||||
scope,
|
||||
workspacePath,
|
||||
command: readOptionalQueryString(body.command),
|
||||
args: Array.isArray(body.args) ? body.args.filter((entry): entry is string => typeof entry === 'string') : undefined,
|
||||
env: typeof body.env === 'object' && body.env !== null
|
||||
? Object.fromEntries(
|
||||
Object.entries(body.env as Record<string, unknown>).filter(
|
||||
(entry): entry is [string, string] => typeof entry[1] === 'string',
|
||||
),
|
||||
)
|
||||
: undefined,
|
||||
cwd: readOptionalQueryString(body.cwd),
|
||||
url: readOptionalQueryString(body.url),
|
||||
headers: typeof body.headers === 'object' && body.headers !== null
|
||||
? Object.fromEntries(
|
||||
Object.entries(body.headers as Record<string, unknown>).filter(
|
||||
(entry): entry is [string, string] => typeof entry[1] === 'string',
|
||||
),
|
||||
)
|
||||
: undefined,
|
||||
envVars: Array.isArray(body.envVars)
|
||||
? body.envVars.filter((entry): entry is string => typeof entry === 'string')
|
||||
: undefined,
|
||||
bearerTokenEnvVar: readOptionalQueryString(body.bearerTokenEnvVar),
|
||||
envHttpHeaders: typeof body.envHttpHeaders === 'object' && body.envHttpHeaders !== null
|
||||
? Object.fromEntries(
|
||||
Object.entries(body.envHttpHeaders as Record<string, unknown>).filter(
|
||||
(entry): entry is [string, string] => typeof entry[1] === 'string',
|
||||
),
|
||||
)
|
||||
: undefined,
|
||||
};
|
||||
};
|
||||
|
||||
const parseProvider = (value: unknown): LLMProvider => {
|
||||
const normalized = normalizeProviderParam(value);
|
||||
if (normalized === 'claude' || normalized === 'codex' || normalized === 'cursor' || normalized === 'gemini') {
|
||||
return normalized;
|
||||
}
|
||||
|
||||
throw new AppError(`Unsupported provider "${normalized}".`, {
|
||||
code: 'UNSUPPORTED_PROVIDER',
|
||||
statusCode: 400,
|
||||
});
|
||||
};
|
||||
|
||||
const parseSessionRenameSummary = (payload: unknown): string => {
|
||||
if (!payload || typeof payload !== 'object') {
|
||||
throw new AppError('Request body must be an object.', {
|
||||
code: 'INVALID_REQUEST_BODY',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const body = payload as Record<string, unknown>;
|
||||
const summary = typeof body.summary === 'string' ? body.summary.trim() : '';
|
||||
if (!summary) {
|
||||
throw new AppError('Summary is required.', {
|
||||
code: 'INVALID_SESSION_SUMMARY',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
if (summary.length > 500) {
|
||||
throw new AppError('Summary must not exceed 500 characters.', {
|
||||
code: 'INVALID_SESSION_SUMMARY',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
return summary;
|
||||
};
|
||||
|
||||
const parseSessionSearchQuery = (value: unknown): string => {
|
||||
const query = readOptionalQueryString(value) ?? '';
|
||||
if (query.length < 2) {
|
||||
throw new AppError('Query must be at least 2 characters', {
|
||||
code: 'INVALID_SEARCH_QUERY',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
return query;
|
||||
};
|
||||
|
||||
const parseSessionSearchLimit = (value: unknown): number => {
|
||||
const raw = readOptionalQueryString(value);
|
||||
if (!raw) {
|
||||
return 50;
|
||||
}
|
||||
|
||||
const parsed = Number.parseInt(raw, 10);
|
||||
if (Number.isNaN(parsed)) {
|
||||
throw new AppError('limit must be a valid integer.', {
|
||||
code: 'INVALID_QUERY_PARAMETER',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
return Math.max(1, Math.min(parsed, 100));
|
||||
};
|
||||
|
||||
router.get(
|
||||
'/:provider/auth/status',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const provider = parseProvider(req.params.provider);
|
||||
const status = await providerAuthService.getProviderAuthStatus(provider);
|
||||
res.json(createApiSuccessResponse(status));
|
||||
}),
|
||||
);
|
||||
|
||||
// ----------------- MCP routes -----------------
|
||||
router.get(
|
||||
'/:provider/mcp/servers',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const provider = parseProvider(req.params.provider);
|
||||
const workspacePath = readOptionalQueryString(req.query.workspacePath);
|
||||
const scope = parseMcpScope(req.query.scope);
|
||||
|
||||
if (scope) {
|
||||
const servers = await providerMcpService.listProviderMcpServersForScope(provider, scope, { workspacePath });
|
||||
res.json(createApiSuccessResponse({ provider, scope, servers }));
|
||||
return;
|
||||
}
|
||||
|
||||
const groupedServers = await providerMcpService.listProviderMcpServers(provider, { workspacePath });
|
||||
res.json(createApiSuccessResponse({ provider, scopes: groupedServers }));
|
||||
}),
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/:provider/mcp/servers',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const provider = parseProvider(req.params.provider);
|
||||
const payload = parseMcpUpsertPayload(req.body);
|
||||
const server = await providerMcpService.upsertProviderMcpServer(provider, payload);
|
||||
res.status(201).json(createApiSuccessResponse({ server }));
|
||||
}),
|
||||
);
|
||||
|
||||
router.delete(
|
||||
'/:provider/mcp/servers/:name',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const provider = parseProvider(req.params.provider);
|
||||
const scope = parseMcpScope(req.query.scope);
|
||||
const workspacePath = readOptionalQueryString(req.query.workspacePath);
|
||||
const result = await providerMcpService.removeProviderMcpServer(provider, {
|
||||
name: readPathParam(req.params.name, 'name'),
|
||||
scope,
|
||||
workspacePath,
|
||||
});
|
||||
res.json(createApiSuccessResponse(result));
|
||||
}),
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/mcp/servers/global',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const payload = parseMcpUpsertPayload(req.body);
|
||||
if (payload.scope === 'local') {
|
||||
throw new AppError('Global MCP add supports only "user" or "project" scopes.', {
|
||||
code: 'INVALID_GLOBAL_MCP_SCOPE',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const results = await providerMcpService.addMcpServerToAllProviders({
|
||||
...payload,
|
||||
scope: payload.scope === 'user' ? 'user' : 'project',
|
||||
});
|
||||
res.status(201).json(createApiSuccessResponse({ results }));
|
||||
}),
|
||||
);
|
||||
|
||||
// ----------------- Session routes -----------------
|
||||
router.delete(
|
||||
'/sessions/:sessionId',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const sessionId = parseSessionId(req.params.sessionId);
|
||||
const deletedFromDisk = parseOptionalBooleanQuery(req.query.deletedFromDisk, 'deletedFromDisk') ?? false;
|
||||
const result = await sessionsService.deleteSessionById(sessionId, deletedFromDisk);
|
||||
res.json(createApiSuccessResponse(result));
|
||||
}),
|
||||
);
|
||||
|
||||
router.put(
|
||||
'/sessions/:sessionId',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const sessionId = parseSessionId(req.params.sessionId);
|
||||
const summary = parseSessionRenameSummary(req.body);
|
||||
const result = sessionsService.renameSessionById(sessionId, summary);
|
||||
res.json(createApiSuccessResponse(result));
|
||||
}),
|
||||
);
|
||||
|
||||
router.get(
|
||||
'/sessions/:sessionId/messages',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const sessionId = parseSessionId(req.params.sessionId);
|
||||
const limitRaw = readOptionalQueryString(req.query.limit);
|
||||
const offsetRaw = readOptionalQueryString(req.query.offset);
|
||||
|
||||
let limit: number | null = null;
|
||||
if (limitRaw !== undefined) {
|
||||
const parsedLimit = Number.parseInt(limitRaw, 10);
|
||||
if (Number.isNaN(parsedLimit) || parsedLimit < 0) {
|
||||
throw new AppError('limit must be a non-negative integer.', {
|
||||
code: 'INVALID_QUERY_PARAMETER',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
limit = parsedLimit;
|
||||
}
|
||||
|
||||
let offset = 0;
|
||||
if (offsetRaw !== undefined) {
|
||||
const parsedOffset = Number.parseInt(offsetRaw, 10);
|
||||
if (Number.isNaN(parsedOffset) || parsedOffset < 0) {
|
||||
throw new AppError('offset must be a non-negative integer.', {
|
||||
code: 'INVALID_QUERY_PARAMETER',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
offset = parsedOffset;
|
||||
}
|
||||
|
||||
const result = await sessionsService.fetchHistory(sessionId, {
|
||||
limit,
|
||||
offset,
|
||||
});
|
||||
res.json(result);
|
||||
}),
|
||||
);
|
||||
|
||||
router.get('/search/sessions', asyncHandler(async (req: Request, res: Response) => {
|
||||
const query = parseSessionSearchQuery(req.query.q);
|
||||
const limit = parseSessionSearchLimit(req.query.limit);
|
||||
|
||||
res.writeHead(200, {
|
||||
'Content-Type': 'text/event-stream',
|
||||
'Cache-Control': 'no-cache',
|
||||
Connection: 'keep-alive',
|
||||
'X-Accel-Buffering': 'no',
|
||||
});
|
||||
|
||||
let closed = false;
|
||||
const abortController = new AbortController();
|
||||
req.on('close', () => {
|
||||
closed = true;
|
||||
abortController.abort();
|
||||
});
|
||||
|
||||
try {
|
||||
await sessionConversationsSearchService.search({
|
||||
query,
|
||||
limit,
|
||||
signal: abortController.signal,
|
||||
onProgress: ({ projectResult, totalMatches, scannedProjects, totalProjects }) => {
|
||||
if (closed) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (projectResult) {
|
||||
res.write(`event: result\ndata: ${JSON.stringify({ projectResult, totalMatches, scannedProjects, totalProjects })}\n\n`);
|
||||
return;
|
||||
}
|
||||
|
||||
res.write(`event: progress\ndata: ${JSON.stringify({ totalMatches, scannedProjects, totalProjects })}\n\n`);
|
||||
},
|
||||
});
|
||||
|
||||
if (!closed) {
|
||||
res.write('event: done\ndata: {}\n\n');
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error searching conversations:', error);
|
||||
if (!closed) {
|
||||
res.write(`event: error\ndata: ${JSON.stringify({ error: 'Search failed' })}\n\n`);
|
||||
}
|
||||
} finally {
|
||||
if (!closed) {
|
||||
res.end();
|
||||
}
|
||||
}
|
||||
}));
|
||||
|
||||
export default router;
|
||||
94
server/modules/providers/services/mcp.service.ts
Normal file
94
server/modules/providers/services/mcp.service.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
import os from 'node:os';
|
||||
|
||||
import { providerRegistry } from '@/modules/providers/provider.registry.js';
|
||||
import type { LLMProvider, McpScope, ProviderMcpServer, UpsertProviderMcpServerInput } from '@/shared/types.js';
|
||||
import { AppError } from '@/shared/utils.js';
|
||||
|
||||
/** Cursor MCP is not supported on Windows hosts (no Cursor CLI integration). */
|
||||
function includeProviderInGlobalMcp(providerId: LLMProvider): boolean {
|
||||
if (providerId === 'cursor' && os.platform() === 'win32') {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
export const providerMcpService = {
|
||||
/**
|
||||
* Lists MCP servers for one provider grouped by supported scopes.
|
||||
*/
|
||||
async listProviderMcpServers(
|
||||
providerName: string,
|
||||
options?: { workspacePath?: string },
|
||||
): Promise<Record<McpScope, ProviderMcpServer[]>> {
|
||||
const provider = providerRegistry.resolveProvider(providerName);
|
||||
return provider.mcp.listServers(options);
|
||||
},
|
||||
|
||||
/**
|
||||
* Lists MCP servers for one provider scope.
|
||||
*/
|
||||
async listProviderMcpServersForScope(
|
||||
providerName: string,
|
||||
scope: McpScope,
|
||||
options?: { workspacePath?: string },
|
||||
): Promise<ProviderMcpServer[]> {
|
||||
const provider = providerRegistry.resolveProvider(providerName);
|
||||
return provider.mcp.listServersForScope(scope, options);
|
||||
},
|
||||
|
||||
/**
|
||||
* Adds or updates one provider MCP server.
|
||||
*/
|
||||
async upsertProviderMcpServer(
|
||||
providerName: string,
|
||||
input: UpsertProviderMcpServerInput,
|
||||
): Promise<ProviderMcpServer> {
|
||||
const provider = providerRegistry.resolveProvider(providerName);
|
||||
return provider.mcp.upsertServer(input);
|
||||
},
|
||||
|
||||
/**
|
||||
* Removes one provider MCP server.
|
||||
*/
|
||||
async removeProviderMcpServer(
|
||||
providerName: string,
|
||||
input: { name: string; scope?: McpScope; workspacePath?: string },
|
||||
): Promise<{ removed: boolean; provider: LLMProvider; name: string; scope: McpScope }> {
|
||||
const provider = providerRegistry.resolveProvider(providerName);
|
||||
return provider.mcp.removeServer(input);
|
||||
},
|
||||
|
||||
/**
|
||||
* Adds one HTTP/stdio MCP server to every provider.
|
||||
*/
|
||||
async addMcpServerToAllProviders(
|
||||
input: Omit<UpsertProviderMcpServerInput, 'scope'> & { scope?: Exclude<McpScope, 'local'> },
|
||||
): Promise<Array<{ provider: LLMProvider; created: boolean; error?: string }>> {
|
||||
if (input.transport !== 'stdio' && input.transport !== 'http') {
|
||||
throw new AppError('Global MCP add supports only "stdio" and "http".', {
|
||||
code: 'INVALID_GLOBAL_MCP_TRANSPORT',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const scope = input.scope ?? 'project';
|
||||
const results: Array<{ provider: LLMProvider; created: boolean; error?: string }> = [];
|
||||
const providers = providerRegistry.listProviders().filter((p) => includeProviderInGlobalMcp(p.id));
|
||||
for (const provider of providers) {
|
||||
try {
|
||||
await provider.mcp.upsertServer({ ...input, scope });
|
||||
results.push({ provider: provider.id, created: true });
|
||||
} catch (error) {
|
||||
results.push({
|
||||
provider: provider.id,
|
||||
created: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
},
|
||||
};
|
||||
26
server/modules/providers/services/provider-auth.service.ts
Normal file
26
server/modules/providers/services/provider-auth.service.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
import { providerRegistry } from '@/modules/providers/provider.registry.js';
|
||||
import type { LLMProvider, ProviderAuthStatus } from '@/shared/types.js';
|
||||
|
||||
export const providerAuthService = {
|
||||
/**
|
||||
* Resolves a provider and returns its installation/authentication status.
|
||||
*/
|
||||
async getProviderAuthStatus(providerName: string): Promise<ProviderAuthStatus> {
|
||||
const provider = providerRegistry.resolveProvider(providerName);
|
||||
return provider.auth.getStatus();
|
||||
},
|
||||
|
||||
/**
|
||||
* Returns whether a provider runtime appears installed.
|
||||
* Falls back to true if status lookup itself fails so callers preserve the
|
||||
* original runtime error instead of replacing it with a status-check failure.
|
||||
*/
|
||||
async isProviderInstalled(providerName: LLMProvider): Promise<boolean> {
|
||||
try {
|
||||
const status = await this.getProviderAuthStatus(providerName);
|
||||
return status.installed;
|
||||
} catch {
|
||||
return true;
|
||||
}
|
||||
},
|
||||
};
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,74 @@
|
||||
import { scanStateDb } from '@/modules/database/index.js';
|
||||
import { providerRegistry } from '@/modules/providers/provider.registry.js';
|
||||
import type { LLMProvider } from '@/shared/types.js';
|
||||
|
||||
type SessionSynchronizeResult = {
|
||||
processedByProvider: Record<LLMProvider, number>;
|
||||
failures: string[];
|
||||
};
|
||||
|
||||
/**
|
||||
* Orchestrates provider-specific session indexers and indexed-session lifecycle operations.
|
||||
*/
|
||||
export const sessionSynchronizerService = {
|
||||
/**
|
||||
* Runs all provider synchronizers and updates scan_state.last_scanned_at.
|
||||
*/
|
||||
async synchronizeSessions(): Promise<SessionSynchronizeResult> {
|
||||
const lastScanAt = scanStateDb.getLastScannedAt();
|
||||
const scanBoundary = new Date();
|
||||
const processedByProvider: Record<LLMProvider, number> = {
|
||||
claude: 0,
|
||||
codex: 0,
|
||||
cursor: 0,
|
||||
gemini: 0,
|
||||
};
|
||||
const failures: string[] = [];
|
||||
|
||||
const results = await Promise.allSettled(
|
||||
providerRegistry.listProviders().map(async (provider) => ({
|
||||
provider: provider.id,
|
||||
processed: await provider.sessionSynchronizer.synchronize(lastScanAt ?? undefined),
|
||||
}))
|
||||
);
|
||||
|
||||
for (const result of results) {
|
||||
if (result.status === 'fulfilled') {
|
||||
processedByProvider[result.value.provider] = result.value.processed;
|
||||
continue;
|
||||
}
|
||||
|
||||
const reason = result.reason instanceof Error ? result.reason.message : String(result.reason);
|
||||
failures.push(reason);
|
||||
}
|
||||
|
||||
if (failures.length === 0) {
|
||||
scanStateDb.updateLastScannedAt(scanBoundary);
|
||||
} else {
|
||||
console.warn(
|
||||
`[Sessions] Skipping scan_state cursor advance because ${failures.length} provider sync(s) failed.`,
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
processedByProvider,
|
||||
failures,
|
||||
};
|
||||
},
|
||||
|
||||
/**
|
||||
* Indexes one provider artifact file without running a full provider rescan.
|
||||
*/
|
||||
async synchronizeProviderFile(
|
||||
provider: LLMProvider,
|
||||
filePath: string
|
||||
): Promise<{ provider: LLMProvider; indexed: boolean; sessionId: string | null }> {
|
||||
const resolvedProvider = providerRegistry.resolveProvider(provider);
|
||||
const sessionId = await resolvedProvider.sessionSynchronizer.synchronizeFile(filePath);
|
||||
return {
|
||||
provider,
|
||||
indexed: Boolean(sessionId),
|
||||
sessionId,
|
||||
};
|
||||
},
|
||||
};
|
||||
283
server/modules/providers/services/sessions-watcher.service.ts
Normal file
283
server/modules/providers/services/sessions-watcher.service.ts
Normal file
@@ -0,0 +1,283 @@
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
import { promises as fsPromises } from 'node:fs';
|
||||
|
||||
import chokidar, { type FSWatcher } from 'chokidar';
|
||||
|
||||
import { sessionSynchronizerService } from '@/modules/providers/services/session-synchronizer.service.js';
|
||||
import { WS_OPEN_STATE, connectedClients } from '@/modules/websocket/index.js';
|
||||
import type { LLMProvider } from '@/shared/types.js';
|
||||
import { getProjectsWithSessions } from '@/modules/projects/index.js';
|
||||
|
||||
type WatcherEventType = 'add' | 'change';
|
||||
|
||||
const PROVIDER_WATCH_PATHS: Array<{ provider: LLMProvider; rootPath: string }> = [
|
||||
{
|
||||
provider: 'claude',
|
||||
rootPath: path.join(os.homedir(), '.claude', 'projects'),
|
||||
},
|
||||
{
|
||||
provider: 'cursor',
|
||||
rootPath: path.join(os.homedir(), '.cursor', 'chats'),
|
||||
},
|
||||
{
|
||||
provider: 'codex',
|
||||
rootPath: path.join(os.homedir(), '.codex', 'sessions'),
|
||||
},
|
||||
{
|
||||
provider: 'gemini',
|
||||
rootPath: path.join(os.homedir(), '.gemini', 'sessions'),
|
||||
},
|
||||
{
|
||||
provider: 'gemini',
|
||||
rootPath: path.join(os.homedir(), '.gemini', 'tmp'),
|
||||
},
|
||||
];
|
||||
|
||||
const WATCHER_IGNORED_PATTERNS = [
|
||||
'**/node_modules/**',
|
||||
'**/.git/**',
|
||||
'**/dist/**',
|
||||
'**/build/**',
|
||||
'**/*.tmp',
|
||||
'**/*.swp',
|
||||
'**/.DS_Store',
|
||||
];
|
||||
|
||||
const PROJECTS_UPDATE_DEBOUNCE_MS = 500;
|
||||
const PROJECTS_UPDATE_MAX_WAIT_MS = 2_000;
|
||||
|
||||
const watchers: FSWatcher[] = [];
|
||||
|
||||
type PendingWatcherUpdate = {
|
||||
providers: Set<LLMProvider>;
|
||||
changeTypes: Set<WatcherEventType>;
|
||||
updatedSessionIds: Set<string>;
|
||||
};
|
||||
|
||||
let pendingWatcherUpdate: PendingWatcherUpdate | null = null;
|
||||
let pendingWatcherUpdateStartedAt: number | null = null;
|
||||
let pendingWatcherFlushTimer: ReturnType<typeof setTimeout> | null = null;
|
||||
let watcherRefreshInFlight = false;
|
||||
let watcherRescheduleAfterRefresh = false;
|
||||
|
||||
/**
|
||||
* Filters watcher events to provider-specific session artifact file types.
|
||||
*/
|
||||
function isWatcherTargetFile(provider: LLMProvider, filePath: string): boolean {
|
||||
if (provider === 'gemini') {
|
||||
return filePath.endsWith('.json') || filePath.endsWith('.jsonl');
|
||||
}
|
||||
|
||||
return filePath.endsWith('.jsonl');
|
||||
}
|
||||
|
||||
function clearPendingWatcherFlushTimer(): void {
|
||||
if (pendingWatcherFlushTimer) {
|
||||
clearTimeout(pendingWatcherFlushTimer);
|
||||
pendingWatcherFlushTimer = null;
|
||||
}
|
||||
}
|
||||
|
||||
function schedulePendingWatcherFlush(): void {
|
||||
if (!pendingWatcherUpdate) {
|
||||
return;
|
||||
}
|
||||
|
||||
const now = Date.now();
|
||||
if (pendingWatcherUpdateStartedAt === null) {
|
||||
pendingWatcherUpdateStartedAt = now;
|
||||
}
|
||||
|
||||
const elapsed = now - pendingWatcherUpdateStartedAt;
|
||||
const remainingMaxWait = Math.max(0, PROJECTS_UPDATE_MAX_WAIT_MS - elapsed);
|
||||
const delay = Math.min(PROJECTS_UPDATE_DEBOUNCE_MS, remainingMaxWait);
|
||||
|
||||
clearPendingWatcherFlushTimer();
|
||||
pendingWatcherFlushTimer = setTimeout(() => {
|
||||
void flushPendingWatcherUpdate();
|
||||
}, delay);
|
||||
}
|
||||
|
||||
function queuePendingWatcherUpdate(
|
||||
eventType: WatcherEventType,
|
||||
provider: LLMProvider,
|
||||
updatedSessionId: string | null
|
||||
): void {
|
||||
if (!pendingWatcherUpdate) {
|
||||
pendingWatcherUpdate = {
|
||||
providers: new Set<LLMProvider>(),
|
||||
changeTypes: new Set<WatcherEventType>(),
|
||||
updatedSessionIds: new Set<string>(),
|
||||
};
|
||||
}
|
||||
|
||||
pendingWatcherUpdate.providers.add(provider);
|
||||
pendingWatcherUpdate.changeTypes.add(eventType);
|
||||
if (updatedSessionId) {
|
||||
pendingWatcherUpdate.updatedSessionIds.add(updatedSessionId);
|
||||
}
|
||||
|
||||
schedulePendingWatcherFlush();
|
||||
}
|
||||
|
||||
async function flushPendingWatcherUpdate(): Promise<void> {
|
||||
clearPendingWatcherFlushTimer();
|
||||
|
||||
if (!pendingWatcherUpdate) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (watcherRefreshInFlight) {
|
||||
watcherRescheduleAfterRefresh = true;
|
||||
return;
|
||||
}
|
||||
|
||||
const queuedUpdate = pendingWatcherUpdate;
|
||||
pendingWatcherUpdate = null;
|
||||
pendingWatcherUpdateStartedAt = null;
|
||||
watcherRefreshInFlight = true;
|
||||
|
||||
try {
|
||||
const updatedProjects = await getProjectsWithSessions({ skipSynchronization: true });
|
||||
const changeTypes = Array.from(queuedUpdate.changeTypes);
|
||||
const watchProviders = Array.from(queuedUpdate.providers);
|
||||
const updatedSessionIds = Array.from(queuedUpdate.updatedSessionIds);
|
||||
|
||||
// Backward-compatible fields stay populated with the first queued values.
|
||||
const updateMessage = JSON.stringify({
|
||||
type: 'projects_updated',
|
||||
projects: updatedProjects,
|
||||
timestamp: new Date().toISOString(),
|
||||
changeType: changeTypes[0] ?? 'change',
|
||||
updatedSessionId: updatedSessionIds[0] ?? undefined,
|
||||
watchProvider: watchProviders[0] ?? undefined,
|
||||
changeTypes,
|
||||
updatedSessionIds,
|
||||
watchProviders,
|
||||
batched: true,
|
||||
});
|
||||
|
||||
connectedClients.forEach(client => {
|
||||
if (client.readyState === WS_OPEN_STATE) {
|
||||
client.send(updateMessage);
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
console.error('Session watcher refresh failed while broadcasting projects_updated', { error: message });
|
||||
} finally {
|
||||
watcherRefreshInFlight = false;
|
||||
|
||||
if (pendingWatcherUpdate || watcherRescheduleAfterRefresh) {
|
||||
watcherRescheduleAfterRefresh = false;
|
||||
schedulePendingWatcherFlush();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles file watcher updates and triggers provider file-level synchronization.
|
||||
*/
|
||||
async function onUpdate(
|
||||
eventType: WatcherEventType,
|
||||
filePath: string,
|
||||
provider: LLMProvider
|
||||
): Promise<void> {
|
||||
if (!isWatcherTargetFile(provider, filePath)) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await sessionSynchronizerService.synchronizeProviderFile(provider, filePath);
|
||||
if (!result.indexed) {
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Session synchronization triggered by ${eventType} event for provider "${provider}"`, {
|
||||
filePath,
|
||||
sessionId: result.sessionId,
|
||||
});
|
||||
queuePendingWatcherUpdate(eventType, provider, result.sessionId);
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
console.error(`Session watcher sync failed for provider "${provider}"`, {
|
||||
eventType,
|
||||
filePath,
|
||||
error: message,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts provider filesystem watchers and performs initial DB synchronization.
|
||||
*/
|
||||
export async function initializeSessionsWatcher(): Promise<void> {
|
||||
console.log('Setting up session watchers');
|
||||
|
||||
const initialSync = await sessionSynchronizerService.synchronizeSessions();
|
||||
console.log('Initial session synchronization complete', {
|
||||
processedByProvider: initialSync.processedByProvider,
|
||||
failures: initialSync.failures,
|
||||
});
|
||||
|
||||
for (const { provider, rootPath } of PROVIDER_WATCH_PATHS) {
|
||||
try {
|
||||
await fsPromises.mkdir(rootPath, { recursive: true });
|
||||
|
||||
const watcher = chokidar.watch(rootPath, {
|
||||
ignored: WATCHER_IGNORED_PATTERNS,
|
||||
persistent: true,
|
||||
ignoreInitial: true,
|
||||
followSymlinks: false,
|
||||
depth: 6,
|
||||
usePolling: true,
|
||||
interval: 6_000,
|
||||
binaryInterval: 6_000,
|
||||
});
|
||||
|
||||
watcher
|
||||
.on('add', (filePath: string) => {
|
||||
void onUpdate('add', filePath, provider);
|
||||
})
|
||||
.on('change', (filePath: string) => {
|
||||
void onUpdate('change', filePath, provider);
|
||||
})
|
||||
.on('error', (error: unknown) => {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
console.error(`Session watcher error for provider "${provider}"`, { error: message });
|
||||
});
|
||||
|
||||
watchers.push(watcher);
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
console.error(`Failed to initialize session watcher for provider "${provider}"`, {
|
||||
rootPath,
|
||||
error: message,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stops all active provider session watchers.
|
||||
*/
|
||||
export async function closeSessionsWatcher(): Promise<void> {
|
||||
clearPendingWatcherFlushTimer();
|
||||
|
||||
await Promise.all(
|
||||
watchers.map(async (watcher) => {
|
||||
try {
|
||||
await watcher.close();
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
console.error('Failed to close session watcher', { error: message });
|
||||
}
|
||||
})
|
||||
);
|
||||
watchers.length = 0;
|
||||
pendingWatcherUpdate = null;
|
||||
pendingWatcherUpdateStartedAt = null;
|
||||
watcherRefreshInFlight = false;
|
||||
watcherRescheduleAfterRefresh = false;
|
||||
}
|
||||
130
server/modules/providers/services/sessions.service.ts
Normal file
130
server/modules/providers/services/sessions.service.ts
Normal file
@@ -0,0 +1,130 @@
|
||||
import fsp from 'node:fs/promises';
|
||||
|
||||
import { sessionsDb } from '@/modules/database/index.js';
|
||||
import { providerRegistry } from '@/modules/providers/provider.registry.js';
|
||||
import type {
|
||||
FetchHistoryOptions,
|
||||
FetchHistoryResult,
|
||||
LLMProvider,
|
||||
NormalizedMessage,
|
||||
} from '@/shared/types.js';
|
||||
import { AppError } from '@/shared/utils.js';
|
||||
|
||||
/**
|
||||
* Removes one file if it exists.
|
||||
*/
|
||||
async function removeFileIfExists(filePath: string): Promise<boolean> {
|
||||
try {
|
||||
await fsp.unlink(filePath);
|
||||
return true;
|
||||
} catch (error) {
|
||||
const code = (error as NodeJS.ErrnoException).code;
|
||||
if (code === 'ENOENT') {
|
||||
return false;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Application service for provider-backed session message operations.
|
||||
*
|
||||
* Callers pass a provider id and this service resolves the concrete provider
|
||||
* class, keeping normalization/history call sites decoupled from implementation
|
||||
* file layout.
|
||||
*/
|
||||
export const sessionsService = {
|
||||
/**
|
||||
* Lists provider ids that can load session history and normalize live messages.
|
||||
*/
|
||||
listProviderIds(): LLMProvider[] {
|
||||
return providerRegistry.listProviders().map((provider) => provider.id);
|
||||
},
|
||||
|
||||
/**
|
||||
* Normalizes one provider-native event into frontend session message events.
|
||||
*/
|
||||
normalizeMessage(
|
||||
providerName: string,
|
||||
raw: unknown,
|
||||
sessionId: string | null,
|
||||
): NormalizedMessage[] {
|
||||
return providerRegistry.resolveProvider(providerName).sessions.normalizeMessage(raw, sessionId);
|
||||
},
|
||||
|
||||
/**
|
||||
* Fetches persisted history by session id.
|
||||
*
|
||||
* Provider and provider-specific lookup hints are resolved from the indexed
|
||||
* session metadata in the database.
|
||||
*/
|
||||
fetchHistory(
|
||||
sessionId: string,
|
||||
options: Pick<FetchHistoryOptions, 'limit' | 'offset'> = {},
|
||||
): Promise<FetchHistoryResult> {
|
||||
const session = sessionsDb.getSessionById(sessionId);
|
||||
if (!session) {
|
||||
throw new AppError(`Session "${sessionId}" was not found.`, {
|
||||
code: 'SESSION_NOT_FOUND',
|
||||
statusCode: 404,
|
||||
});
|
||||
}
|
||||
|
||||
const provider = session.provider as LLMProvider;
|
||||
return providerRegistry.resolveProvider(provider).sessions.fetchHistory(sessionId, {
|
||||
limit: options.limit ?? null,
|
||||
offset: options.offset ?? 0,
|
||||
projectPath: session.project_path ?? '',
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Deletes one persisted session row by id.
|
||||
*
|
||||
* When `deletedFromDisk` is true and a session `jsonl_path` exists, the path
|
||||
* is deleted from disk before the DB row is removed.
|
||||
*/
|
||||
async deleteSessionById(
|
||||
sessionId: string,
|
||||
deletedFromDisk = false,
|
||||
): Promise<{ sessionId: string; deletedFromDisk: boolean }> {
|
||||
const session = sessionsDb.getSessionById(sessionId);
|
||||
if (!session) {
|
||||
throw new AppError(`Session "${sessionId}" was not found.`, {
|
||||
code: 'SESSION_NOT_FOUND',
|
||||
statusCode: 404,
|
||||
});
|
||||
}
|
||||
|
||||
let removedFromDisk = false;
|
||||
if (deletedFromDisk && session.jsonl_path) {
|
||||
removedFromDisk = await removeFileIfExists(session.jsonl_path);
|
||||
}
|
||||
|
||||
const deleted = sessionsDb.deleteSessionById(sessionId);
|
||||
if (!deleted) {
|
||||
throw new AppError(`Session "${sessionId}" was not found.`, {
|
||||
code: 'SESSION_NOT_FOUND',
|
||||
statusCode: 404,
|
||||
});
|
||||
}
|
||||
|
||||
return { sessionId, deletedFromDisk: removedFromDisk };
|
||||
},
|
||||
|
||||
/**
|
||||
* Renames one session by id without requiring the caller to pass provider.
|
||||
*/
|
||||
renameSessionById(sessionId: string, summary: string): { sessionId: string; summary: string } {
|
||||
const session = sessionsDb.getSessionById(sessionId);
|
||||
if (!session) {
|
||||
throw new AppError(`Session "${sessionId}" was not found.`, {
|
||||
code: 'SESSION_NOT_FOUND',
|
||||
statusCode: 404,
|
||||
});
|
||||
}
|
||||
|
||||
sessionsDb.updateSessionCustomName(sessionId, summary);
|
||||
return { sessionId, summary };
|
||||
},
|
||||
};
|
||||
27
server/modules/providers/shared/base/abstract.provider.ts
Normal file
27
server/modules/providers/shared/base/abstract.provider.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import type {
|
||||
IProvider,
|
||||
IProviderAuth,
|
||||
IProviderMcp,
|
||||
IProviderSessionSynchronizer,
|
||||
IProviderSessions,
|
||||
} from '@/shared/interfaces.js';
|
||||
import type { LLMProvider } from '@/shared/types.js';
|
||||
|
||||
/**
|
||||
* Shared provider base.
|
||||
*
|
||||
* Concrete providers must expose auth/MCP handlers and implement message
|
||||
* normalization/history loading because those behaviors depend on native
|
||||
* SDK/CLI formats.
|
||||
*/
|
||||
export abstract class AbstractProvider implements IProvider {
|
||||
readonly id: LLMProvider;
|
||||
abstract readonly mcp: IProviderMcp;
|
||||
abstract readonly auth: IProviderAuth;
|
||||
abstract readonly sessions: IProviderSessions;
|
||||
abstract readonly sessionSynchronizer: IProviderSessionSynchronizer;
|
||||
|
||||
protected constructor(id: LLMProvider) {
|
||||
this.id = id;
|
||||
}
|
||||
}
|
||||
151
server/modules/providers/shared/mcp/mcp.provider.ts
Normal file
151
server/modules/providers/shared/mcp/mcp.provider.ts
Normal file
@@ -0,0 +1,151 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import type { IProviderMcp } from '@/shared/interfaces.js';
|
||||
import type { LLMProvider, McpScope, McpTransport, ProviderMcpServer, UpsertProviderMcpServerInput } from '@/shared/types.js';
|
||||
import { AppError } from '@/shared/utils.js';
|
||||
|
||||
const resolveWorkspacePath = (workspacePath?: string): string =>
|
||||
path.resolve(workspacePath ?? process.cwd());
|
||||
|
||||
const normalizeServerName = (name: string): string => {
|
||||
const normalized = name.trim();
|
||||
if (!normalized) {
|
||||
throw new AppError('MCP server name is required.', {
|
||||
code: 'MCP_SERVER_NAME_REQUIRED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
return normalized;
|
||||
};
|
||||
|
||||
/**
|
||||
* Shared MCP provider for provider-specific config readers/writers.
|
||||
*/
|
||||
export abstract class McpProvider implements IProviderMcp {
|
||||
protected readonly provider: LLMProvider;
|
||||
protected readonly supportedScopes: McpScope[];
|
||||
protected readonly supportedTransports: McpTransport[];
|
||||
|
||||
protected constructor(
|
||||
provider: LLMProvider,
|
||||
supportedScopes: McpScope[],
|
||||
supportedTransports: McpTransport[],
|
||||
) {
|
||||
this.provider = provider;
|
||||
this.supportedScopes = supportedScopes;
|
||||
this.supportedTransports = supportedTransports;
|
||||
}
|
||||
|
||||
async listServers(options?: { workspacePath?: string }): Promise<Record<McpScope, ProviderMcpServer[]>> {
|
||||
const grouped: Record<McpScope, ProviderMcpServer[]> = {
|
||||
user: [],
|
||||
local: [],
|
||||
project: [],
|
||||
};
|
||||
|
||||
for (const scope of this.supportedScopes) {
|
||||
grouped[scope] = await this.listServersForScope(scope, options);
|
||||
}
|
||||
|
||||
return grouped;
|
||||
}
|
||||
|
||||
async listServersForScope(
|
||||
scope: McpScope,
|
||||
options?: { workspacePath?: string },
|
||||
): Promise<ProviderMcpServer[]> {
|
||||
if (!this.supportedScopes.includes(scope)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const workspacePath = resolveWorkspacePath(options?.workspacePath);
|
||||
const scopedServers = await this.readScopedServers(scope, workspacePath);
|
||||
return Object.entries(scopedServers)
|
||||
.map(([name, rawConfig]) => this.normalizeServerConfig(scope, name, rawConfig))
|
||||
.filter((entry): entry is ProviderMcpServer => entry !== null);
|
||||
}
|
||||
|
||||
async upsertServer(input: UpsertProviderMcpServerInput): Promise<ProviderMcpServer> {
|
||||
const scope = input.scope ?? 'project';
|
||||
this.assertScopeAndTransport(scope, input.transport);
|
||||
|
||||
const workspacePath = resolveWorkspacePath(input.workspacePath);
|
||||
const normalizedName = normalizeServerName(input.name);
|
||||
const scopedServers = await this.readScopedServers(scope, workspacePath);
|
||||
scopedServers[normalizedName] = this.buildServerConfig(input);
|
||||
await this.writeScopedServers(scope, workspacePath, scopedServers);
|
||||
|
||||
return {
|
||||
provider: this.provider,
|
||||
name: normalizedName,
|
||||
scope,
|
||||
transport: input.transport,
|
||||
command: input.command,
|
||||
args: input.args,
|
||||
env: input.env,
|
||||
cwd: input.cwd,
|
||||
url: input.url,
|
||||
headers: input.headers,
|
||||
envVars: input.envVars,
|
||||
bearerTokenEnvVar: input.bearerTokenEnvVar,
|
||||
envHttpHeaders: input.envHttpHeaders,
|
||||
};
|
||||
}
|
||||
|
||||
async removeServer(
|
||||
input: { name: string; scope?: McpScope; workspacePath?: string },
|
||||
): Promise<{ removed: boolean; provider: LLMProvider; name: string; scope: McpScope }> {
|
||||
const scope = input.scope ?? 'project';
|
||||
this.assertScope(scope);
|
||||
|
||||
const workspacePath = resolveWorkspacePath(input.workspacePath);
|
||||
const normalizedName = normalizeServerName(input.name);
|
||||
const scopedServers = await this.readScopedServers(scope, workspacePath);
|
||||
const removed = Object.prototype.hasOwnProperty.call(scopedServers, normalizedName);
|
||||
if (removed) {
|
||||
delete scopedServers[normalizedName];
|
||||
await this.writeScopedServers(scope, workspacePath, scopedServers);
|
||||
}
|
||||
|
||||
return { removed, provider: this.provider, name: normalizedName, scope };
|
||||
}
|
||||
|
||||
protected abstract readScopedServers(
|
||||
scope: McpScope,
|
||||
workspacePath: string,
|
||||
): Promise<Record<string, unknown>>;
|
||||
|
||||
protected abstract writeScopedServers(
|
||||
scope: McpScope,
|
||||
workspacePath: string,
|
||||
servers: Record<string, unknown>,
|
||||
): Promise<void>;
|
||||
|
||||
protected abstract buildServerConfig(input: UpsertProviderMcpServerInput): Record<string, unknown>;
|
||||
|
||||
protected abstract normalizeServerConfig(
|
||||
scope: McpScope,
|
||||
name: string,
|
||||
rawConfig: unknown,
|
||||
): ProviderMcpServer | null;
|
||||
|
||||
protected assertScope(scope: McpScope): void {
|
||||
if (!this.supportedScopes.includes(scope)) {
|
||||
throw new AppError(`Provider "${this.provider}" does not support "${scope}" MCP scope.`, {
|
||||
code: 'MCP_SCOPE_NOT_SUPPORTED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
protected assertScopeAndTransport(scope: McpScope, transport: McpTransport): void {
|
||||
this.assertScope(scope);
|
||||
if (!this.supportedTransports.includes(transport)) {
|
||||
throw new AppError(`Provider "${this.provider}" does not support "${transport}" MCP transport.`, {
|
||||
code: 'MCP_TRANSPORT_NOT_SUPPORTED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
293
server/modules/providers/tests/mcp.test.ts
Normal file
293
server/modules/providers/tests/mcp.test.ts
Normal file
@@ -0,0 +1,293 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import fs from 'node:fs/promises';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
import test from 'node:test';
|
||||
|
||||
import TOML from '@iarna/toml';
|
||||
|
||||
import { providerMcpService } from '@/modules/providers/services/mcp.service.js';
|
||||
import { AppError } from '@/shared/utils.js';
|
||||
|
||||
const patchHomeDir = (nextHomeDir: string) => {
|
||||
const original = os.homedir;
|
||||
(os as any).homedir = () => nextHomeDir;
|
||||
return () => {
|
||||
(os as any).homedir = original;
|
||||
};
|
||||
};
|
||||
|
||||
const readJson = async (filePath: string): Promise<Record<string, unknown>> => {
|
||||
const content = await fs.readFile(filePath, 'utf8');
|
||||
return JSON.parse(content) as Record<string, unknown>;
|
||||
};
|
||||
|
||||
/**
|
||||
* This test covers Claude MCP support for all scopes (user/local/project) and all transports (stdio/http/sse),
|
||||
* including add, update/list, and remove operations.
|
||||
*/
|
||||
test('providerMcpService handles claude MCP scopes/transports with file-backed persistence', { concurrency: false }, async () => {
|
||||
const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), 'llm-mcp-claude-'));
|
||||
const workspacePath = path.join(tempRoot, 'workspace');
|
||||
await fs.mkdir(workspacePath, { recursive: true });
|
||||
|
||||
const restoreHomeDir = patchHomeDir(tempRoot);
|
||||
try {
|
||||
await providerMcpService.upsertProviderMcpServer('claude', {
|
||||
name: 'claude-user-stdio',
|
||||
scope: 'user',
|
||||
transport: 'stdio',
|
||||
command: 'npx',
|
||||
args: ['-y', 'my-server'],
|
||||
env: { API_KEY: 'secret' },
|
||||
});
|
||||
|
||||
await providerMcpService.upsertProviderMcpServer('claude', {
|
||||
name: 'claude-local-http',
|
||||
scope: 'local',
|
||||
transport: 'http',
|
||||
url: 'https://example.com/mcp',
|
||||
headers: { Authorization: 'Bearer token' },
|
||||
workspacePath,
|
||||
});
|
||||
|
||||
await providerMcpService.upsertProviderMcpServer('claude', {
|
||||
name: 'claude-project-sse',
|
||||
scope: 'project',
|
||||
transport: 'sse',
|
||||
url: 'https://example.com/sse',
|
||||
headers: { 'X-API-Key': 'abc' },
|
||||
workspacePath,
|
||||
});
|
||||
|
||||
const grouped = await providerMcpService.listProviderMcpServers('claude', { workspacePath });
|
||||
assert.ok(grouped.user.some((server) => server.name === 'claude-user-stdio' && server.transport === 'stdio'));
|
||||
assert.ok(grouped.local.some((server) => server.name === 'claude-local-http' && server.transport === 'http'));
|
||||
assert.ok(grouped.project.some((server) => server.name === 'claude-project-sse' && server.transport === 'sse'));
|
||||
|
||||
// update behavior is the same upsert route with same name
|
||||
await providerMcpService.upsertProviderMcpServer('claude', {
|
||||
name: 'claude-project-sse',
|
||||
scope: 'project',
|
||||
transport: 'sse',
|
||||
url: 'https://example.com/sse-updated',
|
||||
headers: { 'X-API-Key': 'updated' },
|
||||
workspacePath,
|
||||
});
|
||||
|
||||
const projectConfig = await readJson(path.join(workspacePath, '.mcp.json'));
|
||||
const projectServers = projectConfig.mcpServers as Record<string, unknown>;
|
||||
const projectServer = projectServers['claude-project-sse'] as Record<string, unknown>;
|
||||
assert.equal(projectServer.url, 'https://example.com/sse-updated');
|
||||
|
||||
const removeResult = await providerMcpService.removeProviderMcpServer('claude', {
|
||||
name: 'claude-local-http',
|
||||
scope: 'local',
|
||||
workspacePath,
|
||||
});
|
||||
assert.equal(removeResult.removed, true);
|
||||
} finally {
|
||||
restoreHomeDir();
|
||||
await fs.rm(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* This test covers Codex MCP support for user/project scopes, stdio/http formats,
|
||||
* and validation for unsupported scope/transport combinations.
|
||||
*/
|
||||
test('providerMcpService handles codex MCP TOML config and capability validation', { concurrency: false }, async () => {
|
||||
const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), 'llm-mcp-codex-'));
|
||||
const workspacePath = path.join(tempRoot, 'workspace');
|
||||
await fs.mkdir(workspacePath, { recursive: true });
|
||||
|
||||
const restoreHomeDir = patchHomeDir(tempRoot);
|
||||
try {
|
||||
await providerMcpService.upsertProviderMcpServer('codex', {
|
||||
name: 'codex-user-stdio',
|
||||
scope: 'user',
|
||||
transport: 'stdio',
|
||||
command: 'python',
|
||||
args: ['server.py'],
|
||||
env: { API_KEY: 'x' },
|
||||
envVars: ['API_KEY'],
|
||||
cwd: '/tmp',
|
||||
});
|
||||
|
||||
await providerMcpService.upsertProviderMcpServer('codex', {
|
||||
name: 'codex-project-http',
|
||||
scope: 'project',
|
||||
transport: 'http',
|
||||
url: 'https://codex.example.com/mcp',
|
||||
headers: { 'X-Custom-Header': 'value' },
|
||||
envHttpHeaders: { 'X-API-Key': 'MY_API_KEY_ENV' },
|
||||
bearerTokenEnvVar: 'MY_API_TOKEN',
|
||||
workspacePath,
|
||||
});
|
||||
|
||||
const userTomlPath = path.join(tempRoot, '.codex', 'config.toml');
|
||||
const userConfig = TOML.parse(await fs.readFile(userTomlPath, 'utf8')) as Record<string, unknown>;
|
||||
const userServers = userConfig.mcp_servers as Record<string, unknown>;
|
||||
const userStdio = userServers['codex-user-stdio'] as Record<string, unknown>;
|
||||
assert.equal(userStdio.command, 'python');
|
||||
|
||||
const projectTomlPath = path.join(workspacePath, '.codex', 'config.toml');
|
||||
const projectConfig = TOML.parse(await fs.readFile(projectTomlPath, 'utf8')) as Record<string, unknown>;
|
||||
const projectServers = projectConfig.mcp_servers as Record<string, unknown>;
|
||||
const projectHttp = projectServers['codex-project-http'] as Record<string, unknown>;
|
||||
assert.equal(projectHttp.url, 'https://codex.example.com/mcp');
|
||||
|
||||
await assert.rejects(
|
||||
providerMcpService.upsertProviderMcpServer('codex', {
|
||||
name: 'codex-local',
|
||||
scope: 'local',
|
||||
transport: 'stdio',
|
||||
command: 'node',
|
||||
}),
|
||||
(error: unknown) =>
|
||||
error instanceof AppError &&
|
||||
error.code === 'MCP_SCOPE_NOT_SUPPORTED' &&
|
||||
error.statusCode === 400,
|
||||
);
|
||||
|
||||
await assert.rejects(
|
||||
providerMcpService.upsertProviderMcpServer('codex', {
|
||||
name: 'codex-sse',
|
||||
scope: 'project',
|
||||
transport: 'sse',
|
||||
url: 'https://example.com/sse',
|
||||
workspacePath,
|
||||
}),
|
||||
(error: unknown) =>
|
||||
error instanceof AppError &&
|
||||
error.code === 'MCP_TRANSPORT_NOT_SUPPORTED' &&
|
||||
error.statusCode === 400,
|
||||
);
|
||||
} finally {
|
||||
restoreHomeDir();
|
||||
await fs.rm(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* This test covers Gemini/Cursor MCP JSON formats and user/project scope persistence.
|
||||
*/
|
||||
test('providerMcpService handles gemini and cursor MCP JSON config formats', { concurrency: false }, async () => {
|
||||
const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), 'llm-mcp-gc-'));
|
||||
const workspacePath = path.join(tempRoot, 'workspace');
|
||||
await fs.mkdir(workspacePath, { recursive: true });
|
||||
|
||||
const restoreHomeDir = patchHomeDir(tempRoot);
|
||||
try {
|
||||
await providerMcpService.upsertProviderMcpServer('gemini', {
|
||||
name: 'gemini-stdio',
|
||||
scope: 'user',
|
||||
transport: 'stdio',
|
||||
command: 'node',
|
||||
args: ['server.js'],
|
||||
env: { TOKEN: '$TOKEN' },
|
||||
cwd: './server',
|
||||
});
|
||||
|
||||
await providerMcpService.upsertProviderMcpServer('gemini', {
|
||||
name: 'gemini-http',
|
||||
scope: 'project',
|
||||
transport: 'http',
|
||||
url: 'https://gemini.example.com/mcp',
|
||||
headers: { Authorization: 'Bearer token' },
|
||||
workspacePath,
|
||||
});
|
||||
|
||||
await providerMcpService.upsertProviderMcpServer('cursor', {
|
||||
name: 'cursor-stdio',
|
||||
scope: 'project',
|
||||
transport: 'stdio',
|
||||
command: 'npx',
|
||||
args: ['-y', 'mcp-server'],
|
||||
env: { API_KEY: 'value' },
|
||||
workspacePath,
|
||||
});
|
||||
|
||||
await providerMcpService.upsertProviderMcpServer('cursor', {
|
||||
name: 'cursor-http',
|
||||
scope: 'user',
|
||||
transport: 'http',
|
||||
url: 'http://localhost:3333/mcp',
|
||||
headers: { API_KEY: 'value' },
|
||||
});
|
||||
|
||||
const geminiUserConfig = await readJson(path.join(tempRoot, '.gemini', 'settings.json'));
|
||||
const geminiUserServer = (geminiUserConfig.mcpServers as Record<string, unknown>)['gemini-stdio'] as Record<string, unknown>;
|
||||
assert.equal(geminiUserServer.command, 'node');
|
||||
assert.equal(geminiUserServer.type, undefined);
|
||||
|
||||
const geminiProjectConfig = await readJson(path.join(workspacePath, '.gemini', 'settings.json'));
|
||||
const geminiProjectServer = (geminiProjectConfig.mcpServers as Record<string, unknown>)['gemini-http'] as Record<string, unknown>;
|
||||
assert.equal(geminiProjectServer.type, 'http');
|
||||
|
||||
const cursorUserConfig = await readJson(path.join(tempRoot, '.cursor', 'mcp.json'));
|
||||
const cursorHttpServer = (cursorUserConfig.mcpServers as Record<string, unknown>)['cursor-http'] as Record<string, unknown>;
|
||||
assert.equal(cursorHttpServer.url, 'http://localhost:3333/mcp');
|
||||
assert.equal(cursorHttpServer.type, undefined);
|
||||
} finally {
|
||||
restoreHomeDir();
|
||||
await fs.rm(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* This test covers the global MCP adder requirement: only http/stdio are allowed and
|
||||
* one payload is written to all providers.
|
||||
*/
|
||||
test('providerMcpService global adder writes to all providers and rejects unsupported transports', { concurrency: false }, async () => {
|
||||
const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), 'llm-mcp-global-'));
|
||||
const workspacePath = path.join(tempRoot, 'workspace');
|
||||
await fs.mkdir(workspacePath, { recursive: true });
|
||||
|
||||
const restoreHomeDir = patchHomeDir(tempRoot);
|
||||
try {
|
||||
const globalResult = await providerMcpService.addMcpServerToAllProviders({
|
||||
name: 'global-http',
|
||||
scope: 'project',
|
||||
transport: 'http',
|
||||
url: 'https://global.example.com/mcp',
|
||||
workspacePath,
|
||||
});
|
||||
|
||||
const expectCursorGlobal = process.platform !== 'win32';
|
||||
assert.equal(globalResult.length, expectCursorGlobal ? 4 : 3);
|
||||
assert.ok(globalResult.every((entry) => entry.created === true));
|
||||
|
||||
const claudeProject = await readJson(path.join(workspacePath, '.mcp.json'));
|
||||
assert.ok((claudeProject.mcpServers as Record<string, unknown>)['global-http']);
|
||||
|
||||
const codexProject = TOML.parse(await fs.readFile(path.join(workspacePath, '.codex', 'config.toml'), 'utf8')) as Record<string, unknown>;
|
||||
assert.ok((codexProject.mcp_servers as Record<string, unknown>)['global-http']);
|
||||
|
||||
const geminiProject = await readJson(path.join(workspacePath, '.gemini', 'settings.json'));
|
||||
assert.ok((geminiProject.mcpServers as Record<string, unknown>)['global-http']);
|
||||
|
||||
if (expectCursorGlobal) {
|
||||
const cursorProject = await readJson(path.join(workspacePath, '.cursor', 'mcp.json'));
|
||||
assert.ok((cursorProject.mcpServers as Record<string, unknown>)['global-http']);
|
||||
}
|
||||
|
||||
await assert.rejects(
|
||||
providerMcpService.addMcpServerToAllProviders({
|
||||
name: 'global-sse',
|
||||
scope: 'project',
|
||||
transport: 'sse',
|
||||
url: 'https://example.com/sse',
|
||||
workspacePath,
|
||||
}),
|
||||
(error: unknown) =>
|
||||
error instanceof AppError &&
|
||||
error.code === 'INVALID_GLOBAL_MCP_TRANSPORT' &&
|
||||
error.statusCode === 400,
|
||||
);
|
||||
} finally {
|
||||
restoreHomeDir();
|
||||
await fs.rm(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user