├── workspace-server ├── src │ ├── __tests__ │ │ ├── mocks │ │ │ ├── wasm.js │ │ │ ├── marked.js │ │ │ └── jsdom.ts │ │ ├── setup.ts │ │ ├── utils │ │ │ ├── paths.test.ts │ │ │ ├── DriveQueryBuilder.test.ts │ │ │ ├── IdUtils.test.ts │ │ │ ├── validation.test.ts │ │ │ └── logger.test.ts │ │ ├── services │ │ │ ├── TimeService.test.ts │ │ │ └── PeopleService.test.ts │ │ └── auth │ │ │ ├── token-storage │ │ │ ├── oauth-credential-storage.test.ts │ │ │ ├── base-token-storage.test.ts │ │ │ └── hybrid-token-storage.test.ts │ │ │ └── AuthManager.test.ts │ ├── utils │ │ ├── constants.ts │ │ ├── paths.ts │ │ ├── IdUtils.ts │ │ ├── GaxiosConfig.ts │ │ ├── logger.ts │ │ ├── open-wrapper.ts │ │ ├── DriveQueryBuilder.ts │ │ ├── validation.ts │ │ ├── MimeHelper.ts │ │ ├── secure-browser-launcher.ts │ │ └── markdownToDocsRequests.ts │ ├── auth │ │ └── token-storage │ │ │ ├── index.ts │ │ │ ├── types.ts │ │ │ ├── base-token-storage.ts │ │ │ ├── oauth-credential-storage.ts │ │ │ ├── hybrid-token-storage.ts │ │ │ ├── file-token-storage.ts │ │ │ └── keychain-token-storage.ts │ └── services │ │ ├── TimeService.ts │ │ ├── PeopleService.ts │ │ └── SlidesService.ts ├── tsconfig.test.json ├── tsconfig.json ├── jest.config.js ├── package.json ├── esbuild.auth-utils.js ├── esbuild.config.js ├── .github │ └── workflows │ │ ├── release.yml │ │ └── ci.yml └── WORKSPACE-Context.md ├── cloud_function └── package.json ├── SECURITY.md ├── gemini-extension.json ├── .gitignore ├── tsconfig.json ├── commands ├── drive │ └── search.toml ├── gmail │ └── search.toml └── calendar │ ├── get-schedule.toml │ └── clear-schedule.toml ├── scripts ├── list-deps.js ├── start.js ├── set-version.js ├── utils │ └── dependencies.js ├── auth-utils.js └── release.js ├── .github ├── dependabot.yml └── workflows │ ├── release.yml │ ├── deploy-docs.yml │ ├── weekly-preview.yml │ └── ci.yml ├── docs ├── .vitepress │ └── config.mts ├── release.md ├── index.md └── development.md ├── GEMINI.md ├── jest.config.js ├── package.json ├── README.md ├── eslint.config.js └── CONTRIBUTING.md /workspace-server/src/__tests__/mocks/wasm.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | module.exports = {}; -------------------------------------------------------------------------------- /workspace-server/src/utils/constants.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | export const GMAIL_SEARCH_MAX_RESULTS = 100; 8 | -------------------------------------------------------------------------------- /workspace-server/tsconfig.test.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./tsconfig.json", 3 | "compilerOptions": { 4 | "strict": false, 5 | "noImplicitAny": false 6 | }, 7 | "include": [ 8 | "src/**/*.test.ts", 9 | "src/**/*.spec.ts" 10 | ] 11 | } -------------------------------------------------------------------------------- /cloud_function/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "oauth-handler", 3 | "version": "1.0.0", 4 | "main": "index.js", 5 | "dependencies": { 6 | "@google-cloud/functions-framework": "^3.0.0", 7 | "@google-cloud/secret-manager": "^5.0.0", 8 | "axios": "^1.0.0" 9 | } 10 | } -------------------------------------------------------------------------------- /workspace-server/src/auth/token-storage/index.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | export * from './types'; 8 | export * from './base-token-storage'; 9 | export * from './file-token-storage'; 10 | export * from './hybrid-token-storage'; 11 | -------------------------------------------------------------------------------- /workspace-server/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../tsconfig.json", 3 | "compilerOptions": { 4 | "rootDir": "./src", 5 | "outDir": "./dist" 6 | }, 7 | "include": [ 8 | "src/**/*" 9 | ], 10 | "exclude": [ 11 | "dist", 12 | "node_modules", 13 | "src/**/*.test.ts", 14 | "src/**/*.spec.ts" 15 | ] 16 | } -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Reporting Security Issues 2 | 3 | To report a security issue, please use [https://g.co/vulnz](https://g.co/vulnz). 4 | We use g.co/vulnz for our intake, and do coordination and disclosure here on 5 | GitHub (including using GitHub Security Advisory). The Google Security Team will 6 | respond within 5 working days of your report on g.co/vulnz. 7 | -------------------------------------------------------------------------------- /gemini-extension.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "google-workspace", 3 | "version": "0.0.2", 4 | "contextFileName": "workspace-server${/}WORKSPACE-Context.md", 5 | "mcpServers": { 6 | "google-workspace": { 7 | "program": "./workspace-server/dist/index.js", 8 | "command": "node", 9 | "args": [ 10 | "scripts${/}start.js" 11 | ], 12 | "cwd": "${extensionPath}" 13 | } 14 | } 15 | } -------------------------------------------------------------------------------- /workspace-server/jest.config.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | /** @type {import('jest').Config} */ 8 | module.exports = { 9 | // This workspace's tests are configured in the root jest.config.js 10 | // as part of the 'projects' array. This file is kept for backwards 11 | // compatibility and workspace-specific overrides if needed. 12 | }; -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # macOS 2 | .DS_Store 3 | 4 | # logs 5 | logs 6 | 7 | # Dependencies 8 | node_modules/ 9 | 10 | # Build outputs 11 | dist/ 12 | 13 | # Environment files 14 | .env 15 | .env.local 16 | 17 | # Logs 18 | *.log 19 | 20 | # Coverage 21 | coverage/ 22 | 23 | # Editor files 24 | *.swp 25 | *.swo 26 | *~ 27 | .idea/ 28 | .vscode/ 29 | 30 | # Auth tokens 31 | token.json 32 | gemini-cli-workspace-token.json 33 | .gemini-cli-workspace-master-key 34 | 35 | .gemini/ 36 | commit_message.txt 37 | 38 | # Release directory 39 | release/ 40 | 41 | # VitePress 42 | docs/.vitepress/dist 43 | docs/.vitepress/cache -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es2020", 4 | "module": "commonjs", 5 | "esModuleInterop": true, 6 | "allowSyntheticDefaultImports": true, 7 | "forceConsistentCasingInFileNames": true, 8 | "strict": true, 9 | "skipLibCheck": true, 10 | "resolveJsonModule": true, 11 | "declaration": true, 12 | "declarationMap": true, 13 | "sourceMap": true 14 | }, 15 | "include": [ 16 | "workspace-server/src/**/*" 17 | ], 18 | "exclude": [ 19 | "node_modules", 20 | "**/node_modules", 21 | "**/dist", 22 | "**/*.test.ts", 23 | "**/*.spec.ts" 24 | ] 25 | } -------------------------------------------------------------------------------- /commands/drive/search.toml: -------------------------------------------------------------------------------- 1 | description = "Searches Google Drive for files matching a query." 2 | prompt = """ 3 | You are tasked with searching Google Drive for files. 4 | 5 | 1) The user's search query is: {{args}} 6 | 2) Call the `drive.search` tool, passing the user's query as the `query` argument. 7 | 3) The `drive.search` tool returns a JSON object containing a list of files. 8 | 4) Format the result into a clear, readable list, showing only the `name` and `id` for each file found. 9 | 5) If no files are found, state that clearly. 10 | 11 | Example Output Format: 12 | > File Name: My Document (ID: 1a2b3c4d5e6f7g8h9i0j) 13 | > File Name: Project Report (ID: k1l2m3n4o5p6q7r8s9t0) 14 | """ 15 | -------------------------------------------------------------------------------- /scripts/list-deps.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | const path = require('node:path'); 8 | const { getTransitiveDependencies } = require('./utils/dependencies'); 9 | 10 | const root = path.join(__dirname, '..'); 11 | const targetPackages = process.argv.slice(2); 12 | 13 | if (targetPackages.length === 0) { 14 | console.log('Usage: node scripts/list-deps.js [package2...]'); 15 | process.exit(1); 16 | } 17 | 18 | console.log(`Analyzing dependencies for: ${targetPackages.join(', ')}`); 19 | 20 | const allDeps = getTransitiveDependencies(root, targetPackages); 21 | 22 | console.log('\nTransitive Dependencies:'); 23 | Array.from(allDeps).sort().forEach(dep => { 24 | console.log(`- ${dep}`); 25 | }); 26 | -------------------------------------------------------------------------------- /workspace-server/src/__tests__/setup.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | // Test setup file for Jest 8 | // This file runs before all tests 9 | import { jest } from '@jest/globals'; 10 | 11 | // Mock console methods to reduce noise in test output 12 | global.console = { 13 | ...console, 14 | // Keep errors and warnings 15 | error: jest.fn(console.error), 16 | warn: jest.fn(console.warn), 17 | // Silence other logs during tests unless explicitly needed 18 | log: jest.fn(), 19 | info: jest.fn(), 20 | debug: jest.fn(), 21 | }; 22 | 23 | // Set test environment variables 24 | process.env.NODE_ENV = 'test'; 25 | 26 | // Increase timeout for integration tests if needed 27 | jest.setTimeout(10000); 28 | 29 | // Clean up after all tests 30 | afterAll(() => { 31 | jest.clearAllMocks(); 32 | jest.restoreAllMocks(); 33 | }); -------------------------------------------------------------------------------- /commands/gmail/search.toml: -------------------------------------------------------------------------------- 1 | description = "Searches for emails in Gmail matching a query." 2 | prompt = """ 3 | You are tasked with searching Gmail for emails. 4 | 5 | 1) The user's search query is: {{args}} 6 | 2) Call the `gmail.search` tool, passing the user's query as the `query` argument. 7 | 3) The `gmail.search` tool returns a JSON object containing a list of emails (id and threadId). 8 | 4) For each email found, you MUST call `gmail.get` with the `messageId` and `format='metadata'` to get the From, Subject, and Snippet. 9 | 5) Format the result into a clear, readable list. 10 | 6) If no emails are found, state that clearly. 11 | 12 | Example Output Format: 13 | > From: sender@example.com 14 | Subject: Meeting Reminder 15 | Snippet: Don't forget about the meeting tomorrow... 16 | --- 17 | > From: newsletter@example.com 18 | Subject: Weekly News 19 | Snippet: Here are the top stories for this week... 20 | """ 21 | -------------------------------------------------------------------------------- /workspace-server/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "workspace-server", 3 | "version": "0.0.3", 4 | "description": "", 5 | "main": "dist/index.js", 6 | "scripts": { 7 | "test": "cd .. && node --max-old-space-size=4096 node_modules/jest/bin/jest.js --runInBand --verbose", 8 | "test:watch": "cd .. && jest --watch", 9 | "test:coverage": "cd .. && node --max-old-space-size=4096 node_modules/jest/bin/jest.js --coverage", 10 | "test:ci": "cd .. && node --max-old-space-size=4096 node_modules/jest/bin/jest.js --ci --coverage --maxWorkers=2", 11 | "start": "ts-node src/index.ts", 12 | "clean": "rm -rf dist node_modules", 13 | "build": "node esbuild.config.js", 14 | "build:auth-utils": "node esbuild.auth-utils.js" 15 | }, 16 | "keywords": [], 17 | "author": "Allen Hutchison", 18 | "license": "Apache-2.0", 19 | "type": "commonjs", 20 | "devDependencies": { 21 | "esbuild": "^0.27.1" 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "npm" 4 | directory: "/" 5 | schedule: 6 | interval: "weekly" 7 | open-pull-requests-limit: 5 8 | labels: 9 | - "dependencies" 10 | - "npm" 11 | commit-message: 12 | prefix: "chore" 13 | include: "scope" 14 | 15 | - package-ecosystem: "npm" 16 | directory: "/workspace-server" 17 | schedule: 18 | interval: "weekly" 19 | open-pull-requests-limit: 5 20 | labels: 21 | - "dependencies" 22 | - "npm" 23 | commit-message: 24 | prefix: "chore" 25 | include: "scope" 26 | 27 | - package-ecosystem: "github-actions" 28 | directory: "/" 29 | schedule: 30 | interval: "weekly" 31 | open-pull-requests-limit: 5 32 | labels: 33 | - "dependencies" 34 | - "github-actions" 35 | commit-message: 36 | prefix: "chore" 37 | include: "scope" 38 | -------------------------------------------------------------------------------- /workspace-server/esbuild.auth-utils.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | const esbuild = require('esbuild'); 8 | const path = require('node:path'); 9 | 10 | async function buildAuthUtils() { 11 | try { 12 | await esbuild.build({ 13 | entryPoints: ['src/auth/token-storage/oauth-credential-storage.ts'], 14 | bundle: true, 15 | platform: 'node', 16 | target: 'node20', 17 | outfile: 'dist/auth-utils.js', 18 | minify: true, 19 | sourcemap: true, 20 | external: [ 21 | 'keytar', // keytar is a native module and should not be bundled 22 | ], 23 | format: 'cjs', 24 | logLevel: 'info', 25 | }); 26 | 27 | console.log('Auth Utils build completed successfully!'); 28 | } catch (error) { 29 | console.error('Auth Utils build failed:', error); 30 | process.exit(1); 31 | } 32 | } 33 | 34 | buildAuthUtils(); 35 | -------------------------------------------------------------------------------- /workspace-server/src/utils/paths.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import path from 'node:path'; 8 | import * as fs from 'node:fs'; 9 | 10 | function findProjectRoot(): string { 11 | let dir = __dirname; 12 | while (dir !== path.dirname(dir)) { 13 | if (fs.existsSync(path.join(dir, 'gemini-extension.json'))) { 14 | return dir; 15 | } 16 | dir = path.dirname(dir); 17 | } 18 | throw new Error( 19 | `Could not find project root containing gemini-extension.json. Traversed up from ${__dirname}.`, 20 | ); 21 | } 22 | 23 | // Construct an absolute path to the project root. 24 | export const PROJECT_ROOT = findProjectRoot(); 25 | export const ENCRYPTED_TOKEN_PATH = path.join( 26 | PROJECT_ROOT, 27 | 'gemini-cli-workspace-token.json', 28 | ); 29 | export const ENCRYPTION_MASTER_KEY_PATH = path.join( 30 | PROJECT_ROOT, 31 | '.gemini-cli-workspace-master-key', 32 | ); 33 | -------------------------------------------------------------------------------- /commands/calendar/get-schedule.toml: -------------------------------------------------------------------------------- 1 | description = "Show your schedule for today, or the date specified" 2 | prompt = """ 3 | Please show me my schedule for today. To do that use the following steps: 4 | 5 | 1) Use the people.getMe tool to get my information. 6 | 2) Use the time.getTimeZone to get my local time zone. 7 | 3) Either use the time.getCurrentDate or the user supplied date here: {{args}} to understand the current date. 8 | 4) Call calendar.list to identify the calendar associated with me from step 1. 9 | 5) Call calendar.listEvents to identify events on my calendar for the date we determined in step 3. 10 | 6) Format the list of events in my local time zone from step 2 in the following format: 11 | 12 | > HH:MM - HH:MM: Event Title (attendance status) - Event Location 13 | > HH:MM - HH:MM: Event 2 Title (attendance status) - Event Location 14 | 15 | Note: If a calendar event has conflicting timezone information, prioritize the dateTime field over the timeZone field. 16 | 17 | """ 18 | -------------------------------------------------------------------------------- /docs/.vitepress/config.mts: -------------------------------------------------------------------------------- 1 | import { defineConfig } from 'vitepress' 2 | 3 | // https://vitepress.dev/reference/site-config 4 | export default defineConfig({ 5 | base: '/workspace/', 6 | title: "Gemini Workspace Extension", 7 | description: "Documentation for the Google Workspace Server Extension", 8 | themeConfig: { 9 | // https://vitepress.dev/reference/default-theme-config 10 | nav: [ 11 | { text: 'Home', link: '/' }, 12 | { text: 'Development', link: '/development' }, 13 | { text: 'Release', link: '/release' } 14 | ], 15 | 16 | sidebar: [ 17 | { 18 | text: 'Documentation', 19 | items: [ 20 | { text: 'Overview', link: '/' }, 21 | { text: 'Development Guide', link: '/development' }, 22 | { text: 'Release Guide', link: '/release' } 23 | ] 24 | } 25 | ], 26 | 27 | socialLinks: [ 28 | { icon: 'github', link: 'https://github.com/gemini-cli-extensions/workspace' } 29 | ] 30 | } 31 | }) 32 | -------------------------------------------------------------------------------- /workspace-server/src/utils/IdUtils.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { logToFile } from './logger'; 8 | 9 | const DOC_ID_REGEX = /\/d\/([a-zA-Z0-9-_]+)/; 10 | 11 | /** 12 | * Extracts a Google Doc/Sheet/etc. ID from a Google Workspace URL. 13 | * 14 | * @param url The URL to parse. 15 | * @returns The extracted document ID, or undefined if no ID could be found. 16 | */ 17 | export function extractDocId(url: string): string | undefined { 18 | logToFile(`[IdUtils] Attempting to extract doc ID from URL: ${url}`); 19 | if (!url || typeof url !== 'string') { 20 | logToFile(`[IdUtils] Invalid input: URL is null or not a string.`); 21 | return undefined; 22 | } 23 | const match = url.match(DOC_ID_REGEX); 24 | if (match && match[1]) { 25 | const docId = match[1]; 26 | logToFile(`[IdUtils] Successfully extracted doc ID: ${docId}`); 27 | return docId; 28 | } 29 | logToFile(`[IdUtils] Could not extract doc ID from URL.`); 30 | return undefined; 31 | } 32 | -------------------------------------------------------------------------------- /workspace-server/src/utils/GaxiosConfig.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { GaxiosOptions } from 'gaxios'; 8 | import { logToFile } from './logger'; 9 | 10 | export const gaxiosOptions: GaxiosOptions = { 11 | retryConfig: { 12 | retry: 3, 13 | noResponseRetries: 3, 14 | retryDelay: 1000, 15 | httpMethodsToRetry: ['GET', 'HEAD', 'OPTIONS', 'DELETE', 'PUT'], 16 | statusCodesToRetry: [ 17 | [429, 429], 18 | [500, 599], 19 | ], 20 | onRetryAttempt: (err) => { 21 | const config = err.config as GaxiosOptions; 22 | logToFile(`Retrying request to ${config.url}, attempt #${config.retryConfig?.currentRetryAttempt}`); 23 | logToFile(`Error: ${err.message}`); 24 | } 25 | }, 26 | timeout: 30000, 27 | }; 28 | 29 | // Extended timeout for media upload operations 30 | export const mediaUploadOptions: GaxiosOptions = { 31 | ...gaxiosOptions, 32 | timeout: 60000, // 60 seconds for media uploads 33 | }; 34 | -------------------------------------------------------------------------------- /workspace-server/src/__tests__/utils/paths.test.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import path from 'node:path'; 8 | import * as fs from 'node:fs'; 9 | import { PROJECT_ROOT } from '../../utils/paths'; 10 | 11 | describe('paths utils', () => { 12 | describe('PROJECT_ROOT', () => { 13 | it('should resolve to the workspace root directory', () => { 14 | // The project root should contain gemini-extension.json 15 | // Since we are searching for gemini-extension.json which is in the root 'workspace', 16 | // not 'workspace-server', the path should NOT end with 'workspace-server'. 17 | const extensionConfigPath = path.join(PROJECT_ROOT, 'gemini-extension.json'); 18 | expect(fs.existsSync(extensionConfigPath)).toBe(true); 19 | 20 | // The root should be the parent of workspace-server in this monorepo setup 21 | // PROJECT_ROOT = .../workspace 22 | // __dirname = .../workspace/workspace-server/src/__tests__/utils 23 | expect(PROJECT_ROOT.endsWith('workspace-server')).toBe(false); 24 | }); 25 | }); 26 | }); 27 | -------------------------------------------------------------------------------- /workspace-server/src/auth/token-storage/types.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | /** 8 | * Interface for OAuth tokens. 9 | */ 10 | export interface OAuthToken { 11 | accessToken?: string; 12 | refreshToken?: string; 13 | expiresAt?: number; 14 | tokenType: string; 15 | scope?: string; 16 | } 17 | 18 | /** 19 | * Interface for stored OAuth credentials. 20 | */ 21 | export interface OAuthCredentials { 22 | serverName: string; 23 | token: OAuthToken; 24 | clientId?: string; 25 | tokenUrl?: string; 26 | mcpServerUrl?: string; 27 | updatedAt: number; 28 | } 29 | 30 | export enum TokenStorageType { 31 | KEYCHAIN = 'keychain', 32 | ENCRYPTED_FILE = 'encrypted_file', 33 | } 34 | 35 | export interface TokenStorage { 36 | getCredentials(serverName: string): Promise; 37 | setCredentials(credentials: OAuthCredentials): Promise; 38 | deleteCredentials(serverName: string): Promise; 39 | listServers(): Promise; 40 | getAllCredentials(): Promise>; 41 | clearAll(): Promise; 42 | } -------------------------------------------------------------------------------- /workspace-server/esbuild.config.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | const esbuild = require('esbuild'); 8 | const path = require('node:path'); 9 | const fs = require('node:fs'); 10 | 11 | async function build() { 12 | try { 13 | await esbuild.build({ 14 | entryPoints: ['src/index.ts'], 15 | bundle: true, 16 | platform: 'node', 17 | target: 'node16', 18 | outfile: 'dist/index.js', 19 | minify: true, 20 | sourcemap: true, 21 | // Replace 'open' package with our wrapper 22 | alias: { 23 | 'open': path.resolve(__dirname, 'src/utils/open-wrapper.ts') 24 | }, 25 | // External packages that shouldn't be bundled 26 | external: [ 27 | 'jsdom' 28 | ], 29 | // Add a loader for .node files 30 | loader: { 31 | '.node': 'file' 32 | }, 33 | // Make sure CommonJS modules work properly 34 | format: 'cjs', 35 | logLevel: 'info', 36 | }); 37 | 38 | console.log('Build completed successfully!'); 39 | } catch (error) { 40 | console.error('Build failed:', error); 41 | process.exit(1); 42 | } 43 | } 44 | 45 | build(); -------------------------------------------------------------------------------- /GEMINI.md: -------------------------------------------------------------------------------- 1 | This is a Gemini extension that provides tools for interacting with Google Workspace services like Google Docs. 2 | 3 | ### Building and Running 4 | 5 | * **Install dependencies:** `npm install` 6 | * **Build the project:** `npm run build --prefix workspace-server` 7 | 8 | ### Development Conventions 9 | 10 | This project uses TypeScript and the Model Context Protocol (MCP) SDK to create a Gemini extension. The main entry point is `src/index.ts`, which initializes the MCP server and registers the available tools. 11 | 12 | The business logic for each service is separated into its own file in the `src/services` directory. For example, `src/services/DocsService.ts` contains the logic for interacting with the Google Docs API. 13 | 14 | Authentication is handled by the `src/auth/AuthManager.ts` file, which uses the `@google-cloud/local-auth` library to obtain and refresh OAuth 2.0 credentials. 15 | 16 | ### Adding New Tools 17 | 18 | To add a new tool, you need to: 19 | 20 | 1. Add a new method to the appropriate service file in `src/services`. 21 | 2. In `src/index.ts`, register the new tool with the MCP server by calling `server.registerTool()`. You will need to provide a name for the tool, a description, and the input schema using the `zod` library. 22 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | push: 5 | tags: 6 | - 'v*' 7 | 8 | jobs: 9 | release: 10 | strategy: 11 | matrix: 12 | include: 13 | - os: ubuntu-latest 14 | platform: linux 15 | - os: macos-latest 16 | platform: darwin 17 | - os: windows-latest 18 | platform: win32 19 | runs-on: ${{ matrix.os }} 20 | permissions: 21 | contents: write 22 | steps: 23 | - name: Checkout 24 | uses: actions/checkout@v6 25 | with: 26 | fetch-depth: 0 27 | persist-credentials: false 28 | 29 | - name: Setup Node.js 30 | uses: actions/setup-node@v6 31 | with: 32 | node-version: '20' 33 | 34 | - name: Install dependencies 35 | run: npm ci 36 | 37 | - name: Build extension 38 | run: npm run build --workspace=workspace-server 39 | 40 | - name: Create release assets 41 | run: npm run release -- --platform=${{ matrix.platform }} 42 | 43 | - name: Release 44 | uses: softprops/action-gh-release@v2 45 | if: startsWith(github.ref, 'refs/tags/') 46 | with: 47 | files: release/${{ matrix.platform }}.google-workspace-extension.tar.gz -------------------------------------------------------------------------------- /scripts/start.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | const { spawn } = require('node:child_process'); 8 | const path = require('node:path'); 9 | 10 | function runCommand(command, args, options) { 11 | return new Promise((resolve, reject) => { 12 | const child = spawn(command, args, options); 13 | 14 | // Pipe stderr to the parent process's stderr if it's available. 15 | // This is more efficient than listening for 'data' events. 16 | if (child.stderr) { 17 | child.stderr.pipe(process.stderr); 18 | } 19 | 20 | child.on('close', (code) => { 21 | if (code !== 0) { 22 | reject(new Error(`Command failed with code ${code}: ${command} ${args.join(' ')}`)); 23 | } else { 24 | resolve(); 25 | } 26 | }); 27 | child.on('error', (err) => { 28 | reject(err); 29 | }); 30 | }); 31 | } 32 | 33 | async function main() { 34 | try { 35 | await runCommand('npm', ['install'], { stdio: ['ignore', 'ignore', 'pipe'] }); 36 | 37 | const SERVER_PATH = path.join(__dirname, '..', 'workspace-server', 'dist', 'index.js'); 38 | await runCommand('node', [SERVER_PATH, '--debug'], { stdio: 'inherit' }); 39 | } catch (error) { 40 | console.error(error); 41 | process.exit(1); 42 | } 43 | } 44 | 45 | main(); -------------------------------------------------------------------------------- /workspace-server/src/utils/logger.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import * as fs from 'node:fs/promises'; 8 | import * as path from 'node:path'; 9 | import { PROJECT_ROOT } from './paths'; 10 | 11 | const logFilePath = path.join(PROJECT_ROOT, 'logs', 'server.log'); 12 | 13 | async function ensureLogDirectoryExists() { 14 | try { 15 | await fs.mkdir(path.dirname(logFilePath), { recursive: true }); 16 | } catch (error) { 17 | // If we can't create the log directory, log to console as a fallback. 18 | console.error('Could not create log directory:', error); 19 | } 20 | } 21 | 22 | // Ensure the directory exists when the module is loaded. 23 | ensureLogDirectoryExists(); 24 | 25 | let isLoggingEnabled = false; 26 | 27 | export function setLoggingEnabled(enabled: boolean) { 28 | isLoggingEnabled = enabled; 29 | } 30 | 31 | export function logToFile(message: string) { 32 | if (!isLoggingEnabled) { 33 | return; 34 | } 35 | const timestamp = new Date().toISOString(); 36 | const logMessage = `${timestamp} - ${message}\n`; 37 | 38 | fs.appendFile(logFilePath, logMessage).catch(err => { 39 | // Fallback to console if file logging fails 40 | console.error('Failed to write to log file:', err); 41 | }); 42 | } 43 | -------------------------------------------------------------------------------- /.github/workflows/deploy-docs.yml: -------------------------------------------------------------------------------- 1 | name: Deploy Docs to GitHub Pages 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | 8 | jobs: 9 | build: 10 | runs-on: ubuntu-latest 11 | permissions: 12 | contents: write # For actions/checkout to fetch code 13 | pages: write # For uploading the artifact 14 | id-token: write # For OIDC authentication 15 | steps: 16 | - name: Checkout 17 | uses: actions/checkout@v4 18 | with: 19 | fetch-depth: 0 # Not strictly needed but good practice for some build tools 20 | 21 | - name: Setup Node 22 | uses: actions/setup-node@v4 23 | with: 24 | node-version: 20 25 | cache: npm # Cache npm dependencies 26 | 27 | - name: Install dependencies 28 | run: npm install 29 | 30 | - name: Build docs 31 | run: npm run docs:build 32 | 33 | - name: Upload artifact 34 | uses: actions/upload-pages-artifact@v3 35 | with: 36 | path: docs/.vitepress/dist # The directory where VitePress builds the docs 37 | 38 | deploy: 39 | environment: 40 | name: github-pages 41 | url: ${{ steps.deployment.outputs.page_url }} 42 | runs-on: ubuntu-latest 43 | needs: build 44 | permissions: 45 | pages: write 46 | id-token: write 47 | steps: 48 | - name: Deploy to GitHub Pages 49 | id: deployment 50 | uses: actions/deploy-pages@v4 51 | -------------------------------------------------------------------------------- /workspace-server/.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | push: 5 | tags: 6 | - 'v*' 7 | 8 | jobs: 9 | release: 10 | runs-on: ubuntu-latest 11 | 12 | steps: 13 | - uses: actions/checkout@v4 14 | 15 | - name: Use Node.js 16 | uses: actions/setup-node@v4 17 | with: 18 | node-version: '20.x' 19 | cache: 'npm' 20 | cache-dependency-path: workspace-mcp-server/package-lock.json 21 | 22 | - name: Install dependencies 23 | run: npm ci 24 | working-directory: workspace-mcp-server 25 | 26 | - name: Run tests 27 | run: npm test 28 | working-directory: workspace-mcp-server 29 | 30 | - name: Build 31 | run: npm run build 32 | working-directory: workspace-mcp-server 33 | 34 | - name: Create Release 35 | id: create_release 36 | uses: actions/create-release@v1 37 | env: 38 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 39 | with: 40 | tag_name: ${{ github.ref }} 41 | release_name: Release ${{ github.ref }} 42 | draft: false 43 | prerelease: false 44 | 45 | - name: Upload Release Asset 46 | uses: actions/upload-release-asset@v1 47 | env: 48 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 49 | with: 50 | upload_url: ${{ steps.create_release.outputs.upload_url }} 51 | asset_path: ./workspace-mcp-server/dist/index.js 52 | asset_name: workspace-mcp-server.js 53 | asset_content_type: application/javascript -------------------------------------------------------------------------------- /workspace-server/src/auth/token-storage/base-token-storage.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { 8 | OAuthCredentials, 9 | TokenStorage 10 | } from './types'; 11 | 12 | export abstract class BaseTokenStorage implements TokenStorage { 13 | protected readonly serviceName: string; 14 | 15 | constructor(serviceName: string) { 16 | this.serviceName = serviceName; 17 | } 18 | 19 | abstract getCredentials(serverName: string): Promise; 20 | abstract setCredentials(credentials: OAuthCredentials): Promise; 21 | abstract deleteCredentials(serverName: string): Promise; 22 | abstract listServers(): Promise; 23 | abstract getAllCredentials(): Promise>; 24 | abstract clearAll(): Promise; 25 | 26 | protected validateCredentials(credentials: OAuthCredentials): void { 27 | if (!credentials.serverName) { 28 | throw new Error('Server name is required'); 29 | } 30 | if (!credentials.token) { 31 | throw new Error('Token is required'); 32 | } 33 | if (!credentials.token.accessToken && !credentials.token.refreshToken) { 34 | throw new Error('Access token or refresh token is required'); 35 | } 36 | if (!credentials.token.tokenType) { 37 | throw new Error('Token type is required'); 38 | } 39 | } 40 | 41 | 42 | 43 | protected sanitizeServerName(serverName: string): string { 44 | return serverName.replace(/[^a-zA-Z0-9-_.]/g, '_'); 45 | } 46 | } -------------------------------------------------------------------------------- /workspace-server/src/utils/open-wrapper.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | /** 8 | * This module acts as a drop-in replacement for the 'open' package. 9 | * It intercepts browser launch requests and either: 10 | * 1. Opens the browser securely using our secure-browser-launcher 11 | * 2. Prints the URL to console if browser launch should be skipped or fails 12 | */ 13 | 14 | import { openBrowserSecurely, shouldLaunchBrowser } from './secure-browser-launcher'; 15 | 16 | // Create a mock child process object that matches what open returns 17 | const createMockChildProcess = () => ({ 18 | unref: () => {}, 19 | ref: () => {}, 20 | pid: 123, 21 | stdout: null, 22 | stderr: null, 23 | stdin: null, 24 | channel: null, 25 | connected: false, 26 | exitCode: 0, 27 | killed: false, 28 | signalCode: null, 29 | spawnargs: [], 30 | spawnfile: '', 31 | }); 32 | 33 | const openWrapper = async (url: string): Promise => { 34 | // Check if we should launch the browser 35 | if (!shouldLaunchBrowser()) { 36 | console.log(`Browser launch not supported. Please open this URL in your browser: ${url}`); 37 | return createMockChildProcess(); 38 | } 39 | 40 | // Try to open the browser securely 41 | try { 42 | await openBrowserSecurely(url); 43 | return createMockChildProcess(); 44 | } catch { 45 | console.log(`Failed to open browser. Please open this URL in your browser: ${url}`); 46 | return createMockChildProcess(); 47 | } 48 | }; 49 | 50 | // Use standard ES Module export and let the compiler generate the CommonJS correct output. 51 | export default openWrapper; -------------------------------------------------------------------------------- /scripts/set-version.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | const fs = require('node:fs'); 8 | const path = require('node:path'); 9 | 10 | const rootDir = path.join(__dirname, '..'); 11 | const packageJsonPath = path.join(rootDir, 'package.json'); 12 | const workspaceServerPackageJsonPath = path.join(rootDir, 'workspace-server', 'package.json'); 13 | const workspaceServerIndexPath = path.join(rootDir, 'workspace-server', 'src', 'index.ts'); 14 | 15 | const updateJsonFile = (filePath, version) => { 16 | try { 17 | const content = JSON.parse(fs.readFileSync(filePath, 'utf8')); 18 | content.version = version; 19 | fs.writeFileSync(filePath, JSON.stringify(content, null, 2) + '\n'); 20 | console.log(`Updated ${path.relative(rootDir, filePath)} to version ${version}`); 21 | } catch (error) { 22 | console.error(`Failed to update JSON file at ${path.relative(rootDir, filePath)}:`, error); 23 | process.exit(1); 24 | } 25 | }; 26 | 27 | 28 | const main = () => { 29 | let version = process.argv[2]; 30 | 31 | if (version) { 32 | // If version is provided as arg, update root package.json first 33 | updateJsonFile(packageJsonPath, version); 34 | } else { 35 | // Otherwise read from root package.json 36 | const packageJson = require(packageJsonPath); 37 | version = packageJson.version; 38 | console.log(`Using version from package.json: ${version}`); 39 | } 40 | 41 | if (!version) { 42 | console.error('No version specified and no version found in package.json'); 43 | process.exit(1); 44 | } 45 | 46 | updateJsonFile(workspaceServerPackageJsonPath, version); 47 | 48 | }; 49 | 50 | main(); 51 | -------------------------------------------------------------------------------- /scripts/utils/dependencies.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | const fs = require('node:fs'); 8 | const path = require('node:path'); 9 | 10 | /** 11 | * Gets the direct dependencies of a package from its package.json. 12 | * @param {string} rootDir - The root directory containing node_modules. 13 | * @param {string} pkgName - The name of the package. 14 | * @returns {string[]} - A list of dependency names. 15 | */ 16 | function getDependencies(rootDir, pkgName) { 17 | const pkgPath = path.join(rootDir, 'node_modules', pkgName, 'package.json'); 18 | if (!fs.existsSync(pkgPath)) { 19 | return []; 20 | } 21 | const pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf8')); 22 | return Object.keys(pkg.dependencies || {}); 23 | } 24 | 25 | /** 26 | * Recursively finds all transitive dependencies for a list of packages. 27 | * @param {string} rootDir - The root directory containing node_modules. 28 | * @param {string[]} startPkgs - The list of initial packages to resolve. 29 | * @returns {Set} - A set of all transitive dependencies (including startPkgs). 30 | */ 31 | function getTransitiveDependencies(rootDir, startPkgs) { 32 | const visited = new Set(); 33 | const toVisit = [...startPkgs]; 34 | 35 | while (toVisit.length > 0) { 36 | const pkg = toVisit.pop(); 37 | if (visited.has(pkg)) continue; 38 | visited.add(pkg); 39 | 40 | const deps = getDependencies(rootDir, pkg); 41 | deps.forEach(dep => { 42 | if (!visited.has(dep)) { 43 | toVisit.push(dep); 44 | } 45 | }); 46 | } 47 | 48 | return visited; 49 | } 50 | 51 | module.exports = { 52 | getDependencies, 53 | getTransitiveDependencies 54 | }; 55 | -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | /** @type {import('jest').Config} */ 2 | module.exports = { 3 | preset: 'ts-jest', 4 | testEnvironment: 'node', 5 | projects: [ 6 | { 7 | displayName: 'workspace-server', 8 | testMatch: ['/workspace-server/src/**/*.test.ts', '/workspace-server/src/**/*.spec.ts'], 9 | transform: { 10 | '^.+\\.ts$': ['ts-jest', { 11 | tsconfig: { 12 | strict: false 13 | } 14 | }], 15 | }, 16 | transformIgnorePatterns: [ 17 | 'node_modules/(?!(marked)/)', 18 | ], 19 | moduleNameMapper: { 20 | '^@/(.*)$': '/workspace-server/src/$1', 21 | '\\.wasm$': '/workspace-server/src/__tests__/mocks/wasm.js', 22 | '^marked$': '/workspace-server/src/__tests__/mocks/marked.js', 23 | '^jsdom$': '/workspace-server/src/__tests__/mocks/jsdom.ts', 24 | }, 25 | roots: ['/workspace-server/src'], 26 | setupFilesAfterEnv: ['/workspace-server/src/__tests__/setup.ts'], 27 | collectCoverageFrom: [ 28 | '/workspace-server/src/**/*.ts', 29 | '!/workspace-server/src/**/*.d.ts', 30 | '!/workspace-server/src/**/*.test.ts', 31 | '!/workspace-server/src/**/*.spec.ts', 32 | '!/workspace-server/src/index.ts', 33 | ], 34 | coverageDirectory: '/coverage', 35 | coverageThreshold: { 36 | global: { 37 | branches: 45, 38 | functions: 65, 39 | lines: 60, 40 | statements: 60, 41 | }, 42 | }, 43 | } 44 | ], 45 | coverageReporters: ['text', 'lcov', 'html'], 46 | testTimeout: 10000, 47 | verbose: true, 48 | }; -------------------------------------------------------------------------------- /commands/calendar/clear-schedule.toml: -------------------------------------------------------------------------------- 1 | description = "Clear all events for a specific date or range by deleting or declining them" 2 | prompt = """ 3 | Please help me clear my schedule for a specific date or date range. Follow these steps carefully: 4 | 5 | 1. **Identify User & Context:** 6 | - Call `people.getMe` to get my email address and details. 7 | - Call `time.getTimeZone` to get my local time zone. 8 | 9 | 2. **Determine Date Range:** 10 | - Parse the date or date range from the arguments: "{{args}}". 11 | - If no arguments are provided, use `time.getCurrentDate` to default to today. 12 | 13 | 3. **Fetch Events:** 14 | - Call `calendar.listEvents` for the determined date range using my primary calendar. 15 | - **Important:** Include ALL events in the list, even those often filtered out like "Commute Time", "DNS", personal blocks, or events where I am the only attendee. 16 | 17 | 4. **Review & Confirm (CRITICAL):** 18 | - List all the events found for that period in a clear, numbered list showing the Time and Title. 19 | - **Specifically call out any all-day events** and ask if I want to include those in the clear operation. 20 | - **STOP** and ask me for explicit confirmation before proceeding. Example: "I found 5 events (including 1 all-day event). Do you want me to clear all of them?" 21 | 22 | 5. **Execute Clearing:** 23 | - Once I confirm, iterate through each event in the list: 24 | - **If I am the organizer** (check `organizer.self` is true or `organizer.email` matches mine): 25 | - Call `calendar.deleteEvent` to remove it. 26 | - **If I am NOT the organizer**: 27 | - Call `calendar.respondToEvent` with `responseStatus` set to "declined". 28 | 29 | 6. **Final Report:** 30 | - Summarize the actions taken (e.g., "Deleted 2 events and declined 3 events."). 31 | """ 32 | -------------------------------------------------------------------------------- /.github/workflows/weekly-preview.yml: -------------------------------------------------------------------------------- 1 | name: Weekly Preview Release 2 | 3 | on: 4 | schedule: 5 | # Every Monday at 09:00 UTC 6 | - cron: '0 9 * * 1' 7 | workflow_dispatch: 8 | 9 | jobs: 10 | prepare: 11 | runs-on: ubuntu-latest 12 | outputs: 13 | tag_name: ${{ steps.date.outputs.tag_name }} 14 | steps: 15 | - name: Get current date 16 | id: date 17 | run: echo "tag_name=preview-$(date +'%Y-%m-%d')" >> $GITHUB_OUTPUT 18 | 19 | release: 20 | needs: prepare 21 | name: Build and Release 22 | strategy: 23 | matrix: 24 | include: 25 | - os: ubuntu-latest 26 | platform: linux 27 | - os: macos-latest 28 | platform: darwin 29 | - os: windows-latest 30 | platform: win32 31 | runs-on: ${{ matrix.os }} 32 | permissions: 33 | contents: write 34 | steps: 35 | - name: Checkout 36 | uses: actions/checkout@v6 37 | with: 38 | fetch-depth: 0 39 | persist-credentials: false 40 | 41 | - name: Setup Node.js 42 | uses: actions/setup-node@v6 43 | with: 44 | node-version: '20' 45 | 46 | - name: Install dependencies 47 | run: npm ci 48 | 49 | - name: Build extension 50 | run: npm run build --workspace=workspace-server 51 | 52 | - name: Create release assets 53 | shell: bash 54 | env: 55 | GITHUB_REF_NAME: ${{ needs.prepare.outputs.tag_name }} 56 | run: npm run release -- --platform=${{ matrix.platform }} 57 | 58 | - name: Upload Release Assets 59 | uses: softprops/action-gh-release@v2 60 | with: 61 | tag_name: ${{ needs.prepare.outputs.tag_name }} 62 | name: Weekly Preview ${{ needs.prepare.outputs.tag_name }} 63 | prerelease: true 64 | files: release/${{ matrix.platform }}.google-workspace-extension.tar.gz 65 | -------------------------------------------------------------------------------- /docs/release.md: -------------------------------------------------------------------------------- 1 | # Release Process 2 | 3 | This project uses GitHub Actions to automate the release process. 4 | 5 | ## Prerequisites 6 | 7 | - [GitHub CLI](https://cli.github.com/) (`gh`) installed and authenticated. 8 | - Write permissions to the repository. 9 | 10 | ## Creating a Release 11 | 12 | To streamline the release process: 13 | 14 | 1. **Update Version**: Run the `set-version` script to update the version in `package.json` files. The `workspace-server` will now dynamically read its version from its `package.json`. 15 | ```bash 16 | npm run set-version #0.0.x for example 17 | ``` 18 | 19 | 2. **Commit Changes**: Commit the version bump and push the changes to `main` (either directly or via a PR). 20 | ```bash 21 | git commit -am "chore: bump version to " 22 | git push origin main 23 | ``` 24 | 25 | 3. **Create Release**: Use the `gh release create` command. This will trigger the GitHub Actions workflow to build the extension and attach the artifacts to the release. 26 | 27 | ```bash 28 | # Syntax: gh release create --generate-notes 29 | gh release create v --generate-notes 30 | ``` 31 | 32 | ### What happens next? 33 | 34 | 1. **GitHub Actions Trigger**: The `release.yml` workflow is triggered by the new tag. 35 | 2. **Build**: The workflow builds the project using `npm run build`. 36 | 3. **Package**: It creates a `workspace-server.tar.gz` file containing the extension. 37 | 4. **Upload**: The workflow uploads the tarball to the release you just created. 38 | 39 | ## Manual Release (Alternative) 40 | 41 | If you prefer not to use the CLI, you can also push a tag manually: 42 | 43 | ```bash 44 | git tag v1.0.0 45 | git push origin v1.0.0 46 | ``` 47 | 48 | This pushes the tag to GitHub, which triggers the release workflow to create a release and upload the artifacts. However, using `gh release create` is recommended as it allows you to easily generate release notes. 49 | -------------------------------------------------------------------------------- /workspace-server/src/__tests__/mocks/marked.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | const markedMock = jest.fn((text) => { 8 | // Simple mock implementation that returns HTML 9 | return `

${text}

`; 10 | }); 11 | 12 | // Add parse method to the marked function 13 | markedMock.parse = jest.fn((text) => { 14 | // Return a promise that resolves to HTML 15 | return Promise.resolve(`

${text}

`); 16 | }); 17 | 18 | markedMock.parseInline = jest.fn((text) => { 19 | // Simple markdown to HTML conversion for testing 20 | return text 21 | .replace(/\*\*(.*?)\*\*/g, '$1') // **bold** -> bold 22 | .replace(/\*(.*?)\*/g, '$1') // *italic* -> italic 23 | .replace(/_(.*?)_/g, '$1') // _italic_ -> italic 24 | .replace(/`(.*?)`/g, '$1'); // `code` -> code 25 | }); 26 | markedMock.use = jest.fn(); 27 | markedMock.setOptions = jest.fn(); 28 | markedMock.getDefaults = jest.fn(); 29 | markedMock.defaults = {}; 30 | markedMock.Renderer = jest.fn(); 31 | markedMock.TextRenderer = jest.fn(); 32 | markedMock.Lexer = jest.fn(); 33 | markedMock.Parser = jest.fn(); 34 | markedMock.Tokenizer = jest.fn(); 35 | markedMock.Slugger = jest.fn(); 36 | markedMock.lexer = jest.fn(); 37 | markedMock.parser = jest.fn(); 38 | 39 | module.exports = { 40 | marked: markedMock, 41 | Marked: jest.fn(), 42 | lexer: markedMock.lexer, 43 | parser: markedMock.parser, 44 | Renderer: markedMock.Renderer, 45 | TextRenderer: markedMock.TextRenderer, 46 | Lexer: markedMock.Lexer, 47 | Parser: markedMock.Parser, 48 | Tokenizer: markedMock.Tokenizer, 49 | Slugger: markedMock.Slugger, 50 | parse: markedMock.parse, 51 | parseInline: markedMock.parseInline, 52 | use: markedMock.use, 53 | setOptions: markedMock.setOptions, 54 | getDefaults: markedMock.getDefaults, 55 | defaults: markedMock.defaults, 56 | }; -------------------------------------------------------------------------------- /workspace-server/src/__tests__/services/TimeService.test.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { TimeService } from '../../services/TimeService'; 8 | 9 | describe('TimeService', () => { 10 | let timeService: TimeService; 11 | const mockDate = new Date('2025-08-19T12:34:56Z'); 12 | 13 | beforeEach(() => { 14 | timeService = new TimeService(); 15 | jest.useFakeTimers(); 16 | jest.setSystemTime(mockDate); 17 | }); 18 | 19 | afterEach(() => { 20 | jest.useRealTimers(); 21 | }); 22 | 23 | describe('getCurrentDate', () => { 24 | it('should return the current date with utc, local, and timeZone fields', async () => { 25 | const result = await timeService.getCurrentDate(); 26 | const parsed = JSON.parse(result.content[0].text); 27 | const expectedTimeZone = Intl.DateTimeFormat().resolvedOptions().timeZone; 28 | 29 | expect(parsed.utc).toEqual('2025-08-19'); 30 | expect(parsed.local).toMatch(/^\d{4}-\d{2}-\d{2}$/); 31 | expect(parsed.timeZone).toEqual(expectedTimeZone); 32 | }); 33 | }); 34 | 35 | describe('getCurrentTime', () => { 36 | it('should return the current time with utc, local, and timeZone fields', async () => { 37 | const result = await timeService.getCurrentTime(); 38 | const parsed = JSON.parse(result.content[0].text); 39 | const expectedTimeZone = Intl.DateTimeFormat().resolvedOptions().timeZone; 40 | 41 | expect(parsed.utc).toEqual('12:34:56'); 42 | expect(parsed.local).toMatch(/^\d{2}:\d{2}:\d{2}$/); 43 | expect(parsed.timeZone).toEqual(expectedTimeZone); 44 | }); 45 | }); 46 | 47 | describe('getTimeZone', () => { 48 | it('should return the local timezone', async () => { 49 | const result = await timeService.getTimeZone(); 50 | const expectedTimeZone = Intl.DateTimeFormat().resolvedOptions().timeZone; 51 | expect(result.content[0].text).toEqual(JSON.stringify({ timeZone: expectedTimeZone })); 52 | }); 53 | }); 54 | }); 55 | -------------------------------------------------------------------------------- /workspace-server/src/services/TimeService.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { logToFile } from '../utils/logger'; 8 | 9 | export class TimeService { 10 | constructor() { 11 | logToFile('TimeService initialized.'); 12 | } 13 | 14 | private async handleErrors(fn: () => Promise): Promise<{ content: [{ type: "text"; text: string; }] }> { 15 | try { 16 | const result = await fn(); 17 | return { 18 | content: [{ 19 | type: "text" as const, 20 | text: JSON.stringify(result) 21 | }] 22 | }; 23 | } catch (error) { 24 | const errorMessage = error instanceof Error ? error.message : String(error); 25 | logToFile(`Error in TimeService: ${errorMessage}`); 26 | return { 27 | content: [{ 28 | type: "text" as const, 29 | text: JSON.stringify({ error: errorMessage }) 30 | }] 31 | }; 32 | } 33 | } 34 | 35 | private getTimeContext() { 36 | return { 37 | now: new Date(), 38 | timeZone: Intl.DateTimeFormat().resolvedOptions().timeZone 39 | }; 40 | } 41 | 42 | getCurrentDate = async () => { 43 | logToFile('getCurrentDate called'); 44 | return this.handleErrors(async () => { 45 | const { now, timeZone } = this.getTimeContext(); 46 | return { 47 | utc: now.toISOString().slice(0, 10), 48 | local: now.toLocaleDateString('en-CA', { timeZone }), // YYYY-MM-DD format 49 | timeZone 50 | }; 51 | }); 52 | } 53 | 54 | getCurrentTime = async () => { 55 | logToFile('getCurrentTime called'); 56 | return this.handleErrors(async () => { 57 | const { now, timeZone } = this.getTimeContext(); 58 | return { 59 | utc: now.toISOString().slice(11, 19), 60 | local: now.toLocaleTimeString('en-GB', { hour12: false, timeZone }), // HH:MM:SS format 61 | timeZone 62 | }; 63 | }); 64 | } 65 | 66 | getTimeZone = async () => { 67 | logToFile('getTimeZone called'); 68 | return this.handleErrors(async () => { 69 | return { timeZone: Intl.DateTimeFormat().resolvedOptions().timeZone }; 70 | }); 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /workspace-server/src/utils/DriveQueryBuilder.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | /** 8 | * Utility for building Google Drive API search queries 9 | */ 10 | 11 | /** 12 | * Builds a Drive API search query for a specific MIME type with optional title filtering 13 | * @param mimeType The MIME type to search for (e.g., 'application/vnd.google-apps.document') 14 | * @param query The search query, may include 'title:' prefix for title-only searches 15 | * @returns The formatted Drive API query string 16 | */ 17 | export function buildDriveSearchQuery(mimeType: string, query: string): string { 18 | let searchTerm = query; 19 | const titlePrefix = 'title:'; 20 | let q: string; 21 | 22 | if (searchTerm.trim().startsWith(titlePrefix)) { 23 | // Extract search term after 'title:' prefix 24 | searchTerm = searchTerm.trim().substring(titlePrefix.length).trim(); 25 | 26 | // Remove surrounding quotes if present 27 | if ((searchTerm.startsWith("'") && searchTerm.endsWith("'")) || 28 | (searchTerm.startsWith('"') && searchTerm.endsWith('"'))) { 29 | searchTerm = searchTerm.substring(1, searchTerm.length - 1); 30 | } 31 | 32 | // Search by name (title) only 33 | q = `mimeType='${mimeType}' and name contains '${escapeQueryString(searchTerm)}'`; 34 | } else { 35 | // Search full text content 36 | q = `mimeType='${mimeType}' and fullText contains '${escapeQueryString(searchTerm)}'`; 37 | } 38 | 39 | return q; 40 | } 41 | 42 | /** 43 | * Escapes special characters in a query string for Drive API 44 | * @param str The string to escape 45 | * @returns The escaped string 46 | */ 47 | export function escapeQueryString(str: string): string { 48 | return str.replace(/\\/g, '\\\\').replace(/'/g, "\\'"); 49 | } 50 | 51 | // Export MIME type constants for convenience 52 | export const MIME_TYPES = { 53 | DOCUMENT: 'application/vnd.google-apps.document', 54 | PRESENTATION: 'application/vnd.google-apps.presentation', 55 | SPREADSHEET: 'application/vnd.google-apps.spreadsheet', 56 | FOLDER: 'application/vnd.google-apps.folder', 57 | } as const; 58 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: [ main, develop ] 6 | pull_request: 7 | branches: [ main ] 8 | 9 | jobs: 10 | test: 11 | runs-on: ${{ matrix.os }} 12 | 13 | strategy: 14 | matrix: 15 | node-version: [20.x, 22.x, 24.x] 16 | os: [ubuntu-latest, windows-latest, macos-latest] 17 | 18 | steps: 19 | - uses: actions/checkout@v6 20 | 21 | - name: Use Node.js ${{ matrix.node-version }} 22 | uses: actions/setup-node@v6 23 | with: 24 | node-version: ${{ matrix.node-version }} 25 | cache: 'npm' 26 | cache-dependency-path: package-lock.json 27 | 28 | - name: Install libsecret (Linux) 29 | if: runner.os == 'Linux' 30 | run: sudo apt-get update && sudo apt-get install -y libsecret-1-0 31 | 32 | - name: Install dependencies 33 | run: npm ci 34 | 35 | - name: Run linter 36 | run: npm run lint 37 | 38 | - name: Run type checking 39 | run: npx tsc --noEmit --project workspace-server 40 | 41 | - name: Run tests with coverage 42 | run: npm run test:ci 43 | 44 | - name: Upload coverage to Codecov 45 | uses: codecov/codecov-action@v5 46 | with: 47 | directory: ./workspace-server/coverage 48 | flags: unittests 49 | name: codecov-umbrella 50 | fail_ci_if_error: false 51 | 52 | build: 53 | runs-on: ubuntu-latest 54 | needs: test 55 | 56 | steps: 57 | - uses: actions/checkout@v6 58 | 59 | - name: Use Node.js 60 | uses: actions/setup-node@v6 61 | with: 62 | node-version: '20.x' 63 | cache: 'npm' 64 | cache-dependency-path: package-lock.json 65 | 66 | - name: Install dependencies 67 | run: npm ci 68 | 69 | - name: Build 70 | run: npm run build 71 | 72 | - name: Upload build artifacts 73 | uses: actions/upload-artifact@v6 74 | with: 75 | name: dist 76 | path: workspace-server/dist/ 77 | 78 | security: 79 | runs-on: ubuntu-latest 80 | 81 | steps: 82 | - uses: actions/checkout@v6 83 | 84 | - name: Run security audit 85 | run: npm audit --audit-level=moderate 86 | continue-on-error: true 87 | 88 | - name: Check for known vulnerabilities 89 | run: npx audit-ci --moderate 90 | continue-on-error: true -------------------------------------------------------------------------------- /workspace-server/src/auth/token-storage/oauth-credential-storage.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { type Credentials } from 'google-auth-library'; 8 | import { HybridTokenStorage } from './hybrid-token-storage'; 9 | import type { OAuthCredentials } from './types'; 10 | 11 | const KEYCHAIN_SERVICE_NAME = 'gemini-cli-workspace-oauth'; 12 | const MAIN_ACCOUNT_KEY = 'main-account'; 13 | 14 | export class OAuthCredentialStorage { 15 | private static storage: HybridTokenStorage = new HybridTokenStorage( 16 | KEYCHAIN_SERVICE_NAME, 17 | ); 18 | 19 | /** 20 | * Load cached OAuth credentials 21 | */ 22 | static async loadCredentials(): Promise { 23 | try { 24 | const credentials = await this.storage.getCredentials(MAIN_ACCOUNT_KEY); 25 | 26 | if (credentials?.token) { 27 | const { accessToken, refreshToken, expiresAt, tokenType, scope } = 28 | credentials.token; 29 | // Convert from OAuthCredentials format to Google Credentials format 30 | const googleCreds: Credentials = { 31 | access_token: accessToken, 32 | refresh_token: refreshToken || undefined, 33 | token_type: tokenType || undefined, 34 | scope: scope || undefined, 35 | }; 36 | 37 | if (expiresAt) { 38 | googleCreds.expiry_date = expiresAt; 39 | } 40 | 41 | return googleCreds; 42 | } 43 | 44 | return null; 45 | } catch (error: unknown) { 46 | throw error; 47 | } 48 | } 49 | 50 | /** 51 | * Save OAuth credentials 52 | */ 53 | static async saveCredentials(credentials: Credentials): Promise { 54 | // Convert Google Credentials to OAuthCredentials format 55 | const mcpCredentials: OAuthCredentials = { 56 | serverName: MAIN_ACCOUNT_KEY, 57 | token: { 58 | accessToken: credentials.access_token || undefined, 59 | refreshToken: credentials.refresh_token || undefined, 60 | tokenType: credentials.token_type || 'Bearer', 61 | scope: credentials.scope || undefined, 62 | expiresAt: credentials.expiry_date || undefined, 63 | }, 64 | updatedAt: Date.now(), 65 | }; 66 | 67 | await this.storage.setCredentials(mcpCredentials); 68 | } 69 | 70 | /** 71 | * Clear cached OAuth credentials 72 | */ 73 | static async clearCredentials(): Promise { 74 | try { 75 | await this.storage.deleteCredentials(MAIN_ACCOUNT_KEY); 76 | } catch (error: unknown) { 77 | throw error; 78 | } 79 | } 80 | } -------------------------------------------------------------------------------- /workspace-server/.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: [ main, develop ] 6 | pull_request: 7 | branches: [ main ] 8 | 9 | jobs: 10 | test: 11 | runs-on: ubuntu-latest 12 | 13 | strategy: 14 | matrix: 15 | node-version: [18.x, 20.x] 16 | 17 | steps: 18 | - uses: actions/checkout@v4 19 | 20 | - name: Use Node.js ${{ matrix.node-version }} 21 | uses: actions/setup-node@v4 22 | with: 23 | node-version: ${{ matrix.node-version }} 24 | cache: 'npm' 25 | cache-dependency-path: workspace-mcp-server/package-lock.json 26 | 27 | - name: Install dependencies 28 | run: npm ci 29 | working-directory: workspace-mcp-server 30 | 31 | - name: Run linter 32 | run: npm run lint --if-present 33 | working-directory: workspace-mcp-server 34 | 35 | - name: Run type checking 36 | run: npx tsc --noEmit 37 | working-directory: workspace-mcp-server 38 | 39 | - name: Run tests 40 | run: npm test 41 | working-directory: workspace-mcp-server 42 | 43 | - name: Generate coverage report 44 | run: npm run test:coverage 45 | working-directory: workspace-mcp-server 46 | 47 | - name: Upload coverage to Codecov 48 | uses: codecov/codecov-action@v4 49 | with: 50 | directory: ./workspace-mcp-server/coverage 51 | flags: unittests 52 | name: codecov-umbrella 53 | fail_ci_if_error: false 54 | 55 | build: 56 | runs-on: ubuntu-latest 57 | needs: test 58 | 59 | steps: 60 | - uses: actions/checkout@v4 61 | 62 | - name: Use Node.js 63 | uses: actions/setup-node@v4 64 | with: 65 | node-version: '20.x' 66 | cache: 'npm' 67 | cache-dependency-path: workspace-mcp-server/package-lock.json 68 | 69 | - name: Install dependencies 70 | run: npm ci 71 | working-directory: workspace-mcp-server 72 | 73 | - name: Build 74 | run: npm run build 75 | working-directory: workspace-mcp-server 76 | 77 | - name: Upload build artifacts 78 | uses: actions/upload-artifact@v4 79 | with: 80 | name: dist 81 | path: workspace-mcp-server/dist/ 82 | 83 | security: 84 | runs-on: ubuntu-latest 85 | 86 | steps: 87 | - uses: actions/checkout@v4 88 | 89 | - name: Run security audit 90 | run: npm audit --audit-level=moderate 91 | working-directory: workspace-mcp-server 92 | continue-on-error: true 93 | 94 | - name: Check for known vulnerabilities 95 | run: npx audit-ci --moderate 96 | working-directory: workspace-mcp-server 97 | continue-on-error: true -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "gemini-workspace-extension", 3 | "version": "0.0.3", 4 | "description": "Google Workspace Server Extension", 5 | "private": true, 6 | "bin": { 7 | "gemini-workspace-server": "workspace-server/dist/index.js" 8 | }, 9 | "workspaces": [ 10 | "workspace-server" 11 | ], 12 | "scripts": { 13 | "prepare": "npm run build", 14 | "build": "npm run build --workspaces --if-present", 15 | "test": "npm run test --workspaces --if-present", 16 | "test:watch": "npm run test:watch --workspaces --if-present", 17 | "test:coverage": "npm run test:coverage --workspaces --if-present", 18 | "test:ci": "npm run test:ci --workspaces --if-present", 19 | "start": "npm run start --workspaces --if-present", 20 | "auth-utils": "npm run build:auth-utils -w workspace-server && node scripts/auth-utils.js", 21 | "clean": "npm run clean --workspaces --if-present && rm -rf release node_modules logs docs/.vitepress/cache docs/.vitepress/dist", 22 | "lint": "eslint .", 23 | "lint:fix": "eslint . --fix", 24 | "release": "node scripts/release.js", 25 | "release:dev": "npm install && npm run build && node scripts/release.js", 26 | "set-version": "node scripts/set-version.js", 27 | "version": "node scripts/set-version.js && git add workspace-server/package.json", 28 | "docs:dev": "vitepress dev docs", 29 | "docs:build": "vitepress build docs", 30 | "docs:preview": "vitepress preview docs" 31 | }, 32 | "dependencies": { 33 | "@google-apps/chat": "^0.21.0", 34 | "@google-cloud/local-auth": "^3.0.1", 35 | "@googleapis/docs": "^9.0.0", 36 | "@googleapis/drive": "^19.2.1", 37 | "@modelcontextprotocol/sdk": "^1.24.3", 38 | "dompurify": "^3.3.1", 39 | "google-auth-library": "^10.5.0", 40 | "googleapis": "^168.0.0", 41 | "jsdom": "^27.3.0", 42 | "keytar": "^7.9.0", 43 | "marked": "^17.0.1" 44 | }, 45 | "devDependencies": { 46 | "@jest/globals": "^30.0.5", 47 | "@types/dompurify": "^3.2.0", 48 | "@types/jest": "^30.0.0", 49 | "@types/jsdom": "^27.0.0", 50 | "@types/node": "^25.0.2", 51 | "@typescript-eslint/eslint-plugin": "^8.49.0", 52 | "@typescript-eslint/parser": "^8.49.0", 53 | "@vercel/ncc": "^0.38.3", 54 | "archiver": "^7.0.1", 55 | "esbuild": "^0.27.1", 56 | "eslint": "^9.39.2", 57 | "eslint-plugin-import": "^2.32.0", 58 | "eslint-plugin-license-header": "^0.8.0", 59 | "jest": "^30.1.3", 60 | "minimist": "^1.2.8", 61 | "ts-jest": "^29.4.6", 62 | "ts-node": "^10.9.2", 63 | "typescript": "^5.9.2", 64 | "vitepress": "^1.6.4", 65 | "vue": "^3.5.25" 66 | }, 67 | "repository": { 68 | "type": "git", 69 | "url": "git+https://github.com/gemini-cli-extensions/workspace.git" 70 | }, 71 | "keywords": [ 72 | "google-workspace", 73 | "gmail", 74 | "google-docs", 75 | "google-drive", 76 | "google-calendar", 77 | "google-chat", 78 | "google-people", 79 | "google-sheets", 80 | "google-slides", 81 | "gemini-cli" 82 | ], 83 | "author": "Allen Hutchison", 84 | "license": "Apache-2.0" 85 | } 86 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Google Workspace Extension for Gemini CLI 2 | 3 | [![Build Status](https://github.com/gemini-cli-extensions/workspace/actions/workflows/ci.yml/badge.svg)](https://github.com/gemini-cli-extensions/workspace/actions/workflows/ci.yml) 4 | 5 | The Google Workspace extension for Gemini CLI brings the power of your Google Workspace apps to your command line. Manage your documents, spreadsheets, presentations, emails, chat, and calendar events without leaving your terminal. 6 | 7 | ## Prerequisites 8 | 9 | Before using the Google Workspace extension, you need to be logged into your Google account. 10 | 11 | ## Installation 12 | 13 | Install the Google Workspace extension by running the following command from your terminal: 14 | 15 | ```bash 16 | gemini extensions install https://github.com/gemini-cli-extensions/workspace 17 | ``` 18 | 19 | ## Usage 20 | 21 | Once the extension is installed, you can use it to interact with your Google Workspace apps. Here are a few examples: 22 | 23 | **Create a new Google Doc:** 24 | 25 | > "Create a new Google Doc with the title 'My New Doc' and the content '# My New Document\n\nThis is a new document created from the command line.'" 26 | 27 | **List your upcoming calendar events:** 28 | 29 | > "What's on my calendar for today?" 30 | 31 | **Search for a file in Google Drive:** 32 | 33 | > "Find the file named 'my-file.txt' in my Google Drive." 34 | 35 | ## Commands 36 | 37 | This extension provides a variety of commands. Here are a few examples: 38 | 39 | ### Get Schedule 40 | 41 | **Command:** `/calendar:get-schedule [date]` 42 | 43 | Shows your schedule for today or a specified date. 44 | 45 | ### Search Drive 46 | 47 | **Command:** `/drive:search ` 48 | 49 | Searches your Google Drive for files matching the given query. 50 | 51 | ## Resources 52 | 53 | - [Documentation](docs/index.md): Detailed documentation on all the available tools. 54 | - [GitHub Issues](https://github.com/gemini-cli-extensions/workspace/issues): Report bugs or request features. 55 | 56 | ## Important security consideration: Indirect Prompt Injection Risk 57 | 58 | When exposing any language model to untrusted data, there's a risk of an [indirect prompt injection attack](https://en.wikipedia.org/wiki/Prompt_injection). Agentic tools like Gemini CLI, connected to MCP servers, have access to a wide array of tools and APIs. 59 | 60 | This MCP server grants the agent the ability to read, modify, and delete your Google Account data, as well as other data shared with you. 61 | 62 | * Never use this with untrusted tools 63 | * Never include untrusted inputs into the model context. This includes asking Gemini CLI to process mail, documents, or other resources from unverified sources. 64 | * Untrusted inputs may contain hidden instructions that could hijack your CLI session. Attackers can then leverage this to modify, steal, or destroy your data. 65 | * Always carefully review actions taken by Gemini CLI on your behalf to ensure they are correct and align with your intentions. 66 | 67 | ## Contributing 68 | 69 | Contributions are welcome! Please read the [CONTRIBUTING.md](CONTRIBUTING.md) file for details on how to contribute to this project. 70 | 71 | ## 📄 Legal 72 | 73 | - **License**: [Apache License 2.0](LICENSE) 74 | - **Terms of Service**: [Terms of Service](https://policies.google.com/terms) 75 | - **Privacy Policy**: [Privacy Policy](https://policies.google.com/privacy) 76 | - **Security**: [Security Policy](SECURITY.md) 77 | 78 | -------------------------------------------------------------------------------- /workspace-server/src/auth/token-storage/hybrid-token-storage.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { BaseTokenStorage } from './base-token-storage'; 8 | import { FileTokenStorage } from './file-token-storage'; 9 | import type { TokenStorage, OAuthCredentials } from './types'; 10 | import { TokenStorageType } from './types'; 11 | 12 | const FORCE_FILE_STORAGE_ENV_VAR = 'GEMINI_CLI_WORKSPACE_FORCE_FILE_STORAGE'; 13 | 14 | export class HybridTokenStorage extends BaseTokenStorage { 15 | private storage: TokenStorage | null = null; 16 | private storageType: TokenStorageType | null = null; 17 | private storageInitPromise: Promise | null = null; 18 | 19 | constructor(serviceName: string) { 20 | super(serviceName); 21 | } 22 | 23 | private async initializeStorage(): Promise { 24 | const forceFileStorage = process.env[FORCE_FILE_STORAGE_ENV_VAR] === 'true'; 25 | 26 | if (!forceFileStorage) { 27 | try { 28 | const { KeychainTokenStorage } = await import( 29 | './keychain-token-storage' 30 | ); 31 | const keychainStorage = new KeychainTokenStorage(this.serviceName); 32 | 33 | const isAvailable = await keychainStorage.isAvailable(); 34 | if (isAvailable) { 35 | this.storage = keychainStorage; 36 | this.storageType = TokenStorageType.KEYCHAIN; 37 | return this.storage; 38 | } 39 | } catch (e) { 40 | // Fallback to file storage if keychain fails to initialize. 41 | console.warn('Keychain initialization failed, falling back to file storage:', e); 42 | } 43 | } 44 | 45 | this.storage = await FileTokenStorage.create(this.serviceName); 46 | this.storageType = TokenStorageType.ENCRYPTED_FILE; 47 | return this.storage; 48 | } 49 | 50 | private async getStorage(): Promise { 51 | if (this.storage !== null) { 52 | return this.storage; 53 | } 54 | 55 | // Use a single initialization promise to avoid race conditions 56 | if (!this.storageInitPromise) { 57 | this.storageInitPromise = this.initializeStorage(); 58 | } 59 | 60 | // Wait for initialization to complete 61 | return await this.storageInitPromise; 62 | } 63 | 64 | async getCredentials(serverName: string): Promise { 65 | const storage = await this.getStorage(); 66 | return storage.getCredentials(serverName); 67 | } 68 | 69 | async setCredentials(credentials: OAuthCredentials): Promise { 70 | const storage = await this.getStorage(); 71 | await storage.setCredentials(credentials); 72 | } 73 | 74 | async deleteCredentials(serverName: string): Promise { 75 | const storage = await this.getStorage(); 76 | await storage.deleteCredentials(serverName); 77 | } 78 | 79 | async listServers(): Promise { 80 | const storage = await this.getStorage(); 81 | return storage.listServers(); 82 | } 83 | 84 | async getAllCredentials(): Promise> { 85 | const storage = await this.getStorage(); 86 | return storage.getAllCredentials(); 87 | } 88 | 89 | async clearAll(): Promise { 90 | const storage = await this.getStorage(); 91 | await storage.clearAll(); 92 | } 93 | 94 | async getStorageType(): Promise { 95 | await this.getStorage(); 96 | return this.storageType!; 97 | } 98 | } -------------------------------------------------------------------------------- /eslint.config.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | const tseslint = require('@typescript-eslint/eslint-plugin'); 8 | const tsParser = require('@typescript-eslint/parser'); 9 | const licenseHeader = require('eslint-plugin-license-header'); 10 | const importPlugin = require('eslint-plugin-import'); 11 | 12 | module.exports = [ 13 | { 14 | ignores: ['**/dist/', '*.js', '**/node_modules/', '**/coverage/', '!eslint.config.js', '**/docs/.vitepress/cache/', '**/docs/.vitepress/dist/'], 15 | }, 16 | { 17 | files: ['workspace-server/src/**/*.ts'], 18 | ignores: ['**/*.test.ts', '**/*.spec.ts'], 19 | languageOptions: { 20 | parser: tsParser, 21 | parserOptions: { 22 | project: true, 23 | tsconfigRootDir: __dirname, 24 | ecmaVersion: 2020, 25 | sourceType: 'module', 26 | }, 27 | }, 28 | plugins: { 29 | '@typescript-eslint': tseslint, 30 | }, 31 | rules: { 32 | ...tseslint.configs.recommended.rules, 33 | '@typescript-eslint/no-explicit-any': 'off', 34 | '@typescript-eslint/explicit-function-return-type': 'off', 35 | '@typescript-eslint/no-unused-vars': [ 36 | 'warn', 37 | { 38 | argsIgnorePattern: '^_', 39 | varsIgnorePattern: '^_', 40 | caughtErrorsIgnorePattern: '^_', 41 | }, 42 | ], 43 | 'prefer-const': 'warn', 44 | }, 45 | }, 46 | { 47 | files: ['workspace-server/src/**/*.test.ts', 'workspace-server/src/**/*.spec.ts'], 48 | languageOptions: { 49 | parser: tsParser, 50 | parserOptions: { 51 | ecmaVersion: 2020, 52 | sourceType: 'module', 53 | }, 54 | }, 55 | plugins: { 56 | '@typescript-eslint': tseslint, 57 | }, 58 | rules: { 59 | ...tseslint.configs.recommended.rules, 60 | '@typescript-eslint/no-explicit-any': 'off', 61 | '@typescript-eslint/explicit-function-return-type': 'off', 62 | '@typescript-eslint/no-unused-vars': [ 63 | 'warn', 64 | { 65 | argsIgnorePattern: '^_', 66 | varsIgnorePattern: '^_', 67 | }, 68 | ], 69 | 'prefer-const': 'warn', 70 | }, 71 | }, 72 | { 73 | files: ['./**/*.{tsx,ts,js}'], 74 | ignores: ['workspace-server/src/index.ts'], // Has shebang which conflicts with license header 75 | plugins: { 76 | 'license-header': licenseHeader, 77 | import: importPlugin, 78 | }, 79 | rules: { 80 | 'license-header/header': [ 81 | 'error', 82 | [ 83 | '/**', 84 | ' * @license', 85 | ' * Copyright 2025 Google LLC', 86 | ' * SPDX-License-Identifier: Apache-2.0', 87 | ' */', 88 | ], 89 | ], 90 | 'import/enforce-node-protocol-usage': ['error', 'always'], 91 | }, 92 | }, 93 | { 94 | files: ['workspace-server/src/index.ts'], 95 | plugins: { 96 | import: importPlugin, 97 | }, 98 | rules: { 99 | 'import/enforce-node-protocol-usage': ['error', 'always'], 100 | }, 101 | }, 102 | ]; -------------------------------------------------------------------------------- /scripts/auth-utils.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | const { OAuthCredentialStorage } = require('../workspace-server/dist/auth-utils.js'); 8 | 9 | async function clearAuth() { 10 | try { 11 | await OAuthCredentialStorage.clearCredentials(); 12 | console.log('✅ Authentication credentials cleared successfully.'); 13 | } catch (error) { 14 | console.error('❌ Failed to clear authentication credentials:', error); 15 | process.exit(1); 16 | } 17 | } 18 | 19 | async function expireToken() { 20 | try { 21 | const credentials = await OAuthCredentialStorage.loadCredentials(); 22 | if (!credentials) { 23 | console.log('ℹ️ No credentials found to expire.'); 24 | return; 25 | } 26 | 27 | // Set expiry to 1 second ago 28 | credentials.expiry_date = Date.now() - 1000; 29 | await OAuthCredentialStorage.saveCredentials(credentials); 30 | console.log('✅ Access token expired successfully.'); 31 | console.log(' Next API call will trigger proactive refresh.'); 32 | } catch (error) { 33 | console.error('❌ Failed to expire token:', error); 34 | process.exit(1); 35 | } 36 | } 37 | 38 | async function showStatus() { 39 | try { 40 | const credentials = await OAuthCredentialStorage.loadCredentials(); 41 | if (!credentials) { 42 | console.log('ℹ️ No credentials found.'); 43 | return; 44 | } 45 | 46 | const now = Date.now(); 47 | const expiry = credentials.expiry_date; 48 | const hasRefreshToken = !!credentials.refresh_token; 49 | const hasAccessToken = !!credentials.access_token; 50 | const isExpired = expiry ? expiry < now : false; 51 | 52 | console.log('📊 Auth Status:'); 53 | console.log(` Access Token: ${hasAccessToken ? '✅ Present' : '❌ Missing'}`); 54 | console.log(` Refresh Token: ${hasRefreshToken ? '✅ Present' : '❌ Missing'}`); 55 | if (expiry) { 56 | console.log(` Expiry: ${new Date(expiry).toISOString()}`); 57 | console.log(` Status: ${isExpired ? '❌ EXPIRED' : '✅ Valid'}`); 58 | if (!isExpired) { 59 | const minutesLeft = Math.floor((expiry - now) / 1000 / 60); 60 | console.log(` Time left: ~${minutesLeft} minutes`); 61 | } 62 | } else { 63 | console.log(` Expiry: ⚠️ Unknown`); 64 | } 65 | } catch (error) { 66 | console.error('❌ Failed to get auth status:', error); 67 | process.exit(1); 68 | } 69 | } 70 | 71 | function showHelp() { 72 | console.log(` 73 | Auth Management CLI 74 | 75 | Usage: node scripts/auth-utils.js 76 | 77 | Commands: 78 | clear Clear all authentication credentials 79 | expire Force the access token to expire (for testing refresh) 80 | status Show current authentication status 81 | help Show this help message 82 | 83 | Examples: 84 | node scripts/auth-utils.js clear 85 | node scripts/auth-utils.js expire 86 | node scripts/auth-utils.js status 87 | `); 88 | } 89 | 90 | async function main() { 91 | const command = process.argv[2]; 92 | 93 | switch (command) { 94 | case 'clear': 95 | await clearAuth(); 96 | break; 97 | case 'expire': 98 | await expireToken(); 99 | break; 100 | case 'status': 101 | await showStatus(); 102 | break; 103 | case 'help': 104 | case '--help': 105 | case '-h': 106 | showHelp(); 107 | break; 108 | default: 109 | if (!command) { 110 | console.error('❌ No command specified.'); 111 | } else { 112 | console.error(`❌ Unknown command: ${command}`); 113 | } 114 | showHelp(); 115 | process.exit(1); 116 | } 117 | } 118 | 119 | main(); 120 | -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | # Google Workspace Extension Documentation 2 | 3 | This document provides an overview of the Google Workspace extension for Gemini CLI. 4 | 5 | ## Available Tools 6 | 7 | The extension provides the following tools: 8 | 9 | ### Google Docs 10 | - `docs.create`: Creates a new Google Doc. 11 | - `docs.insertText`: Inserts text at the beginning of a Google Doc. 12 | - `docs.find`: Finds Google Docs by searching for a query in their title. 13 | - `docs.move`: Moves a document to a specified folder. 14 | - `docs.getText`: Retrieves the text content of a Google Doc. 15 | - `docs.appendText`: Appends text to the end of a Google Doc. 16 | - `docs.replaceText`: Replaces all occurrences of a given text with new text in a Google Doc. 17 | - `docs.extractIdFromUrl`: Extracts the document ID from a Google Workspace URL. 18 | 19 | ### Google Slides 20 | - `slides.getText`: Retrieves the text content of a Google Slides presentation. 21 | - `slides.find`: Finds Google Slides presentations by searching for a query. 22 | - `slides.getMetadata`: Gets metadata about a Google Slides presentation. 23 | 24 | ### Google Sheets 25 | - `sheets.getText`: Retrieves the content of a Google Sheets spreadsheet. 26 | - `sheets.getRange`: Gets values from a specific range in a Google Sheets spreadsheet. 27 | - `sheets.find`: Finds Google Sheets spreadsheets by searching for a query. 28 | - `sheets.getMetadata`: Gets metadata about a Google Sheets spreadsheet. 29 | 30 | ### Google Drive 31 | - `drive.search`: Searches for files and folders in Google Drive. 32 | - `drive.findFolder`: Finds a folder by name in Google Drive. 33 | - `drive.downloadFile`: Downloads a file from Google Drive to a local path. 34 | 35 | ### Google Calendar 36 | - `calendar.list`: Lists all of the user's calendars. 37 | - `calendar.createEvent`: Creates a new event in a calendar. 38 | - `calendar.listEvents`: Lists events from a calendar. 39 | - `calendar.getEvent`: Gets the details of a specific calendar event. 40 | - `calendar.findFreeTime`: Finds a free time slot for multiple people to meet. 41 | - `calendar.updateEvent`: Updates an existing event in a calendar. 42 | - `calendar.respondToEvent`: Responds to a meeting invitation (accept, decline, or tentative). 43 | - `calendar.deleteEvent`: Deletes an event from a calendar. 44 | 45 | ### Google Chat 46 | - `chat.listSpaces`: Lists the spaces the user is a member of. 47 | - `chat.findSpaceByName`: Finds a Google Chat space by its display name. 48 | - `chat.sendMessage`: Sends a message to a Google Chat space. 49 | - `chat.getMessages`: Gets messages from a Google Chat space. 50 | - `chat.sendDm`: Sends a direct message to a user. 51 | - `chat.findDmByEmail`: Finds a Google Chat DM space by a user's email address. 52 | - `chat.listThreads`: Lists threads from a Google Chat space in reverse chronological order. 53 | - `chat.setUpSpace`: Sets up a new Google Chat space with a display name and a list of members. 54 | 55 | ### Gmail 56 | - `gmail.search`: Search for emails in Gmail using query parameters. 57 | - `gmail.get`: Get the full content of a specific email message. 58 | - `gmail.downloadAttachment`: Downloads an attachment from a Gmail message to a local file. 59 | - `gmail.modify`: Modify a Gmail message. 60 | - `gmail.send`: Send an email message. 61 | - `gmail.createDraft`: Create a draft email message. 62 | - `gmail.sendDraft`: Send a previously created draft email. 63 | - `gmail.listLabels`: List all Gmail labels in the user's mailbox. 64 | 65 | ### Time 66 | - `time.getCurrentDate`: Gets the current date. Returns both UTC (for API use) and local time (for user display), along with the timezone. 67 | - `time.getCurrentTime`: Gets the current time. Returns both UTC (for API use) and local time (for user display), along with the timezone. 68 | - `time.getTimeZone`: Gets the local timezone. 69 | 70 | ### People 71 | - `people.getUserProfile`: Gets a user's profile information. 72 | - `people.getMe`: Gets the profile information of the authenticated user. 73 | 74 | -------------------------------------------------------------------------------- /workspace-server/src/__tests__/auth/token-storage/oauth-credential-storage.test.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { describe, it, expect, beforeEach, jest } from '@jest/globals'; 8 | import { OAuthCredentialStorage } from '../../../auth/token-storage/oauth-credential-storage'; 9 | import { HybridTokenStorage } from '../../../auth/token-storage/hybrid-token-storage'; 10 | import { type Credentials } from 'google-auth-library'; 11 | import { type OAuthCredentials } from '../../../auth/token-storage/types'; 12 | 13 | // Mock the HybridTokenStorage dependency 14 | jest.mock('../../../auth/token-storage/hybrid-token-storage'); 15 | 16 | describe('OAuthCredentialStorage', () => { 17 | const mockGoogleCredentials: Credentials = { 18 | access_token: 'test-access-token', 19 | refresh_token: 'test-refresh-token', 20 | expiry_date: 1234567890, 21 | token_type: 'Bearer', 22 | scope: 'test-scope', 23 | }; 24 | 25 | const mockMcpCredentials: OAuthCredentials = { 26 | serverName: 'main-account', 27 | token: { 28 | accessToken: 'test-access-token', 29 | refreshToken: 'test-refresh-token', 30 | expiresAt: 1234567890, 31 | tokenType: 'Bearer', 32 | scope: 'test-scope', 33 | }, 34 | updatedAt: expect.any(Number) as any, 35 | }; 36 | 37 | let getCredentialsMock: any; 38 | let setCredentialsMock: any; 39 | let deleteCredentialsMock: any; 40 | 41 | beforeEach(() => { 42 | jest.clearAllMocks(); 43 | 44 | getCredentialsMock = jest 45 | .spyOn(HybridTokenStorage.prototype, 'getCredentials') 46 | .mockResolvedValue(null); 47 | setCredentialsMock = jest 48 | .spyOn(HybridTokenStorage.prototype, 'setCredentials') 49 | .mockResolvedValue(undefined); 50 | deleteCredentialsMock = jest 51 | .spyOn(HybridTokenStorage.prototype, 'deleteCredentials') 52 | .mockResolvedValue(undefined); 53 | }); 54 | 55 | describe('loadCredentials', () => { 56 | it('should load credentials from HybridTokenStorage if available', async () => { 57 | getCredentialsMock.mockResolvedValue(mockMcpCredentials); 58 | 59 | const credentials = await OAuthCredentialStorage.loadCredentials(); 60 | 61 | expect(getCredentialsMock).toHaveBeenCalledWith('main-account'); 62 | expect(credentials).toEqual(mockGoogleCredentials); 63 | }); 64 | 65 | it('should return null if no credentials found', async () => { 66 | getCredentialsMock.mockResolvedValue(null); 67 | 68 | const credentials = await OAuthCredentialStorage.loadCredentials(); 69 | 70 | expect(getCredentialsMock).toHaveBeenCalledWith('main-account'); 71 | expect(credentials).toBeNull(); 72 | }); 73 | 74 | it('should throw an error if loading fails', async () => { 75 | getCredentialsMock.mockRejectedValue(new Error('Storage error')); 76 | 77 | await expect(OAuthCredentialStorage.loadCredentials()).rejects.toThrow( 78 | 'Storage error', 79 | ); 80 | }); 81 | }); 82 | 83 | describe('saveCredentials', () => { 84 | it('should save credentials to HybridTokenStorage', async () => { 85 | setCredentialsMock.mockResolvedValue(undefined); 86 | 87 | await OAuthCredentialStorage.saveCredentials(mockGoogleCredentials); 88 | 89 | expect(setCredentialsMock).toHaveBeenCalledWith(mockMcpCredentials); 90 | }); 91 | }); 92 | 93 | describe('clearCredentials', () => { 94 | it('should delete credentials from HybridTokenStorage', async () => { 95 | deleteCredentialsMock.mockResolvedValue(undefined); 96 | 97 | await OAuthCredentialStorage.clearCredentials(); 98 | 99 | expect(deleteCredentialsMock).toHaveBeenCalledWith('main-account'); 100 | }); 101 | 102 | it('should throw an error if clearing from HybridTokenStorage fails', async () => { 103 | deleteCredentialsMock.mockRejectedValue(new Error('Clear error')); 104 | 105 | await expect(OAuthCredentialStorage.clearCredentials()).rejects.toThrow( 106 | 'Clear error', 107 | ); 108 | }); 109 | }); 110 | }); -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # How to Contribute 2 | 3 | We would love to accept your patches and contributions to this project. 4 | 5 | ## Before you begin 6 | 7 | ### Sign our Contributor License Agreement 8 | 9 | Contributions to this project must be accompanied by a 10 | [Contributor License Agreement](https://cla.developers.google.com/about) (CLA). 11 | You (or your employer) retain the copyright to your contribution; this simply 12 | gives us permission to use and redistribute your contributions as part of the 13 | project. 14 | 15 | If you or your current employer have already signed the Google CLA (even if it 16 | was for a different project), you probably don't need to do it again. 17 | 18 | Visit to see your current agreements or to 19 | sign a new one. 20 | 21 | ### Review our Community Guidelines 22 | 23 | This project follows [Google's Open Source Community 24 | Guidelines](https://opensource.google/conduct/). 25 | 26 | ## Contribution Process 27 | 28 | ### Code Reviews 29 | 30 | All submissions, including submissions by project members, require review. We 31 | use [GitHub pull requests](https://docs.github.com/articles/about-pull-requests) 32 | for this purpose. 33 | 34 | ### Self Assigning Issues 35 | 36 | If you're looking for an issue to work on, check out our list of issues that are labeled ["help wanted"](https://github.com/gemini-cli-extensions/workspace/issues?q=is%3Aissue+state%3Aopen+label%3A%22help+wanted%22). 37 | 38 | To assign an issue to yourself, simply add a comment with the text `/assign`. The comment must contain only that text and nothing else. This command will assign the issue to you, provided it is not already assigned. 39 | 40 | Please note that you can have a maximum of 3 issues assigned to you at any given time. 41 | 42 | ### Pull Request Guidelines 43 | 44 | To help us review and merge your PRs quickly, please follow these guidelines. PRs that do not meet these standards may be closed. 45 | 46 | #### 1. Link to an Existing Issue 47 | 48 | All PRs should be linked to an existing issue in our tracker. This ensures that every change has been discussed and is aligned with the project's goals before any code is written. 49 | 50 | - **For bug fixes:** The PR should be linked to the bug report issue. 51 | - **For features:** The PR should be linked to the feature request or proposal issue that has been approved by a maintainer. 52 | 53 | If an issue for your change doesn't exist, please **open one first** and wait for feedback before you start coding. 54 | 55 | #### 2. Keep It Small and Focused 56 | 57 | We favor small, atomic PRs that address a single issue or add a single, self-contained feature. 58 | 59 | - **Do:** Create a PR that fixes one specific bug or adds one specific feature. 60 | - **Don't:** Bundle multiple unrelated changes (e.g., a bug fix, a new feature, and a refactor) into a single PR. 61 | 62 | Large changes should be broken down into a series of smaller, logical PRs that can be reviewed and merged independently. 63 | 64 | #### 3. Use Draft PRs for Work in Progress 65 | 66 | If you'd like to get early feedback on your work, please use GitHub's **Draft Pull Request** feature. This signals to the maintainers that the PR is not yet ready for a formal review but is open for discussion and initial feedback. 67 | 68 | #### 4. Ensure All Checks Pass 69 | 70 | Before submitting your PR, ensure that all automated checks are passing by running `npm run test && npm run lint`. This command runs all tests, linting, and other style checks. 71 | 72 | #### 5. Write Clear Commit Messages and a Good PR Description 73 | 74 | Your PR should have a clear, descriptive title and a detailed description of the changes. Follow the [Conventional Commits](https://www.conventionalcommits.org/) standard for your commit messages. 75 | 76 | - **Good PR Title:** `feat(cli): Add --json flag to 'config get' command` 77 | - **Bad PR Title:** `Made some changes` 78 | 79 | In the PR description, explain the "why" behind your changes and link to the relevant issue (e.g., `Fixes #123`). 80 | 81 | ## Development Setup and Workflow 82 | 83 | For information on how to build, modify, and understand the development setup of this project, please see the [development documentation](docs/development.md). -------------------------------------------------------------------------------- /workspace-server/src/services/PeopleService.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { google, people_v1 } from 'googleapis'; 8 | import { AuthManager } from '../auth/AuthManager'; 9 | import { logToFile } from '../utils/logger'; 10 | import { gaxiosOptions } from '../utils/GaxiosConfig'; 11 | 12 | export class PeopleService { 13 | constructor(private authManager: AuthManager) { 14 | } 15 | 16 | private async getPeopleClient(): Promise { 17 | const auth = await this.authManager.getAuthenticatedClient(); 18 | const options = { ...gaxiosOptions, auth }; 19 | return google.people({ version: 'v1', ...options }); 20 | } 21 | 22 | public getUserProfile = async ({ userId, email, name }: { userId?: string, email?: string, name?: string }) => { 23 | logToFile(`[PeopleService] Starting getUserProfile with: userId=${userId}, email=${email}, name=${name}`); 24 | try { 25 | if (!userId && !email && !name) { 26 | throw new Error('Either userId, email, or name must be provided.'); 27 | } 28 | const people = await this.getPeopleClient(); 29 | if (userId) { 30 | const resourceName = userId.startsWith('people/') ? userId : `people/${userId}`; 31 | const res = await people.people.get({ 32 | resourceName, 33 | personFields: 'names,emailAddresses', 34 | }); 35 | logToFile(`[PeopleService] Finished getUserProfile for user: ${userId}`); 36 | return { 37 | content: [{ 38 | type: "text" as const, 39 | text: JSON.stringify({ results: [{ person: res.data }] }) 40 | }] 41 | }; 42 | } else if (email || name) { 43 | const query = email || name; 44 | const res = await people.people.searchDirectoryPeople({ 45 | query, 46 | readMask: 'names,emailAddresses', 47 | sources: ['DIRECTORY_SOURCE_TYPE_DOMAIN_CONTACT', 'DIRECTORY_SOURCE_TYPE_DOMAIN_PROFILE'], 48 | }); 49 | logToFile(`[PeopleService] Finished getUserProfile search for: ${query}`); 50 | return { 51 | content: [{ 52 | type: "text" as const, 53 | text: JSON.stringify(res.data) 54 | }] 55 | }; 56 | } else { 57 | throw new Error('Either userId, email, or name must be provided.'); 58 | } 59 | } catch (error) { 60 | const errorMessage = error instanceof Error ? error.message : String(error); 61 | logToFile(`[PeopleService] Error during people.getUserProfile: ${errorMessage}`); 62 | return { 63 | content: [{ 64 | type: "text" as const, 65 | text: JSON.stringify({ error: errorMessage }) 66 | }] 67 | }; 68 | } 69 | } 70 | 71 | public getMe = async () => { 72 | logToFile(`[PeopleService] Starting getMe`); 73 | try { 74 | const people = await this.getPeopleClient(); 75 | const res = await people.people.get({ 76 | resourceName: 'people/me', 77 | personFields: 'names,emailAddresses', 78 | }); 79 | logToFile(`[PeopleService] Finished getMe`); 80 | return { 81 | content: [{ 82 | type: "text" as const, 83 | text: JSON.stringify(res.data) 84 | }] 85 | }; 86 | } catch (error) { 87 | const errorMessage = error instanceof Error ? error.message : String(error); 88 | logToFile(`[PeopleService] Error during people.getMe: ${errorMessage}`); 89 | return { 90 | content: [{ 91 | type: "text" as const, 92 | text: JSON.stringify({ error: errorMessage }) 93 | }] 94 | }; 95 | } 96 | } 97 | } -------------------------------------------------------------------------------- /workspace-server/src/__tests__/utils/DriveQueryBuilder.test.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { describe, it, expect } from '@jest/globals'; 8 | import { buildDriveSearchQuery, MIME_TYPES } from '../../utils/DriveQueryBuilder'; 9 | 10 | describe('DriveQueryBuilder', () => { 11 | describe('buildDriveSearchQuery', () => { 12 | it('should build fullText query for regular search', () => { 13 | const query = buildDriveSearchQuery(MIME_TYPES.DOCUMENT, 'test query'); 14 | expect(query).toBe("mimeType='application/vnd.google-apps.document' and fullText contains 'test query'"); 15 | }); 16 | 17 | it('should build name query for title-prefixed search', () => { 18 | const query = buildDriveSearchQuery(MIME_TYPES.PRESENTATION, 'title:My Presentation'); 19 | expect(query).toBe("mimeType='application/vnd.google-apps.presentation' and name contains 'My Presentation'"); 20 | }); 21 | 22 | it('should handle quoted title searches', () => { 23 | const query = buildDriveSearchQuery(MIME_TYPES.SPREADSHEET, 'title:"Budget 2024"'); 24 | expect(query).toBe("mimeType='application/vnd.google-apps.spreadsheet' and name contains 'Budget 2024'"); 25 | }); 26 | 27 | it('should handle single-quoted title searches', () => { 28 | const query = buildDriveSearchQuery(MIME_TYPES.DOCUMENT, "title:'Q4 Report'"); 29 | expect(query).toBe("mimeType='application/vnd.google-apps.document' and name contains 'Q4 Report'"); 30 | }); 31 | 32 | it('should escape special characters in query', () => { 33 | const query = buildDriveSearchQuery(MIME_TYPES.DOCUMENT, "test's query\\path"); 34 | expect(query).toBe("mimeType='application/vnd.google-apps.document' and fullText contains 'test\\'s query\\\\path'"); 35 | }); 36 | 37 | it('should escape special characters in title search', () => { 38 | const query = buildDriveSearchQuery(MIME_TYPES.PRESENTATION, "title:John's Presentation\\2024"); 39 | expect(query).toBe("mimeType='application/vnd.google-apps.presentation' and name contains 'John\\'s Presentation\\\\2024'"); 40 | }); 41 | 42 | it('should handle empty strings', () => { 43 | const query = buildDriveSearchQuery(MIME_TYPES.SPREADSHEET, ''); 44 | expect(query).toBe("mimeType='application/vnd.google-apps.spreadsheet' and fullText contains ''"); 45 | }); 46 | 47 | it('should handle whitespace-only queries', () => { 48 | const query = buildDriveSearchQuery(MIME_TYPES.DOCUMENT, ' '); 49 | expect(query).toBe("mimeType='application/vnd.google-apps.document' and fullText contains ' '"); 50 | }); 51 | 52 | it('should handle title prefix with whitespace', () => { 53 | const query = buildDriveSearchQuery(MIME_TYPES.PRESENTATION, ' title: "My Doc" '); 54 | expect(query).toBe("mimeType='application/vnd.google-apps.presentation' and name contains 'My Doc'"); 55 | }); 56 | 57 | it('should work with all MIME types', () => { 58 | expect(buildDriveSearchQuery(MIME_TYPES.DOCUMENT, 'test')) 59 | .toContain('application/vnd.google-apps.document'); 60 | expect(buildDriveSearchQuery(MIME_TYPES.PRESENTATION, 'test')) 61 | .toContain('application/vnd.google-apps.presentation'); 62 | expect(buildDriveSearchQuery(MIME_TYPES.SPREADSHEET, 'test')) 63 | .toContain('application/vnd.google-apps.spreadsheet'); 64 | expect(buildDriveSearchQuery(MIME_TYPES.FOLDER, 'test')) 65 | .toContain('application/vnd.google-apps.folder'); 66 | }); 67 | }); 68 | 69 | describe('MIME_TYPES constants', () => { 70 | it('should have correct MIME type values', () => { 71 | expect(MIME_TYPES.DOCUMENT).toBe('application/vnd.google-apps.document'); 72 | expect(MIME_TYPES.PRESENTATION).toBe('application/vnd.google-apps.presentation'); 73 | expect(MIME_TYPES.SPREADSHEET).toBe('application/vnd.google-apps.spreadsheet'); 74 | expect(MIME_TYPES.FOLDER).toBe('application/vnd.google-apps.folder'); 75 | }); 76 | }); 77 | }); 78 | -------------------------------------------------------------------------------- /docs/development.md: -------------------------------------------------------------------------------- 1 | # Development 2 | 3 | This document provides instructions for developing the Google Workspace extension. 4 | 5 | ## Development Setup and Workflow 6 | 7 | This section guides contributors on how to build, modify, and understand the development setup of this project. 8 | 9 | ### Setting Up the Development Environment 10 | 11 | **Prerequisites:** 12 | 13 | 1. **Node.js**: 14 | - **Development:** Please use Node.js `~20.19.0`. This specific version is required due to an upstream development dependency issue. You can use a tool like [nvm](https://github.com/nvm-sh/nvm) to manage Node.js versions. 15 | - **Production:** For running the CLI in a production environment, any version of Node.js `>=20` is acceptable. 16 | 2. **Git** 17 | 18 | ### Build Process 19 | 20 | To clone the repository: 21 | 22 | ```bash 23 | git clone https://github.com/gemini-cli-extensions/workspace.git # Or your fork's URL 24 | cd workspace 25 | ``` 26 | 27 | To install dependencies defined in `package.json` as well as root dependencies: 28 | 29 | ```bash 30 | npm install 31 | ``` 32 | 33 | To build the entire project (all packages): 34 | 35 | ```bash 36 | npm run build 37 | ``` 38 | 39 | This command typically compiles TypeScript to JavaScript, bundles assets, and prepares the packages for execution. Refer to `scripts/build.js` and `package.json` scripts for more details on what happens during the build. 40 | 41 | ### Running Tests 42 | 43 | This project contains unit tests. 44 | 45 | #### Unit Tests 46 | 47 | To execute the unit test suite for the project: 48 | 49 | ```bash 50 | npm run test 51 | ``` 52 | 53 | This will run tests located in the `workspace-server/src/__tests__` directory. Ensure tests pass before submitting any changes. For a more comprehensive check, it is recommended to run `npm run test && npm run lint`. 54 | 55 | To test a single file, you can pass its path from the project root as an argument. For example: 56 | 57 | ```bash 58 | npm run test -- workspace-server/src/__tests__/GmailService.test.ts 59 | 60 | ### Linting and Style Checks 61 | 62 | To ensure code quality and formatting consistency, run the linter and tests: 63 | 64 | ```bash 65 | npm run test && npm run lint 66 | ``` 67 | 68 | This command will run ESLint, Prettier, all tests, and other checks as defined in the project's `package.json`. 69 | 70 | > [!TIP] 71 | > After cloning create a git pre-commit hook file to ensure your commits are always clean. 72 | > 73 | > ```bash 74 | > cat <<'EOF' > .git/hooks/pre-commit 75 | > #!/bin/sh 76 | > # Run tests and linting before commit 77 | > if ! (npm run test && npm run lint); then 78 | > echo "Pre-commit checks failed. Commit aborted." 79 | > exit 1 80 | > fi 81 | > EOF 82 | > chmod +x .git/hooks/pre-commit 83 | > ``` 84 | 85 | #### Formatting 86 | 87 | To separately format the code in this project by running the following command from the root directory: 88 | 89 | ```bash 90 | npm run format 91 | ``` 92 | 93 | This command uses Prettier to format the code according to the project's style guidelines. 94 | 95 | #### Linting 96 | 97 | To separately lint the code in this project, run the following command from the root directory: 98 | 99 | ```bash 100 | npm run lint 101 | ``` 102 | 103 | ### Coding Conventions 104 | 105 | - Please adhere to the coding style, patterns, and conventions used throughout the existing codebase. 106 | - Consult [GEMINI.md](https://github.com/gemini-cli-extensions/workspace/blob/main/GEMINI.md) (typically found in the project root) for specific instructions related to AI-assisted development, including conventions for comments, and Git usage. 107 | - **Imports:** Pay special attention to import paths. The project uses ESLint to enforce restrictions on relative imports between packages. 108 | 109 | ### Project Structure 110 | 111 | - `workspace-server/`: The main workspace for the MCP server. 112 | - `src/`: Contains the source code for the server. 113 | - `__tests__/`: Contains all the tests. 114 | - `auth/`: Handles authentication. 115 | - `services/`: Contains the business logic for each service. 116 | - `utils/`: Contains utility functions. 117 | - `config/`: Contains configuration files. 118 | - `scripts/`: Utility scripts for building, testing, and development tasks. 119 | 120 | ## Authentication 121 | 122 | The extension uses OAuth 2.0 to authenticate with Google Workspace APIs. The `scripts/auth-utils.js` script provides a command-line interface to manage authentication credentials. 123 | 124 | ### Usage 125 | 126 | To use the script, run the following command: 127 | 128 | ```bash 129 | node scripts/auth-utils.js 130 | ``` 131 | 132 | ### Commands 133 | 134 | - `clear`: Clear all authentication credentials. 135 | - `expire`: Force the access token to expire (for testing refresh). 136 | - `status`: Show current authentication status. 137 | - `help`: Show the help message. 138 | -------------------------------------------------------------------------------- /workspace-server/src/__tests__/utils/IdUtils.test.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { describe, it, expect } from '@jest/globals'; 8 | import { extractDocId } from '../../utils/IdUtils'; 9 | 10 | describe('IdUtils', () => { 11 | describe('extractDocId', () => { 12 | it('should extract document ID from a full Google Docs URL', () => { 13 | const url = 'https://docs.google.com/document/d/1a2b3c4d5e6f7g8h9i0j/edit'; 14 | const result = extractDocId(url); 15 | expect(result).toBe('1a2b3c4d5e6f7g8h9i0j'); 16 | }); 17 | 18 | it('should extract document ID from URL with additional parameters', () => { 19 | const url = 'https://docs.google.com/document/d/abc123-XYZ_789/edit?usp=sharing'; 20 | const result = extractDocId(url); 21 | expect(result).toBe('abc123-XYZ_789'); 22 | }); 23 | 24 | it('should extract document ID from URL with preview path', () => { 25 | const url = 'https://docs.google.com/document/d/test-doc-id-123/preview'; 26 | const result = extractDocId(url); 27 | expect(result).toBe('test-doc-id-123'); 28 | }); 29 | 30 | it('should extract document ID from URL without protocol', () => { 31 | const url = 'docs.google.com/document/d/my_document_id/view'; 32 | const result = extractDocId(url); 33 | expect(result).toBe('my_document_id'); 34 | }); 35 | 36 | it('should return undefined when raw document ID is passed directly', () => { 37 | const docId = '1a2b3c4d5e6f7g8h9i0j'; 38 | const result = extractDocId(docId); 39 | expect(result).toBeUndefined(); 40 | }); 41 | 42 | it('should return undefined for document ID with underscores and hyphens', () => { 43 | const docId = 'doc_id-with-special_chars_123'; 44 | const result = extractDocId(docId); 45 | expect(result).toBeUndefined(); 46 | }); 47 | 48 | it('should return undefined if no pattern matches', () => { 49 | const randomString = 'not a doc id or url'; 50 | const result = extractDocId(randomString); 51 | expect(result).toBeUndefined(); 52 | }); 53 | 54 | it('should return undefined for empty string', () => { 55 | const result = extractDocId(''); 56 | expect(result).toBeUndefined(); 57 | }); 58 | 59 | it('should extract from partial URL path', () => { 60 | const partialPath = '/document/d/abc123xyz/'; 61 | const result = extractDocId(partialPath); 62 | expect(result).toBe('abc123xyz'); 63 | }); 64 | 65 | it('should handle URL with multiple document paths (edge case)', () => { 66 | // Should extract the first match 67 | const url = '/document/d/first123/document/d/second456/'; 68 | const result = extractDocId(url); 69 | expect(result).toBe('first123'); 70 | }); 71 | 72 | it('should handle very long document IDs', () => { 73 | const longId = 'a'.repeat(100) + '_' + 'b'.repeat(50); 74 | const url = `https://docs.google.com/document/d/${longId}/edit`; 75 | const result = extractDocId(url); 76 | expect(result).toBe(longId); 77 | }); 78 | 79 | it('should handle document ID with only numbers', () => { 80 | const url = 'https://docs.google.com/document/d/1234567890/edit'; 81 | const result = extractDocId(url); 82 | expect(result).toBe('1234567890'); 83 | }); 84 | 85 | it('should handle document ID with only letters', () => { 86 | const url = 'https://docs.google.com/document/d/abcdefghij/edit'; 87 | const result = extractDocId(url); 88 | expect(result).toBe('abcdefghij'); 89 | }); 90 | 91 | it('should handle malformed URLs gracefully', () => { 92 | const malformedUrl = 'https://docs.google.com/document/edit'; 93 | const result = extractDocId(malformedUrl); 94 | // Should return the input as-is when pattern doesn't match 95 | expect(result).toBeUndefined(); 96 | }); 97 | 98 | it('should be case sensitive for document IDs', () => { 99 | const url = 'https://docs.google.com/document/d/AbCdEfGhIj/edit'; 100 | const result = extractDocId(url); 101 | expect(result).toBe('AbCdEfGhIj'); 102 | }); 103 | 104 | it('should extract document ID from a complex URL with resourcekey', () => { 105 | const url = 'https://docs.google.com/document/d/1MGqTbt5joTs40QS-YZTP9QH1-TxQ5tij7RgXPFWMPiI/edit?resourcekey=0-X_p2TPxpk0visLTHHMF7Yg&tab=t.0'; 106 | const result = extractDocId(url); 107 | expect(result).toBe('1MGqTbt5joTs40QS-YZTP9QH1-TxQ5tij7RgXPFWMPiI'); 108 | }); 109 | 110 | it('should extract document ID from a URL without a trailing slash', () => { 111 | const url = 'https://docs.google.com/document/d/1MGqTbt5joTs40QS-YZTP9QH1-TxQ5tij7RgXPFWMPiI'; 112 | const result = extractDocId(url); 113 | expect(result).toBe('1MGqTbt5joTs40QS-YZTP9QH1-TxQ5tij7RgXPFWMPiI'); 114 | }); 115 | }); 116 | }); 117 | -------------------------------------------------------------------------------- /scripts/release.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | const fs = require('node:fs'); 8 | const path = require('node:path'); 9 | const archiver = require('archiver'); 10 | const argv = require('minimist')(process.argv.slice(2)); 11 | 12 | const deleteFilesByExtension = (dir, ext) => { 13 | if (!fs.existsSync(dir)) { 14 | return; 15 | } 16 | 17 | const files = fs.readdirSync(dir); 18 | for (const file of files) { 19 | const filePath = path.join(dir, file); 20 | const stat = fs.lstatSync(filePath); 21 | if (stat.isDirectory()) { 22 | deleteFilesByExtension(filePath, ext); 23 | } else if (filePath.endsWith(ext)) { 24 | fs.unlinkSync(filePath); 25 | } 26 | } 27 | }; 28 | 29 | const main = async () => { 30 | const platform = argv.platform; 31 | if (platform && typeof platform !== 'string') { 32 | console.error( 33 | 'Error: The --platform argument must be a string (e.g., --platform=linux).' 34 | ); 35 | process.exit(1); 36 | } 37 | const baseName = 'google-workspace-extension'; 38 | const name = platform ? `${platform}.${baseName}` : baseName; 39 | const extension = 'tar.gz'; 40 | 41 | const rootDir = path.join(__dirname, '..'); 42 | const releaseDir = path.join(rootDir, 'release'); 43 | fs.rmSync(releaseDir, { recursive: true, force: true }); 44 | const archiveName = `${name}.${extension}`; 45 | const archiveDir = path.join(releaseDir, name); 46 | const workspaceMcpServerDir = path.join(rootDir, 'workspace-server'); 47 | 48 | // Create the release directory 49 | fs.mkdirSync(releaseDir, { recursive: true }); 50 | 51 | // Create the platform-specific directory 52 | fs.mkdirSync(archiveDir, { recursive: true }); 53 | 54 | // Copy the dist directory 55 | fs.cpSync( 56 | path.join(workspaceMcpServerDir, 'dist'), 57 | path.join(archiveDir, 'dist'), 58 | { recursive: true } 59 | ); 60 | 61 | // Clean up the dist directory 62 | const distDir = path.join(archiveDir, 'dist'); 63 | deleteFilesByExtension(distDir, '.d.ts'); 64 | deleteFilesByExtension(distDir, '.map'); 65 | fs.rmSync(path.join(distDir, '__tests__'), { recursive: true, force: true }); 66 | fs.rmSync(path.join(distDir, 'auth'), { recursive: true, force: true }); 67 | fs.rmSync(path.join(distDir, 'services'), { recursive: true, force: true }); 68 | fs.rmSync(path.join(distDir, 'utils'), { recursive: true, force: true }); 69 | 70 | // Copy native modules and dependencies (keytar, jsdom) 71 | const nodeModulesDir = path.join(archiveDir, 'node_modules'); 72 | fs.mkdirSync(nodeModulesDir, { recursive: true }); 73 | 74 | const { getTransitiveDependencies } = require('./utils/dependencies'); 75 | const visited = getTransitiveDependencies(rootDir, ['keytar', 'jsdom']); 76 | 77 | visited.forEach(pkg => { 78 | const source = path.join(rootDir, 'node_modules', pkg); 79 | const dest = path.join(nodeModulesDir, pkg); 80 | if (fs.existsSync(source)) { 81 | fs.cpSync(source, dest, { recursive: true }); 82 | } 83 | }); 84 | 85 | const packageJson = require('../package.json'); 86 | const version = (process.env.GITHUB_REF_NAME || packageJson.version).replace(/^v/, ''); 87 | 88 | // Generate the gemini-extension.json file 89 | const geminiExtensionJson = { 90 | name: 'google-workspace', 91 | version, 92 | contextFileName: 'WORKSPACE-Context.md', 93 | mcpServers: { 94 | 'google-workspace': { 95 | command: 'node', 96 | args: ['dist/index.js'], 97 | cwd: '${extensionPath}', 98 | }, 99 | }, 100 | }; 101 | fs.writeFileSync( 102 | path.join(archiveDir, 'gemini-extension.json'), 103 | JSON.stringify(geminiExtensionJson, null, 2) 104 | ); 105 | 106 | // Copy the WORKSPACE-Context.md file 107 | fs.copyFileSync( 108 | path.join(workspaceMcpServerDir, 'WORKSPACE-Context.md'), 109 | path.join(archiveDir, 'WORKSPACE-Context.md') 110 | ); 111 | 112 | // Copy the commands directory 113 | const commandsDir = path.join(rootDir, 'commands'); 114 | if (fs.existsSync(commandsDir)) { 115 | fs.cpSync(commandsDir, path.join(archiveDir, 'commands'), { recursive: true }); 116 | } 117 | 118 | 119 | // Create the archive 120 | const output = fs.createWriteStream(path.join(releaseDir, archiveName)); 121 | const archive = archiver('tar', { 122 | gzip: true, 123 | }); 124 | 125 | const archivePromise = new Promise((resolve, reject) => { 126 | output.on('close', function () { 127 | console.log(archive.pointer() + ' total bytes'); 128 | console.log( 129 | 'archiver has been finalized and the output file descriptor has closed.' 130 | ); 131 | resolve(); 132 | }); 133 | 134 | archive.on('error', function (err) { 135 | reject(err); 136 | }); 137 | }); 138 | 139 | archive.pipe(output); 140 | archive.directory(archiveDir, false); 141 | archive.finalize(); 142 | 143 | await archivePromise; 144 | }; 145 | 146 | main().catch(err => { 147 | console.error(err); 148 | process.exit(1); 149 | }); 150 | -------------------------------------------------------------------------------- /workspace-server/src/__tests__/auth/token-storage/base-token-storage.test.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { describe, it, expect, beforeEach } from '@jest/globals'; 8 | import { BaseTokenStorage } from '../../../auth/token-storage/base-token-storage'; 9 | import type { OAuthCredentials, OAuthToken } from '../../../auth/token-storage/types'; 10 | 11 | class TestTokenStorage extends BaseTokenStorage { 12 | private storage = new Map(); 13 | 14 | async getCredentials(serverName: string): Promise { 15 | return this.storage.get(serverName) || null; 16 | } 17 | 18 | async setCredentials(credentials: OAuthCredentials): Promise { 19 | this.validateCredentials(credentials); 20 | this.storage.set(credentials.serverName, credentials); 21 | } 22 | 23 | async deleteCredentials(serverName: string): Promise { 24 | this.storage.delete(serverName); 25 | } 26 | 27 | async listServers(): Promise { 28 | return Array.from(this.storage.keys()); 29 | } 30 | 31 | async getAllCredentials(): Promise> { 32 | return new Map(this.storage); 33 | } 34 | 35 | async clearAll(): Promise { 36 | this.storage.clear(); 37 | } 38 | 39 | override validateCredentials(credentials: OAuthCredentials): void { 40 | super.validateCredentials(credentials); 41 | } 42 | 43 | 44 | 45 | override sanitizeServerName(serverName: string): string { 46 | return super.sanitizeServerName(serverName); 47 | } 48 | } 49 | 50 | describe('BaseTokenStorage', () => { 51 | let storage: TestTokenStorage; 52 | 53 | beforeEach(() => { 54 | storage = new TestTokenStorage('gemini-cli-mcp-oauth'); 55 | }); 56 | 57 | describe('validateCredentials', () => { 58 | it('should validate valid credentials with access token', () => { 59 | const credentials: OAuthCredentials = { 60 | serverName: 'test-server', 61 | token: { 62 | accessToken: 'access-token', 63 | tokenType: 'Bearer', 64 | }, 65 | updatedAt: Date.now(), 66 | }; 67 | 68 | expect(() => storage.validateCredentials(credentials)).not.toThrow(); 69 | }); 70 | 71 | it('should validate valid credentials with refresh token', () => { 72 | const credentials: OAuthCredentials = { 73 | serverName: 'test-server', 74 | token: { 75 | refreshToken: 'refresh-token', 76 | tokenType: 'Bearer', 77 | }, 78 | updatedAt: Date.now(), 79 | }; 80 | 81 | expect(() => storage.validateCredentials(credentials)).not.toThrow(); 82 | }); 83 | 84 | it('should throw for missing server name', () => { 85 | const credentials = { 86 | serverName: '', 87 | token: { 88 | accessToken: 'access-token', 89 | tokenType: 'Bearer', 90 | }, 91 | updatedAt: Date.now(), 92 | } as OAuthCredentials; 93 | 94 | expect(() => storage.validateCredentials(credentials)).toThrow( 95 | 'Server name is required', 96 | ); 97 | }); 98 | 99 | it('should throw for missing token', () => { 100 | const credentials = { 101 | serverName: 'test-server', 102 | token: null as unknown as OAuthToken, 103 | updatedAt: Date.now(), 104 | } as OAuthCredentials; 105 | 106 | expect(() => storage.validateCredentials(credentials)).toThrow( 107 | 'Token is required', 108 | ); 109 | }); 110 | 111 | it('should throw for missing access token and refresh token', () => { 112 | const credentials = { 113 | serverName: 'test-server', 114 | token: { 115 | accessToken: '', 116 | tokenType: 'Bearer', 117 | }, 118 | updatedAt: Date.now(), 119 | } as OAuthCredentials; 120 | 121 | expect(() => storage.validateCredentials(credentials)).toThrow( 122 | 'Access token or refresh token is required', 123 | ); 124 | }); 125 | 126 | it('should throw for missing token type', () => { 127 | const credentials = { 128 | serverName: 'test-server', 129 | token: { 130 | accessToken: 'access-token', 131 | tokenType: '', 132 | }, 133 | updatedAt: Date.now(), 134 | } as OAuthCredentials; 135 | 136 | expect(() => storage.validateCredentials(credentials)).toThrow( 137 | 'Token type is required', 138 | ); 139 | }); 140 | }); 141 | 142 | 143 | 144 | describe('sanitizeServerName', () => { 145 | it('should keep valid characters', () => { 146 | expect(storage.sanitizeServerName('test-server.example_123')).toBe( 147 | 'test-server.example_123', 148 | ); 149 | }); 150 | 151 | it('should replace invalid characters with underscore', () => { 152 | expect(storage.sanitizeServerName('test@server#example')).toBe( 153 | 'test_server_example', 154 | ); 155 | }); 156 | 157 | it('should handle special characters', () => { 158 | expect(storage.sanitizeServerName('test server/example:123')).toBe( 159 | 'test_server_example_123', 160 | ); 161 | }); 162 | }); 163 | }); -------------------------------------------------------------------------------- /workspace-server/src/__tests__/mocks/jsdom.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | /** 8 | * Mock implementation of jsdom for Jest tests 9 | * 10 | * This mock provides the minimal DOM functionality needed by our code: 11 | * 1. DocsService uses JSDOM to create a window object for DOMPurify 12 | * 2. markdownToDocsRequests uses JSDOM to parse HTML from marked 13 | */ 14 | 15 | class MockElement { 16 | tagName: string; 17 | nodeType: number; 18 | childNodes: MockNode[]; 19 | nextSibling: MockNode | null; 20 | attributes: { [key: string]: string }; 21 | 22 | constructor(tagName: string) { 23 | this.tagName = tagName; 24 | this.nodeType = 1; // Element node 25 | this.childNodes = []; 26 | this.nextSibling = null; 27 | this.attributes = {}; 28 | } 29 | 30 | get textContent(): string { 31 | return this.childNodes.map(child => child.textContent).join(''); 32 | } 33 | 34 | 35 | toLowerCase() { 36 | return this.tagName.toLowerCase(); 37 | } 38 | 39 | getAttribute(name: string): string | null { 40 | return this.attributes[name] || null; 41 | } 42 | } 43 | 44 | class MockTextNode { 45 | nodeType: number; 46 | textContent: string; 47 | childNodes: never[]; 48 | nextSibling: MockNode | null; 49 | 50 | constructor(text: string) { 51 | this.nodeType = 3; // Text node 52 | this.textContent = text; 53 | this.childNodes = []; 54 | this.nextSibling = null; 55 | } 56 | } 57 | 58 | type MockNode = MockElement | MockTextNode; 59 | 60 | class MockDocument { 61 | body: MockElement; 62 | 63 | constructor() { 64 | this.body = new MockElement('BODY'); 65 | } 66 | 67 | createElement(tagName: string): MockElement { 68 | return new MockElement(tagName.toUpperCase()); 69 | } 70 | 71 | querySelector(selector: string): MockElement | null { 72 | // Simple implementation for our use case (mostly just tag names) 73 | const tagName = selector.toUpperCase(); 74 | 75 | const queue: MockNode[] = [...this.body.childNodes]; 76 | while (queue.length > 0) { 77 | const node = queue.shift()!; 78 | if (node instanceof MockElement) { 79 | if (node.tagName === tagName) { 80 | return node; 81 | } 82 | queue.push(...node.childNodes); 83 | } 84 | } 85 | 86 | return null; 87 | } 88 | } 89 | 90 | class MockWindow { 91 | document: MockDocument; 92 | DOMParser: typeof MockDOMParser; 93 | 94 | constructor() { 95 | this.document = new MockDocument(); 96 | this.DOMParser = MockDOMParser; 97 | } 98 | } 99 | 100 | class MockDOMParser { 101 | parseFromString(html: string): { body: MockElement } { 102 | const body = new MockElement('BODY'); 103 | this.parseNodes(html, body); 104 | return { body }; 105 | } 106 | 107 | private parseNodes(html: string, parent: MockElement) { 108 | // Parse simple HTML tags and text 109 | // Note: This regex is very simple and won't handle attributes or self-closing tags well 110 | // but it's sufficient for the markdown output we're testing 111 | const tagRegex = /<(\w+)(?:\s+[^>]*)?>(.*?)<\/\1>|([^<]+)/gs; 112 | let match; 113 | 114 | while ((match = tagRegex.exec(html)) !== null) { 115 | if (match[1]) { 116 | // It's a tag 117 | const tagName = match[1].toUpperCase(); 118 | const element = new MockElement(tagName); 119 | const content = match[2]; 120 | 121 | // Handle attributes (simple href for links) 122 | const fullTag = match[0]; 123 | const hrefMatch = fullTag.match(/href=["']([^"']*)["']/); 124 | if (hrefMatch) { 125 | element.attributes['href'] = hrefMatch[1]; 126 | } 127 | 128 | // Recursively parse content 129 | this.parseNodes(content, element); 130 | 131 | parent.childNodes.push(element); 132 | } else if (match[3]) { 133 | // It's text content 134 | const text = match[3]; 135 | if (text) { 136 | const textNode = new MockTextNode(text); 137 | parent.childNodes.push(textNode); 138 | } 139 | } 140 | } 141 | 142 | // Set next sibling references 143 | for (let i = 0; i < parent.childNodes.length - 1; i++) { 144 | parent.childNodes[i].nextSibling = parent.childNodes[i + 1]; 145 | } 146 | } 147 | } 148 | 149 | export class JSDOM { 150 | window: MockWindow; 151 | 152 | constructor(html?: string) { 153 | this.window = new MockWindow(); 154 | 155 | if (html) { 156 | const parser = new MockDOMParser(); 157 | const parsed = parser.parseFromString(html); 158 | this.window.document.body = parsed.body; 159 | } 160 | } 161 | } 162 | -------------------------------------------------------------------------------- /workspace-server/src/utils/validation.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { z } from 'zod'; 8 | 9 | /** 10 | * Email validation schema 11 | * Validates email format according to RFC 5322 12 | */ 13 | export const emailSchema = z.string().email('Invalid email format'); 14 | 15 | /** 16 | * Validates multiple email addresses (for CC/BCC fields) 17 | */ 18 | export const emailArraySchema = z.union([ 19 | emailSchema, 20 | z.array(emailSchema) 21 | ]); 22 | 23 | /** 24 | * ISO 8601 datetime validation schema 25 | * Accepts formats like: 26 | * - 2024-01-15T10:30:00Z 27 | * - 2024-01-15T10:30:00-05:00 28 | * - 2024-01-15T10:30:00.000Z 29 | */ 30 | export const iso8601DateTimeSchema = z.string().refine( 31 | (val) => { 32 | const iso8601Regex = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d{3})?(Z|[+-]\d{2}:\d{2})$/; 33 | if (!iso8601Regex.test(val)) return false; 34 | 35 | // Additional check: ensure it's a valid date 36 | const date = new Date(val); 37 | return !isNaN(date.getTime()); 38 | }, 39 | { 40 | message: 'Invalid ISO 8601 datetime format. Expected format: YYYY-MM-DDTHH:mm:ss[.sss][Z|±HH:mm]' 41 | } 42 | ); 43 | 44 | /** 45 | * Google Drive document/file ID validation 46 | * Google IDs are typically alphanumeric strings with hyphens and underscores 47 | */ 48 | export const googleDocumentIdSchema = z.string().regex( 49 | /^[a-zA-Z0-9_-]+$/, 50 | 'Invalid document ID format. Document IDs should only contain letters, numbers, hyphens, and underscores' 51 | ); 52 | 53 | /** 54 | * Google Drive URL validation 55 | * Accepts various Google Workspace URLs and extracts the document ID 56 | */ 57 | export const googleWorkspaceUrlSchema = z.string().regex( 58 | /^https:\/\/(docs|drive|sheets|slides)\.google\.com\/.+\/d\/([a-zA-Z0-9_-]+)/, 59 | 'Invalid Google Workspace URL format' 60 | ); 61 | 62 | /** 63 | * Folder name validation 64 | * Prevents problematic characters in folder names 65 | */ 66 | export const folderNameSchema = z.string() 67 | .min(1, 'Folder name cannot be empty') 68 | .max(255, 'Folder name too long (max 255 characters)') 69 | .refine( 70 | (val) => !(/[<>:"/\\|?*\x00-\x1F]/.test(val)), 71 | 'Folder name contains invalid characters' 72 | ); 73 | 74 | /** 75 | * Calendar ID validation 76 | * Can be 'primary' or an email address 77 | */ 78 | export const calendarIdSchema = z.union([ 79 | z.literal('primary'), 80 | emailSchema 81 | ]); 82 | 83 | /** 84 | * Search query sanitization 85 | * Escapes potentially dangerous characters from search queries 86 | * Preserves quotes for exact phrase searching 87 | */ 88 | export const searchQuerySchema = z.string() 89 | .transform((val) => { 90 | // Escape backslashes first, then escape quotes 91 | // This preserves the ability to search for exact phrases 92 | return val 93 | .replace(/\\/g, '\\\\') // Escape backslashes 94 | .replace(/'/g, "\\'") // Escape single quotes 95 | .replace(/"/g, '\\"'); // Escape double quotes 96 | }); 97 | 98 | /** 99 | * Page size validation for pagination 100 | */ 101 | export const pageSizeSchema = z.number() 102 | .int('Page size must be an integer') 103 | .min(1, 'Page size must be at least 1') 104 | .max(100, 'Page size cannot exceed 100'); 105 | 106 | 107 | /** 108 | * Helper function to create a validator from a Zod schema 109 | */ 110 | function createValidator( 111 | schema: z.ZodSchema, 112 | fallbackErrorMessage: string 113 | ) { 114 | return (value: unknown): { success: boolean; error?: string } => { 115 | try { 116 | schema.parse(value); 117 | return { success: true }; 118 | } catch (error) { 119 | if (error instanceof z.ZodError) { 120 | return { success: false, error: error.issues[0].message }; 121 | } 122 | return { success: false, error: fallbackErrorMessage }; 123 | } 124 | }; 125 | } 126 | 127 | /** 128 | * Helper function to validate email 129 | */ 130 | export const validateEmail = createValidator(emailSchema, 'Invalid email format'); 131 | 132 | /** 133 | * Helper function to validate ISO 8601 datetime 134 | */ 135 | export const validateDateTime = createValidator(iso8601DateTimeSchema, 'Invalid datetime format'); 136 | 137 | /** 138 | * Helper function to validate Google document ID 139 | */ 140 | export const validateDocumentId = createValidator(googleDocumentIdSchema, 'Invalid document ID'); 141 | 142 | /** 143 | * Helper function to extract document ID from URL or return the ID if already valid 144 | */ 145 | export function extractDocumentId(urlOrId: string): string { 146 | // First check if it's already a valid ID 147 | if (googleDocumentIdSchema.safeParse(urlOrId).success) { 148 | return urlOrId; 149 | } 150 | 151 | // Try to extract from URL 152 | const urlMatch = urlOrId.match(/\/d\/([a-zA-Z0-9_-]+)/); 153 | if (urlMatch && urlMatch[1]) { 154 | return urlMatch[1]; 155 | } 156 | 157 | throw new Error('Invalid document ID or URL'); 158 | } 159 | 160 | /** 161 | * Validation error class for consistent error handling 162 | */ 163 | export class ValidationError extends Error { 164 | constructor( 165 | message: string, 166 | public field: string, 167 | public value: unknown 168 | ) { 169 | super(message); 170 | this.name = 'ValidationError'; 171 | } 172 | } -------------------------------------------------------------------------------- /workspace-server/src/__tests__/services/PeopleService.test.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { describe, it, expect, jest, beforeEach, afterEach } from '@jest/globals'; 8 | import { PeopleService } from '../../services/PeopleService'; 9 | import { AuthManager } from '../../auth/AuthManager'; 10 | import { google } from 'googleapis'; 11 | 12 | // Mock the googleapis module 13 | jest.mock('googleapis'); 14 | jest.mock('../../utils/logger'); 15 | 16 | describe('PeopleService', () => { 17 | let peopleService: PeopleService; 18 | let mockAuthManager: jest.Mocked; 19 | let mockPeopleAPI: any; 20 | 21 | beforeEach(() => { 22 | // Clear all mocks before each test 23 | jest.clearAllMocks(); 24 | 25 | // Create mock AuthManager 26 | mockAuthManager = { 27 | getAuthenticatedClient: jest.fn(), 28 | } as any; 29 | 30 | // Create mock People API 31 | mockPeopleAPI = { 32 | people: { 33 | get: jest.fn(), 34 | searchDirectoryPeople: jest.fn(), 35 | }, 36 | }; 37 | 38 | // Mock the google constructors 39 | (google.people as jest.Mock) = jest.fn().mockReturnValue(mockPeopleAPI); 40 | 41 | // Create PeopleService instance 42 | peopleService = new PeopleService(mockAuthManager); 43 | 44 | const mockAuthClient = { access_token: 'test-token' }; 45 | mockAuthManager.getAuthenticatedClient.mockResolvedValue(mockAuthClient as any); 46 | }); 47 | 48 | afterEach(() => { 49 | jest.restoreAllMocks(); 50 | }); 51 | 52 | describe('getUserProfile', () => { 53 | it('should return a user profile by userId', async () => { 54 | const mockUser = { 55 | data: { 56 | resourceName: 'people/110001608645105799644', 57 | names: [{ 58 | displayName: 'Test User', 59 | }], 60 | emailAddresses: [{ 61 | value: 'test@example.com', 62 | }], 63 | }, 64 | }; 65 | mockPeopleAPI.people.get.mockResolvedValue(mockUser); 66 | 67 | const result = await peopleService.getUserProfile({ userId: '110001608645105799644' }); 68 | 69 | expect(mockPeopleAPI.people.get).toHaveBeenCalledWith({ 70 | resourceName: 'people/110001608645105799644', 71 | personFields: 'names,emailAddresses', 72 | }); 73 | expect(JSON.parse(result.content[0].text)).toEqual({ results: [{ person: mockUser.data }] }); 74 | }); 75 | 76 | it('should return a user profile by email', async () => { 77 | const mockUser = { 78 | data: { 79 | results: [ 80 | { 81 | person: { 82 | resourceName: 'people/110001608645105799644', 83 | names: [{ 84 | displayName: 'Test User', 85 | }], 86 | emailAddresses: [{ 87 | value: 'test@example.com', 88 | }], 89 | } 90 | } 91 | ] 92 | }, 93 | }; 94 | mockPeopleAPI.people.searchDirectoryPeople.mockResolvedValue(mockUser); 95 | 96 | const result = await peopleService.getUserProfile({ email: 'test@example.com' }); 97 | 98 | expect(mockPeopleAPI.people.searchDirectoryPeople).toHaveBeenCalledWith({ 99 | query: 'test@example.com', 100 | readMask: 'names,emailAddresses', 101 | sources: ['DIRECTORY_SOURCE_TYPE_DOMAIN_CONTACT', 'DIRECTORY_SOURCE_TYPE_DOMAIN_PROFILE'], 102 | }); 103 | expect(JSON.parse(result.content[0].text)).toEqual(mockUser.data); 104 | }); 105 | 106 | it('should handle errors during getUserProfile', async () => { 107 | const apiError = new Error('API Error'); 108 | mockPeopleAPI.people.get.mockRejectedValue(apiError); 109 | 110 | const result = await peopleService.getUserProfile({ userId: '110001608645105799644' }); 111 | 112 | expect(JSON.parse(result.content[0].text)).toEqual({ error: 'API Error' }); 113 | }); 114 | }); 115 | 116 | describe('getMe', () => { 117 | it('should return the authenticated user\'s profile', async () => { 118 | const mockMe = { 119 | data: { 120 | resourceName: 'people/me', 121 | names: [{ 122 | displayName: 'Me', 123 | }], 124 | emailAddresses: [{ 125 | value: 'me@example.com', 126 | }], 127 | }, 128 | }; 129 | mockPeopleAPI.people.get.mockResolvedValue(mockMe); 130 | 131 | const result = await peopleService.getMe(); 132 | 133 | expect(mockPeopleAPI.people.get).toHaveBeenCalledWith({ 134 | resourceName: 'people/me', 135 | personFields: 'names,emailAddresses', 136 | }); 137 | expect(JSON.parse(result.content[0].text)).toEqual(mockMe.data); 138 | }); 139 | 140 | it('should handle errors during getMe', async () => { 141 | const apiError = new Error('API Error'); 142 | mockPeopleAPI.people.get.mockRejectedValue(apiError); 143 | 144 | const result = await peopleService.getMe(); 145 | 146 | expect(JSON.parse(result.content[0].text)).toEqual({ error: 'API Error' }); 147 | }); 148 | }); 149 | }); 150 | -------------------------------------------------------------------------------- /workspace-server/src/utils/MimeHelper.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | /** 8 | * Helper class for creating RFC 2822 compliant MIME messages for Gmail API 9 | */ 10 | export class MimeHelper { 11 | /** 12 | * Creates a base64url-encoded MIME message for Gmail API 13 | */ 14 | public static createMimeMessage({ 15 | to, 16 | subject, 17 | body, 18 | from, 19 | cc, 20 | bcc, 21 | replyTo, 22 | isHtml = false 23 | }: { 24 | to: string; 25 | subject: string; 26 | body: string; 27 | from?: string; 28 | cc?: string; 29 | bcc?: string; 30 | replyTo?: string; 31 | isHtml?: boolean; 32 | }): string { 33 | // Encode subject for UTF-8 support 34 | const utf8Subject = `=?utf-8?B?${Buffer.from(subject).toString('base64')}?=`; 35 | 36 | // Build message headers 37 | const messageParts: string[] = []; 38 | 39 | // Add From header if provided, otherwise Gmail will use the authenticated user 40 | if (from) { 41 | messageParts.push(`From: ${from}`); 42 | } 43 | 44 | messageParts.push(`To: ${to}`); 45 | 46 | if (cc) { 47 | messageParts.push(`Cc: ${cc}`); 48 | } 49 | 50 | if (bcc) { 51 | messageParts.push(`Bcc: ${bcc}`); 52 | } 53 | 54 | if (replyTo) { 55 | messageParts.push(`Reply-To: ${replyTo}`); 56 | } 57 | 58 | messageParts.push(`Subject: ${utf8Subject}`); 59 | 60 | // Add content type based on whether it's HTML or plain text 61 | if (isHtml) { 62 | messageParts.push('Content-Type: text/html; charset=utf-8'); 63 | } else { 64 | messageParts.push('Content-Type: text/plain; charset=utf-8'); 65 | } 66 | 67 | messageParts.push(''); // Empty line between headers and body 68 | messageParts.push(body); 69 | 70 | // Join all parts with CRLF as per RFC 2822 71 | const message = messageParts.join('\r\n'); 72 | 73 | // Encode to base64url format required by Gmail API 74 | const encodedMessage = Buffer.from(message) 75 | .toString('base64') 76 | .replace(/\+/g, '-') 77 | .replace(/\//g, '_') 78 | .replace(/=+$/, ''); 79 | 80 | return encodedMessage; 81 | } 82 | 83 | /** 84 | * Creates a MIME message with attachments 85 | */ 86 | public static createMimeMessageWithAttachments({ 87 | to, 88 | subject, 89 | body, 90 | from, 91 | cc, 92 | bcc, 93 | attachments, 94 | isHtml = false 95 | }: { 96 | to: string; 97 | subject: string; 98 | body: string; 99 | from?: string; 100 | cc?: string; 101 | bcc?: string; 102 | attachments?: Array<{ 103 | filename: string; 104 | content: Buffer | string; 105 | contentType?: string; 106 | }>; 107 | isHtml?: boolean; 108 | }): string { 109 | const boundary = `boundary_${Date.now()}_${Math.random().toString(36).substring(7)}`; 110 | const utf8Subject = `=?utf-8?B?${Buffer.from(subject).toString('base64')}?=`; 111 | 112 | const messageParts: string[] = []; 113 | 114 | // Headers 115 | if (from) { 116 | messageParts.push(`From: ${from}`); 117 | } 118 | messageParts.push(`To: ${to}`); 119 | if (cc) { 120 | messageParts.push(`Cc: ${cc}`); 121 | } 122 | if (bcc) { 123 | messageParts.push(`Bcc: ${bcc}`); 124 | } 125 | messageParts.push(`Subject: ${utf8Subject}`); 126 | messageParts.push('MIME-Version: 1.0'); 127 | 128 | if (!attachments || attachments.length === 0) { 129 | // Simple message without attachments 130 | return this.createMimeMessage({ to, subject, body, from, cc, bcc, isHtml }); 131 | } 132 | 133 | // Multipart message with attachments 134 | messageParts.push(`Content-Type: multipart/mixed; boundary="${boundary}"`); 135 | messageParts.push(''); 136 | 137 | // Body part 138 | messageParts.push(`--${boundary}`); 139 | if (isHtml) { 140 | messageParts.push('Content-Type: text/html; charset=utf-8'); 141 | } else { 142 | messageParts.push('Content-Type: text/plain; charset=utf-8'); 143 | } 144 | messageParts.push(''); 145 | messageParts.push(body); 146 | 147 | // Attachments 148 | for (const attachment of attachments) { 149 | messageParts.push(`--${boundary}`); 150 | messageParts.push(`Content-Type: ${attachment.contentType || 'application/octet-stream'}`); 151 | messageParts.push('Content-Transfer-Encoding: base64'); 152 | messageParts.push(`Content-Disposition: attachment; filename="${attachment.filename}"`); 153 | messageParts.push(''); 154 | 155 | const content = typeof attachment.content === 'string' 156 | ? attachment.content 157 | : attachment.content.toString('base64'); 158 | 159 | // Add content in chunks of 76 characters as per MIME spec 160 | const chunks = content.match(/.{1,76}/g) || []; 161 | messageParts.push(...chunks); 162 | } 163 | 164 | // End boundary 165 | messageParts.push(`--${boundary}--`); 166 | 167 | const message = messageParts.join('\r\n'); 168 | 169 | // Encode to base64url 170 | return Buffer.from(message) 171 | .toString('base64') 172 | .replace(/\+/g, '-') 173 | .replace(/\//g, '_') 174 | .replace(/=+$/, ''); 175 | } 176 | 177 | /** 178 | * Decodes a base64url-encoded string (inverse of encoding) 179 | */ 180 | public static decodeBase64Url(encoded: string): string { 181 | // Add back padding if needed 182 | let base64 = encoded.replace(/-/g, '+').replace(/_/g, '/'); 183 | while (base64.length % 4) { 184 | base64 += '='; 185 | } 186 | return Buffer.from(base64, 'base64').toString('utf-8'); 187 | } 188 | } -------------------------------------------------------------------------------- /workspace-server/src/auth/token-storage/file-token-storage.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { promises as fs } from 'node:fs'; 8 | import * as path from 'node:path'; 9 | import * as os from 'node:os'; 10 | import * as crypto from 'node:crypto'; 11 | import { BaseTokenStorage } from './base-token-storage'; 12 | import type { OAuthCredentials } from './types'; 13 | import { logToFile } from '../../utils/logger'; 14 | import { 15 | ENCRYPTED_TOKEN_PATH, 16 | ENCRYPTION_MASTER_KEY_PATH, 17 | } from '../../utils/paths'; 18 | 19 | export class FileTokenStorage extends BaseTokenStorage { 20 | private readonly tokenFilePath: string; 21 | private readonly encryptionKey: Buffer; 22 | private readonly masterKey: Buffer; 23 | 24 | private constructor(serviceName: string, masterKey: Buffer) { 25 | super(serviceName); 26 | this.tokenFilePath = ENCRYPTED_TOKEN_PATH; 27 | this.masterKey = masterKey; 28 | this.encryptionKey = this.deriveEncryptionKey(); 29 | } 30 | 31 | static async create(serviceName: string): Promise { 32 | const masterKey = await this.loadMasterKey(); 33 | return new FileTokenStorage(serviceName, masterKey); 34 | } 35 | 36 | private static async loadMasterKey(): Promise { 37 | try { 38 | const masterKey = await fs.readFile(ENCRYPTION_MASTER_KEY_PATH); 39 | return masterKey; 40 | } catch (error) { 41 | const err = error as NodeJS.ErrnoException; 42 | if (err.code === 'ENOENT') { 43 | const newKey = crypto.randomBytes(32); 44 | await fs.writeFile(ENCRYPTION_MASTER_KEY_PATH, newKey, { mode: 0o600 }); 45 | return newKey; 46 | } 47 | throw error; 48 | } 49 | } 50 | 51 | private deriveEncryptionKey(): Buffer { 52 | const salt = `${os.hostname()}-${ 53 | os.userInfo().username 54 | }-gemini-cli-workspace`; 55 | return crypto.scryptSync(this.masterKey, salt, 32); 56 | } 57 | 58 | private encrypt(text: string): string { 59 | const iv = crypto.randomBytes(16); 60 | const cipher = crypto.createCipheriv('aes-256-gcm', this.encryptionKey, iv); 61 | 62 | let encrypted = cipher.update(text, 'utf8', 'hex'); 63 | encrypted += cipher.final('hex'); 64 | 65 | const authTag = cipher.getAuthTag(); 66 | 67 | return iv.toString('hex') + ':' + authTag.toString('hex') + ':' + encrypted; 68 | } 69 | 70 | private decrypt(encryptedData: string): string { 71 | const parts = encryptedData.split(':'); 72 | if (parts.length !== 3) { 73 | throw new Error('Invalid encrypted data format'); 74 | } 75 | 76 | const iv = Buffer.from(parts[0], 'hex'); 77 | const authTag = Buffer.from(parts[1], 'hex'); 78 | const encrypted = parts[2]; 79 | 80 | const decipher = crypto.createDecipheriv( 81 | 'aes-256-gcm', 82 | this.encryptionKey, 83 | iv, 84 | ); 85 | decipher.setAuthTag(authTag); 86 | 87 | let decrypted = decipher.update(encrypted, 'hex', 'utf8'); 88 | decrypted += decipher.final('utf8'); 89 | 90 | return decrypted; 91 | } 92 | 93 | private async ensureDirectoryExists(): Promise { 94 | const dir = path.dirname(this.tokenFilePath); 95 | await fs.mkdir(dir, { recursive: true, mode: 0o700 }); 96 | } 97 | 98 | private async loadTokens(): Promise> { 99 | try { 100 | const data = await fs.readFile(this.tokenFilePath, 'utf-8'); 101 | const decrypted = this.decrypt(data); 102 | const tokens = JSON.parse(decrypted) as Record; 103 | return new Map(Object.entries(tokens)); 104 | } catch (error: unknown) { 105 | const err = error as NodeJS.ErrnoException & { message?: string }; 106 | if (err.code === 'ENOENT') { 107 | logToFile('Token file does not exist'); 108 | return new Map(); 109 | } 110 | if ( 111 | err.message?.includes('Invalid encrypted data format') || 112 | err.message?.includes( 113 | 'Unsupported state or unable to authenticate data', 114 | ) 115 | ) { 116 | logToFile('Token file corrupted'); 117 | return new Map(); 118 | } 119 | throw error; 120 | } 121 | } 122 | 123 | private async saveTokens( 124 | tokens: Map, 125 | ): Promise { 126 | await this.ensureDirectoryExists(); 127 | 128 | const data = Object.fromEntries(tokens); 129 | const json = JSON.stringify(data, null, 2); 130 | const encrypted = this.encrypt(json); 131 | 132 | await fs.writeFile(this.tokenFilePath, encrypted, { mode: 0o600 }); 133 | } 134 | 135 | async getCredentials(serverName: string): Promise { 136 | const tokens = await this.loadTokens(); 137 | const credentials = tokens.get(serverName); 138 | 139 | if (!credentials) { 140 | return null; 141 | } 142 | 143 | 144 | 145 | return credentials; 146 | } 147 | 148 | async setCredentials(credentials: OAuthCredentials): Promise { 149 | this.validateCredentials(credentials); 150 | 151 | const tokens = await this.loadTokens(); 152 | const updatedCredentials: OAuthCredentials = { 153 | ...credentials, 154 | updatedAt: Date.now(), 155 | }; 156 | 157 | tokens.set(credentials.serverName, updatedCredentials); 158 | await this.saveTokens(tokens); 159 | } 160 | 161 | async deleteCredentials(serverName: string): Promise { 162 | const tokens = await this.loadTokens(); 163 | 164 | if (!tokens.has(serverName)) { 165 | throw new Error(`No credentials found for ${serverName}`); 166 | } 167 | 168 | tokens.delete(serverName); 169 | 170 | if (tokens.size === 0) { 171 | try { 172 | await fs.unlink(this.tokenFilePath); 173 | } catch (error: unknown) { 174 | const err = error as NodeJS.ErrnoException; 175 | if (err.code !== 'ENOENT') { 176 | throw error; 177 | } 178 | } 179 | } else { 180 | await this.saveTokens(tokens); 181 | } 182 | } 183 | 184 | async listServers(): Promise { 185 | const tokens = await this.loadTokens(); 186 | return Array.from(tokens.keys()); 187 | } 188 | 189 | async getAllCredentials(): Promise> { 190 | const tokens = await this.loadTokens(); 191 | const result = new Map(); 192 | 193 | for (const [serverName, credentials] of tokens) { 194 | try { 195 | this.validateCredentials(credentials); 196 | result.set(serverName, credentials); 197 | } catch (error) { 198 | console.error(`Skipping invalid credentials for ${serverName}:`, error); 199 | } 200 | } 201 | 202 | return result; 203 | } 204 | 205 | async clearAll(): Promise { 206 | try { 207 | await fs.unlink(this.tokenFilePath); 208 | } catch (error: unknown) { 209 | const err = error as NodeJS.ErrnoException; 210 | if (err.code !== 'ENOENT') { 211 | throw error; 212 | } 213 | } 214 | } 215 | } -------------------------------------------------------------------------------- /workspace-server/src/__tests__/utils/validation.test.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { describe, it, expect } from '@jest/globals'; 8 | import { 9 | validateEmail, 10 | validateDateTime, 11 | validateDocumentId, 12 | extractDocumentId, 13 | emailSchema, 14 | emailArraySchema, 15 | searchQuerySchema, 16 | ValidationError 17 | } from '../../utils/validation'; 18 | 19 | describe('Validation Utilities', () => { 20 | describe('Email Validation', () => { 21 | it('should validate correct email addresses', () => { 22 | expect(validateEmail('user@example.com')).toEqual({ success: true }); 23 | expect(validateEmail('john.doe+tag@company.co.uk')).toEqual({ success: true }); 24 | }); 25 | 26 | it('should reject invalid email addresses', () => { 27 | expect(validateEmail('invalid')).toMatchObject({ success: false }); 28 | expect(validateEmail('@example.com')).toMatchObject({ success: false }); 29 | expect(validateEmail('user@')).toMatchObject({ success: false }); 30 | expect(validateEmail('user @example.com')).toMatchObject({ success: false }); 31 | }); 32 | 33 | it('should handle email arrays', () => { 34 | const result1 = emailSchema.safeParse('user@example.com'); 35 | expect(result1.success).toBe(true); 36 | 37 | const result2 = emailSchema.safeParse(['user1@example.com', 'user2@example.com']); 38 | expect(result2.success).toBe(false); // Single schema doesn't accept arrays 39 | }); 40 | 41 | it('should validate emailArraySchema with single email', () => { 42 | const result = emailArraySchema.safeParse('user@example.com'); 43 | expect(result.success).toBe(true); 44 | }); 45 | 46 | it('should validate emailArraySchema with array of emails', () => { 47 | const result = emailArraySchema.safeParse(['user1@example.com', 'user2@example.com']); 48 | expect(result.success).toBe(true); 49 | }); 50 | 51 | it('should reject emailArraySchema with invalid emails in array', () => { 52 | const result = emailArraySchema.safeParse(['valid@example.com', 'invalid-email']); 53 | expect(result.success).toBe(false); 54 | }); 55 | }); 56 | 57 | describe('DateTime Validation', () => { 58 | it('should validate correct ISO 8601 datetime formats', () => { 59 | expect(validateDateTime('2024-01-15T10:30:00Z')).toEqual({ success: true }); 60 | expect(validateDateTime('2024-01-15T10:30:00.000Z')).toEqual({ success: true }); 61 | expect(validateDateTime('2024-01-15T10:30:00-05:00')).toEqual({ success: true }); 62 | expect(validateDateTime('2024-01-15T10:30:00+09:30')).toEqual({ success: true }); 63 | }); 64 | 65 | it('should reject invalid datetime formats', () => { 66 | expect(validateDateTime('2024-01-15')).toMatchObject({ success: false }); 67 | expect(validateDateTime('10:30:00')).toMatchObject({ success: false }); 68 | expect(validateDateTime('2024-01-15 10:30:00')).toMatchObject({ success: false }); 69 | expect(validateDateTime('not a date')).toMatchObject({ success: false }); 70 | }); 71 | 72 | it('should reject invalid dates', () => { 73 | expect(validateDateTime('2024-13-01T10:30:00Z')).toMatchObject({ success: false }); // Invalid month 74 | // Note: JavaScript Date constructor accepts Feb 30 and converts it to March 1st or 2nd 75 | // So this test would pass as valid. We'd need more complex validation for this. 76 | expect(validateDateTime('2024-00-01T10:30:00Z')).toMatchObject({ success: false }); // Invalid month (0) 77 | }); 78 | }); 79 | 80 | describe('Document ID Validation', () => { 81 | it('should validate correct document IDs', () => { 82 | expect(validateDocumentId('1a2b3c4d5e6f7g8h9i0j')).toEqual({ success: true }); 83 | expect(validateDocumentId('abc-123_XYZ')).toEqual({ success: true }); 84 | expect(validateDocumentId('Document_ID-123')).toEqual({ success: true }); 85 | }); 86 | 87 | it('should reject invalid document IDs', () => { 88 | expect(validateDocumentId('doc id with spaces')).toMatchObject({ success: false }); 89 | expect(validateDocumentId('doc#id')).toMatchObject({ success: false }); 90 | expect(validateDocumentId('doc/id')).toMatchObject({ success: false }); 91 | expect(validateDocumentId('')).toMatchObject({ success: false }); 92 | }); 93 | }); 94 | 95 | describe('Document ID Extraction', () => { 96 | it('should extract ID from Google Docs URLs', () => { 97 | const url = 'https://docs.google.com/document/d/1a2b3c4d5e6f/edit'; 98 | expect(extractDocumentId(url)).toBe('1a2b3c4d5e6f'); 99 | }); 100 | 101 | it('should extract ID from Google Drive URLs', () => { 102 | const url = 'https://drive.google.com/file/d/abc123XYZ/view'; 103 | expect(extractDocumentId(url)).toBe('abc123XYZ'); 104 | }); 105 | 106 | it('should extract ID from Google Sheets URLs', () => { 107 | const url = 'https://sheets.google.com/spreadsheets/d/sheet_id_123/edit'; 108 | expect(extractDocumentId(url)).toBe('sheet_id_123'); 109 | }); 110 | 111 | it('should return ID if already valid', () => { 112 | const id = 'valid_document_id_123'; 113 | expect(extractDocumentId(id)).toBe(id); 114 | }); 115 | 116 | it('should throw error for invalid input', () => { 117 | expect(() => extractDocumentId('not a valid url or id')).toThrow(); 118 | expect(() => extractDocumentId('https://example.com/doc')).toThrow(); 119 | }); 120 | }); 121 | 122 | describe('Search Query Sanitization', () => { 123 | it('should escape potentially dangerous characters', () => { 124 | const result = searchQuerySchema.parse("test' OR '1'='1"); 125 | expect(result).toBe("test\\' OR \\'1\\'=\\'1"); // Quotes are escaped 126 | }); 127 | 128 | it('should escape quotes while preserving search functionality', () => { 129 | const result = searchQuerySchema.parse('search for "exact phrase"'); 130 | expect(result).toBe('search for \\"exact phrase\\"'); 131 | }); 132 | 133 | it('should preserve safe characters', () => { 134 | const result = searchQuerySchema.parse('test query with spaces and-dashes'); 135 | expect(result).toBe('test query with spaces and-dashes'); 136 | }); 137 | }); 138 | 139 | describe('ValidationError', () => { 140 | it('should create proper error with field and value', () => { 141 | const error = new ValidationError('Invalid email', 'email', 'bad@'); 142 | expect(error.message).toBe('Invalid email'); 143 | expect(error.field).toBe('email'); 144 | expect(error.value).toBe('bad@'); 145 | expect(error.name).toBe('ValidationError'); 146 | }); 147 | }); 148 | }); -------------------------------------------------------------------------------- /workspace-server/src/utils/secure-browser-launcher.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { execFile, ExecFileOptions } from 'node:child_process'; 8 | import { platform } from 'node:os'; 9 | import { URL } from 'node:url'; 10 | 11 | 12 | function withTimeout(promise: Promise, ms: number): Promise { 13 | let timeoutId: NodeJS.Timeout; 14 | const timeout = new Promise((_, reject) => { 15 | timeoutId = setTimeout(() => reject(new Error('Timeout')), ms); 16 | }); 17 | return Promise.race([promise, timeout]).finally(() => clearTimeout(timeoutId)); 18 | } 19 | 20 | /** 21 | * Validates that a URL is safe to open in a browser. 22 | * Only allows HTTP and HTTPS URLs to prevent command injection. 23 | * 24 | * @param url The URL to validate 25 | * @throws Error if the URL is invalid or uses an unsafe protocol 26 | */ 27 | function validateUrl(url: string): void { 28 | let parsedUrl: URL; 29 | 30 | try { 31 | parsedUrl = new URL(url); 32 | } catch { 33 | throw new Error('Invalid URL'); 34 | } 35 | 36 | // Only allow HTTP and HTTPS protocols 37 | if (parsedUrl.protocol !== 'http:' && parsedUrl.protocol !== 'https:') { 38 | throw new Error( 39 | `Unsafe protocol: ${parsedUrl.protocol}. Only HTTP and HTTPS are allowed.` 40 | ); 41 | } 42 | 43 | // Additional validation: ensure no newlines or control characters 44 | if (/[\r\n\x00-\x1F]/.test(url)) { 45 | throw new Error('URL contains invalid characters'); 46 | } 47 | } 48 | 49 | /** 50 | * Opens a URL in the default browser using platform-specific commands. 51 | * This implementation avoids shell injection vulnerabilities by: 52 | * 1. Validating the URL to ensure it's HTTP/HTTPS only 53 | * 2. Using execFile instead of exec to avoid shell interpretation 54 | * 3. Passing the URL as an argument rather than constructing a command string 55 | * 56 | * @param url The URL to open 57 | * @param execFileFn The function to execute a command. Defaults to node's execFile. 58 | * @throws Error if the URL is invalid or if opening the browser fails 59 | */ 60 | export async function openBrowserSecurely( 61 | url: string, 62 | execFileFn: typeof execFile = execFile 63 | ): Promise { 64 | // Validate the URL first 65 | validateUrl(url); 66 | 67 | const platformName = platform(); 68 | let command: string; 69 | let args: string[]; 70 | 71 | switch (platformName) { 72 | case 'darwin': 73 | // macOS 74 | command = 'open'; 75 | args = [url]; 76 | break; 77 | 78 | case 'win32': 79 | // Windows - use PowerShell with Start-Process 80 | // This avoids the cmd.exe shell which is vulnerable to injection 81 | command = 'powershell.exe'; 82 | args = [ 83 | '-NoProfile', 84 | '-NonInteractive', 85 | '-WindowStyle', 86 | 'Hidden', 87 | '-Command', 88 | `Start-Process '${url.replace(/'/g, "''")}'`, 89 | ]; 90 | break; 91 | 92 | case 'linux': 93 | case 'freebsd': 94 | case 'openbsd': 95 | // Linux and BSD variants 96 | // Try xdg-open first, fall back to other options 97 | command = 'xdg-open'; 98 | args = [url]; 99 | break; 100 | 101 | default: 102 | throw new Error(`Unsupported platform: ${platformName}`); 103 | } 104 | 105 | const options: Record = { 106 | // Don't inherit parent's environment to avoid potential issues 107 | env: { 108 | ...process.env, 109 | // Ensure we're not in a shell that might interpret special characters 110 | SHELL: undefined, 111 | }, 112 | // Detach the browser process so it doesn't block 113 | detached: true, 114 | stdio: 'ignore', 115 | }; 116 | 117 | const tryCommand = (cmd: string, cmdArgs: string[]): Promise => { 118 | return new Promise((resolve, reject) => { 119 | const child = execFileFn( 120 | cmd, 121 | cmdArgs, 122 | options as ExecFileOptions, 123 | (error) => { 124 | if (error) { 125 | // This callback handles errors after the process has run, 126 | // but for our case, the 'error' event is more important for spawn failures. 127 | reject(error); 128 | } 129 | } 130 | ); 131 | 132 | // The 'error' event is critical. It fires if the command cannot be found or spawned. 133 | child.on('error', (error) => { 134 | reject(error); 135 | }); 136 | 137 | // If the process spawns successfully, 'xdg-open' and similar commands 138 | // exit almost immediately. We don't need to wait for the browser to close. 139 | // We can consider the job done if the process exits with code 0. 140 | child.on('exit', (code) => { 141 | if (code === 0) { 142 | resolve(); 143 | } else { 144 | reject(new Error(`Process exited with code ${code}`)); 145 | } 146 | }); 147 | }); 148 | }; 149 | 150 | try { 151 | await withTimeout(tryCommand(command, args), 5000); 152 | } catch (error) { 153 | // For Linux, try fallback commands if xdg-open fails 154 | if ( 155 | (platformName === 'linux' || 156 | platformName === 'freebsd' || 157 | platformName === 'openbsd') && 158 | command === 'xdg-open' 159 | ) { 160 | const fallbackCommands = [ 161 | 'gnome-open', 162 | 'kde-open', 163 | 'firefox', 164 | 'chromium', 165 | 'google-chrome', 166 | ]; 167 | 168 | for (const fallbackCommand of fallbackCommands) { 169 | try { 170 | await withTimeout(tryCommand(fallbackCommand, [url]), 5000); 171 | return; // Success! 172 | } catch { 173 | // Try next command 174 | continue; 175 | } 176 | } 177 | } 178 | 179 | // Re-throw the error if all attempts failed 180 | throw new Error( 181 | `Failed to open browser: ${ 182 | error instanceof Error ? error.message : 'Unknown error' 183 | }` 184 | ); 185 | } 186 | } 187 | 188 | /** 189 | * Checks if the current environment should attempt to launch a browser. 190 | * This is the same logic as in browser.ts for consistency. 191 | * 192 | * @returns True if the tool should attempt to launch a browser 193 | */ 194 | export function shouldLaunchBrowser(): boolean { 195 | // A list of browser names that indicate we should not attempt to open a 196 | // web browser for the user. 197 | const browserBlocklist = ['www-browser']; 198 | const browserEnv = process.env.BROWSER; 199 | if (browserEnv && browserBlocklist.includes(browserEnv)) { 200 | return false; 201 | } 202 | 203 | // Common environment variables used in CI/CD or other non-interactive shells. 204 | if (process.env.CI || process.env.DEBIAN_FRONTEND === 'noninteractive') { 205 | return false; 206 | } 207 | 208 | // The presence of SSH_CONNECTION indicates a remote session. 209 | // We should not attempt to launch a browser unless a display is explicitly available 210 | // (checked below for Linux). 211 | const isSSH = !!process.env.SSH_CONNECTION; 212 | 213 | // On Linux, the presence of a display server is a strong indicator of a GUI. 214 | if (platform() === 'linux') { 215 | // These are environment variables that can indicate a running compositor on Linux. 216 | const displayVariables = ['DISPLAY', 'WAYLAND_DISPLAY', 'MIR_SOCKET']; 217 | const hasDisplay = displayVariables.some((v) => !!process.env[v]); 218 | if (!hasDisplay) { 219 | return false; 220 | } 221 | } 222 | 223 | // If in an SSH session on a non-Linux OS (e.g., macOS), don't launch browser. 224 | // The Linux case is handled above (it's allowed if DISPLAY is set). 225 | if (isSSH && platform() !== 'linux') { 226 | return false; 227 | } 228 | 229 | // For non-Linux OSes, we generally assume a GUI is available 230 | // unless other signals (like SSH) suggest otherwise. 231 | return true; 232 | } 233 | -------------------------------------------------------------------------------- /workspace-server/src/auth/token-storage/keychain-token-storage.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import * as crypto from 'node:crypto'; 8 | import { BaseTokenStorage } from './base-token-storage'; 9 | import type { OAuthCredentials } from './types'; 10 | 11 | interface Keytar { 12 | getPassword(service: string, account: string): Promise; 13 | setPassword( 14 | service: string, 15 | account: string, 16 | password: string, 17 | ): Promise; 18 | deletePassword(service: string, account: string): Promise; 19 | findCredentials( 20 | service: string, 21 | ): Promise>; 22 | } 23 | 24 | const KEYCHAIN_TEST_PREFIX = '__keychain_test__'; 25 | 26 | export class KeychainTokenStorage extends BaseTokenStorage { 27 | private keychainAvailable: boolean | null = null; 28 | private keytarModule: Keytar | null = null; 29 | private keytarLoadAttempted = false; 30 | 31 | async getKeytar(): Promise { 32 | // If we've already tried loading (successfully or not), return the result 33 | if (this.keytarLoadAttempted) { 34 | return this.keytarModule; 35 | } 36 | 37 | this.keytarLoadAttempted = true; 38 | 39 | try { 40 | // Try to import keytar without any timeout - let the OS handle it 41 | const moduleName = 'keytar'; 42 | const module = await import(moduleName); 43 | this.keytarModule = module.default || module; 44 | } catch (error) { 45 | 46 | console.error(error); 47 | } 48 | return this.keytarModule; 49 | } 50 | 51 | async getCredentials(serverName: string): Promise { 52 | if (!(await this.checkKeychainAvailability())) { 53 | throw new Error('Keychain is not available'); 54 | } 55 | 56 | const keytar = await this.getKeytar(); 57 | if (!keytar) { 58 | throw new Error('Keytar module not available'); 59 | } 60 | 61 | try { 62 | const sanitizedName = this.sanitizeServerName(serverName); 63 | const data = await keytar.getPassword(this.serviceName, sanitizedName); 64 | 65 | if (!data) { 66 | return null; 67 | } 68 | 69 | const credentials = JSON.parse(data) as OAuthCredentials; 70 | 71 | 72 | 73 | return credentials; 74 | } catch (error) { 75 | if (error instanceof SyntaxError) { 76 | throw new Error(`Failed to parse stored credentials for ${serverName}`); 77 | } 78 | throw error; 79 | } 80 | } 81 | 82 | async setCredentials(credentials: OAuthCredentials): Promise { 83 | if (!(await this.checkKeychainAvailability())) { 84 | throw new Error('Keychain is not available'); 85 | } 86 | 87 | const keytar = await this.getKeytar(); 88 | if (!keytar) { 89 | throw new Error('Keytar module not available'); 90 | } 91 | 92 | this.validateCredentials(credentials); 93 | 94 | const sanitizedName = this.sanitizeServerName(credentials.serverName); 95 | const updatedCredentials: OAuthCredentials = { 96 | ...credentials, 97 | updatedAt: Date.now(), 98 | }; 99 | 100 | const data = JSON.stringify(updatedCredentials); 101 | await keytar.setPassword(this.serviceName, sanitizedName, data); 102 | } 103 | 104 | async deleteCredentials(serverName: string): Promise { 105 | if (!(await this.checkKeychainAvailability())) { 106 | throw new Error('Keychain is not available'); 107 | } 108 | 109 | const keytar = await this.getKeytar(); 110 | if (!keytar) { 111 | throw new Error('Keytar module not available'); 112 | } 113 | 114 | const sanitizedName = this.sanitizeServerName(serverName); 115 | const deleted = await keytar.deletePassword( 116 | this.serviceName, 117 | sanitizedName, 118 | ); 119 | 120 | if (!deleted) { 121 | throw new Error(`No credentials found for ${serverName}`); 122 | } 123 | } 124 | 125 | async listServers(): Promise { 126 | if (!(await this.checkKeychainAvailability())) { 127 | throw new Error('Keychain is not available'); 128 | } 129 | 130 | const keytar = await this.getKeytar(); 131 | if (!keytar) { 132 | throw new Error('Keytar module not available'); 133 | } 134 | 135 | try { 136 | const credentials = await keytar.findCredentials(this.serviceName); 137 | return credentials 138 | .filter((cred) => !cred.account.startsWith(KEYCHAIN_TEST_PREFIX)) 139 | .map((cred: { account: string }) => cred.account); 140 | } catch (error) { 141 | console.error('Failed to list servers from keychain:', error); 142 | return []; 143 | } 144 | } 145 | 146 | async getAllCredentials(): Promise> { 147 | if (!(await this.checkKeychainAvailability())) { 148 | throw new Error('Keychain is not available'); 149 | } 150 | 151 | const keytar = await this.getKeytar(); 152 | if (!keytar) { 153 | throw new Error('Keytar module not available'); 154 | } 155 | 156 | const result = new Map(); 157 | try { 158 | const credentials = ( 159 | await keytar.findCredentials(this.serviceName) 160 | ).filter((c) => !c.account.startsWith(KEYCHAIN_TEST_PREFIX)); 161 | 162 | for (const cred of credentials) { 163 | try { 164 | const data = JSON.parse(cred.password) as OAuthCredentials; 165 | this.validateCredentials(data); 166 | result.set(cred.account, data); 167 | } catch (error) { 168 | console.error( 169 | `Failed to parse credentials for ${cred.account}:`, 170 | error, 171 | ); 172 | } 173 | } 174 | } catch (error) { 175 | console.error('Failed to get all credentials from keychain:', error); 176 | } 177 | 178 | return result; 179 | } 180 | 181 | async clearAll(): Promise { 182 | if (!(await this.checkKeychainAvailability())) { 183 | throw new Error('Keychain is not available'); 184 | } 185 | 186 | const servers = this.keytarModule 187 | ? await this.keytarModule 188 | .findCredentials(this.serviceName) 189 | .then((creds) => creds.map((c) => c.account)) 190 | .catch((error: Error) => { 191 | throw new Error( 192 | `Failed to list servers for clearing: ${error.message}`, 193 | ); 194 | }) 195 | : []; 196 | const errors: Error[] = []; 197 | 198 | for (const server of servers) { 199 | try { 200 | await this.deleteCredentials(server); 201 | } catch (error) { 202 | errors.push(error as Error); 203 | } 204 | } 205 | 206 | if (errors.length > 0) { 207 | throw new Error( 208 | `Failed to clear some credentials: ${errors.map((e) => e.message).join(', ')}`, 209 | ); 210 | } 211 | } 212 | 213 | // Checks whether or not a set-get-delete cycle with the keychain works. 214 | // Returns false if any operation fails. 215 | async checkKeychainAvailability(): Promise { 216 | if (this.keychainAvailable !== null) { 217 | return this.keychainAvailable; 218 | } 219 | 220 | try { 221 | const keytar = await this.getKeytar(); 222 | if (!keytar) { 223 | this.keychainAvailable = false; 224 | return false; 225 | } 226 | 227 | const testAccount = `${KEYCHAIN_TEST_PREFIX}${crypto.randomBytes(8).toString('hex')}`; 228 | const testPassword = 'test'; 229 | 230 | await keytar.setPassword(this.serviceName, testAccount, testPassword); 231 | const retrieved = await keytar.getPassword(this.serviceName, testAccount); 232 | const deleted = await keytar.deletePassword( 233 | this.serviceName, 234 | testAccount, 235 | ); 236 | 237 | const success = deleted && retrieved === testPassword; 238 | this.keychainAvailable = success; 239 | return success; 240 | } catch (_error) { 241 | this.keychainAvailable = false; 242 | return false; 243 | } 244 | } 245 | 246 | async isAvailable(): Promise { 247 | return this.checkKeychainAvailability(); 248 | } 249 | } -------------------------------------------------------------------------------- /workspace-server/src/__tests__/utils/logger.test.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { describe, it, expect, jest, beforeEach, afterEach } from '@jest/globals'; 8 | import * as path from 'node:path'; 9 | 10 | // Mock fs/promises module BEFORE any imports that use it 11 | jest.mock('fs/promises'); 12 | 13 | describe('logger', () => { 14 | let consoleErrorSpy: any; 15 | let logToFile: (message: string) => void; 16 | let setLoggingEnabled: (enabled: boolean) => void; 17 | let fs: any; 18 | 19 | async function setupLogger(appendFileMock?: any) { 20 | jest.resetModules(); 21 | jest.doMock('fs/promises', () => ({ 22 | mkdir: jest.fn(() => Promise.resolve()), 23 | appendFile: appendFileMock || jest.fn(() => Promise.resolve()), 24 | })); 25 | 26 | fs = await import('node:fs/promises'); 27 | const loggerModule = await import('../../utils/logger'); 28 | logToFile = loggerModule.logToFile; 29 | setLoggingEnabled = loggerModule.setLoggingEnabled; 30 | setLoggingEnabled(true); 31 | jest.clearAllMocks(); 32 | } 33 | 34 | beforeEach(() => { 35 | // Clear all mocks 36 | jest.clearAllMocks(); 37 | 38 | // Clear module cache to ensure fresh imports 39 | jest.resetModules(); 40 | 41 | // Spy on console.error 42 | consoleErrorSpy = jest.spyOn(console, 'error').mockImplementation(() => {}); 43 | }); 44 | 45 | afterEach(() => { 46 | jest.restoreAllMocks(); 47 | }); 48 | 49 | describe('module initialization', () => { 50 | it('should create log directory on module load', async () => { 51 | // Set up mocks 52 | jest.doMock('fs/promises', () => ({ 53 | mkdir: jest.fn(() => Promise.resolve()), 54 | appendFile: jest.fn(() => Promise.resolve()), 55 | })); 56 | 57 | // Import the module (this triggers initialization) 58 | await import('../../utils/logger'); 59 | 60 | // Get the mocked fs module 61 | fs = await import('node:fs/promises'); 62 | 63 | // Wait for async initialization 64 | await new Promise(resolve => setTimeout(resolve, 10)); 65 | 66 | expect(fs.mkdir).toHaveBeenCalledWith( 67 | expect.stringContaining('logs'), 68 | { recursive: true } 69 | ); 70 | }); 71 | 72 | it('should handle directory creation errors gracefully', async () => { 73 | const mkdirError = new Error('Permission denied'); 74 | 75 | // Set up mocks 76 | jest.doMock('fs/promises', () => ({ 77 | mkdir: jest.fn(() => Promise.reject(mkdirError)), 78 | appendFile: jest.fn(() => Promise.resolve()), 79 | })); 80 | 81 | // Import the module 82 | await import('../../utils/logger'); 83 | 84 | // Wait for async initialization 85 | await new Promise(resolve => setTimeout(resolve, 10)); 86 | 87 | expect(consoleErrorSpy).toHaveBeenCalledWith( 88 | 'Could not create log directory:', 89 | mkdirError 90 | ); 91 | }); 92 | }); 93 | 94 | describe('logToFile', () => { 95 | beforeEach(async () => { 96 | await setupLogger(); 97 | }); 98 | 99 | it('should append message with timestamp to log file', async () => { 100 | const testMessage = 'Test log message'; 101 | const mockDate = new Date('2024-01-01T12:00:00.000Z'); 102 | jest.spyOn(global, 'Date').mockImplementation(() => mockDate as any); 103 | 104 | logToFile(testMessage); 105 | 106 | // Wait for async operation 107 | await new Promise(resolve => setTimeout(resolve, 10)); 108 | 109 | expect(fs.appendFile).toHaveBeenCalledWith( 110 | expect.stringContaining('server.log'), 111 | '2024-01-01T12:00:00.000Z - Test log message\n' 112 | ); 113 | }); 114 | 115 | it('should handle multiple log messages', async () => { 116 | logToFile('First message'); 117 | logToFile('Second message'); 118 | logToFile('Third message'); 119 | 120 | // Wait for async operations 121 | await new Promise(resolve => setTimeout(resolve, 10)); 122 | 123 | expect(fs.appendFile).toHaveBeenCalledTimes(3); 124 | expect(fs.appendFile).toHaveBeenNthCalledWith( 125 | 1, 126 | expect.stringContaining('server.log'), 127 | expect.stringContaining('First message') 128 | ); 129 | expect(fs.appendFile).toHaveBeenNthCalledWith( 130 | 2, 131 | expect.stringContaining('server.log'), 132 | expect.stringContaining('Second message') 133 | ); 134 | expect(fs.appendFile).toHaveBeenNthCalledWith( 135 | 3, 136 | expect.stringContaining('server.log'), 137 | expect.stringContaining('Third message') 138 | ); 139 | }); 140 | 141 | it('should log to console.error when file write fails', async () => { 142 | const writeError = new Error('Disk full'); 143 | await setupLogger(jest.fn(() => Promise.reject(writeError))); 144 | 145 | logToFile('Failed write test'); 146 | 147 | // Wait for async operation 148 | await new Promise(resolve => setTimeout(resolve, 10)); 149 | 150 | expect(consoleErrorSpy).toHaveBeenCalledWith( 151 | 'Failed to write to log file:', 152 | writeError 153 | ); 154 | }); 155 | 156 | it('should format log message correctly', async () => { 157 | const mockDate = new Date('2024-12-25T18:30:45.123Z'); 158 | jest.spyOn(global, 'Date').mockImplementation(() => mockDate as any); 159 | 160 | logToFile('Holiday log entry'); 161 | 162 | // Wait for async operation 163 | await new Promise(resolve => setTimeout(resolve, 10)); 164 | 165 | const expectedMessage = '2024-12-25T18:30:45.123Z - Holiday log entry\n'; 166 | expect(fs.appendFile).toHaveBeenCalledWith( 167 | expect.any(String), 168 | expectedMessage 169 | ); 170 | }); 171 | 172 | it('should handle empty messages', async () => { 173 | const mockDate = new Date('2024-01-01T12:00:00.000Z'); 174 | jest.spyOn(global, 'Date').mockImplementation(() => mockDate as any); 175 | logToFile(''); 176 | 177 | // Wait for async operation 178 | await new Promise(resolve => setTimeout(resolve, 10)); 179 | 180 | expect(fs.appendFile).toHaveBeenCalledWith( 181 | expect.stringContaining('server.log'), 182 | '2024-01-01T12:00:00.000Z - \n' 183 | ); 184 | }); 185 | 186 | it('should handle special characters in messages', async () => { 187 | const specialMessage = 'Message with \n newline, \t tab, and "quotes"'; 188 | 189 | logToFile(specialMessage); 190 | 191 | // Wait for async operation 192 | await new Promise(resolve => setTimeout(resolve, 10)); 193 | 194 | expect(fs.appendFile).toHaveBeenCalledWith( 195 | expect.stringContaining('server.log'), 196 | expect.stringContaining(specialMessage) 197 | ); 198 | }); 199 | 200 | it('should use correct log file path', async () => { 201 | logToFile('Path test'); 202 | 203 | // Wait for async operation 204 | await new Promise(resolve => setTimeout(resolve, 10)); 205 | 206 | const callArgs = (fs.appendFile as jest.Mock).mock.calls[0]; 207 | const logPath = callArgs[0] as string; 208 | 209 | expect(logPath).toContain('logs'); 210 | expect(logPath).toContain('server.log'); 211 | expect(path.isAbsolute(logPath)).toBe(true); 212 | }); 213 | 214 | it('should not throw when appendFile fails', async () => { 215 | await setupLogger(jest.fn(() => Promise.reject(new Error('Write failed')))); 216 | 217 | // Should not throw 218 | expect(() => logToFile('Test message')).not.toThrow(); 219 | 220 | // Wait for async operation 221 | await new Promise(resolve => setTimeout(resolve, 10)); 222 | 223 | expect(consoleErrorSpy).toHaveBeenCalled(); 224 | }); 225 | 226 | it('should not log when logging is disabled', () => { 227 | setLoggingEnabled(false); 228 | const testMessage = 'Test log message'; 229 | 230 | logToFile(testMessage); 231 | 232 | expect(fs.appendFile).not.toHaveBeenCalled(); 233 | }); 234 | }); 235 | }); 236 | -------------------------------------------------------------------------------- /workspace-server/src/services/SlidesService.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { google, slides_v1, drive_v3 } from 'googleapis'; 8 | import { AuthManager } from '../auth/AuthManager'; 9 | import { logToFile } from '../utils/logger'; 10 | import { extractDocId } from '../utils/IdUtils'; 11 | import { gaxiosOptions } from '../utils/GaxiosConfig'; 12 | import { buildDriveSearchQuery, MIME_TYPES } from '../utils/DriveQueryBuilder'; 13 | 14 | export class SlidesService { 15 | constructor(private authManager: AuthManager) { 16 | } 17 | 18 | private async getSlidesClient(): Promise { 19 | const auth = await this.authManager.getAuthenticatedClient(); 20 | const options = { ...gaxiosOptions, auth }; 21 | return google.slides({ version: 'v1', ...options }); 22 | } 23 | 24 | private async getDriveClient(): Promise { 25 | const auth = await this.authManager.getAuthenticatedClient(); 26 | const options = { ...gaxiosOptions, auth }; 27 | return google.drive({ version: 'v3', ...options }); 28 | } 29 | 30 | public getText = async ({ presentationId }: { presentationId: string }) => { 31 | logToFile(`[SlidesService] Starting getText for presentation: ${presentationId}`); 32 | try { 33 | const id = extractDocId(presentationId) || presentationId; 34 | 35 | const slides = await this.getSlidesClient(); 36 | // Get the presentation with all necessary fields 37 | const presentation = await slides.presentations.get({ 38 | presentationId: id, 39 | fields: 'title,slides(pageElements(shape(text,shapeProperties),table(tableRows(tableCells(text)))))', 40 | }); 41 | 42 | let content = ''; 43 | 44 | // Add presentation title 45 | if (presentation.data.title) { 46 | content += `Presentation Title: ${presentation.data.title}\n\n`; 47 | } 48 | 49 | // Process each slide 50 | if (presentation.data.slides) { 51 | presentation.data.slides.forEach((slide, slideIndex) => { 52 | content += `\n--- Slide ${slideIndex + 1} ---\n`; 53 | 54 | if (slide.pageElements) { 55 | slide.pageElements.forEach(element => { 56 | // Extract text from shapes 57 | if (element.shape && element.shape.text) { 58 | const shapeText = this.extractTextFromTextContent(element.shape.text); 59 | if (shapeText) { 60 | content += shapeText + '\n'; 61 | } 62 | } 63 | 64 | // Extract text from tables 65 | if (element.table && element.table.tableRows) { 66 | content += '\n--- Table Data ---\n'; 67 | element.table.tableRows.forEach(row => { 68 | const rowText: string[] = []; 69 | if (row.tableCells) { 70 | row.tableCells.forEach(cell => { 71 | const cellText = cell.text ? this.extractTextFromTextContent(cell.text) : ''; 72 | rowText.push(cellText.trim()); 73 | }); 74 | } 75 | content += rowText.join(' | ') + '\n'; 76 | }); 77 | content += '--- End Table Data ---\n'; 78 | } 79 | }); 80 | } 81 | content += '\n'; 82 | }); 83 | } 84 | 85 | logToFile(`[SlidesService] Finished getText for presentation: ${id}`); 86 | return { 87 | content: [{ 88 | type: "text" as const, 89 | text: content.trim() 90 | }] 91 | }; 92 | } catch (error) { 93 | const errorMessage = error instanceof Error ? error.message : String(error); 94 | logToFile(`[SlidesService] Error during slides.getText: ${errorMessage}`); 95 | return { 96 | content: [{ 97 | type: "text" as const, 98 | text: JSON.stringify({ error: errorMessage }) 99 | }] 100 | }; 101 | } 102 | } 103 | 104 | private extractTextFromTextContent(textContent: slides_v1.Schema$TextContent): string { 105 | let text = ''; 106 | if (textContent.textElements) { 107 | textContent.textElements.forEach(element => { 108 | if (element.textRun && element.textRun.content) { 109 | text += element.textRun.content; 110 | } else if (element.paragraphMarker) { 111 | // Add newline for paragraph markers 112 | text += '\n'; 113 | } 114 | }); 115 | } 116 | return text; 117 | } 118 | 119 | public find = async ({ query, pageToken, pageSize = 10 }: { query: string, pageToken?: string, pageSize?: number }) => { 120 | logToFile(`[SlidesService] Searching for presentations with query: ${query}`); 121 | try { 122 | const q = buildDriveSearchQuery(MIME_TYPES.PRESENTATION, query); 123 | logToFile(`[SlidesService] Executing Drive API query: ${q}`); 124 | 125 | const drive = await this.getDriveClient(); 126 | const res = await drive.files.list({ 127 | pageSize: pageSize, 128 | fields: 'nextPageToken, files(id, name)', 129 | q: q, 130 | pageToken: pageToken, 131 | }); 132 | 133 | const files = res.data.files || []; 134 | const nextPageToken = res.data.nextPageToken; 135 | 136 | logToFile(`[SlidesService] Found ${files.length} presentations.`); 137 | 138 | return { 139 | content: [{ 140 | type: "text" as const, 141 | text: JSON.stringify({ 142 | files: files, 143 | nextPageToken: nextPageToken 144 | }) 145 | }] 146 | }; 147 | } catch (error) { 148 | const errorMessage = error instanceof Error ? error.message : String(error); 149 | logToFile(`[SlidesService] Error during slides.find: ${errorMessage}`); 150 | return { 151 | content: [{ 152 | type: "text" as const, 153 | text: JSON.stringify({ error: errorMessage }) 154 | }] 155 | }; 156 | } 157 | } 158 | 159 | public getMetadata = async ({ presentationId }: { presentationId: string }) => { 160 | logToFile(`[SlidesService] Starting getMetadata for presentation: ${presentationId}`); 161 | try { 162 | const id = extractDocId(presentationId) || presentationId; 163 | 164 | const slides = await this.getSlidesClient(); 165 | const presentation = await slides.presentations.get({ 166 | presentationId: id, 167 | fields: 'presentationId,title,slides(objectId),pageSize,notesMaster,masters,layouts', 168 | }); 169 | 170 | const metadata = { 171 | presentationId: presentation.data.presentationId, 172 | title: presentation.data.title, 173 | slideCount: presentation.data.slides?.length || 0, 174 | pageSize: presentation.data.pageSize, 175 | hasMasters: !!presentation.data.masters?.length, 176 | hasLayouts: !!presentation.data.layouts?.length, 177 | hasNotesMaster: !!presentation.data.notesMaster, 178 | }; 179 | 180 | logToFile(`[SlidesService] Finished getMetadata for presentation: ${id}`); 181 | return { 182 | content: [{ 183 | type: "text" as const, 184 | text: JSON.stringify(metadata) 185 | }] 186 | }; 187 | } catch (error) { 188 | const errorMessage = error instanceof Error ? error.message : String(error); 189 | logToFile(`[SlidesService] Error during slides.getMetadata: ${errorMessage}`); 190 | return { 191 | content: [{ 192 | type: "text" as const, 193 | text: JSON.stringify({ error: errorMessage }) 194 | }] 195 | }; 196 | } 197 | } 198 | } 199 | -------------------------------------------------------------------------------- /workspace-server/src/utils/markdownToDocsRequests.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { docs_v1 } from 'googleapis'; 8 | import { marked } from 'marked'; 9 | import { JSDOM } from 'jsdom'; 10 | 11 | interface FormatRange { 12 | start: number; 13 | end: number; 14 | type: 'bold' | 'italic' | 'code' | 'link' | 'heading'; 15 | url?: string; 16 | headingLevel?: number; 17 | isParagraph?: boolean; 18 | } 19 | 20 | interface ParsedMarkdown { 21 | plainText: string; 22 | formattingRequests: docs_v1.Schema$Request[]; 23 | } 24 | 25 | /** 26 | * Parses markdown text and generates Google Docs API requests for formatting. 27 | * Uses the marked library to convert to HTML, then parses the HTML to extract formatting. 28 | */ 29 | export function parseMarkdownToDocsRequests(markdown: string, startIndex: number): ParsedMarkdown { 30 | // Split markdown into lines to handle block elements like headings 31 | const lines = markdown.split('\n'); 32 | const htmlParts: string[] = []; 33 | 34 | for (const line of lines) { 35 | // Check if this is a heading line 36 | const headingMatch = line.match(/^(#{1,6})\s+(.+)$/); 37 | if (headingMatch) { 38 | const level = headingMatch[1].length; 39 | const content = headingMatch[2]; 40 | // Parse inline content within the heading 41 | try { 42 | const inlineHtml = marked.parseInline(content) as string; 43 | htmlParts.push(`${inlineHtml}`); 44 | } catch (error) { 45 | console.error('Markdown parsing failed for heading, falling back to raw content:', error); 46 | htmlParts.push(`${content}`); 47 | } 48 | } else if (line.trim()) { 49 | // For non-heading, non-empty lines, use parseInline 50 | try { 51 | const inlineHtml = marked.parseInline(line) as string; 52 | htmlParts.push(`

${inlineHtml}

`); 53 | } catch (error) { 54 | console.error('Markdown parsing failed for line, falling back to raw content:', error); 55 | htmlParts.push(`

${line}

`); 56 | } 57 | } else { 58 | // Empty lines become paragraph breaks 59 | htmlParts.push(''); 60 | } 61 | } 62 | 63 | // Convert markdown to HTML - handle both block and inline elements 64 | const html = htmlParts.join('\n'); 65 | 66 | // If no conversion happened, return plain text 67 | if (!html || html === markdown) { 68 | return { 69 | plainText: markdown, 70 | formattingRequests: [] 71 | }; 72 | } 73 | 74 | // Parse HTML to extract text and formatting 75 | // Create a wrapper div to handle inline HTML that might not have a parent element 76 | const dom = new JSDOM(`
${html}
`); 77 | const document = dom.window.document; 78 | const wrapper = document.querySelector('div'); 79 | 80 | const formattingRanges: FormatRange[] = []; 81 | let plainText = ''; 82 | let currentPos = 0; 83 | 84 | // Recursive function to process nodes 85 | function processNode(node: Node) { 86 | if (node.nodeType === 3) { // Text node 87 | const text = node.textContent || ''; 88 | plainText += text; 89 | currentPos += text.length; 90 | } else if (node.nodeType === 1) { // Element node 91 | const element = node as HTMLElement; 92 | const tagName = element.tagName.toLowerCase(); 93 | 94 | const start = currentPos; 95 | 96 | // Process children first to get the text content 97 | for (const child of Array.from(node.childNodes)) { 98 | processNode(child); 99 | } 100 | 101 | const end = currentPos; 102 | 103 | // Record formatting based on tag 104 | if (tagName === 'strong' || tagName === 'b') { 105 | formattingRanges.push({ start, end, type: 'bold' }); 106 | } else if (tagName === 'em' || tagName === 'i') { 107 | formattingRanges.push({ start, end, type: 'italic' }); 108 | } else if (tagName === 'code') { 109 | formattingRanges.push({ start, end, type: 'code' }); 110 | } else if (tagName === 'a') { 111 | const href = element.getAttribute('href') || ''; 112 | formattingRanges.push({ start, end, type: 'link', url: href }); 113 | } else if (tagName.match(/^h[1-6]$/)) { 114 | const level = parseInt(tagName.charAt(1)); 115 | // Mark the entire paragraph range for heading style 116 | formattingRanges.push({ start, end, type: 'heading', headingLevel: level, isParagraph: true }); 117 | } else if (tagName === 'p') { 118 | // Add newline after paragraph content if not the last element 119 | const nextSibling = element.nextSibling; 120 | if (nextSibling && nextSibling.nodeType === 1) { 121 | plainText += '\n'; 122 | currentPos += 1; 123 | } 124 | } 125 | } 126 | } 127 | 128 | // Process all nodes 129 | if (wrapper) { 130 | for (const child of Array.from(wrapper.childNodes)) { 131 | processNode(child); 132 | } 133 | } else { 134 | // If parsing failed, just use the plain markdown (no formatting) 135 | plainText = markdown; 136 | } 137 | 138 | // Generate formatting requests 139 | const formattingRequests: docs_v1.Schema$Request[] = []; 140 | 141 | for (const range of formattingRanges) { 142 | const textStyle: docs_v1.Schema$TextStyle = {}; 143 | const fields: string[] = []; 144 | 145 | if (range.type === 'bold') { 146 | textStyle.bold = true; 147 | fields.push('bold'); 148 | } else if (range.type === 'italic') { 149 | textStyle.italic = true; 150 | fields.push('italic'); 151 | } else if (range.type === 'code') { 152 | textStyle.weightedFontFamily = { 153 | fontFamily: 'Courier New', 154 | weight: 400 155 | }; 156 | textStyle.backgroundColor = { 157 | color: { 158 | rgbColor: { 159 | red: 0.95, 160 | green: 0.95, 161 | blue: 0.95 162 | } 163 | } 164 | }; 165 | fields.push('weightedFontFamily', 'backgroundColor'); 166 | } else if (range.type === 'link' && range.url) { 167 | textStyle.link = { 168 | url: range.url 169 | }; 170 | textStyle.foregroundColor = { 171 | color: { 172 | rgbColor: { 173 | red: 0.06, 174 | green: 0.33, 175 | blue: 0.80 176 | } 177 | } 178 | }; 179 | textStyle.underline = true; 180 | fields.push('link', 'foregroundColor', 'underline'); 181 | } else if (range.type === 'heading' && range.headingLevel && range.isParagraph) { 182 | // Use updateParagraphStyle for headings as per Google Docs API best practices 183 | const headingStyles: { [key: number]: string } = { 184 | 1: 'HEADING_1', 185 | 2: 'HEADING_2', 186 | 3: 'HEADING_3', 187 | 4: 'HEADING_4', 188 | 5: 'HEADING_5', 189 | 6: 'HEADING_6' 190 | }; 191 | 192 | const namedStyleType = headingStyles[range.headingLevel] || 'HEADING_1'; 193 | 194 | // Create a separate updateParagraphStyle request for headings 195 | formattingRequests.push({ 196 | updateParagraphStyle: { 197 | paragraphStyle: { 198 | namedStyleType: namedStyleType 199 | }, 200 | range: { 201 | startIndex: startIndex + range.start, 202 | endIndex: startIndex + range.end 203 | }, 204 | fields: 'namedStyleType' 205 | } 206 | }); 207 | 208 | // Skip the normal text style formatting for headings 209 | continue; 210 | } 211 | 212 | if (fields.length > 0) { 213 | formattingRequests.push({ 214 | updateTextStyle: { 215 | range: { 216 | startIndex: startIndex + range.start, 217 | endIndex: startIndex + range.end 218 | }, 219 | textStyle: textStyle, 220 | fields: fields.join(',') 221 | } 222 | }); 223 | } 224 | } 225 | 226 | return { 227 | plainText, 228 | formattingRequests 229 | }; 230 | } 231 | 232 | /** 233 | * Handles line breaks and paragraphs in markdown text 234 | */ 235 | export function processMarkdownLineBreaks(text: string): string { 236 | // Convert double line breaks to paragraph breaks 237 | // Single line breaks remain as-is 238 | return text.replace(/\n\n+/g, '\n\n'); 239 | } -------------------------------------------------------------------------------- /workspace-server/WORKSPACE-Context.md: -------------------------------------------------------------------------------- 1 | # Google Workspace Extension - Behavioral Guide 2 | 3 | This guide provides behavioral instructions for effectively using the Google Workspace Extension tools. For detailed parameter documentation, refer to the tool descriptions in the extension itself. 4 | 5 | ## 🎯 Core Principles 6 | 7 | ### 1. User Context First 8 | **Always establish user context at the beginning of interactions:** 9 | - Use `people.getMe()` to understand who the user is 10 | - Use `time.getTimeZone()` to get the user's local timezone 11 | - Apply this context throughout all interactions 12 | - All time-based operations should respect the user's timezone 13 | 14 | ### 2. Safety and Transparency 15 | **Never execute write operations without explicit confirmation:** 16 | - Preview all changes before executing 17 | - Show complete details in a readable format 18 | - Wait for clear user approval 19 | - Give users the opportunity to review and cancel 20 | 21 | ### 3. Smart Tool Usage 22 | **Choose the right approach for each task:** 23 | - Tools automatically handle URL-to-ID conversion - don't extract IDs manually 24 | - Batch related operations when possible 25 | - Use pagination for large result sets 26 | - Apply appropriate formats based on the use case 27 | 28 | ## 📋 Output Formatting Standards 29 | 30 | ### Lists and Search Results 31 | Always format multiple items as **numbered lists** for better readability: 32 | 33 | ✅ **Correct:** 34 | ``` 35 | Found 3 documents: 36 | 1. Budget Report 2024 37 | 2. Q3 Sales Presentation 38 | 3. Team Meeting Notes 39 | ``` 40 | 41 | ❌ **Incorrect:** 42 | ``` 43 | Found 3 documents: 44 | - Budget Report 2024 45 | - Q3 Sales Presentation 46 | - Team Meeting Notes 47 | ``` 48 | 49 | ### Write Operation Previews 50 | Before any write operation, show a clear preview: 51 | 52 | ``` 53 | I'll create this calendar event: 54 | 55 | Title: Team Standup 56 | Date: January 15, 2025 57 | Time: 10:00 AM - 10:30 AM (EST) 58 | Attendees: team@example.com 59 | 60 | Should I create this event? 61 | ``` 62 | 63 | ## 🔄 Multi-Tool Workflows 64 | 65 | ### Creating and Organizing Documents 66 | When creating documents in specific folders: 67 | 1. Create the document first 68 | 2. Then move it to the folder (if specified) 69 | 3. Confirm successful completion 70 | 71 | ### Calendar Scheduling Workflow 72 | 1. Get user's timezone with `time.getTimeZone()` 73 | 2. Check availability with `calendar.listEvents()` 74 | 3. Create event with proper timezone handling 75 | 4. Always show times in user's local timezone 76 | 77 | ### Email Search and Response 78 | 1. Search with `gmail.search()` using appropriate query syntax 79 | 2. Get full content with `gmail.get()` if needed 80 | 3. Preview any reply before sending 81 | 4. Use threading context when responding 82 | 83 | ### Adding/Removing Labels from Emails 84 | 1. For system labels, including "INBOX", "SPAM", "TRASH", "UNREAD", "STARRED", "IMPORTANT", the ID is the name itself. 85 | 2. For user created custom labels, retrieve label ID with `gmail.listLabels()`. 86 | 3. Use `gmail.modify()` to add or remove labels from emails with a single call using label IDs. 87 | 88 | ### Event Deletion 89 | When using `calendar.deleteEvent`: 90 | - This is a destructive action that permanently removes the event. 91 | - For organizers, this cancels the event for all attendees. 92 | - For attendees, this only removes it from their own calendar. 93 | - Always confirm with the user before executing a deletion. 94 | 95 | ## 📅 Calendar Best Practices 96 | 97 | ### Understanding "Next Meeting" 98 | When asked about "next meeting" or "today's schedule": 99 | 1. **Fetch the full day's context** - Use start of day (00:00:00) to end of day (23:59:59) 100 | 2. **Filter by response status** - Only show meetings where the user has: 101 | - Accepted the invitation 102 | - Not yet responded (needs to decide) 103 | - DO NOT show declined meetings unless explicitly requested 104 | 3. **Compare with current time** - Identify meetings relative to now 105 | 4. **Handle edge cases**: 106 | - If a meeting is in progress, mention it first 107 | - "Next" means the first meeting after current time 108 | - Keep full day context for follow-up questions 109 | 110 | ### Meeting Response Filtering 111 | - **Default behavior**: Show only accepted and pending meetings 112 | - **Declined meetings**: Exclude unless user asks "show me all meetings" or "including declined" 113 | - **Use `attendeeResponseStatus`** parameter to filter appropriately 114 | - This respects the user's time by not cluttering their schedule with irrelevant meetings 115 | 116 | ### Timezone Management 117 | - Always display times in the user's timezone 118 | - Convert all times appropriately before display 119 | - Include timezone abbreviation (EST, PST, etc.) for clarity 120 | 121 | ## 📧 Gmail & Chat Guidelines 122 | 123 | ### Search Strategies 124 | - Use Gmail search syntax: `from:email@example.com is:unread` 125 | - Combine multiple criteria for precise results 126 | - Include SPAM/TRASH only when explicitly needed 127 | 128 | ### Threading and Context 129 | - Maintain conversation context in replies 130 | - Reference previous messages when relevant 131 | - Use appropriate reply vs. new message based on context 132 | 133 | ### Downloading Attachments 134 | 1. **Find Attachment ID**: Use `gmail.get` with `format='full'` to retrieve message details, including `attachments` metadata (IDs and filenames). 135 | 2. **Download**: Use `gmail.downloadAttachment` with the specific `messageId` and `attachmentId`. 136 | 3. **Absolute Paths**: Always provide an **absolute path** for the `localPath` argument (e.g., `/Users/username/Downloads/file.pdf`). Relative paths will be rejected for security. 137 | 138 | ## 📄 Docs, Sheets, and Slides 139 | 140 | ### Format Selection (Sheets) 141 | Choose output format based on use case: 142 | - **text**: Human-readable, good for quick review 143 | - **csv**: Data export, analysis in other tools 144 | - **json**: Programmatic processing, structured data 145 | 146 | ### Content Handling 147 | - Docs/Sheets/Slides tools accept URLs directly - no ID extraction needed 148 | - Use markdown for initial document creation when appropriate 149 | - Preserve formatting when reading/modifying content 150 | 151 | ## 🚫 Common Pitfalls to Avoid 152 | 153 | ### Don't Do This: 154 | - ❌ Use `extractIdFromUrl` when other tools accept URLs 155 | - ❌ Assume timezone without checking 156 | - ❌ Execute writes without preview and confirmation 157 | - ❌ Create files unless explicitly requested 158 | - ❌ Duplicate parameter documentation from tool descriptions 159 | - ❌ Use relative paths for file downloads (e.g., `downloads/file.txt`) 160 | 161 | ### Do This Instead: 162 | - ✅ Pass URLs directly to tools that accept them 163 | - ✅ Get user timezone at session start 164 | - ✅ Preview all changes and wait for approval 165 | - ✅ Only create what's requested 166 | - ✅ Focus on behavioral guidance and best practices 167 | - ✅ Always use **absolute paths** for file downloads (e.g., `/Users/me/Downloads/file.txt`) 168 | 169 | ## 🔍 Error Handling Patterns 170 | 171 | ### Authentication Errors 172 | - If any tool returns `{"error":"invalid_request"}`, it likely indicates an expired or invalid session. 173 | - **Action:** Call `auth.clear` to reset credentials and force a re-login. 174 | - Inform the user that you are resetting authentication due to an error. 175 | 176 | ### Graceful Degradation 177 | - If a folder doesn't exist, offer to create it 178 | - If search returns no results, suggest alternatives 179 | - If permissions are insufficient, explain clearly 180 | 181 | ### Validation Before Action 182 | - Verify file/folder existence before moving 183 | - Check calendar availability before scheduling 184 | - Validate email addresses before sending 185 | 186 | ## ⚡ Performance Optimization 187 | 188 | ### Batch Operations 189 | - Group related API calls when possible 190 | - Use field masks to request only needed data 191 | - Implement pagination for large datasets 192 | 193 | ### Caching Strategy 194 | - Reuse user context throughout session 195 | - Cache frequently accessed metadata 196 | - Minimize redundant API calls 197 | 198 | ## 📝 Session Management 199 | 200 | ### Beginning of Session 201 | 1. Get user profile with `people.getMe()` 202 | 2. Get timezone with `time.getTimeZone()` 203 | 3. Establish any relevant context 204 | 205 | ### During Interaction 206 | - Maintain context awareness 207 | - Apply user preferences consistently 208 | - Handle follow-up questions efficiently 209 | 210 | ### End of Session 211 | - Confirm all requested tasks completed 212 | - Provide summary if multiple operations performed 213 | - Ensure no pending confirmations 214 | 215 | ## 🎨 Service-Specific Nuances 216 | 217 | ### Google Docs 218 | - Support for markdown content creation 219 | - Automatic HTML conversion from markdown 220 | - Position-based text insertion (index 1 for beginning) 221 | 222 | ### Google Sheets 223 | - Multiple output formats available 224 | - Range-based operations with A1 notation 225 | - Metadata includes sheet structure information 226 | 227 | ### Google Calendar 228 | - Event creation requires both start and end times 229 | - Support for attendee management 230 | - Response status filtering available 231 | 232 | ### Gmail 233 | - Full threading support 234 | - Label-based organization 235 | - Draft creation and management 236 | 237 | ### Google Chat 238 | - Space vs. DM distinction 239 | - Thread-aware messaging 240 | - Unread message filtering 241 | 242 | Remember: This guide focuses on **how to think** about using these tools effectively. For specific parameter details, refer to the tool descriptions themselves. 243 | -------------------------------------------------------------------------------- /workspace-server/src/__tests__/auth/token-storage/hybrid-token-storage.test.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { describe, it, expect, beforeEach, afterEach, jest } from '@jest/globals'; 8 | import { type OAuthCredentials, TokenStorageType } from '../../../auth/token-storage/types'; 9 | 10 | // Mock paths 11 | const KEYCHAIN_TOKEN_STORAGE_PATH = '../../../auth/token-storage/keychain-token-storage'; 12 | const FILE_TOKEN_STORAGE_PATH = '../../../auth/token-storage/file-token-storage'; 13 | const HYBRID_TOKEN_STORAGE_PATH = '../../../auth/token-storage/hybrid-token-storage'; 14 | 15 | interface MockStorage { 16 | isAvailable?: ReturnType; 17 | getCredentials: ReturnType; 18 | setCredentials: ReturnType; 19 | deleteCredentials: ReturnType; 20 | listServers: ReturnType; 21 | getAllCredentials: ReturnType; 22 | clearAll: ReturnType; 23 | } 24 | 25 | describe('HybridTokenStorage', () => { 26 | let HybridTokenStorage: typeof import('../../../auth/token-storage/hybrid-token-storage').HybridTokenStorage; 27 | let storage: import('../../../auth/token-storage/hybrid-token-storage').HybridTokenStorage; 28 | let mockKeychainStorage: MockStorage; 29 | let mockFileStorage: MockStorage; 30 | const originalEnv = process.env; 31 | 32 | beforeEach(() => { 33 | jest.resetModules(); 34 | process.env = { ...originalEnv }; 35 | process.env['GEMINI_CLI_WORKSPACE_FORCE_FILE_STORAGE'] = 'false'; 36 | 37 | mockKeychainStorage = { 38 | isAvailable: jest.fn(), 39 | getCredentials: jest.fn(), 40 | setCredentials: jest.fn(), 41 | deleteCredentials: jest.fn(), 42 | listServers: jest.fn(), 43 | getAllCredentials: jest.fn(), 44 | clearAll: jest.fn(), 45 | }; 46 | 47 | mockFileStorage = { 48 | getCredentials: jest.fn(), 49 | setCredentials: jest.fn(), 50 | deleteCredentials: jest.fn(), 51 | listServers: jest.fn(), 52 | getAllCredentials: jest.fn(), 53 | clearAll: jest.fn(), 54 | }; 55 | 56 | jest.doMock(KEYCHAIN_TOKEN_STORAGE_PATH, () => ({ 57 | KeychainTokenStorage: jest.fn().mockImplementation(() => mockKeychainStorage), 58 | })); 59 | 60 | jest.mock(FILE_TOKEN_STORAGE_PATH, () => ({ 61 | FileTokenStorage: { 62 | create: jest.fn().mockImplementation(() => { 63 | return Promise.resolve(mockFileStorage); 64 | }), 65 | }, 66 | })); 67 | 68 | // eslint-disable-next-line @typescript-eslint/no-require-imports 69 | HybridTokenStorage = require(HYBRID_TOKEN_STORAGE_PATH).HybridTokenStorage; 70 | storage = new HybridTokenStorage('test-service'); 71 | }); 72 | 73 | afterEach(() => { 74 | process.env = originalEnv; 75 | }); 76 | 77 | describe('storage selection', () => { 78 | it('should use keychain when available', async () => { 79 | mockKeychainStorage.isAvailable!.mockResolvedValue(true); 80 | mockKeychainStorage.getCredentials.mockResolvedValue(null); 81 | 82 | await storage.getCredentials('test-server'); 83 | 84 | expect(mockKeychainStorage.isAvailable).toHaveBeenCalled(); 85 | expect(mockKeychainStorage.getCredentials).toHaveBeenCalledWith( 86 | 'test-server', 87 | ); 88 | expect(await storage.getStorageType()).toBe(TokenStorageType.KEYCHAIN); 89 | }); 90 | 91 | it('should use file storage when GEMINI_CLI_WORKSPACE_FORCE_FILE_STORAGE is set', async () => { 92 | process.env['GEMINI_CLI_WORKSPACE_FORCE_FILE_STORAGE'] = 'true'; 93 | mockFileStorage.getCredentials.mockResolvedValue(null); 94 | 95 | await storage.getCredentials('test-server'); 96 | 97 | expect(mockKeychainStorage.isAvailable).not.toHaveBeenCalled(); 98 | expect(mockFileStorage.getCredentials).toHaveBeenCalledWith( 99 | 'test-server', 100 | ); 101 | expect(await storage.getStorageType()).toBe( 102 | TokenStorageType.ENCRYPTED_FILE, 103 | ); 104 | }); 105 | 106 | it('should fall back to file storage when keychain is unavailable', async () => { 107 | mockKeychainStorage.isAvailable!.mockResolvedValue(false); 108 | mockFileStorage.getCredentials.mockResolvedValue(null); 109 | 110 | await storage.getCredentials('test-server'); 111 | 112 | expect(mockKeychainStorage.isAvailable).toHaveBeenCalled(); 113 | expect(mockFileStorage.getCredentials).toHaveBeenCalledWith( 114 | 'test-server', 115 | ); 116 | expect(await storage.getStorageType()).toBe( 117 | TokenStorageType.ENCRYPTED_FILE, 118 | ); 119 | }); 120 | 121 | it('should fall back to file storage when keychain throws error', async () => { 122 | mockKeychainStorage.isAvailable!.mockRejectedValue( 123 | new Error('Keychain error'), 124 | ); 125 | mockFileStorage.getCredentials.mockResolvedValue(null); 126 | 127 | await storage.getCredentials('test-server'); 128 | 129 | expect(mockKeychainStorage.isAvailable).toHaveBeenCalled(); 130 | expect(mockFileStorage.getCredentials).toHaveBeenCalledWith( 131 | 'test-server', 132 | ); 133 | expect(await storage.getStorageType()).toBe( 134 | TokenStorageType.ENCRYPTED_FILE, 135 | ); 136 | }); 137 | 138 | it('should cache storage selection', async () => { 139 | mockKeychainStorage.isAvailable!.mockResolvedValue(true); 140 | mockKeychainStorage.getCredentials.mockResolvedValue(null); 141 | 142 | await storage.getCredentials('test-server'); 143 | await storage.getCredentials('another-server'); 144 | 145 | expect(mockKeychainStorage.isAvailable).toHaveBeenCalledTimes(1); 146 | }); 147 | }); 148 | 149 | describe('getCredentials', () => { 150 | it('should delegate to selected storage', async () => { 151 | const credentials: OAuthCredentials = { 152 | serverName: 'test-server', 153 | token: { 154 | accessToken: 'access-token', 155 | tokenType: 'Bearer', 156 | }, 157 | updatedAt: Date.now(), 158 | }; 159 | 160 | mockKeychainStorage.isAvailable!.mockResolvedValue(true); 161 | mockKeychainStorage.getCredentials.mockResolvedValue(credentials); 162 | 163 | const result = await storage.getCredentials('test-server'); 164 | 165 | expect(result).toEqual(credentials); 166 | expect(mockKeychainStorage.getCredentials).toHaveBeenCalledWith( 167 | 'test-server', 168 | ); 169 | }); 170 | }); 171 | 172 | describe('setCredentials', () => { 173 | it('should delegate to selected storage', async () => { 174 | const credentials: OAuthCredentials = { 175 | serverName: 'test-server', 176 | token: { 177 | accessToken: 'access-token', 178 | tokenType: 'Bearer', 179 | }, 180 | updatedAt: Date.now(), 181 | }; 182 | 183 | mockKeychainStorage.isAvailable!.mockResolvedValue(true); 184 | mockKeychainStorage.setCredentials.mockResolvedValue(undefined); 185 | 186 | await storage.setCredentials(credentials); 187 | 188 | expect(mockKeychainStorage.setCredentials).toHaveBeenCalledWith( 189 | credentials, 190 | ); 191 | }); 192 | }); 193 | 194 | describe('deleteCredentials', () => { 195 | it('should delegate to selected storage', async () => { 196 | mockKeychainStorage.isAvailable!.mockResolvedValue(true); 197 | mockKeychainStorage.deleteCredentials.mockResolvedValue(undefined); 198 | 199 | await storage.deleteCredentials('test-server'); 200 | 201 | expect(mockKeychainStorage.deleteCredentials).toHaveBeenCalledWith( 202 | 'test-server', 203 | ); 204 | }); 205 | }); 206 | 207 | describe('listServers', () => { 208 | it('should delegate to selected storage', async () => { 209 | const servers = ['server1', 'server2']; 210 | mockKeychainStorage.isAvailable!.mockResolvedValue(true); 211 | mockKeychainStorage.listServers.mockResolvedValue(servers); 212 | 213 | const result = await storage.listServers(); 214 | 215 | expect(result).toEqual(servers); 216 | expect(mockKeychainStorage.listServers).toHaveBeenCalled(); 217 | }); 218 | }); 219 | 220 | describe('getAllCredentials', () => { 221 | it('should delegate to selected storage', async () => { 222 | const credentialsMap = new Map([ 223 | [ 224 | 'server1', 225 | { 226 | serverName: 'server1', 227 | token: { accessToken: 'token1', tokenType: 'Bearer' }, 228 | updatedAt: Date.now(), 229 | }, 230 | ], 231 | [ 232 | 'server2', 233 | { 234 | serverName: 'server2', 235 | token: { accessToken: 'token2', tokenType: 'Bearer' }, 236 | updatedAt: Date.now(), 237 | }, 238 | ], 239 | ]); 240 | 241 | mockKeychainStorage.isAvailable!.mockResolvedValue(true); 242 | mockKeychainStorage.getAllCredentials.mockResolvedValue(credentialsMap); 243 | 244 | const result = await storage.getAllCredentials(); 245 | 246 | expect(result).toEqual(credentialsMap); 247 | expect(mockKeychainStorage.getAllCredentials).toHaveBeenCalled(); 248 | }); 249 | }); 250 | 251 | describe('clearAll', () => { 252 | it('should delegate to selected storage', async () => { 253 | mockKeychainStorage.isAvailable!.mockResolvedValue(true); 254 | mockKeychainStorage.clearAll.mockResolvedValue(undefined); 255 | 256 | await storage.clearAll(); 257 | 258 | expect(mockKeychainStorage.clearAll).toHaveBeenCalled(); 259 | }); 260 | }); 261 | }); -------------------------------------------------------------------------------- /workspace-server/src/__tests__/auth/AuthManager.test.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2025 Google LLC 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { AuthManager } from '../../auth/AuthManager'; 8 | import { OAuthCredentialStorage } from '../../auth/token-storage/oauth-credential-storage'; 9 | import { google } from 'googleapis'; 10 | 11 | // Mock dependencies 12 | jest.mock('../../auth/token-storage/oauth-credential-storage'); 13 | jest.mock('googleapis'); 14 | jest.mock('../../utils/logger'); 15 | jest.mock('../../utils/secure-browser-launcher'); 16 | 17 | // Mock fetch globally for refreshToken tests 18 | global.fetch = jest.fn(); 19 | 20 | describe('AuthManager', () => { 21 | let authManager: AuthManager; 22 | let mockOAuth2Client: any; 23 | 24 | beforeEach(() => { 25 | jest.clearAllMocks(); 26 | 27 | // Setup mock OAuth2 client 28 | mockOAuth2Client = { 29 | setCredentials: jest.fn().mockImplementation((creds) => { 30 | mockOAuth2Client.credentials = creds; 31 | }), 32 | generateAuthUrl: jest.fn(), 33 | on: jest.fn(), 34 | refreshAccessToken: jest.fn(), 35 | credentials: {} 36 | }; 37 | 38 | (google.auth.OAuth2 as unknown as jest.Mock).mockReturnValue(mockOAuth2Client); 39 | 40 | authManager = new AuthManager(['scope1']); 41 | }); 42 | 43 | it('should set up tokens event listener on client creation', async () => { 44 | (OAuthCredentialStorage.loadCredentials as jest.Mock).mockResolvedValue({ 45 | access_token: 'old_token', 46 | refresh_token: 'old_refresh', 47 | scope: 'scope1' 48 | }); 49 | 50 | await authManager.getAuthenticatedClient(); 51 | 52 | // Verify 'on' was called for 'tokens' 53 | expect(mockOAuth2Client.on).toHaveBeenCalledWith('tokens', expect.any(Function)); 54 | }); 55 | 56 | it('should save credentials when tokens event is emitted', async () => { 57 | (OAuthCredentialStorage.loadCredentials as jest.Mock).mockResolvedValue({ 58 | access_token: 'old_token', 59 | refresh_token: 'old_refresh', 60 | scope: 'scope1' 61 | }); 62 | 63 | await authManager.getAuthenticatedClient(); 64 | 65 | // Get the registered callback 66 | const tokensCallback = mockOAuth2Client.on.mock.calls.find((call: any[]) => call[0] === 'tokens')[1]; 67 | expect(tokensCallback).toBeDefined(); 68 | 69 | // Simulate tokens event 70 | const newTokens = { 71 | access_token: 'new_token', 72 | expiry_date: 123456789 73 | }; 74 | 75 | await tokensCallback(newTokens); 76 | 77 | // Verify saveCredentials was called with merged tokens 78 | // New tokens take precedence, but refresh_token is preserved from old credentials 79 | expect(OAuthCredentialStorage.saveCredentials).toHaveBeenCalledWith({ 80 | access_token: 'new_token', 81 | refresh_token: 'old_refresh', // Preserved from old credentials 82 | expiry_date: 123456789 83 | // Note: scope is NOT preserved because newTokens didn't include it 84 | }); 85 | }); 86 | 87 | it('should preserve refresh token during manual refresh if not returned', async () => { 88 | // Setup initial state with a refresh token 89 | (OAuthCredentialStorage.loadCredentials as jest.Mock).mockResolvedValue({ 90 | access_token: 'old_token', 91 | refresh_token: 'old_refresh_token', 92 | scope: 'scope1' 93 | }); 94 | 95 | // Initialize client to populate this.client 96 | await authManager.getAuthenticatedClient(); 97 | 98 | // Mock fetch to simulate cloud function returning new tokens without refresh_token 99 | (global.fetch as jest.Mock).mockResolvedValue({ 100 | ok: true, 101 | json: async () => ({ 102 | access_token: 'new_access_token', 103 | expiry_date: 999999999 104 | }) 105 | }); 106 | 107 | await authManager.refreshToken(); 108 | 109 | // Verify saveCredentials was called with BOTH new access token AND old refresh token 110 | expect(OAuthCredentialStorage.saveCredentials).toHaveBeenCalledWith(expect.objectContaining({ 111 | access_token: 'new_access_token', 112 | refresh_token: 'old_refresh_token' 113 | })); 114 | }); 115 | 116 | it('should preserve refresh token when refreshAccessToken mutates credentials in-place', async () => { 117 | // Setup initial state with a refresh token 118 | (OAuthCredentialStorage.loadCredentials as jest.Mock).mockResolvedValue({ 119 | access_token: 'old_token', 120 | refresh_token: 'old_refresh_token', 121 | scope: 'scope1' 122 | }); 123 | 124 | // Initialize client to populate this.client 125 | await authManager.getAuthenticatedClient(); 126 | 127 | // Mock fetch to simulate cloud function returning new tokens without refresh_token 128 | (global.fetch as jest.Mock).mockResolvedValue({ 129 | ok: true, 130 | json: async () => ({ 131 | access_token: 'new_access_token', 132 | expiry_date: 999999999 133 | }) 134 | }); 135 | 136 | await authManager.refreshToken(); 137 | 138 | // This test verifies that the refresh_token is preserved even when 139 | // the cloud function doesn't return it in the response 140 | expect(OAuthCredentialStorage.saveCredentials).toHaveBeenCalledWith(expect.objectContaining({ 141 | access_token: 'new_access_token', 142 | refresh_token: 'old_refresh_token' 143 | })); 144 | }); 145 | 146 | it('should preserve refresh token in tokens event handler', async () => { 147 | // Setup initial state with a refresh token in storage 148 | (OAuthCredentialStorage.loadCredentials as jest.Mock).mockResolvedValue({ 149 | access_token: 'old_token', 150 | refresh_token: 'stored_refresh_token', 151 | scope: 'scope1' 152 | }); 153 | 154 | await authManager.getAuthenticatedClient(); 155 | 156 | // Get the registered callback 157 | const tokensCallback = mockOAuth2Client.on.mock.calls.find((call: any[]) => call[0] === 'tokens')[1]; 158 | 159 | // Simulate automatic refresh that doesn't include refresh_token 160 | const newTokens = { 161 | access_token: 'auto_refreshed_token', 162 | expiry_date: 999999999 163 | // Note: no refresh_token 164 | }; 165 | 166 | await tokensCallback(newTokens); 167 | 168 | // Verify saveCredentials was called with BOTH new access token AND stored refresh token 169 | expect(OAuthCredentialStorage.saveCredentials).toHaveBeenCalledWith({ 170 | access_token: 'auto_refreshed_token', 171 | expiry_date: 999999999, 172 | refresh_token: 'stored_refresh_token' 173 | }); 174 | }); 175 | 176 | it('should proactively refresh expired tokens before returning client', async () => { 177 | // Setup: Load credentials with expired token 178 | const expiredTime = Date.now() - 1000; // 1 second ago 179 | (OAuthCredentialStorage.loadCredentials as jest.Mock).mockResolvedValue({ 180 | access_token: 'expired_token', 181 | refresh_token: 'valid_refresh', 182 | expiry_date: expiredTime, 183 | scope: 'scope1' 184 | }); 185 | 186 | // Mock fetch to simulate cloud function returning fresh tokens 187 | (global.fetch as jest.Mock).mockResolvedValue({ 188 | ok: true, 189 | json: async () => ({ 190 | access_token: 'fresh_token', 191 | expiry_date: Date.now() + 3600000 192 | }) 193 | }); 194 | 195 | // First call: load expired credentials from storage, should trigger proactive refresh 196 | const firstClient = await authManager.getAuthenticatedClient(); 197 | expect(firstClient).toBeDefined(); 198 | 199 | // Verify fetch was called to refresh the token 200 | expect(global.fetch).toHaveBeenCalledWith( 201 | 'https://google-workspace-extension.geminicli.com/refreshToken', 202 | expect.objectContaining({ 203 | method: 'POST', 204 | body: expect.stringContaining('valid_refresh') 205 | }) 206 | ); 207 | 208 | // Verify new token was saved with preserved refresh_token 209 | expect(OAuthCredentialStorage.saveCredentials).toHaveBeenCalledWith( 210 | expect.objectContaining({ 211 | access_token: 'fresh_token', 212 | refresh_token: 'valid_refresh' 213 | }) 214 | ); 215 | }); 216 | 217 | it('should proactively refresh tokens expiring within buffer (5 minutes)', async () => { 218 | // Setup: Load credentials with token expiring in 4 minutes (within 5 min buffer) 219 | const TEST_EXPIRY_WITHIN_BUFFER = 4 * 60 * 1000; 220 | const expiresIn4Minutes = Date.now() + TEST_EXPIRY_WITHIN_BUFFER; 221 | (OAuthCredentialStorage.loadCredentials as jest.Mock).mockResolvedValue({ 222 | access_token: 'soon_expiring_token', 223 | refresh_token: 'valid_refresh', 224 | expiry_date: expiresIn4Minutes, 225 | scope: 'scope1' 226 | }); 227 | 228 | // Mock fetch to simulate cloud function returning fresh tokens 229 | (global.fetch as jest.Mock).mockResolvedValue({ 230 | ok: true, 231 | json: async () => ({ 232 | access_token: 'fresh_token', 233 | expiry_date: Date.now() + 60 * 60 * 1000 234 | }) 235 | }); 236 | 237 | // Call getAuthenticatedClient 238 | const client = await authManager.getAuthenticatedClient(); 239 | expect(client).toBeDefined(); 240 | 241 | // Verify fetch was called to refresh the token because it was within buffer 242 | expect(global.fetch).toHaveBeenCalledWith( 243 | 'https://google-workspace-extension.geminicli.com/refreshToken', 244 | expect.objectContaining({ 245 | method: 'POST', 246 | body: expect.stringContaining('valid_refresh') 247 | }) 248 | ); 249 | }); 250 | }); 251 | --------------------------------------------------------------------------------