├── .ai ├── auth-spec.md ├── db-migrations.md ├── diagrams │ ├── auth.md │ ├── journey.md │ └── ui.md ├── feature-flags.md ├── project-mvp.md ├── project-prd.md ├── tech-stack.md ├── test-plan-gemini.md ├── test-plan.md └── ui │ └── mobile-navigation.md ├── .all-contributorsrc ├── .cursor └── rules │ ├── astro.mdc │ ├── github-action.mdc │ ├── mermaid-diagram-auth.mdc │ ├── mermaid-diagram-journey.mdc │ ├── mermaid-diagram-ui.mdc │ ├── playwright-e2e-testing.mdc │ ├── react-development.mdc │ ├── supabase-migrations.mdc │ ├── supabase.mdc │ ├── test-plan.mdc │ └── vitest-unit-testing.mdc ├── .cursorignore ├── .editorconfig ├── .env.example ├── .eslintrc.cjs ├── .github ├── pull_request_template.md └── workflows │ ├── deploy-app.yml │ ├── deploy-mcp-on-merge.yml │ └── pull-request.yml ├── .gitignore ├── .husky └── pre-commit ├── .nvmrc ├── .prettierignore ├── .prettierrc ├── .vscode ├── extensions.json ├── launch.json ├── markdown.css └── settings.json ├── LICENSE ├── README.md ├── astro.config.mjs ├── e2e ├── auth.setup.ts ├── global.teardown.ts ├── home.spec.ts ├── page-objects │ ├── CollectionsSidebarPage.ts │ ├── HomePage.ts │ └── SaveCollectionDialog.ts └── tests │ └── collections.spec.ts ├── mcp-server ├── .gitignore ├── .vscode │ └── settings.json ├── README.md ├── biome.json ├── package-lock.json ├── package.json ├── src │ ├── data │ │ └── rulesProvider.ts │ ├── index.ts │ └── tools │ │ └── rulesTools.ts ├── tsconfig.json ├── worker-configuration.d.ts └── wrangler.jsonc ├── package-lock.json ├── package.json ├── playwright-report └── index.html ├── playwright.config.ts ├── public ├── demo.png └── favicon.svg ├── scripts └── generate-rules-json.mts ├── src ├── assets │ ├── 10xlogo.svg │ ├── demo.png │ └── privacy-policy │ │ ├── pp-13-04-2025-en.md │ │ └── pp-13-04-2025-pl.md ├── components │ ├── Footer.tsx │ ├── MobileNavigation.tsx │ ├── Topbar.tsx │ ├── TwoPane.tsx │ ├── auth │ │ ├── AuthInput.tsx │ │ ├── AuthLayout.tsx │ │ ├── LoginButton.tsx │ │ ├── LoginForm.tsx │ │ ├── ResetPasswordForm.tsx │ │ ├── SignupForm.tsx │ │ └── UpdatePasswordResetForm.tsx │ ├── cookie-banner │ │ └── CookieBanner.tsx │ ├── helpers │ │ └── tailwind-safelist.tsx │ ├── privacy │ │ └── PrivacyLayout.astro │ ├── rule-builder │ │ ├── LayerItem.tsx │ │ ├── LayerSelector.tsx │ │ ├── LibraryItem.tsx │ │ ├── LibrarySelector.tsx │ │ ├── RuleBuilder.tsx │ │ ├── SearchInput.tsx │ │ ├── SelectedRules.tsx │ │ ├── StackItem.tsx │ │ ├── StackSelector.tsx │ │ ├── hooks │ │ │ ├── useMCPDialog.ts │ │ │ └── useRuleBuilder.ts │ │ ├── index.ts │ │ └── modals │ │ │ └── MCPDialog.tsx │ ├── rule-collections │ │ ├── CollectionListEntry.tsx │ │ ├── CollectionsList.tsx │ │ ├── CollectionsSidebar.tsx │ │ ├── DeletionDialog.tsx │ │ ├── SaveCollectionDialog.tsx │ │ ├── SaveDefaultDialog.tsx │ │ └── UnsavedChangesDialog.tsx │ ├── rule-parser │ │ ├── DependencyUploader.tsx │ │ └── useDependencyUpload.ts │ ├── rule-preview │ │ ├── DependencyUpload.tsx │ │ ├── MarkdownContentRenderer.tsx │ │ ├── RulePreview.tsx │ │ ├── RulePreviewTopbar.tsx │ │ ├── RulesPath.tsx │ │ ├── RulesPreviewActions.tsx │ │ ├── RulesPreviewCopyDownloadActions.tsx │ │ └── index.ts │ └── ui │ │ ├── Accordion.tsx │ │ └── ConfirmDialog.tsx ├── data │ ├── ai-environments.ts │ ├── dictionaries.ts │ ├── rules.ts │ └── rules │ │ ├── accessibility.ts │ │ ├── backend.ts │ │ ├── coding.ts │ │ ├── database.ts │ │ ├── frontend.ts │ │ ├── helpers.ts │ │ ├── index.ts │ │ ├── infrastructure.ts │ │ ├── testing.ts │ │ └── types.ts ├── db │ ├── database.types.ts │ └── supabase.client.ts ├── env.d.ts ├── features │ └── featureFlags.ts ├── hooks │ ├── useAuth.ts │ ├── useCaptcha.ts │ ├── useCookieConsent.ts │ └── useTokenHashVerification.ts ├── i18n │ ├── translations.spec.ts │ └── translations.ts ├── layouts │ ├── Layout.astro │ └── partials │ │ ├── Fonts.astro │ │ ├── GTMContainer.astro │ │ └── SEO.astro ├── middleware │ └── index.ts ├── pages │ ├── api │ │ ├── auth │ │ │ ├── login.ts │ │ │ ├── logout.ts │ │ │ ├── reset-password.ts │ │ │ ├── signup.ts │ │ │ ├── update-password.ts │ │ │ └── verify-reset-token.ts │ │ ├── captcha │ │ │ └── verify.ts │ │ ├── collections.ts │ │ ├── collections │ │ │ └── [id].ts │ │ ├── dependencyMappers.ts │ │ └── upload-dependencies.ts │ ├── auth │ │ ├── login.astro │ │ ├── reset-password.astro │ │ ├── signup.astro │ │ └── update-password.astro │ ├── index.astro │ └── privacy │ │ ├── en │ │ └── index.astro │ │ ├── pl │ │ └── index.astro │ │ └── privacyPolicyVersion.ts ├── services │ ├── auth.ts │ ├── captcha.ts │ ├── rateLimiter.ts │ └── rules-builder │ │ ├── RulesBuilderService.ts │ │ ├── RulesBuilderTypes.ts │ │ ├── RulesGenerationStrategy.ts │ │ ├── __tests__ │ │ └── RulesBuilderService.test.ts │ │ └── rules-generation-strategies │ │ ├── MultiFileRulesStrategy.ts │ │ └── SingleFileRulesStrategy.ts ├── store │ ├── authStore.ts │ ├── collectionsStore.ts │ ├── navigationStore.ts │ ├── projectStore.ts │ ├── storage │ │ ├── urlStorage.test.ts │ │ └── urlStorage.ts │ └── techStackStore.ts ├── styles │ ├── global.css │ └── theme.ts ├── types │ ├── auth.ts │ └── collection.types.ts └── utils │ ├── __tests__ │ └── markdownStyling.test.tsx │ ├── cn.ts │ ├── markdownStyling.tsx │ └── slugify.ts ├── supabase ├── .gitignore ├── config.toml ├── emails │ ├── email-template.html │ └── recovery.html └── migrations │ ├── 20250328135512_collections.sql │ ├── 20250328201010_drop-rls.sql │ ├── 20250402082709_add_rls_to_collections.sql │ └── 20250411083417_create_user_consents.sql ├── tailwind.config.mjs ├── tests └── setup │ ├── types.d.ts │ └── vitest.setup.ts ├── tsconfig.json └── vitest.config.ts /.ai/db-migrations.md: -------------------------------------------------------------------------------- 1 | --- 2 | # Specify the following for Cursor rules 3 | description: Guidelines for writing Postgres migrations 4 | globs: 'supabase/migrations/**/*.sql' 5 | --- 6 | 7 | # Database: Create migration 8 | 9 | You are a Postgres Expert who loves creating secure database schemas. 10 | 11 | This project uses the migrations provided by the Supabase CLI. 12 | 13 | ## Creating a migration file 14 | 15 | Given the context of the user's message, create a database migration file inside the folder `supabase/migrations/`. 16 | 17 | The file MUST following this naming convention: 18 | 19 | The file MUST be named in the format `YYYYMMDDHHmmss_short_description.sql` with proper casing for months, minutes, and seconds in UTC time: 20 | 21 | 1. `YYYY` - Four digits for the year (e.g., `2024`). 22 | 2. `MM` - Two digits for the month (01 to 12). 23 | 3. `DD` - Two digits for the day of the month (01 to 31). 24 | 4. `HH` - Two digits for the hour in 24-hour format (00 to 23). 25 | 5. `mm` - Two digits for the minute (00 to 59). 26 | 6. `ss` - Two digits for the second (00 to 59). 27 | 7. Add an appropriate description for the migration. 28 | 29 | For example: 30 | 31 | ``` 32 | 20240906123045_create_profiles.sql 33 | ``` 34 | 35 | ## SQL Guidelines 36 | 37 | Write Postgres-compatible SQL code for Supabase migration files that: 38 | 39 | - Includes a header comment with metadata about the migration, such as the purpose, affected tables/columns, and any special considerations. 40 | - Includes thorough comments explaining the purpose and expected behavior of each migration step. 41 | - Write all SQL in lowercase. 42 | - Add copious comments for any destructive SQL commands, including truncating, dropping, or column alterations. 43 | - When creating a new table, you MUST enable Row Level Security (RLS) even if the table is intended for public access. 44 | - When creating RLS Policies 45 | - Ensure the policies cover all relevant access scenarios (e.g. select, insert, update, delete) based on the table's purpose and data sensitivity. 46 | - If the table is intended for public access the policy can simply return `true`. 47 | - RLS Policies should be granular: one policy for `select`, one for `insert` etc) and for each supabase role (`anon` and `authenticated`). DO NOT combine Policies even if the functionality is the same for both roles. 48 | - Include comments explaining the rationale and intended behavior of each security policy 49 | 50 | The generated SQL code should be production-ready, well-documented, and aligned with Supabase's best practices. 51 | -------------------------------------------------------------------------------- /.ai/diagrams/auth.md: -------------------------------------------------------------------------------- 1 | ```mermaid 2 | sequenceDiagram 3 | autonumber 4 | participant Browser 5 | participant Middleware 6 | participant AstroAPI 7 | participant SupabaseAuth 8 | 9 | %% Rejestracja użytkownika 10 | Browser->>AstroAPI: POST /api/auth/signup (email, password) 11 | activate AstroAPI 12 | AstroAPI->>SupabaseAuth: supabase.auth.signUp() 13 | activate SupabaseAuth 14 | alt Rejestracja udana 15 | SupabaseAuth-->>AstroAPI: Utworzono konto 16 | AstroAPI-->>Browser: 200 OK + dane użytkownika 17 | Note over Browser: Przekierowanie do logowania 18 | else Błąd rejestracji 19 | SupabaseAuth-->>AstroAPI: Błąd (np. email zajęty) 20 | AstroAPI-->>Browser: 400 Bad Request + komunikat 21 | end 22 | deactivate SupabaseAuth 23 | deactivate AstroAPI 24 | 25 | %% Logowanie użytkownika 26 | Browser->>AstroAPI: POST /api/auth/login (email, password) 27 | activate AstroAPI 28 | AstroAPI->>SupabaseAuth: supabase.auth.signInWithPassword() 29 | activate SupabaseAuth 30 | alt Logowanie udane 31 | SupabaseAuth-->>AstroAPI: Token JWT + dane użytkownika 32 | AstroAPI-->>Browser: 200 OK + token w HttpOnly cookie 33 | Note over Browser: Inicjalizacja authStore (Zustand) 34 | else Błędne dane 35 | SupabaseAuth-->>AstroAPI: Błąd autentykacji 36 | AstroAPI-->>Browser: 400 Bad Request + komunikat 37 | end 38 | deactivate SupabaseAuth 39 | deactivate AstroAPI 40 | 41 | %% Reset hasła 42 | Browser->>AstroAPI: POST /api/auth/reset-password (email) 43 | activate AstroAPI 44 | AstroAPI->>SupabaseAuth: supabase.auth.resetPasswordForEmail() 45 | activate SupabaseAuth 46 | alt Email istnieje 47 | SupabaseAuth-->>AstroAPI: Email wysłany 48 | AstroAPI-->>Browser: 200 OK + komunikat 49 | Note over Browser: Informacja o wysłaniu linku 50 | else Email nie istnieje 51 | SupabaseAuth-->>AstroAPI: Błąd 52 | AstroAPI-->>Browser: 400 Bad Request + komunikat 53 | end 54 | deactivate SupabaseAuth 55 | deactivate AstroAPI 56 | 57 | %% Dostęp do chronionej zawartości 58 | Browser->>Middleware: Żądanie chronionego zasobu 59 | activate Middleware 60 | Middleware->>SupabaseAuth: Weryfikacja tokenu JWT 61 | activate SupabaseAuth 62 | alt Token ważny 63 | SupabaseAuth-->>Middleware: Token poprawny 64 | Middleware->>AstroAPI: Przekazanie żądania 65 | AstroAPI-->>Browser: Chroniona zawartość 66 | else Token nieważny/wygasły 67 | SupabaseAuth-->>Middleware: Błąd weryfikacji 68 | Middleware-->>Browser: Przekierowanie do /auth/login 69 | end 70 | deactivate SupabaseAuth 71 | deactivate Middleware 72 | 73 | %% Wylogowanie 74 | Browser->>AstroAPI: POST /api/auth/logout 75 | activate AstroAPI 76 | AstroAPI->>SupabaseAuth: supabase.auth.signOut() 77 | activate SupabaseAuth 78 | SupabaseAuth-->>AstroAPI: Sesja zakończona 79 | AstroAPI-->>Browser: 200 OK + usunięcie cookie 80 | Note over Browser: Reset authStore 81 | deactivate SupabaseAuth 82 | deactivate AstroAPI 83 | ``` 84 | -------------------------------------------------------------------------------- /.ai/diagrams/journey.md: -------------------------------------------------------------------------------- 1 | ```mermaid 2 | stateDiagram-v2 3 | [*] --> StronaGlowna 4 | 5 | state "Dostęp Niezalogowany" as Niezalogowany { 6 | StronaGlowna --> TworzenieRegul 7 | state "Tworzenie Reguł" as TworzenieRegul { 8 | [*] --> KreatorRegul 9 | KreatorRegul --> PodgladRegul 10 | PodgladRegul --> KreatorRegul 11 | } 12 | 13 | state "Ograniczony Dostęp" as OgraniczonyDostep { 14 | TworzenieRegul --> ProbaZapisuKolekcji 15 | ProbaZapisuKolekcji --> WymaganeLogowanie: Próba zapisu 16 | } 17 | } 18 | 19 | state "Proces Autentykacji" as Autentykacja { 20 | state "Logowanie" as Logowanie { 21 | [*] --> FormularzLogowania 22 | FormularzLogowania --> WalidacjaLogowania 23 | 24 | state if_logowanie <> 25 | WalidacjaLogowania --> if_logowanie 26 | if_logowanie --> PomyslneLogowanie: Dane poprawne 27 | if_logowanie --> BladLogowania: Błędne dane 28 | BladLogowania --> FormularzLogowania 29 | 30 | note right of FormularzLogowania 31 | Email i hasło wymagane 32 | Minimum 8 znaków dla hasła 33 | end note 34 | } 35 | 36 | state "Rejestracja" as Rejestracja { 37 | [*] --> FormularzRejestracji 38 | FormularzRejestracji --> WalidacjaRejestracji 39 | 40 | state if_rejestracja <> 41 | WalidacjaRejestracji --> if_rejestracja 42 | if_rejestracja --> PomyslnaRejestracja: Dane poprawne 43 | if_rejestracja --> BladRejestracji: Email zajęty 44 | BladRejestracji --> FormularzRejestracji 45 | 46 | note right of FormularzRejestracji 47 | Email i hasło wymagane 48 | Minimum 8 znaków dla hasła 49 | Potwierdzenie hasła 50 | end note 51 | } 52 | 53 | state "Odzyskiwanie Hasła" as OdzyskiwanieHasla { 54 | [*] --> FormularzResetu 55 | FormularzResetu --> WyslanieMaila 56 | WyslanieMaila --> OczekiwanieNaLink 57 | OczekiwanieNaLink --> NoweHaslo 58 | NoweHaslo --> PomyslneLogowanie 59 | } 60 | } 61 | 62 | state "Dostęp Zalogowany" as Zalogowany { 63 | PomyslneLogowanie --> PanelUzytkownika 64 | 65 | state "Panel Użytkownika" as PanelUzytkownika { 66 | [*] --> TworzenieRegulAuth 67 | 68 | state "Zarządzanie Kolekcjami" as Kolekcje { 69 | TworzenieRegulAuth --> ZapisKolekcji 70 | ZapisKolekcji --> EdycjaKolekcji 71 | EdycjaKolekcji --> ZapisKolekcji 72 | ZapisKolekcji --> UsuniecieKolekcji 73 | } 74 | } 75 | } 76 | 77 | %% Główne przejścia między stanami 78 | WymaganeLogowanie --> FormularzLogowania 79 | StronaGlowna --> FormularzLogowania: Przycisk Logowania 80 | StronaGlowna --> FormularzRejestracji: Przycisk Rejestracji 81 | FormularzLogowania --> FormularzRejestracji: Link do rejestracji 82 | FormularzLogowania --> FormularzResetu: Zapomniałem hasła 83 | PomyslnaRejestracja --> FormularzLogowania 84 | PanelUzytkownika --> [*]: Wylogowanie 85 | 86 | note right of TworzenieRegul 87 | Podstawowa funkcjonalność 88 | dostępna bez logowania 89 | (US-001) 90 | end note 91 | 92 | note right of Kolekcje 93 | Pełna funkcjonalność 94 | dostępna po zalogowaniu 95 | (US-003) 96 | end note 97 | ``` 98 | -------------------------------------------------------------------------------- /.ai/diagrams/ui.md: -------------------------------------------------------------------------------- 1 | ```mermaid 2 | flowchart TD 3 | %% Style definitions 4 | classDef astroPage fill:#2d374d,stroke:#64748b,color:#fff 5 | classDef reactComponent fill:#1e293b,stroke:#475569,color:#fff 6 | classDef store fill:#312e81,stroke:#4338ca,color:#fff 7 | classDef auth fill:#374151,stroke:#4b5563,color:#fff 8 | classDef shared fill:#1f2937,stroke:#374151,color:#fff 9 | 10 | %% Main Layout & Pages 11 | subgraph "Astro Pages" 12 | IndexPage["index.astro"]:::astroPage 13 | LoginPage["auth/login.astro"]:::astroPage 14 | SignupPage["auth/signup.astro"]:::astroPage 15 | ResetPage["auth/reset-password.astro"]:::astroPage 16 | end 17 | 18 | %% Auth Components 19 | subgraph "Komponenty Autoryzacji" 20 | LoginForm["LoginForm"]:::auth 21 | SignupForm["SignupForm"]:::auth 22 | ResetForm["ResetPasswordForm"]:::auth 23 | AuthValidation["FormValidation"]:::auth 24 | end 25 | 26 | %% Shared Components 27 | subgraph "Komponenty Współdzielone" 28 | Layout["Layout"]:::shared 29 | Topbar["Topbar"]:::shared 30 | Footer["Footer"]:::shared 31 | end 32 | 33 | %% Main App Components 34 | subgraph "Komponenty Aplikacji" 35 | TwoPane["TwoPane"]:::reactComponent 36 | RuleBuilder["RuleBuilder"]:::reactComponent 37 | RulePreview["RulePreview"]:::reactComponent 38 | CollectionsSidebar["CollectionsSidebar"]:::reactComponent 39 | end 40 | 41 | %% State Management 42 | subgraph "Zarządzanie Stanem" 43 | AuthStore["authStore\n(Zustand)"]:::store 44 | CollectionsStore["collectionsStore\n(Zustand)"]:::store 45 | end 46 | 47 | %% Connections - Layout Structure 48 | IndexPage --> Layout 49 | LoginPage --> Layout 50 | SignupPage --> Layout 51 | ResetPage --> Layout 52 | 53 | Layout --> Topbar 54 | Layout --> Footer 55 | 56 | %% Auth Flow 57 | LoginPage --> LoginForm 58 | SignupPage --> SignupForm 59 | ResetPage --> ResetForm 60 | 61 | LoginForm --> AuthValidation 62 | SignupForm --> AuthValidation 63 | ResetForm --> AuthValidation 64 | 65 | %% Main App Structure 66 | IndexPage --> TwoPane 67 | TwoPane --> RuleBuilder 68 | TwoPane --> RulePreview 69 | IndexPage --> CollectionsSidebar 70 | 71 | %% State Management 72 | LoginForm -.-> AuthStore 73 | SignupForm -.-> AuthStore 74 | Topbar -.-> AuthStore 75 | CollectionsSidebar -.-> AuthStore 76 | CollectionsSidebar -.-> CollectionsStore 77 | 78 | %% Data Flow 79 | AuthStore -.-> Topbar 80 | AuthStore -.-> CollectionsSidebar 81 | CollectionsStore -.-> CollectionsSidebar 82 | 83 | %% Props Flow 84 | IndexPage --> |"user props"| Topbar 85 | IndexPage --> |"user props"| CollectionsSidebar 86 | ``` 87 | -------------------------------------------------------------------------------- /.ai/feature-flags.md: -------------------------------------------------------------------------------- 1 | # Feature Flags Module Plan 2 | 3 | ## Overview 4 | 5 | Moduł flag funkcjonalności umożliwia oddzielenie deploymentów od release'ów poprzez wprowadzenie systemu flag, które pozwalają na kontrolowanie dostępności poszczególnych funkcjonalności w zależności od środowiska. System ten może być stosowany: 6 | 7 | - na poziomie endpointów API (np. dla kolekcji, auth) 8 | - na poziomie stron Astro (np. @login.astro, @signup.astro, @reset-password.astro) 9 | - na poziomie widoczności kolekcji i komponentów (np. TwoPane.tsx, MobileNavigation.tsx) 10 | 11 | ## Wymagania 12 | 13 | - **Środowiska:** modół obsługuje środowiska `local`, `integration` oraz `prod`. 14 | - **Flagi:** Na początek moduł obsługuje flagi dla `auth` i `collections` jako proste wartości boolowskie (`true`/`false`). 15 | - **Użycie:** W aplikacji można importować moduł i wykonywać `isFeatureEnabled('key')` w celu sprawdzenia, czy dana funkcjonalność jest aktywna. 16 | - **Logowanie:** Każde zapytanie o flagę loguje informacje diagnostyczne, takie jak bieżące środowisko oraz wynik flagi. 17 | - **Build Time:** Flagi są ustalane podczas kompilacji, wykorzystując zmienną środowiskową `import.meta.env.PUBLIC_ENV_NAME`, analogicznie do sposobu użycia w wytycznych @supabase.mdc. 18 | 19 | ## Implementacja 20 | 21 | Moduł znajduje się w `src/features/featureFlags.ts` i składa się z następujących głównych elementów: 22 | 23 | 1. **Wykrywanie środowiska:** 24 | Moduł korzysta z `import.meta.env.PUBLIC_ENV_NAME`, aby określić bieżące środowisko. Jeśli zmienna nie jest ustawiona, zwraca `null`. 25 | 26 | 2. **Konfiguracja flag:** 27 | Obiekt konfiguracji mapuje nazwy funkcji na obiekty określające, czy funkcja jest włączona dla danego środowiska. 28 | 29 | 3. **Funkcja sprawdzająca flagę:** 30 | Funkcja `isFeatureEnabled(feature: string): boolean` sprawdza, czy dana flaga jest zdefiniowana, loguje wynik i zwraca ustawioną wartość flagi dla bieżącego środowiska. 31 | 32 | 4. **Przykładowy kod:** 33 | 34 | `src/features/featureFlags.ts` 35 | 36 | ## Podsumowanie 37 | 38 | Ten projekt modułu flag funkcjonalności zapewnia elastyczny system zarządzania funkcjami oparty na środowisku, który można wykorzystywać zarówno na backendzie, jak i frontendzie. Podejście to umożliwia łatwą rozbudowę systemu o kolejne flagi oraz umożliwia diagnostykę dzięki logowaniu stanu flag w trakcie wywołań. 39 | -------------------------------------------------------------------------------- /.ai/project-mvp.md: -------------------------------------------------------------------------------- 1 | # MVP dla 10xRules.ai 2 | 3 | Główny problem: Aby podnieść jakość współpracy programisty z AI, konieczne jest zdefiniowanie projektowych reguł dla sztucznej inteligencji. Niestety, współczesne edytory zintegrowane z modelami językowymi używają różnych konwencji do definiowania tych reguł, a bez tego dokumentu jakość współpracy z AI jest niska. Do tego, precyzyjne definiowanie reguł wymaga doświadczenia i wiedzy o AI. 4 | 5 | ## Najmniejszy zestaw funkcjonalności: 6 | 7 | - Katalog reguł dla AI w formacie Markdown na 3 poziomach - warstwy aplikacji, stacku i bibliotek (np. Frontend, React, Zustand) 8 | - Udostępnienie konwencji do definiowania reguł dla AI w danym edytorze 9 | - Pobieranie reguł w formacie Markdown 10 | - Kopiowanie reguł do schowka 11 | - Generowanie reguł na podstawie "dep-files" (np. package.json czy requirements.txt) 12 | 13 | ## Co NIE wchodzi w zakres MVP: 14 | 15 | - Zaawansowane funkcje udostępniania reguł między użytkownikami 16 | - Eksport do formatów innych niż Markdown 17 | - Edycja reguł w aplikacji - użytkownik dostosowuje treść już na poziomie swojego projektu 18 | 19 | ## Kryteria sukcesu: 20 | 21 | - Użytkownik może wygenerować zestaw reguł dla najpopularniejszych technologii webowych korzystając z gotowego katalogu 22 | - Użytkownik może pobrać gotowy zestaw reguł lub skopiować do schowka 23 | -------------------------------------------------------------------------------- /.ai/tech-stack.md: -------------------------------------------------------------------------------- 1 | # 10xRules - Tech Stack 2 | 3 | ### Frontend - Astro z React dla komponentów interaktywnych: 4 | 5 | - Astro 5 z nastawieniem na routing server-side 6 | - React 18.3 dla interaktywnych komponentów 7 | - TypeScript 5 dla lepszej jakości kodu i wsparcia IDE 8 | - Tailwind CSS 4 dla szybkiego stylowania 9 | - Zustand dla zarządzania stanem aplikacji 10 | - Lucide React (ikony aplikacji) 11 | 12 | ### Backend - Astro z Supabase jako kompleksowe rozwiązanie backendowe: 13 | 14 | - Wbudowana autentykacja użytkowników oparta o JWT i Supabase Auth 15 | - Baza danych PostgreSQL w oparciu o Supabase 16 | 17 | ### AI - Komunikacja z modelami przez usługę Openrouter.ai: 18 | 19 | - Dostęp do szerokiej gamy modeli (OpenAI, Anthropic, Google i wiele innych), które pozwolą nam znaleźć rozwiązanie zapewniające wysoką efektywność i niskie koszta 20 | 21 | ### CI/CD i Hosting: 22 | 23 | - Github Actions do tworzenia pipeline'ów CI/CD 24 | - Cloudflare Pages do hostingu - workflow `master.yml` 25 | 26 | ### Testing: 27 | 28 | - Testy jednostkowe - Vitest z React Testing Library dla komponentów UI: 29 | 30 | - Vitest jako nowoczesny i szybki runner testów zoptymalizowany dla Vite/Astro 31 | - React Testing Library do testowania interaktywnych komponentów React 32 | - @testing-library/dom do testowania statycznych komponentów Astro 33 | - MSW (Mock Service Worker) do mockowania API w testach 34 | 35 | - Testy end-to-end - Playwright: 36 | 37 | - Symulacja pełnych ścieżek użytkownika z lepszą wieloprzeglądarkowością 38 | - Testowanie kluczowych funkcjonalności: kreator reguł, generowanie reguł na podstawie plików, zarządzanie kolekcjami 39 | - Automatyczne uruchamianie testów w ramach pipeline CI/CD GitHub Actions 40 | 41 | - Formatowanie i lintowanie kodu 42 | 43 | - ESLint dla lintowania kodu 44 | - Prettier dla formatowania kodu 45 | 46 | - Zależności: `package.json` 47 | -------------------------------------------------------------------------------- /.ai/ui/mobile-navigation.md: -------------------------------------------------------------------------------- 1 | # Mobile Navigation Specification 2 | 3 | ## Overview 4 | 5 | This specification outlines the changes required to improve the mobile experience of the 10xRules.ai application while maintaining the current desktop behavior. The changes focus on implementing a mobile-first bottom navigation pattern and reorganizing the main panels for better accessibility. 6 | 7 | ## Components Affected 8 | 9 | - `TwoPane.tsx` (main layout component) 10 | - `Footer.tsx` (global footer) 11 | - `CollectionsSidebar.tsx` (collections panel) 12 | - `RuleBuilder.tsx` (builder panel) 13 | - `RulePreview.tsx` (preview panel) 14 | 15 | ## Desktop Behavior (>= md breakpoint) 16 | 17 | - Maintain current three-panel layout 18 | - Keep the existing sidebar toggle functionality 19 | - Preserve current footer visibility and positioning 20 | - No changes to current panel distribution and sizing 21 | 22 | ## Mobile Behavior (< md breakpoint) 23 | 24 | ### Layout Changes 25 | 26 | - Transform into a single-panel view with bottom navigation 27 | - Hide the classic footer component 28 | - Each panel (Collections, Builder, Preview) becomes a full-width view 29 | - Remove the current top-right toggle button 30 | - Make collections panel full width and height 31 | 32 | ### Bottom Navigation 33 | 34 | - Fixed position at the bottom of the viewport 35 | - Three equal-width navigation items: 36 | 1. Collections 37 | 2. Builder 38 | 3. Preview 39 | - Active state indication for current view 40 | - Consistent with Fluent 2.0 design system 41 | - Dark theme by default 42 | 43 | ### Panel Transitions 44 | 45 | - Smooth transitions between panels (300ms duration) 46 | - Maintain scroll position for each panel independently 47 | - No content reflow during transitions 48 | 49 | ### Accessibility Requirements 50 | 51 | - Minimum touch target size: 44x44px 52 | - Clear visual feedback on active states 53 | - Proper ARIA labels and roles 54 | - Keyboard navigation support 55 | - Screen reader compatibility 56 | 57 | ## Technical Constraints 58 | 59 | - Built with React 18.3 60 | - Styled with Tailwind CSS 4 61 | - State management via Zustand 62 | - Icons from lucide-react 63 | - Responsive breakpoints follow Tailwind defaults 64 | - Dark mode as default theme 65 | 66 | ## Success Metrics 67 | 68 | - Improved mobile usability score 69 | - Reduced cognitive load for mobile users 70 | - Maintained desktop experience quality 71 | - Seamless responsive behavior across breakpoints 72 | -------------------------------------------------------------------------------- /.all-contributorsrc: -------------------------------------------------------------------------------- 1 | { 2 | "files": [ 3 | "README.md" 4 | ], 5 | "imageSize": 100, 6 | "commit": false, 7 | "commitType": "docs", 8 | "commitConvention": "angular", 9 | "contributors": [ 10 | { 11 | "login": "damianidczak", 12 | "name": "Damian", 13 | "avatar_url": "https://avatars.githubusercontent.com/u/21343496?v=4", 14 | "profile": "https://github.com/damianidczak", 15 | "contributions": [ 16 | "code" 17 | ] 18 | }, 19 | { 20 | "login": "pawel-twardziak", 21 | "name": "pawel-twardziak", 22 | "avatar_url": "https://avatars.githubusercontent.com/u/180847852?v=4", 23 | "profile": "https://github.com/pawel-twardziak", 24 | "contributions": [ 25 | "code" 26 | ] 27 | }, 28 | { 29 | "login": "dudziakm", 30 | "name": "Michal Dudziak", 31 | "avatar_url": "https://avatars.githubusercontent.com/u/10773170?v=4", 32 | "profile": "https://github.com/dudziakm", 33 | "contributions": [ 34 | "maintenance" 35 | ] 36 | }, 37 | { 38 | "login": "arturlaskowski", 39 | "name": "Artur Laskowski", 40 | "avatar_url": "https://avatars.githubusercontent.com/u/92392161?v=4", 41 | "profile": "https://www.linkedin.com/in/artur-laskowski94", 42 | "contributions": [ 43 | "code" 44 | ] 45 | }, 46 | { 47 | "login": "Michaelzag", 48 | "name": "Michaelzag", 49 | "avatar_url": "https://avatars.githubusercontent.com/u/4809030?v=4", 50 | "profile": "https://github.com/Michaelzag", 51 | "contributions": [ 52 | "code" 53 | ] 54 | }, 55 | { 56 | "login": "PeterPorzuczek", 57 | "name": "Piotr Porzuczek", 58 | "avatar_url": "https://avatars.githubusercontent.com/u/24259570?v=4", 59 | "profile": "https://github.com/PeterPorzuczek", 60 | "contributions": [ 61 | "code" 62 | ] 63 | }, 64 | { 65 | "login": "michalczukm", 66 | "name": "Michał Michalczuk", 67 | "avatar_url": "https://avatars.githubusercontent.com/u/6861120?v=4", 68 | "profile": "https://michalczukm.xyz", 69 | "contributions": [ 70 | "code" 71 | ] 72 | } 73 | ], 74 | "contributorsPerLine": 7, 75 | "skipCi": true, 76 | "repoType": "github", 77 | "repoHost": "https://github.com", 78 | "projectName": "ai-rules-builder", 79 | "projectOwner": "przeprogramowani" 80 | } 81 | -------------------------------------------------------------------------------- /.cursor/rules/astro.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Building server-side Astro pages 3 | globs: 4 | alwaysApply: false 5 | --- 6 | ### Guidelines for Astro 7 | 8 | - Use content collections with type safety for blog posts, documentation, etc. 9 | - Leverage Server Endpoints for API routes 10 | - Use POST, GET - uppercase format for endpoint handlers 11 | - Use `export const prerender = false` for API routes 12 | - Use zod for input validation in API routes 13 | - Implement or reuse middleware for request/response modification 14 | - Use image optimization with the Astro Image integration 15 | - Implement hybrid rendering with server-side rendering where needed 16 | - Use Astro.cookies for server-side cookie management 17 | - Leverage import.meta.env for environment variables 18 | - Always check if you're asked to create public or private pages (if public, update `src/middleware/index.ts`) to allow non-auth browsing -------------------------------------------------------------------------------- /.cursor/rules/github-action.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: 3 | globs: 4 | alwaysApply: false 5 | --- 6 | 7 | ## Github Action Rules 8 | 9 | - Check if `package.json` exists in project root and summarize key scripts 10 | - Check if `.nvmrc` exists in project root 11 | - Check if `.env.example` exists in project root to identify key `env:` variables 12 | - Always use `git branch -a | cat` to verify whether we use `main` or `master` branch 13 | - Always use `env:` variables and secrets attached to jobs instead of global workflows 14 | - Always use `npm ci` for Node-based dependency setup 15 | - Extract common steps into composite actions in separate files 16 | - Once you're done, as a final step conduct the following: 17 | 18 | 1) For each public action always use "Run Terminal" to see what is the most up-to-date version (use only major version): 19 | 20 | ```bash 21 | curl -s https://api.github.com/repos/{owner}/{repo}/releases/latest | grep '"tag_name":' | sed -E 's/.*"v([0-9]+).*/\1/' 22 | ``` 23 | 24 | 2) (Ask if needed) Use "Run Terminal" to fetch README.md and see if we're not using any deprecated actions by mistake: 25 | 26 | ```bash 27 | curl -s https://raw.githubusercontent.com/{owner}/{repo}/refs/tags/v{TAG_VERSION}/README.md 28 | ``` 29 | 30 | 3) (Ask if needed) Use "Run Terminal" to fetch repo metadata and see if we're not using any deprecated actions by mistake: 31 | 32 | ```bash 33 | curl -s https://api.github.com/repos/{owner}/{repo} | grep '"archived":' 34 | ``` 35 | 36 | 4) (Ask if needed) In case of linter issues related to action parameters, try to fetch action description directly from GitHub and use the following command: 37 | 38 | ```bash 39 | curl -s https://raw.githubusercontent.com/{owner}/{repo}/refs/heads/{main/master}/action.yml 40 | ``` -------------------------------------------------------------------------------- /.cursor/rules/playwright-e2e-testing.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: E2E testing with Playwright 3 | globs: 4 | alwaysApply: false 5 | --- 6 | 7 | ## TESTING 8 | 9 | ### Guidelines for E2E 10 | 11 | #### PLAYWRIGHT 12 | 13 | - Initialize configuration only with Chromium/Desktop Chrome browser 14 | - Use browser contexts for isolating test environments 15 | - Implement the Page Object Model for maintainable tests in ./e2e/page-objects 16 | - Use locators for resilient element selection 17 | - Leverage API testing for backend validation 18 | - Implement visual comparison with expect(page).toHaveScreenshot() 19 | - Use the codegen tool for test recording 20 | - Leverage trace viewer for debugging test failures 21 | - Implement test hooks for setup and teardown 22 | - Use expect assertions with specific matchers 23 | - Leverage parallel execution for faster test runs 24 | - Follow 'Arrange', 'Act', 'Assert' approach to test structure for simplicity and readability. 25 | 26 | 27 | -------------------------------------------------------------------------------- /.cursor/rules/react-development.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Developing React components in Astro application 3 | globs: 4 | alwaysApply: false 5 | --- 6 | ## Frontend 7 | 8 | ### Guidelines for React 9 | 10 | #### React Coding Standards 11 | 12 | - Use functional components with hooks instead of class components 13 | - Implement React.memo() for expensive components that render often with the same props 14 | - Utilize React.lazy() and Suspense for code-splitting and performance optimization 15 | - Use the useCallback hook for event handlers passed to child components to prevent unnecessary re-renders 16 | - Prefer useMemo for expensive calculations to avoid recomputation on every render 17 | - Prefer relying on Tailwind media queries instead of manual style recalculations 18 | - If there's a need to attach React components to Astro pages and make them browser-first (i.e. using window), use client:only directive to make the component exclusively run on the client. 19 | - Use Tailwind responsive variants (sm:, md:, lg:, etc.) for adaptive designs - under no circumstances calculate this manually -------------------------------------------------------------------------------- /.cursor/rules/supabase-migrations.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: 3 | globs: supabase/migrations/**/*.sql 4 | alwaysApply: false 5 | --- 6 | # Database: Create migration 7 | 8 | You are a Postgres Expert who loves creating secure database schemas. 9 | 10 | This project uses the migrations provided by the Supabase CLI. 11 | 12 | ## Creating a migration file 13 | 14 | Given the context of the user's message, create a database migration file inside the folder `supabase/migrations/`. 15 | 16 | The file MUST following this naming convention: 17 | 18 | The file MUST be named in the format `YYYYMMDDHHmmss_short_description.sql` with proper casing for months, minutes, and seconds in UTC time: 19 | 20 | 1. `YYYY` - Four digits for the year (e.g., `2024`). 21 | 2. `MM` - Two digits for the month (01 to 12). 22 | 3. `DD` - Two digits for the day of the month (01 to 31). 23 | 4. `HH` - Two digits for the hour in 24-hour format (00 to 23). 24 | 5. `mm` - Two digits for the minute (00 to 59). 25 | 6. `ss` - Two digits for the second (00 to 59). 26 | 7. Add an appropriate description for the migration. 27 | 28 | For example: 29 | 30 | ``` 31 | 20240906123045_create_profiles.sql 32 | ``` 33 | 34 | 35 | ## SQL Guidelines 36 | 37 | Write Postgres-compatible SQL code for Supabase migration files that: 38 | 39 | - Includes a header comment with metadata about the migration, such as the purpose, affected tables/columns, and any special considerations. 40 | - Includes thorough comments explaining the purpose and expected behavior of each migration step. 41 | - Write all SQL in lowercase. 42 | - Add copious comments for any destructive SQL commands, including truncating, dropping, or column alterations. 43 | - When creating a new table, you MUST enable Row Level Security (RLS) even if the table is intended for public access. 44 | - When creating RLS Policies 45 | - Ensure the policies cover all relevant access scenarios (e.g. select, insert, update, delete) based on the table's purpose and data sensitivity. 46 | - If the table is intended for public access the policy can simply return `true`. 47 | - RLS Policies should be granular: one policy for `select`, one for `insert` etc) and for each supabase role (`anon` and `authenticated`). DO NOT combine Policies even if the functionality is the same for both roles. 48 | - Include comments explaining the rationale and intended behavior of each security policy 49 | 50 | The generated SQL code should be production-ready, well-documented, and aligned with Supabase's best practices. 51 | -------------------------------------------------------------------------------- /.cursor/rules/test-plan.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: 3 | globs: 4 | alwaysApply: false 5 | --- 6 | Jesteś doświadczonym inżynierem QA, którego zadaniem jest stworzenie kompleksowego planu testów dla projektu programistycznego. Przeanalizuj poniższe informacje o projekcie: 7 | 8 | 9 | 10 | 11 | 12 | 13 | [tech-stack.md](mdc:.ai/tech-stack.md) 14 | 15 | 16 | Twoim zadaniem jest wygenerowanie szczegółowego planu testów, który będzie dostosowany do specyfiki projektu, uwzględniając wykorzystywane technologie, strukturę kodu oraz kluczowe elementy repozytorium. Plan testów powinien być napisany w języku polskim. 17 | 18 | Przed stworzeniem planu testów, przeprowadź dogłębną analizę projektu wewnątrz bloku w swoim bloku myślowym. W analizie uwzględnij: 19 | 20 | 1. Kluczowe komponenty projektu wynikające z analizy kodu: 21 | - Wymień i opisz główne komponenty projektu 22 | 2. Specyfikę stosu technologicznego i jego wpływ na strategię testowania: 23 | - Przeanalizuj każdy element stosu technologicznego i jego implikacje dla testowania 24 | 3. Priorytety testowe bazujące na strukturze repozytorium: 25 | - Zidentyfikuj i uszereguj obszary testowe według ważności 26 | 4. Potencjalne obszary ryzyka wymagające szczególnej uwagi w testach: 27 | - Wymień potencjalne ryzyka i uzasadnij, dlaczego wymagają specjalnej uwagi 28 | 29 | Po zakończeniu analizy, stwórz plan testów wewnątrz bloku . Plan powinien zawierać: 30 | 31 | 1. Wprowadzenie i cele testowania 32 | 2. Zakres testów 33 | 3. Typy testów do przeprowadzenia (np. testy jednostkowe, integracyjne, wydajnościowe) 34 | 4. Scenariusze testowe dla kluczowych funkcjonalności 35 | 5. Środowisko testowe 36 | 6. Narzędzia do testowania 37 | 7. Harmonogram testów 38 | 8. Kryteria akceptacji testów 39 | 9. Role i odpowiedzialności w procesie testowania 40 | 10. Procedury raportowania błędów 41 | 42 | Pamiętaj, aby plan testów był: 43 | - Dokładnie dostosowany do kontekstu projektu 44 | - Uwzględniał specyfikę wykorzystywanych technologii 45 | - Priorytetyzował kluczowe elementy repozytorium 46 | - Był napisany w języku polskim 47 | - Prezentował wysoką jakość i profesjonalizm 48 | 49 | Rozpocznij od analizy, a następnie przejdź do tworzenia planu testów. Twój końcowy wynik powinien składać się tylko z planu testów i nie powinien powielać ani streszczać żadnej pracy wykonanej w bloku analizy projektu. 50 | 51 | Przedstaw ten plan w formacie Markdown. -------------------------------------------------------------------------------- /.cursor/rules/vitest-unit-testing.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Unit testing with Vitest and React Testing Library 3 | globs: 4 | alwaysApply: false 5 | --- 6 | 7 | ## TESTING 8 | 9 | ### Guidelines for UNIT TESTING 10 | 11 | #### VITEST 12 | 13 | - Leverage the `vi` object for test doubles - Use `vi.fn()` for function mocks, `vi.spyOn()` to monitor existing functions, and `vi.stubGlobal()` for global mocks. Prefer spies over mocks when you only need to verify interactions without changing behavior. 14 | 15 | - Master `vi.mock()` factory patterns - Place mock factory functions at the top level of your test file, return typed mock implementations, and use `mockImplementation()` or `mockReturnValue()` for dynamic control during tests. Remember the factory runs before imports are processed. 16 | 17 | - Create setup files for reusable configuration - Define global mocks, custom matchers, and environment setup in dedicated files referenced in your `vitest.config.ts`. This keeps your test files clean while ensuring consistent test environments. 18 | 19 | - Use inline snapshots for readable assertions - Replace complex equality checks with `expect(value).toMatchInlineSnapshot()` to capture expected output directly in your test file, making changes more visible in code reviews. 20 | 21 | - Monitor coverage with purpose - Configure coverage thresholds in `vitest.config.ts` to ensure critical code paths are tested, but focus on meaningful tests rather than arbitrary coverage percentages. 22 | 23 | - Make watch mode part of your workflow - Run `vitest --watch` during development for instant feedback as you modify code, filtering tests with `-t` to focus on specific areas under development. 24 | 25 | - Explore UI mode for complex test suites - Use `vitest --ui` to visually navigate large test suites, inspect test results, and debug failures more efficiently during development. 26 | 27 | - Handle optional dependencies with smart mocking - Use conditional mocking to test code with optional dependencies by implementing `vi.mock()` with the factory pattern for modules that might not be available in all environments. 28 | 29 | - Configure jsdom for DOM testing - Set `environment: 'jsdom'` in your configuration for frontend component tests and combine with testing-library utilities for realistic user interaction simulation. 30 | 31 | - Structure tests for maintainability - Group related tests with descriptive `describe` blocks, use explicit assertion messages, and follow the Arrange-Act-Assert pattern to make tests self-documenting. 32 | 33 | - Leverage TypeScript type checking in tests - Enable strict typing in your tests to catch type errors early, use `expectTypeOf()` for type-level assertions, and ensure mocks preserve the original type signatures. 34 | 35 | - Follow 'Arrange', 'Act', 'Assert' approach to test structure for simplicity and readability. 36 | 37 | -------------------------------------------------------------------------------- /.cursorignore: -------------------------------------------------------------------------------- 1 | .env 2 | .env.test 3 | .env.local 4 | .env.integration 5 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | end_of_line = crlf 5 | insert_final_newline = true 6 | indent_style = space 7 | indent_size = 2 8 | max_line_length = 120 9 | -------------------------------------------------------------------------------- /.env.example: -------------------------------------------------------------------------------- 1 | # Include in .env & .env.test 2 | PUBLIC_ENV_NAME=### 3 | SUPABASE_URL=https://PROJECT_ID.supabase.co 4 | SUPABASE_PUBLIC_KEY=### 5 | 6 | CF_CAPTCHA_SITE_KEY=### 7 | CF_CAPTCHA_SECRET_KEY=### 8 | 9 | # Include in .env.test 10 | E2E_USERNAME_ID=### 11 | E2E_USERNAME=### 12 | E2E_PASSWORD=### -------------------------------------------------------------------------------- /.eslintrc.cjs: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | extends: [ 3 | 'eslint:recommended', 4 | 'plugin:@typescript-eslint/recommended', 5 | 'plugin:astro/recommended', 6 | ], 7 | parser: '@typescript-eslint/parser', 8 | plugins: ['@typescript-eslint'], 9 | overrides: [ 10 | { 11 | files: ['*.astro'], 12 | parser: 'astro-eslint-parser', 13 | parserOptions: { 14 | parser: '@typescript-eslint/parser', 15 | extraFileExtensions: ['.astro'], 16 | }, 17 | }, 18 | ], 19 | env: { 20 | browser: true, 21 | node: true, 22 | }, 23 | root: true, 24 | }; 25 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | # Description 2 | 3 | Please include a summary of the changes and the related issue. Please also include relevant motivation and context. 4 | 5 | ## Roadmap alignment 6 | 7 | To maintain project quality and avoid wasted effort, all major changes must start with a GitHub issue discussion. Please create an issue describing your proposed changes, wait for maintainer feedback and approval, and only then proceed with coding. This helps us ensure alignment with project goals before you invest time in implementation. 8 | 9 | - [ ] I have opened an issue first and received approval before working on this PR. 10 | 11 | ## Type of change 12 | 13 | Please delete options that are not relevant. 14 | 15 | - [ ] Bug fix (non-breaking change which fixes an issue) 16 | - [ ] New feature (non-breaking change which adds functionality) 17 | - [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected) 18 | - [ ] Documentation update 19 | 20 | # How Has This Been Tested? 21 | 22 | Please describe the tests that you ran to verify your changes. 23 | 24 | - [ ] Manual Tests 25 | - [ ] Unit Tests 26 | - [ ] E2E Tests 27 | 28 | ## Important Note for Fork-Based PRs 29 | 30 | If you're submitting a PR from a forked repository, please note that E2E tests require specific repository-level environment (`integration`) and secrets to be set up. These are described in `.env.example` and include: 31 | 32 | ```bash 33 | PUBLIC_ENV_NAME=integration 34 | SUPABASE_URL=### 35 | SUPABASE_PUBLIC_KEY=### 36 | 37 | E2E_USERNAME_ID=### 38 | E2E_USERNAME=### 39 | E2E_PASSWORD=### 40 | ``` 41 | 42 | Please ensure these are properly configured in your fork's repository settings under "Secrets and variables" → "Actions" before running E2E tests. 43 | -------------------------------------------------------------------------------- /.github/workflows/deploy-mcp-on-merge.yml: -------------------------------------------------------------------------------- 1 | name: Deploy MCP Server (Production) 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | paths: 8 | - 'mcp-server/**' 9 | 10 | jobs: 11 | deploy-mcp-worker: 12 | name: Deploy Worker (mcp-server) 13 | runs-on: ubuntu-latest 14 | environment: production 15 | steps: 16 | - name: Checkout repository 17 | uses: actions/checkout@v4 18 | 19 | # We need Node.js to generate rules and run wrangler/npm 20 | - name: Set up Node.js 21 | uses: actions/setup-node@v4 22 | with: 23 | node-version-file: '.nvmrc' # Assuming .nvmrc is in the root 24 | cache: 'npm' 25 | cache-dependency-path: '**/package-lock.json' # Cache npm deps for root and worker 26 | 27 | - name: Install root dependencies 28 | run: npm ci 29 | 30 | - name: Generate preparedRules.json 31 | run: npm run generate-rules # Assuming direct execution works 32 | 33 | - name: Install worker dependencies 34 | run: cd mcp-server && npm ci 35 | 36 | - name: Deploy Worker (mcp-server) 37 | uses: cloudflare/wrangler-action@v3 38 | with: 39 | apiToken: ${{ secrets.CLOUDFLARE_WORKER_TOKEN }} 40 | accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} 41 | workingDirectory: './mcp-server' 42 | command: deploy 43 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # build output 2 | dist/ 3 | 4 | # generated types 5 | .astro/ 6 | 7 | # dependencies 8 | node_modules/ 9 | 10 | # logs 11 | npm-debug.log* 12 | yarn-debug.log* 13 | yarn-error.log* 14 | pnpm-debug.log* 15 | 16 | # environment variables 17 | .env 18 | .env.production 19 | .env.test 20 | .env.integration 21 | .env.local 22 | 23 | # macOS-specific files 24 | .DS_Store 25 | 26 | # jetbrains setting folder 27 | .idea/ 28 | 29 | playwright/.auth 30 | test-results 31 | coverage 32 | playwright-report/* 33 | playwright-report/index.html 34 | 35 | # Generated data files 36 | src/data/preparedRules.json 37 | mcp-server/src/preparedRules.json 38 | -------------------------------------------------------------------------------- /.husky/pre-commit: -------------------------------------------------------------------------------- 1 | npx lint-staged 2 | -------------------------------------------------------------------------------- /.nvmrc: -------------------------------------------------------------------------------- 1 | 22 -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | src/layouts/partials/GTMContainer.astro -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "singleQuote": true, 3 | "trailingComma": "all", 4 | "astroAllowShorthand": false, 5 | "astroSkipFrontmatter": false, 6 | "printWidth": 100, 7 | "plugins": ["prettier-plugin-astro"], 8 | "overrides": [ 9 | { 10 | "files": "*.astro", 11 | "options": { 12 | "parser": "astro" 13 | } 14 | } 15 | ] 16 | } 17 | -------------------------------------------------------------------------------- /.vscode/extensions.json: -------------------------------------------------------------------------------- 1 | { 2 | "recommendations": ["astro-build.astro-vscode", "esbenp.prettier-vscode"], 3 | "unwantedRecommendations": [] 4 | } 5 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "0.2.0", 3 | "configurations": [ 4 | { 5 | "command": "./node_modules/.bin/astro dev", 6 | "name": "Development server", 7 | "request": "launch", 8 | "type": "node-terminal" 9 | } 10 | ] 11 | } 12 | -------------------------------------------------------------------------------- /.vscode/markdown.css: -------------------------------------------------------------------------------- 1 | code { 2 | font-family: 'FiraCode Nerd Font Mono', 'Consolas', 'Source Code Pro', monospace; 3 | font-size: 12px; 4 | -webkit-font-smoothing: antialiased; 5 | } 6 | 7 | pre code { 8 | font-family: 'FiraCode Nerd Font Mono', 'Consolas', 'Source Code Pro', monospace; 9 | font-size: 12px; 10 | -webkit-font-smoothing: antialiased; 11 | } 12 | 13 | .vscode-body pre code { 14 | font-family: 'FiraCode Nerd Font Mono', 'Consolas', 'Source Code Pro', monospace !important; 15 | } 16 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "typescript.tsdk": "node_modules/typescript/lib" 3 | } 4 | -------------------------------------------------------------------------------- /astro.config.mjs: -------------------------------------------------------------------------------- 1 | // @ts-check 2 | import { defineConfig, envField } from 'astro/config'; 3 | import tailwindcss from '@tailwindcss/vite'; 4 | import react from '@astrojs/react'; 5 | 6 | import cloudflare from '@astrojs/cloudflare'; 7 | 8 | // https://astro.build/config 9 | export default defineConfig({ 10 | output: 'server', 11 | server: { 12 | port: 3000, 13 | }, 14 | env: { 15 | schema: { 16 | PUBLIC_ENV_NAME: envField.string({ context: 'server', access: 'secret' }), 17 | SUPABASE_URL: envField.string({ context: 'server', access: 'secret' }), 18 | SUPABASE_PUBLIC_KEY: envField.string({ context: 'server', access: 'secret' }), 19 | SUPABASE_SERVICE_ROLE_KEY: envField.string({ context: 'server', access: 'secret' }), 20 | CF_CAPTCHA_SITE_KEY: envField.string({ 21 | context: 'server', 22 | access: 'secret', 23 | }), 24 | CF_CAPTCHA_SECRET_KEY: envField.string({ 25 | context: 'server', 26 | access: 'secret', 27 | }), 28 | }, 29 | }, 30 | vite: { 31 | plugins: [tailwindcss()], 32 | }, 33 | devToolbar: { 34 | enabled: false, 35 | }, 36 | integrations: [react()], 37 | adapter: cloudflare(), 38 | }); 39 | -------------------------------------------------------------------------------- /e2e/auth.setup.ts: -------------------------------------------------------------------------------- 1 | import { test as setup, expect } from '@playwright/test'; 2 | import path from 'path'; 3 | import { fileURLToPath } from 'url'; 4 | 5 | const __filename = fileURLToPath(import.meta.url); 6 | const __dirname = path.dirname(__filename); 7 | const authFile = path.join(__dirname, '../playwright/.auth/user.json'); 8 | 9 | const E2E_USERNAME = process.env.E2E_USERNAME; 10 | const E2E_PASSWORD = process.env.E2E_PASSWORD; 11 | 12 | if (!E2E_USERNAME || !E2E_PASSWORD) { 13 | throw new Error('E2E_USERNAME and E2E_PASSWORD must be set'); 14 | } 15 | 16 | setup('authenticate', async ({ page, baseURL }) => { 17 | // Navigate to login page and wait for it to load 18 | await page.goto(`${baseURL}/auth/login`); 19 | 20 | // Wait for and fill email input 21 | const emailInput = page.locator('input[data-testid="auth-input-email"]'); 22 | await emailInput.waitFor({ state: 'visible' }); 23 | await emailInput.click(); 24 | await emailInput.fill(E2E_USERNAME); 25 | await expect(emailInput).toHaveValue(E2E_USERNAME); 26 | 27 | // Wait for and fill password input 28 | const passwordInput = page.locator('input[data-testid="auth-input-password"]'); 29 | await passwordInput.waitFor({ state: 'visible' }); 30 | await passwordInput.click(); 31 | await passwordInput.fill(E2E_PASSWORD); 32 | await expect(passwordInput).toHaveValue(E2E_PASSWORD); 33 | 34 | // Wait for and click submit button 35 | const submitButton = page.locator('button[type="submit"]'); 36 | await submitButton.waitFor({ state: 'visible' }); 37 | await Promise.all([page.waitForNavigation({ waitUntil: 'networkidle' }), submitButton.click()]); 38 | 39 | // Wait for successful navigation and verify we're logged in 40 | await expect(page.getByRole('heading', { name: 'Rule Builder', level: 2 })).toBeVisible({ 41 | timeout: 10000, 42 | }); 43 | 44 | // Store authentication state 45 | await page.context().storageState({ path: authFile }); 46 | }); 47 | -------------------------------------------------------------------------------- /e2e/global.teardown.ts: -------------------------------------------------------------------------------- 1 | import { test as teardown } from '@playwright/test'; 2 | import { createClient } from '@supabase/supabase-js'; 3 | 4 | teardown('cleanup database', async () => { 5 | console.log('Cleaning up test database...'); 6 | 7 | if (!process.env.SUPABASE_URL!.includes('ueardaqpl')) { 8 | throw new Error('Cannot run teardown on non-test database!'); 9 | } 10 | 11 | const supabase = createClient(process.env.SUPABASE_URL!, process.env.SUPABASE_PUBLIC_KEY!); 12 | 13 | try { 14 | // Sign in with test user credentials to avoid issues with RLS 15 | const { error: signInError } = await supabase.auth.signInWithPassword({ 16 | email: process.env.E2E_USERNAME!, 17 | password: process.env.E2E_PASSWORD!, 18 | }); 19 | 20 | if (signInError) { 21 | console.error('Error signing in:', signInError); 22 | throw signInError; 23 | } 24 | 25 | const { error } = await supabase 26 | .from('collections') 27 | .delete() 28 | .eq('user_id', process.env.E2E_USERNAME_ID); 29 | 30 | if (error) { 31 | console.error('Error cleaning up collections:', error); 32 | throw error; 33 | } 34 | 35 | console.log('Successfully cleaned up collections for E2E test user'); 36 | } catch (error) { 37 | console.error('Failed to clean up database:', error); 38 | throw error; 39 | } 40 | }); 41 | -------------------------------------------------------------------------------- /e2e/home.spec.ts: -------------------------------------------------------------------------------- 1 | import { test, expect } from '@playwright/test'; 2 | import { HomePage } from './page-objects/HomePage'; 3 | 4 | test.describe('Home Page', () => { 5 | test('should have the correct title', async ({ page }) => { 6 | const homePage = new HomePage(page); 7 | await homePage.goto(); 8 | 9 | const title = await homePage.getTitle(); 10 | expect(title).toContain('10xRules.ai'); 11 | }); 12 | 13 | test('should have a heading', async ({ page }) => { 14 | const homePage = new HomePage(page); 15 | await homePage.goto(); 16 | 17 | await expect(homePage.heading).toBeVisible(); 18 | }); 19 | }); 20 | -------------------------------------------------------------------------------- /e2e/page-objects/HomePage.ts: -------------------------------------------------------------------------------- 1 | import type { Locator, Page } from '@playwright/test'; 2 | 3 | export class HomePage { 4 | readonly page: Page; 5 | readonly heading: Locator; 6 | 7 | constructor(page: Page) { 8 | this.page = page; 9 | this.heading = page.getByRole('heading', { level: 1 }); 10 | } 11 | 12 | async goto() { 13 | await this.page.goto('/'); 14 | } 15 | 16 | async getTitle(): Promise { 17 | return this.page.title(); 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /e2e/tests/collections.spec.ts: -------------------------------------------------------------------------------- 1 | import { test, expect } from '@playwright/test'; 2 | import { CollectionsSidebarPage } from '../page-objects/CollectionsSidebarPage'; 3 | import { SaveCollectionDialog } from '../page-objects/SaveCollectionDialog'; 4 | 5 | test.describe('Collections Management', () => { 6 | /** 7 | * Generates a unique collection name for testing 8 | */ 9 | const generateUniqueName = () => { 10 | const timestamp = new Date().toISOString().replace(/[^0-9]/g, ''); 11 | return `Test Collection ${timestamp}`; 12 | }; 13 | 14 | test('should create a new collection', async ({ page }) => { 15 | // Arrange 16 | const sidebarPage = new CollectionsSidebarPage(page); 17 | const saveDialog = new SaveCollectionDialog(page); 18 | const testData = { 19 | name: generateUniqueName(), 20 | description: 'This is a test collection created by E2E test', 21 | }; 22 | 23 | // Navigate to the main page 24 | await page.goto('/'); 25 | 26 | // Act 27 | await sidebarPage.open(); 28 | await sidebarPage.waitForLoad(); 29 | await sidebarPage.clickCreateCollection(); 30 | 31 | const saveResult = await saveDialog.createCollection(testData); 32 | expect(saveResult).toBe(true); 33 | 34 | // Assert 35 | // Wait for the collection to appear in the list and verify its content 36 | const collection = sidebarPage.getCollectionByName(testData.name); 37 | await expect(collection).toBeVisible({ timeout: 5000 }); 38 | 39 | // Verify collection details 40 | await expect(collection.locator('[data-test-id="collection-entry-name"]')).toHaveText( 41 | testData.name, 42 | ); 43 | await expect(collection.locator('[data-test-id="collection-entry-description"]')).toHaveText( 44 | testData.description, 45 | ); 46 | }); 47 | 48 | test('should show error when creating collection without name', async ({ page }) => { 49 | // Arrange 50 | const sidebarPage = new CollectionsSidebarPage(page); 51 | const saveDialog = new SaveCollectionDialog(page); 52 | 53 | // Navigate to the main page 54 | await page.goto('/'); 55 | 56 | // Act 57 | await sidebarPage.open(); 58 | await sidebarPage.waitForLoad(); 59 | await sidebarPage.clickCreateCollection(); 60 | 61 | await saveDialog.waitForOpen(); 62 | const saveResult = await saveDialog.save(); 63 | 64 | // Assert 65 | expect(saveResult).toBe(false); 66 | await expect(saveDialog.form).toBeVisible(); 67 | await expect(saveDialog.errorMessage).toBeVisible(); 68 | expect(await saveDialog.getErrorMessage()).toBe('Name is required'); 69 | 70 | // Verify that the dialog is still open and can be cancelled 71 | await saveDialog.cancel(); 72 | await expect(saveDialog.form).toBeHidden(); 73 | }); 74 | }); 75 | -------------------------------------------------------------------------------- /mcp-server/.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | 3 | # wrangler files 4 | .wrangler 5 | .dev.vars* 6 | -------------------------------------------------------------------------------- /mcp-server/.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "files.associations": { 3 | "wrangler.json": "jsonc" 4 | } 5 | } -------------------------------------------------------------------------------- /mcp-server/biome.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://biomejs.dev/schemas/1.6.2/schema.json", 3 | "organizeImports": { 4 | "enabled": true 5 | }, 6 | "files": { 7 | "ignore": ["worker-configuration.d.ts"] 8 | }, 9 | "vcs": { 10 | "enabled": true, 11 | "clientKind": "git", 12 | "useIgnoreFile": true 13 | }, 14 | "linter": { 15 | "enabled": true, 16 | "rules": { 17 | "recommended": true, 18 | "suspicious": { 19 | "noExplicitAny": "off", 20 | "noDebugger": "off", 21 | "noConsoleLog": "off", 22 | "noConfusingVoidType": "off" 23 | }, 24 | "style": { 25 | "noNonNullAssertion": "off" 26 | } 27 | } 28 | }, 29 | "formatter": { 30 | "enabled": true, 31 | "indentWidth": 4, 32 | "lineWidth": 100 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /mcp-server/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "10x-rules-mcp-server", 3 | "version": "0.0.1", 4 | "private": true, 5 | "scripts": { 6 | "deploy": "wrangler deploy", 7 | "dev": "wrangler dev", 8 | "format": "biome format --write", 9 | "lint:fix": "biome lint --fix", 10 | "start": "wrangler dev", 11 | "cf-typegen": "wrangler types" 12 | }, 13 | "devDependencies": { 14 | "@cloudflare/workers-types": "^4.20250427.0", 15 | "typescript": "^5.5.2", 16 | "wrangler": "^4.13.2" 17 | }, 18 | "dependencies": { 19 | "@modelcontextprotocol/sdk": "^1.7.0", 20 | "agents": "^0.0.65", 21 | "zod": "^3.24.2" 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /mcp-server/src/data/rulesProvider.ts: -------------------------------------------------------------------------------- 1 | import preparedRulesData from '../preparedRules.json'; 2 | // Import type from the tools directory 3 | import type { HierarchyNode } from '../tools/rulesTools'; 4 | 5 | interface PreparedRules { 6 | hierarchy: HierarchyNode[]; 7 | rules: Record; 8 | } 9 | 10 | // Type assertion for JSON import. 11 | // Ensure 'resolveJsonModule' is true in your tsconfig.json 12 | const preparedRules = preparedRulesData as PreparedRules; 13 | 14 | /** 15 | * Returns the hierarchical structure of available rules. 16 | */ 17 | export function getRuleHierarchy(): HierarchyNode[] { 18 | // Add basic check in case the json is malformed or empty 19 | if (!preparedRules || !preparedRules.hierarchy) { 20 | console.error('Error: preparedRules.json missing or hierarchy property not found.'); 21 | return []; 22 | } 23 | return preparedRules.hierarchy; 24 | } 25 | 26 | /** 27 | * Returns the rules for a specific library identifier. 28 | * @param libraryIdentifier The unique identifier for the library (e.g., 'REACT', 'NEXT_JS'). 29 | * @returns An array of rules strings, or undefined if the library identifier is not found. 30 | */ 31 | export function getRulesForLibrary(libraryIdentifier: string): string[] | undefined { 32 | // Add basic check 33 | if (!preparedRules || !preparedRules.rules) { 34 | console.error('Error: preparedRules.json missing or rules property not found.'); 35 | return undefined; 36 | } 37 | return preparedRules.rules[libraryIdentifier]; 38 | } -------------------------------------------------------------------------------- /mcp-server/src/index.ts: -------------------------------------------------------------------------------- 1 | import { McpAgent } from "agents/mcp"; 2 | import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; 3 | import { listAvailableRulesTool, getRuleContentTool } from "./tools/rulesTools"; 4 | import { z } from 'zod'; 5 | 6 | // Define our MCP agent with tools 7 | export class MyMCP extends McpAgent { 8 | server = new McpServer({ 9 | name: "MCP Rules Server", 10 | version: "1.0.0", 11 | }); 12 | 13 | async init() { 14 | // Register listAvailableRulesTool 15 | this.server.tool( 16 | listAvailableRulesTool.name, 17 | listAvailableRulesTool.description, 18 | async () => { 19 | const result = await listAvailableRulesTool.execute(); 20 | return { content: [{ type: 'text', text: JSON.stringify(result) }] }; 21 | } 22 | ); 23 | 24 | const inputSchemaShape = getRuleContentTool.inputSchema instanceof z.ZodObject 25 | ? getRuleContentTool.inputSchema.shape 26 | : {}; 27 | 28 | this.server.tool( 29 | getRuleContentTool.name, 30 | inputSchemaShape, 31 | async (args: unknown) => { 32 | const parsedArgs = getRuleContentTool.inputSchema.safeParse(args); 33 | if (!parsedArgs.success) { 34 | return { content: [{ type: 'text', text: `Invalid input: ${parsedArgs.error.message}`}], isError: true }; 35 | } 36 | const result = await getRuleContentTool.execute(parsedArgs.data); 37 | return { content: [{ type: 'text', text: JSON.stringify(result) }] }; 38 | } 39 | ); 40 | } 41 | } 42 | 43 | // Define more specific types for Env and ExecutionContext if known for the environment 44 | // Example for Cloudflare Workers: 45 | // interface Env { /* ... bindings ... */ } 46 | // interface ExecutionContext { waitUntil(promise: Promise): void; passThroughOnException(): void; } 47 | export default { 48 | fetch(request: Request, env: Env, ctx: ExecutionContext) { 49 | const url = new URL(request.url); 50 | 51 | if (url.pathname === "/sse" || url.pathname === "/sse/message") { 52 | // @ts-expect-error - env is not typed 53 | return MyMCP.serveSSE("/sse").fetch(request, env, ctx); 54 | } 55 | 56 | if (url.pathname === "/mcp") { 57 | // @ts-expect-error - env is not typed 58 | return MyMCP.serve("/mcp").fetch(request, env, ctx); 59 | } 60 | 61 | return new Response("Not found", { status: 404 }); 62 | }, 63 | }; 64 | -------------------------------------------------------------------------------- /mcp-server/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es2021", 4 | "lib": ["es2021"], 5 | "jsx": "react-jsx", 6 | "module": "es2022", 7 | "moduleResolution": "Bundler", 8 | "resolveJsonModule": true, 9 | "allowJs": true, 10 | "checkJs": false, 11 | "noEmit": true, 12 | "isolatedModules": true, 13 | "allowSyntheticDefaultImports": true, 14 | "forceConsistentCasingInFileNames": true, 15 | "strict": true, 16 | "skipLibCheck": true, 17 | "types": [ 18 | "@cloudflare/workers-types/2023-07-01" 19 | ] 20 | }, 21 | "include": ["worker-configuration.d.ts", "src/**/*.ts"] 22 | } 23 | -------------------------------------------------------------------------------- /mcp-server/wrangler.jsonc: -------------------------------------------------------------------------------- 1 | /** 2 | * For more details on how to configure Wrangler, refer to: 3 | * https://developers.cloudflare.com/workers/wrangler/configuration/ 4 | */ 5 | { 6 | "$schema": "node_modules/wrangler/config-schema.json", 7 | "name": "10x-rules-mcp-server", 8 | "main": "src/index.ts", 9 | "compatibility_date": "2025-03-10", 10 | "compatibility_flags": [ 11 | "nodejs_compat" 12 | ], 13 | "migrations": [ 14 | { 15 | "new_sqlite_classes": [ 16 | "MyMCP" 17 | ], 18 | "tag": "v1" 19 | } 20 | ], 21 | "durable_objects": { 22 | "bindings": [ 23 | { 24 | "class_name": "MyMCP", 25 | "name": "MCP_OBJECT" 26 | } 27 | ] 28 | }, 29 | "observability": { 30 | "enabled": true 31 | } 32 | /** 33 | * Smart Placement 34 | * Docs: https://developers.cloudflare.com/workers/configuration/smart-placement/#smart-placement 35 | */ 36 | // "placement": { "mode": "smart" }, 37 | 38 | /** 39 | * Bindings 40 | * Bindings allow your Worker to interact with resources on the Cloudflare Developer Platform, including 41 | * databases, object storage, AI inference, real-time communication and more. 42 | * https://developers.cloudflare.com/workers/runtime-apis/bindings/ 43 | */ 44 | 45 | /** 46 | * Environment Variables 47 | * https://developers.cloudflare.com/workers/wrangler/configuration/#environment-variables 48 | */ 49 | // "vars": { "MY_VARIABLE": "production_value" }, 50 | /** 51 | * Note: Use secrets to store sensitive data. 52 | * https://developers.cloudflare.com/workers/configuration/secrets/ 53 | */ 54 | 55 | /** 56 | * Static Assets 57 | * https://developers.cloudflare.com/workers/static-assets/binding/ 58 | */ 59 | // "assets": { "directory": "./public/", "binding": "ASSETS" }, 60 | 61 | /** 62 | * Service Bindings (communicate between multiple Workers) 63 | * https://developers.cloudflare.com/workers/wrangler/configuration/#service-bindings 64 | */ 65 | // "services": [{ "binding": "MY_SERVICE", "service": "my-service" }] 66 | } 67 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "ai-rules-builder", 3 | "type": "module", 4 | "version": "0.0.1", 5 | "scripts": { 6 | "generate-rules": "tsx ./scripts/generate-rules-json.mts", 7 | "dev": "astro dev -- --mode local", 8 | "dev:e2e": "npm run astro dev -- --mode integration", 9 | "build": "astro build", 10 | "preview": "astro preview", 11 | "astro": "astro", 12 | "lint": "eslint \"src/**/*.{ts,tsx,astro}\" --fix", 13 | "lint:check": "eslint \"src/**/*.{ts,tsx,astro}\"", 14 | "format": "prettier \"src/**/*.{astro,ts,tsx,css}\" --write", 15 | "format:check": "prettier \"src/**/*.{astro,ts,tsx,css}\" --check", 16 | "prepare": "husky", 17 | "test": "vitest run", 18 | "test:watch": "vitest", 19 | "test:ui": "vitest --ui", 20 | "test:coverage": "vitest run --coverage", 21 | "test:e2e": "playwright test", 22 | "test:e2e:ui": "playwright test --ui", 23 | "test:e2e:codegen": "playwright codegen" 24 | }, 25 | "dependencies": { 26 | "@astrojs/cloudflare": "12.4.0", 27 | "@astrojs/node": "9.1.3", 28 | "@astrojs/react": "4.2.1", 29 | "@hookform/resolvers": "5.0.1", 30 | "@supabase/ssr": "0.6.1", 31 | "@supabase/supabase-js": "2.49.3", 32 | "@tailwindcss/vite": "4.0.14", 33 | "@types/react-dom": "18.3.5", 34 | "astro": "^5.5.2", 35 | "axios": "1.9.0", 36 | "clsx": "^2.1.1", 37 | "fflate": "^0.8.2", 38 | "lucide-react": "0.479.0", 39 | "lz-string": "^1.5.0", 40 | "react": "18.3.0", 41 | "react-dom": "18.3.0", 42 | "react-hook-form": "7.55.0", 43 | "tailwind-merge": "^3.0.2", 44 | "tailwindcss": "4.0.14", 45 | "typescript": "5.8.2", 46 | "zod": "3.24.2", 47 | "zustand": "5.0.3" 48 | }, 49 | "devDependencies": { 50 | "@playwright/test": "1.51.1", 51 | "@tailwindcss/typography": "0.5.16", 52 | "@testing-library/dom": "10.4.0", 53 | "@testing-library/jest-dom": "6.6.3", 54 | "@testing-library/react": "16.2.0", 55 | "@types/cloudflare-turnstile": "0.2.2", 56 | "@types/react": "18.3.18", 57 | "@typescript-eslint/eslint-plugin": "8.27.0", 58 | "@typescript-eslint/parser": "8.27.0", 59 | "@vitest/coverage-v8": "3.1.1", 60 | "@vitest/ui": "3.1.1", 61 | "astro-eslint-parser": "1.2.2", 62 | "dotenv": "16.4.7", 63 | "eslint": "^8.57.0", 64 | "eslint-plugin-astro": "1.3.1", 65 | "happy-dom": "17.4.4", 66 | "husky": "^9.0.11", 67 | "jsdom": "26.0.0", 68 | "lint-staged": "^15.2.2", 69 | "msw": "2.7.3", 70 | "prettier": "3.5.3", 71 | "prettier-plugin-astro": "0.14.1", 72 | "tsx": "^4.19.3", 73 | "vitest": "3.1.1" 74 | }, 75 | "optionalDependencies": { 76 | "@rollup/rollup-linux-x64-gnu": "4.37.0", 77 | "@tailwindcss/oxide-linux-x64-gnu": "4.0.14", 78 | "lightningcss-linux-x64-gnu": "1.29.2" 79 | }, 80 | "lint-staged": { 81 | "src/**/*.{ts,tsx,astro}": [ 82 | "eslint --fix", 83 | "prettier --write" 84 | ], 85 | "src/**/*.{json,css,md}": [ 86 | "prettier --write" 87 | ] 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /playwright.config.ts: -------------------------------------------------------------------------------- 1 | import { defineConfig, devices } from '@playwright/test'; 2 | import path from 'path'; 3 | import { fileURLToPath } from 'url'; 4 | import dotenv from 'dotenv'; 5 | 6 | const __filename = fileURLToPath(import.meta.url); 7 | const __dirname = path.dirname(__filename); 8 | 9 | dotenv.config({ path: path.resolve(__dirname, '.env.integration') }); 10 | 11 | export default defineConfig({ 12 | testDir: './e2e', 13 | fullyParallel: true, 14 | forbidOnly: !!process.env.CI, 15 | retries: process.env.CI ? 2 : 0, 16 | workers: process.env.CI ? 1 : undefined, 17 | reporter: [['html', { open: 'never' }], ['list']], 18 | use: { 19 | baseURL: 'http://localhost:3000', 20 | trace: 'on-first-retry', 21 | screenshot: 'only-on-failure', 22 | }, 23 | projects: [ 24 | // Setup project 25 | { 26 | name: 'setup', 27 | testMatch: /.*\.setup\.ts/, 28 | teardown: 'cleanup', 29 | }, 30 | // Cleanup project 31 | { 32 | name: 'cleanup', 33 | testMatch: /global\.teardown\.ts/, 34 | }, 35 | { 36 | name: 'chromium', 37 | use: { 38 | ...devices['Desktop Chrome'], 39 | storageState: 'playwright/.auth/user.json', 40 | }, 41 | dependencies: ['setup'], 42 | }, 43 | ], 44 | webServer: { 45 | command: 'npm run dev:e2e', 46 | url: 'http://localhost:3000', 47 | reuseExistingServer: !process.env.CI, 48 | timeout: 120000, 49 | }, 50 | }); 51 | -------------------------------------------------------------------------------- /public/demo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/przeprogramowani/ai-rules-builder/dcb5841ebdb5679ed7d789b0d8a94cd8cacdc40a/public/demo.png -------------------------------------------------------------------------------- /public/favicon.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 9 | 10 | -------------------------------------------------------------------------------- /scripts/generate-rules-json.mts: -------------------------------------------------------------------------------- 1 | import fs from 'fs/promises'; 2 | import path from 'path'; 3 | import { fileURLToPath } from 'url'; 4 | 5 | // Assuming these paths are correct relative to the script location 6 | const dataDir = path.resolve(path.dirname(fileURLToPath(import.meta.url)), '../src/data'); 7 | const i18nDir = path.resolve(path.dirname(fileURLToPath(import.meta.url)), '../src/i18n'); 8 | const mcpServerSourceDir = path.resolve(path.dirname(fileURLToPath(import.meta.url)), '../mcp-server/src'); 9 | const outputFilePath = path.join(mcpServerSourceDir, 'preparedRules.json'); 10 | 11 | interface TranslationObject { 12 | [key: string]: string; 13 | } 14 | 15 | interface HierarchyNode { 16 | id: string; 17 | name: string; 18 | children?: HierarchyNode[]; 19 | } 20 | 21 | async function generateRulesJson() { 22 | console.log('Starting generation of preparedRules.json...'); 23 | 24 | try { 25 | // Dynamically import modules - adjust paths if necessary 26 | const { Layer, layerToStackMap, stackToLibraryMap } = await import(path.join(dataDir, 'dictionaries.ts')); 27 | const { layerTranslations, stackTranslations, libraryTranslations } = await import(path.join(i18nDir, 'translations.ts')); 28 | // Assuming rules.ts exports a default or named export called libraryRules 29 | const { libraryRules } = await import(path.join(dataDir, 'rules.ts')); // Adjust import based on actual export 30 | 31 | if (!libraryRules) { 32 | throw new Error('libraryRules export not found in src/data/rules.ts'); 33 | } 34 | 35 | const hierarchy: HierarchyNode[] = []; 36 | 37 | // Build hierarchy 38 | for (const layerKey of Object.keys(Layer)) { 39 | const layerId = Layer[layerKey as keyof typeof Layer]; 40 | const layerName = (layerTranslations as TranslationObject)[layerId] || layerId; // Use ID as fallback 41 | const layerNode: HierarchyNode = { id: layerId, name: layerName, children: [] }; 42 | 43 | const stacks = layerToStackMap[layerId] || []; 44 | for (const stackId of stacks) { 45 | const stackName = (stackTranslations as TranslationObject)[stackId] || stackId; 46 | const stackNode: HierarchyNode = { id: stackId, name: stackName, children: [] }; 47 | 48 | const libraries = stackToLibraryMap[stackId] || []; 49 | for (const libraryId of libraries) { 50 | const libraryName = (libraryTranslations as TranslationObject)[libraryId] || libraryId; 51 | const libraryNode: HierarchyNode = { id: libraryId, name: libraryName }; 52 | stackNode.children!.push(libraryNode); 53 | } 54 | 55 | // Sort libraries alphabetically by name 56 | stackNode.children!.sort((a, b) => a.name.localeCompare(b.name)); 57 | layerNode.children!.push(stackNode); 58 | } 59 | 60 | // Sort stacks alphabetically by name 61 | layerNode.children!.sort((a, b) => a.name.localeCompare(b.name)); 62 | hierarchy.push(layerNode); 63 | } 64 | 65 | // Sort layers alphabetically by name 66 | hierarchy.sort((a, b) => a.name.localeCompare(b.name)); 67 | 68 | const finalOutput = { 69 | hierarchy, 70 | rules: libraryRules, // Use the imported rules directly 71 | }; 72 | 73 | // Write to file 74 | await fs.writeFile(outputFilePath, JSON.stringify(finalOutput, null, 2)); 75 | console.log(`Successfully generated ${outputFilePath}`); 76 | 77 | } catch (error) { 78 | console.error('Error generating preparedRules.json:', error); 79 | process.exit(1); // Exit with error code 80 | } 81 | } 82 | 83 | generateRulesJson(); -------------------------------------------------------------------------------- /src/assets/demo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/przeprogramowani/ai-rules-builder/dcb5841ebdb5679ed7d789b0d8a94cd8cacdc40a/src/assets/demo.png -------------------------------------------------------------------------------- /src/components/Footer.tsx: -------------------------------------------------------------------------------- 1 | import { GitBranch, Rocket, BadgeInfo } from 'lucide-react'; 2 | 3 | export default function Footer() { 4 | return ( 5 | 39 | ); 40 | } 41 | -------------------------------------------------------------------------------- /src/components/MobileNavigation.tsx: -------------------------------------------------------------------------------- 1 | import React, { useCallback, useMemo } from 'react'; 2 | import { Album, Blocks, Eye } from 'lucide-react'; 3 | import { useNavigationStore } from '../store/navigationStore'; 4 | import { isFeatureEnabled } from '../features/featureFlags'; 5 | 6 | interface NavigationItemProps { 7 | icon: React.ReactNode; 8 | label: string; 9 | isActive: boolean; 10 | onClick: () => void; 11 | } 12 | 13 | const NavigationItem = React.memo(({ icon, label, isActive, onClick }) => { 14 | return ( 15 | 25 | ); 26 | }); 27 | 28 | NavigationItem.displayName = 'NavigationItem'; 29 | 30 | export const MobileNavigation: React.FC = () => { 31 | const { activePanel, setActivePanel } = useNavigationStore(); 32 | const isCollectionsEnabled = isFeatureEnabled('authOnUI'); 33 | 34 | // Memoize panel change handlers to prevent unnecessary re-renders 35 | const handleCollectionsClick = useCallback(() => setActivePanel('collections'), [setActivePanel]); 36 | const handleBuilderClick = useCallback(() => setActivePanel('builder'), [setActivePanel]); 37 | const handlePreviewClick = useCallback(() => setActivePanel('preview'), [setActivePanel]); 38 | 39 | // Memoize navigation items configuration 40 | const navigationItems = useMemo(() => { 41 | const items: NavigationItemProps[] = [ 42 | { 43 | icon: , 44 | label: 'Builder', 45 | isActive: activePanel === 'builder', 46 | onClick: handleBuilderClick, 47 | }, 48 | { 49 | icon: , 50 | label: 'Preview', 51 | isActive: activePanel === 'preview', 52 | onClick: handlePreviewClick, 53 | }, 54 | ]; 55 | 56 | if (isCollectionsEnabled) { 57 | items.unshift({ 58 | icon: , 59 | label: 'Collections', 60 | isActive: activePanel === 'collections', 61 | onClick: handleCollectionsClick, 62 | }); 63 | } 64 | 65 | return items; 66 | }, [ 67 | isCollectionsEnabled, 68 | activePanel, 69 | handleCollectionsClick, 70 | handleBuilderClick, 71 | handlePreviewClick, 72 | ]); 73 | 74 | return ( 75 | 92 | ); 93 | }; 94 | -------------------------------------------------------------------------------- /src/components/Topbar.tsx: -------------------------------------------------------------------------------- 1 | import { WandSparkles } from 'lucide-react'; 2 | import DependencyUploader from './rule-parser/DependencyUploader'; 3 | import { useAuthStore } from '../store/authStore'; 4 | import { useEffect } from 'react'; 5 | import LoginButton from './auth/LoginButton'; 6 | 7 | interface TopbarProps { 8 | title?: string; 9 | initialUser?: { 10 | id: string; 11 | email: string | null; 12 | }; 13 | } 14 | 15 | export default function Topbar({ title = '10xRules.ai', initialUser }: TopbarProps) { 16 | const { setUser } = useAuthStore(); 17 | 18 | // Initialize auth store with user data from server 19 | useEffect(() => { 20 | if (initialUser) { 21 | setUser(initialUser); 22 | } 23 | }, [initialUser, setUser]); 24 | 25 | return ( 26 |
27 |
28 | 29 |
30 | 31 |

32 | {title} 33 |

34 |
35 |
36 | 37 |
38 |
39 | 40 |
41 | 42 |
43 |
44 |
45 | ); 46 | } 47 | -------------------------------------------------------------------------------- /src/components/TwoPane.tsx: -------------------------------------------------------------------------------- 1 | import { useEffect, useState } from 'react'; 2 | import { RuleBuilder } from './rule-builder'; 3 | import { RulePreview } from './rule-preview'; 4 | import CollectionsSidebar from './rule-collections/CollectionsSidebar'; 5 | import { MobileNavigation } from './MobileNavigation'; 6 | import { useNavigationStore } from '../store/navigationStore'; 7 | import { isFeatureEnabled } from '../features/featureFlags'; 8 | import { useTechStackStore } from '../store/techStackStore'; 9 | 10 | function RulesPane() { 11 | const { activePanel, isSidebarOpen, toggleSidebar, setSidebarOpen } = useNavigationStore(); 12 | const isCollectionsEnabled = isFeatureEnabled('authOnUI'); 13 | 14 | // Sync the local state with the store on component mount 15 | useEffect(() => { 16 | // If on desktop, default sidebar to closed 17 | const isDesktop = window.matchMedia('(min-width: 768px)').matches; 18 | if (isDesktop) { 19 | setSidebarOpen(false); 20 | } 21 | }, [setSidebarOpen]); 22 | 23 | return ( 24 |
25 | {isCollectionsEnabled && ( 26 | 27 | )} 28 | 29 |
30 |
37 | 38 |
39 | 40 |
47 | 48 |
49 |
50 | 51 | 52 |
53 | ); 54 | } 55 | 56 | type TwoPaneProps = { 57 | initialUrl: URL; 58 | }; 59 | 60 | export default function TwoPane({ initialUrl }: TwoPaneProps) { 61 | const { anyLibrariesToLoad } = useTechStackStore(); 62 | const [isHydrated, setIsHydrated] = useState(false); 63 | 64 | const shouldWaitForHydration = anyLibrariesToLoad(initialUrl) && !isHydrated; 65 | 66 | useEffect(() => { 67 | setIsHydrated(true); 68 | }, []); 69 | 70 | if (shouldWaitForHydration) { 71 | return
Loading...
; 72 | } 73 | 74 | return ; 75 | } 76 | -------------------------------------------------------------------------------- /src/components/auth/AuthInput.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { transitions } from '../../styles/theme'; 3 | 4 | interface AuthInputProps extends React.InputHTMLAttributes { 5 | label: string; 6 | error?: string; 7 | } 8 | 9 | const AuthInput = React.forwardRef( 10 | ({ label, error, id, ...inputProps }, ref) => { 11 | return ( 12 |
13 | 16 | 29 | {error &&

{error}

} 30 |
31 | ); 32 | }, 33 | ); 34 | AuthInput.displayName = 'AuthInput'; 35 | export default AuthInput; 36 | -------------------------------------------------------------------------------- /src/components/auth/AuthLayout.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | interface AuthLayoutProps { 3 | children: React.ReactNode; 4 | title: string; 5 | subtitle?: string; 6 | } 7 | 8 | export const AuthLayout: React.FC = ({ children, title, subtitle }) => { 9 | return ( 10 |
11 |
12 |
13 |

{title}

14 | {subtitle &&

{subtitle}

} 15 |
16 |
17 | {children} 18 |
19 |
20 |
21 | 22 |
23 | ); 24 | }; 25 | 26 | export default AuthLayout; 27 | -------------------------------------------------------------------------------- /src/components/auth/LoginButton.tsx: -------------------------------------------------------------------------------- 1 | import { LogIn, LogOut } from 'lucide-react'; 2 | import { useAuthStore } from '../../store/authStore'; 3 | import { isFeatureEnabled } from '../../features/featureFlags'; 4 | 5 | export default function LoginButton() { 6 | const { user, logout } = useAuthStore(); 7 | 8 | // If auth feature is disabled, render nothing 9 | if (!isFeatureEnabled('authOnUI')) { 10 | return null; 11 | } 12 | 13 | const handleLogout = async () => { 14 | try { 15 | await fetch('/api/auth/logout', { method: 'POST' }); 16 | logout(); 17 | window.location.href = '/auth/login'; 18 | } catch (error) { 19 | console.error('Logout failed:', error); 20 | } 21 | }; 22 | 23 | if (user) { 24 | return ( 25 |
26 | 27 | {user.email} 28 | 29 | 37 |
38 | ); 39 | } 40 | 41 | return ( 42 | 47 | 48 | Login 49 | 50 | ); 51 | } 52 | -------------------------------------------------------------------------------- /src/components/cookie-banner/CookieBanner.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { useCookieConsent } from '../../hooks/useCookieConsent'; 3 | 4 | interface CookieBannerProps { 5 | message?: string; 6 | acceptLabel?: string; 7 | declineLabel?: string; 8 | } 9 | 10 | const CookieBanner: React.FC = ({ 11 | message = 'Allow cookies from tools like Google Analytics to help us improve this website.', 12 | acceptLabel = 'Accept', 13 | declineLabel = 'Decline', 14 | }) => { 15 | const { isConsentGiven, setConsent } = useCookieConsent(); 16 | 17 | if (isConsentGiven === true || isConsentGiven === null) { 18 | return null; 19 | } 20 | 21 | const bannerBaseClasses = 'z-50 bottom-0 w-full p-2 pb-16'; 22 | const bannerVisibilityClasses = isConsentGiven === false ? 'fixed' : 'hidden'; 23 | 24 | return ( 25 |
26 |
27 |

{message}

28 |
29 | 35 | 41 |
42 |
43 |
44 | ); 45 | }; 46 | 47 | export default CookieBanner; 48 | -------------------------------------------------------------------------------- /src/components/privacy/PrivacyLayout.astro: -------------------------------------------------------------------------------- 1 | --- 2 | import Footer from '../Footer.tsx'; 3 | import Topbar from '../Topbar.tsx'; 4 | 5 | const user = Astro.locals.user; 6 | 7 | interface Props { 8 | title: string; 9 | lang: 'en' | 'pl'; 10 | } 11 | 12 | const { title, lang } = Astro.props; 13 | const otherLang = lang === 'en' ? 'pl' : 'en'; 14 | const otherLangPath = `/privacy/${otherLang}`; 15 | --- 16 | 17 |
18 | 19 |
20 | 29 |
30 |
31 | 32 |
33 |
34 |
35 |
36 |
37 | -------------------------------------------------------------------------------- /src/components/rule-builder/LayerSelector.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { Layer, getStacksByLayer, getLibrariesByStack } from '../../data/dictionaries'; 3 | import { useTechStackStore } from '../../store/techStackStore'; 4 | 5 | interface LayerSelectorProps { 6 | onSelectLayer: (layer: Layer) => void; 7 | } 8 | 9 | export const LayerSelector: React.FC = ({ onSelectLayer }) => { 10 | const { selectedLibraries, selectLayer } = useTechStackStore(); 11 | 12 | // Get all available layers 13 | const layers = Object.values(Layer); 14 | 15 | const handleLayerClick = (layer: Layer) => { 16 | selectLayer(layer); 17 | onSelectLayer(layer); 18 | }; 19 | 20 | // Check if any libraries from this layer are selected 21 | const hasSelectedLibraries = (layer: Layer): boolean => { 22 | // Get all stacks for this layer 23 | const stacks = getStacksByLayer(layer); 24 | 25 | // Check if any libraries from these stacks are selected 26 | return stacks.some((stack) => { 27 | const libraries = getLibrariesByStack(stack); 28 | return libraries.some((library) => selectedLibraries.includes(library)); 29 | }); 30 | }; 31 | 32 | return ( 33 |
34 |

Select category

35 |
36 | {layers.map((layer) => ( 37 | 53 | ))} 54 |
55 |
56 | ); 57 | }; 58 | 59 | export default LayerSelector; 60 | -------------------------------------------------------------------------------- /src/components/rule-builder/LibraryItem.tsx: -------------------------------------------------------------------------------- 1 | import { Check } from 'lucide-react'; 2 | import type { KeyboardEvent } from 'react'; 3 | import React from 'react'; 4 | import { Library } from '../../data/dictionaries'; 5 | import { getLibraryTranslation } from '../../i18n/translations'; 6 | import type { LayerType } from '../../styles/theme'; 7 | import { getLayerClasses } from '../../styles/theme'; 8 | import { useAccordionContentOpen } from '../ui/Accordion'; 9 | 10 | interface LibraryItemProps { 11 | library: Library; 12 | isSelected: boolean; 13 | onToggle: (library: Library) => void; 14 | layerType: LayerType; 15 | } 16 | 17 | export const LibraryItem: React.FC = React.memo( 18 | ({ library, isSelected, onToggle, layerType }) => { 19 | const isParentAccordionOpen = useAccordionContentOpen(); 20 | const itemClasses = getLayerClasses.libraryItem(layerType, isSelected); 21 | 22 | const handleKeyDown = (e: KeyboardEvent) => { 23 | if (e.key === 'Enter' || e.key === ' ') { 24 | e.preventDefault(); 25 | onToggle(library); 26 | } 27 | }; 28 | 29 | return ( 30 | 43 | ); 44 | }, 45 | ); 46 | 47 | LibraryItem.displayName = 'LibraryItem'; 48 | -------------------------------------------------------------------------------- /src/components/rule-builder/LibrarySelector.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { Stack, Library, getLibrariesByStack } from '../../data/dictionaries'; 3 | import { useTechStackStore } from '../../store/techStackStore'; 4 | 5 | interface LibrarySelectorProps { 6 | selectedStack: Stack; 7 | onBackToStacks: () => void; 8 | } 9 | 10 | export const LibrarySelector: React.FC = ({ 11 | selectedStack, 12 | onBackToStacks, 13 | }) => { 14 | const { selectLibrary, unselectLibrary, isLibrarySelected } = useTechStackStore(); 15 | 16 | // Get libraries for the selected stack 17 | const libraries = getLibrariesByStack(selectedStack); 18 | 19 | const handleLibraryToggle = (library: Library) => { 20 | if (isLibrarySelected(library)) { 21 | unselectLibrary(library); 22 | } else { 23 | selectLibrary(library); 24 | } 25 | }; 26 | 27 | return ( 28 |
29 |
30 | 47 |

Select tools

48 |
49 | 50 |
51 | {libraries.map((library) => ( 52 |
58 |
59 | {library} 60 | 69 |
70 |
71 | ))} 72 |
73 |
74 | ); 75 | }; 76 | 77 | export default LibrarySelector; 78 | -------------------------------------------------------------------------------- /src/components/rule-builder/SearchInput.tsx: -------------------------------------------------------------------------------- 1 | import { Search, X } from 'lucide-react'; 2 | import type { ChangeEvent, KeyboardEvent } from 'react'; 3 | import React, { useCallback, useRef, useState } from 'react'; 4 | 5 | interface SearchInputProps { 6 | searchQuery: string; 7 | setSearchQuery: (query: string) => void; 8 | matchCount?: number; 9 | totalCount?: number; 10 | className?: string; 11 | } 12 | 13 | export const SearchInput: React.FC = ({ 14 | searchQuery, 15 | setSearchQuery, 16 | matchCount, 17 | totalCount, 18 | className = '', 19 | }) => { 20 | const inputRef = useRef(null); 21 | const [hasFocus, setHasFocus] = useState(false); 22 | 23 | const handleChange = useCallback( 24 | (e: ChangeEvent) => { 25 | setSearchQuery(e.target.value); 26 | }, 27 | [setSearchQuery], 28 | ); 29 | 30 | const handleClear = useCallback(() => { 31 | setSearchQuery(''); 32 | inputRef.current?.focus(); 33 | }, [setSearchQuery]); 34 | 35 | const handleKeyDown = useCallback( 36 | (e: KeyboardEvent) => { 37 | if (e.key === 'Enter' || e.key === ' ') { 38 | e.preventDefault(); 39 | handleClear(); 40 | } 41 | }, 42 | [handleClear], 43 | ); 44 | 45 | return ( 46 |
51 |
52 | 53 |
54 | setHasFocus(true)} 63 | onBlur={() => setHasFocus(false)} 64 | tabIndex={0} 65 | /> 66 | {searchQuery && ( 67 | 76 | )} 77 | {searchQuery && matchCount !== undefined && totalCount !== undefined && ( 78 |
79 | {matchCount} / {totalCount} 80 |
81 | )} 82 |
83 | ); 84 | }; 85 | 86 | export default React.memo(SearchInput); 87 | -------------------------------------------------------------------------------- /src/components/rule-builder/SelectedRules.tsx: -------------------------------------------------------------------------------- 1 | import { X } from 'lucide-react'; 2 | import React from 'react'; 3 | import type { KeyboardEvent } from 'react'; 4 | import { Library } from '../../data/dictionaries'; 5 | import type { LayerType } from '../../styles/theme'; 6 | import { getLayerClasses } from '../../styles/theme'; 7 | 8 | interface SelectedRulesProps { 9 | selectedLibraries: Library[]; 10 | unselectLibrary: (library: Library) => void; 11 | getLibraryLayerType: (library: Library) => LayerType; 12 | } 13 | 14 | export const SelectedRules: React.FC = React.memo( 15 | ({ selectedLibraries, unselectLibrary, getLibraryLayerType }) => { 16 | const handleKeyDown = (e: KeyboardEvent, library: Library) => { 17 | if (e.key === 'Enter' || e.key === ' ') { 18 | e.preventDefault(); 19 | unselectLibrary(library); 20 | } 21 | }; 22 | 23 | if (selectedLibraries.length === 0) { 24 | return null; 25 | } 26 | 27 | return ( 28 |
29 |

30 | Selected Rules ({selectedLibraries.length}) 31 |

32 |
33 | {selectedLibraries.map((library) => { 34 | const layerType = getLibraryLayerType(library); 35 | return ( 36 |
43 | {library} 44 | 53 |
54 | ); 55 | })} 56 |
57 |
58 | ); 59 | }, 60 | ); 61 | 62 | SelectedRules.displayName = 'SelectedRules'; 63 | -------------------------------------------------------------------------------- /src/components/rule-builder/StackItem.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { Library, Stack, getLibrariesByStack } from '../../data/dictionaries'; 3 | import { getStackTranslation } from '../../i18n/translations'; 4 | import type { LayerType } from '../../styles/theme'; 5 | import { getLayerClasses } from '../../styles/theme'; 6 | import { AccordionContent, AccordionItem, AccordionTrigger } from '../ui/Accordion'; 7 | import { LibraryItem } from './LibraryItem'; 8 | 9 | interface StackItemProps { 10 | stack: Stack; 11 | isOpen: boolean; 12 | hasSelected: boolean; 13 | selectedCount: number; 14 | toggleStack: (stack: Stack) => void; 15 | handleLibraryToggle: (library: Library) => void; 16 | isLibrarySelected: (library: Library) => boolean; 17 | layerType: LayerType; 18 | filteredLibraries?: Library[]; 19 | isNested?: boolean; 20 | } 21 | 22 | export const StackItem: React.FC = React.memo( 23 | ({ 24 | stack, 25 | isOpen, 26 | hasSelected, 27 | toggleStack, 28 | handleLibraryToggle, 29 | isLibrarySelected, 30 | layerType, 31 | filteredLibraries, 32 | isNested = false, 33 | }) => { 34 | const containerClasses = getLayerClasses.stackContainer(layerType, hasSelected, isOpen); 35 | 36 | // Use the filtered libraries if provided, otherwise use all libraries for this stack 37 | const libraries = filteredLibraries || getLibrariesByStack(stack); 38 | 39 | return ( 40 | 41 |
42 | toggleStack(stack)} 44 | isOpen={isOpen} 45 | className={`text-white ${getLayerClasses.focusRing(layerType)}`} 46 | > 47 |
48 | {getStackTranslation(stack)} 49 |
50 |
51 | 52 | 53 |
54 | {libraries.map((library) => ( 55 | 62 | ))} 63 |
64 |
65 |
66 |
67 | ); 68 | }, 69 | ); 70 | 71 | StackItem.displayName = 'StackItem'; 72 | -------------------------------------------------------------------------------- /src/components/rule-builder/StackSelector.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { Layer, Stack, getStacksByLayer, getLibrariesByStack } from '../../data/dictionaries'; 3 | import { useTechStackStore } from '../../store/techStackStore'; 4 | 5 | interface StackSelectorProps { 6 | selectedLayer: Layer; 7 | onSelectStack: (stack: Stack) => void; 8 | onBackToLayers: () => void; 9 | } 10 | 11 | export const StackSelector: React.FC = ({ 12 | selectedLayer, 13 | onSelectStack, 14 | onBackToLayers, 15 | }) => { 16 | const { selectStack, selectedLibraries } = useTechStackStore(); 17 | 18 | // Get stacks for the selected layer 19 | const stacks = getStacksByLayer(selectedLayer); 20 | 21 | const handleStackClick = (stack: Stack) => { 22 | selectStack(stack); 23 | onSelectStack(stack); 24 | }; 25 | 26 | // Check if any libraries from this stack are selected 27 | const hasSelectedLibraries = (stack: Stack): boolean => { 28 | const libraries = getLibrariesByStack(stack); 29 | return libraries.some((library) => selectedLibraries.includes(library)); 30 | }; 31 | 32 | return ( 33 |
34 |
35 | 52 |

Select rule group

53 |
54 | 55 |
56 | {stacks.map((stack) => ( 57 | 73 | ))} 74 |
75 |
76 | ); 77 | }; 78 | 79 | export default StackSelector; 80 | -------------------------------------------------------------------------------- /src/components/rule-builder/hooks/useMCPDialog.ts: -------------------------------------------------------------------------------- 1 | import { useState } from 'react'; 2 | 3 | export const useMCPDialog = () => { 4 | const [isMCPDialogOpen, setIsMCPDialogOpen] = useState(false); 5 | 6 | const showMCPInstructions = () => { 7 | setIsMCPDialogOpen(true); 8 | }; 9 | 10 | const hideMCPInstructions = () => { 11 | setIsMCPDialogOpen(false); 12 | }; 13 | 14 | return { 15 | isMCPDialogOpen, 16 | showMCPInstructions, 17 | hideMCPInstructions, 18 | }; 19 | }; 20 | -------------------------------------------------------------------------------- /src/components/rule-builder/index.ts: -------------------------------------------------------------------------------- 1 | export { default as RuleBuilder } from './RuleBuilder'; 2 | export { default as LayerSelector } from './LayerSelector'; 3 | export { default as StackSelector } from './StackSelector'; 4 | export { default as LibrarySelector } from './LibrarySelector'; 5 | -------------------------------------------------------------------------------- /src/components/rule-collections/CollectionsSidebar.tsx: -------------------------------------------------------------------------------- 1 | import React, { useEffect } from 'react'; 2 | import { Album, ChevronLeft, LogIn } from 'lucide-react'; 3 | import { transitions } from '../../styles/theme'; 4 | import { CollectionsList } from './CollectionsList'; 5 | import { useCollectionsStore } from '../../store/collectionsStore'; 6 | import { useAuthStore } from '../../store/authStore'; 7 | import { useNavigationStore } from '../../store/navigationStore'; 8 | 9 | interface CollectionsSidebarProps { 10 | isOpen: boolean; 11 | onToggle: () => void; 12 | } 13 | 14 | export const CollectionsSidebar: React.FC = ({ isOpen, onToggle }) => { 15 | const fetchCollections = useCollectionsStore((state) => state.fetchCollections); 16 | const isAuthenticated = useAuthStore((state) => state.isAuthenticated); 17 | const { activePanel } = useNavigationStore(); 18 | const isMobileCollectionsActive = activePanel === 'collections'; 19 | 20 | useEffect(() => { 21 | if (isAuthenticated) { 22 | fetchCollections(); 23 | } 24 | }, [fetchCollections, isAuthenticated]); 25 | 26 | return ( 27 |
35 | {/* Main content area */} 36 |
39 |
40 |

Collections

41 | {isAuthenticated ? ( 42 | 43 | ) : ( 44 |
45 | 46 |

Please log in to view and manage your collections

47 | 48 | Go to login page 49 | 50 |
51 | )} 52 |
53 |
54 | 55 | {/* Toggle button - positioned absolutely at the top-right */} 56 |
59 | 67 |
68 |
69 | ); 70 | }; 71 | 72 | export default CollectionsSidebar; 73 | -------------------------------------------------------------------------------- /src/components/rule-collections/DeletionDialog.tsx: -------------------------------------------------------------------------------- 1 | import React, { useState } from 'react'; 2 | import { 3 | ConfirmDialog, 4 | ConfirmDialogHeader, 5 | ConfirmDialogContent, 6 | ConfirmDialogActions, 7 | } from '../ui/ConfirmDialog'; 8 | 9 | interface DeletionDialogProps { 10 | isOpen: boolean; 11 | onClose: () => void; 12 | onConfirm: () => Promise; 13 | itemName: string; 14 | title?: string; 15 | } 16 | 17 | export const DeletionDialog: React.FC = ({ 18 | isOpen, 19 | onClose, 20 | onConfirm, 21 | itemName, 22 | title = 'Delete Item', 23 | }) => { 24 | const [isDeleting, setIsDeleting] = useState(false); 25 | 26 | const handleConfirmDelete = async () => { 27 | try { 28 | setIsDeleting(true); 29 | await onConfirm(); 30 | } catch (error) { 31 | console.error('Failed to delete item:', error); 32 | } finally { 33 | setIsDeleting(false); 34 | } 35 | }; 36 | 37 | return ( 38 | 39 | {title} 40 | 41 | Are you sure you want to delete "{itemName}"? This action cannot be undone. 42 | 43 | 44 | 50 | 59 | 60 | 61 | ); 62 | }; 63 | 64 | export default DeletionDialog; 65 | -------------------------------------------------------------------------------- /src/components/rule-collections/UnsavedChangesDialog.tsx: -------------------------------------------------------------------------------- 1 | import React, { useState } from 'react'; 2 | import { 3 | ConfirmDialog, 4 | ConfirmDialogHeader, 5 | ConfirmDialogContent, 6 | ConfirmDialogActions, 7 | } from '../ui/ConfirmDialog'; 8 | 9 | interface UnsavedChangesDialogProps { 10 | isOpen: boolean; 11 | onClose: () => void; 12 | onSave: () => Promise; 13 | onSkip: () => void; 14 | collectionName: string; 15 | } 16 | 17 | export const UnsavedChangesDialog: React.FC = ({ 18 | isOpen, 19 | onClose, 20 | onSave, 21 | onSkip, 22 | collectionName, 23 | }) => { 24 | const [isSaving, setIsSaving] = useState(false); 25 | 26 | const handleSave = async () => { 27 | try { 28 | setIsSaving(true); 29 | await onSave(); 30 | } catch (error) { 31 | console.error('Failed to save changes:', error); 32 | } finally { 33 | setIsSaving(false); 34 | } 35 | }; 36 | 37 | return ( 38 | 39 | Unsaved Changes 40 | 41 | You have unsaved changes in "{collectionName}". Would you like to save them before 42 | proceeding? 43 | 44 | 45 | 51 | 57 | 66 | 67 | 68 | ); 69 | }; 70 | 71 | export default UnsavedChangesDialog; 72 | -------------------------------------------------------------------------------- /src/components/rule-parser/DependencyUploader.tsx: -------------------------------------------------------------------------------- 1 | import { Upload } from 'lucide-react'; 2 | import { useDependencyUpload } from './useDependencyUpload'; 3 | 4 | export default function DependencyUploader() { 5 | const { isUploading, uploadStatus, uploadDependencyFile } = useDependencyUpload(); 6 | 7 | const handleSubmit = async (event: React.FormEvent) => { 8 | event.preventDefault(); 9 | 10 | const form = event.currentTarget; 11 | const fileInput = form.querySelector('input[type="file"]') as HTMLInputElement; 12 | const file = fileInput?.files?.[0]; 13 | 14 | if (file) { 15 | await uploadDependencyFile(file); 16 | // Reset the form 17 | form.reset(); 18 | } 19 | }; 20 | 21 | const handleFileChange = (event: React.ChangeEvent) => { 22 | const file = event.target.files?.[0]; 23 | if (file) { 24 | // Automatically submit the form when a file is selected 25 | const form = event.target.closest('form'); 26 | if (form) { 27 | form.requestSubmit(); 28 | } 29 | } 30 | }; 31 | 32 | return ( 33 |
34 |
35 | 59 | 62 |
63 | 64 | {uploadStatus.message && ( 65 | 70 | )} 71 |
72 | ); 73 | } 74 | -------------------------------------------------------------------------------- /src/components/rule-parser/useDependencyUpload.ts: -------------------------------------------------------------------------------- 1 | import { useState, useEffect } from 'react'; 2 | import { useTechStackStore } from '../../store/techStackStore'; 3 | import { Library } from '../../data/dictionaries'; 4 | 5 | export interface UploadStatus { 6 | success?: boolean; 7 | message?: string; 8 | } 9 | 10 | export function useDependencyUpload() { 11 | const [isUploading, setIsUploading] = useState(false); 12 | const [uploadStatus, setUploadStatus] = useState({}); 13 | const { selectLibrary } = useTechStackStore(); 14 | 15 | // Effect to clear the message after 5 seconds 16 | useEffect(() => { 17 | if (uploadStatus.message) { 18 | const timer = setTimeout(() => { 19 | setUploadStatus({}); 20 | }, 5000); 21 | 22 | // Cleanup timer on component unmount or when message changes 23 | return () => clearTimeout(timer); 24 | } 25 | }, [uploadStatus.message]); 26 | 27 | const uploadDependencyFile = async (file: File) => { 28 | if (!file) { 29 | setUploadStatus({ 30 | success: false, 31 | message: 'Please select a file', 32 | }); 33 | return; 34 | } 35 | 36 | try { 37 | setIsUploading(true); 38 | setUploadStatus({}); 39 | 40 | console.log('Uploading file:', file.name, file.type, file.size); 41 | 42 | const formData = new FormData(); 43 | formData.set('file', file); 44 | 45 | const response = await fetch('/api/upload-dependencies', { 46 | method: 'POST', 47 | body: formData, 48 | }); 49 | 50 | const data = await response.json(); 51 | 52 | if (!response.ok) { 53 | throw new Error(data.error || 'Failed to upload file'); 54 | } 55 | 56 | // Update the tech stack store with identified libraries 57 | if (data.libraries && Array.isArray(data.libraries)) { 58 | data.libraries.forEach((lib: Library) => { 59 | selectLibrary(lib); 60 | }); 61 | 62 | setUploadStatus({ 63 | success: true, 64 | message: `Found ${data.mappedLibraries} libraries from ${data.identifiedDependencies} dependencies`, 65 | }); 66 | } else { 67 | setUploadStatus({ 68 | success: true, 69 | message: 'No libraries were identified in the file', 70 | }); 71 | } 72 | } catch (error) { 73 | console.error('Error uploading file:', error); 74 | setUploadStatus({ 75 | success: false, 76 | message: error instanceof Error ? error.message : 'Failed to process file', 77 | }); 78 | } finally { 79 | setIsUploading(false); 80 | } 81 | }; 82 | 83 | return { 84 | isUploading, 85 | uploadStatus, 86 | uploadDependencyFile, 87 | }; 88 | } 89 | -------------------------------------------------------------------------------- /src/components/rule-preview/DependencyUpload.tsx: -------------------------------------------------------------------------------- 1 | import { FileUp } from 'lucide-react'; 2 | import React, { Fragment } from 'react'; 3 | 4 | import type { UploadStatus } from '../rule-parser/useDependencyUpload.ts'; 5 | 6 | // Component for the dependency dropzone overlay 7 | export const DependencyUpload: React.FC<{ uploadStatus: UploadStatus; isDragging: boolean }> = ({ 8 | uploadStatus, 9 | isDragging, 10 | }) => { 11 | if (!isDragging) return null; 12 | 13 | return ( 14 | 15 |
16 | 17 |

18 | Drop dependency file to identify libraries 19 |

20 |

Supported: package.json, requirements.txt

21 |
22 | {/* Upload status message */} 23 | {uploadStatus.message && ( 24 |
25 | {uploadStatus.message} 26 |
27 | )} 28 |
29 | ); 30 | }; 31 | -------------------------------------------------------------------------------- /src/components/rule-preview/MarkdownContentRenderer.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import type { RulesContent } from '../../services/rules-builder/RulesBuilderTypes.ts'; 3 | import RulesPreviewCopyDownloadActions from './RulesPreviewCopyDownloadActions.tsx'; 4 | import { processRulesContentMarkdown } from '../../utils/markdownStyling.tsx'; 5 | 6 | // Component for rendering markdown content 7 | export const MarkdownContentRenderer: React.FC<{ markdownContent: RulesContent[] }> = ({ 8 | markdownContent, 9 | }) => { 10 | return ( 11 |
12 | {markdownContent.map((rule, index) => ( 13 |
17 | {markdownContent.length > 1 && ( 18 |
19 | 20 |
21 | )} 22 | 23 |
24 |             {processRulesContentMarkdown(rule.markdown)}
25 |           
26 |
27 | ))} 28 |
29 | ); 30 | }; 31 | -------------------------------------------------------------------------------- /src/components/rule-preview/RulePreview.tsx: -------------------------------------------------------------------------------- 1 | import React, { useCallback, useEffect, useState } from 'react'; 2 | import { RulesBuilderService } from '../../services/rules-builder/RulesBuilderService.ts'; 3 | import { useProjectStore } from '../../store/projectStore'; 4 | import { useTechStackStore } from '../../store/techStackStore'; 5 | import { useDependencyUpload } from '../rule-parser/useDependencyUpload'; 6 | import { RulePreviewTopbar } from './RulePreviewTopbar'; 7 | import { DependencyUpload } from './DependencyUpload.tsx'; 8 | import { MarkdownContentRenderer } from './MarkdownContentRenderer.tsx'; 9 | import type { RulesContent } from '../../services/rules-builder/RulesBuilderTypes.ts'; 10 | 11 | export const RulePreview: React.FC = () => { 12 | const { selectedLibraries } = useTechStackStore(); 13 | const { projectName, projectDescription, isMultiFileEnvironment } = useProjectStore(); 14 | const [markdownContent, setMarkdownContent] = useState([]); 15 | const [isDragging, setIsDragging] = useState(false); 16 | const { uploadStatus, uploadDependencyFile } = useDependencyUpload(); 17 | 18 | useEffect(() => { 19 | const markdowns = RulesBuilderService.generateRulesContent( 20 | projectName, 21 | projectDescription, 22 | selectedLibraries, 23 | isMultiFileEnvironment, 24 | ); 25 | setMarkdownContent(markdowns); 26 | }, [selectedLibraries, projectName, projectDescription, isMultiFileEnvironment]); 27 | 28 | // Handle drag events 29 | const handleDragOver = useCallback((e: React.DragEvent) => { 30 | e.preventDefault(); 31 | e.stopPropagation(); 32 | setIsDragging(true); 33 | }, []); 34 | 35 | const handleDragLeave = useCallback((e: React.DragEvent) => { 36 | e.preventDefault(); 37 | e.stopPropagation(); 38 | setIsDragging(false); 39 | }, []); 40 | 41 | const handleDrop = useCallback( 42 | async (e: React.DragEvent) => { 43 | e.preventDefault(); 44 | e.stopPropagation(); 45 | setIsDragging(false); 46 | 47 | const files = Array.from(e.dataTransfer.files); 48 | if (files.length > 0) { 49 | const file = files[0]; 50 | // Check if it's a valid dependency file (json or txt) 51 | if (file.name.endsWith('.json') || file.name.endsWith('.txt')) { 52 | await uploadDependencyFile(file); 53 | } else { 54 | console.warn('Invalid file type. Please drop a package.json or requirements.txt file.'); 55 | } 56 | } 57 | }, 58 | [uploadDependencyFile], 59 | ); 60 | 61 | return ( 62 |
68 | 69 | {/* Dropzone overlay */} 70 | 71 | {/* Markdown content */} 72 | 73 |
74 | ); 75 | }; 76 | 77 | export default RulePreview; 78 | -------------------------------------------------------------------------------- /src/components/rule-preview/RulesPath.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { useProjectStore } from '../../store/projectStore'; 3 | import { aiEnvironmentConfig } from '../../data/ai-environments.ts'; 4 | 5 | export const RulesPath: React.FC = () => { 6 | const { selectedEnvironment } = useProjectStore(); 7 | 8 | // Get the appropriate file path based on the selected format 9 | const getFilePath = (): string => aiEnvironmentConfig[selectedEnvironment].filePath; 10 | 11 | return ( 12 |
13 | Path: {getFilePath()} 14 |
15 | ); 16 | }; 17 | 18 | export default RulesPath; 19 | -------------------------------------------------------------------------------- /src/components/rule-preview/RulesPreviewActions.tsx: -------------------------------------------------------------------------------- 1 | import { ExternalLink } from 'lucide-react'; 2 | import React from 'react'; 3 | import { aiEnvironmentConfig } from '../../data/ai-environments.ts'; 4 | import { useProjectStore } from '../../store/projectStore'; 5 | 6 | export const RulesPreviewActions: React.FC = () => { 7 | const { selectedEnvironment } = useProjectStore(); 8 | 9 | return ( 10 | 16 | 17 | 18 | ); 19 | }; 20 | 21 | export default RulesPreviewActions; 22 | -------------------------------------------------------------------------------- /src/components/rule-preview/index.ts: -------------------------------------------------------------------------------- 1 | export { default as RulePreview } from './RulePreview'; 2 | export { default as RulePreviewControls } from './RulePreviewTopbar'; 3 | -------------------------------------------------------------------------------- /src/data/ai-environments.ts: -------------------------------------------------------------------------------- 1 | export enum AIEnvironmentName { 2 | GitHub = 'github', 3 | Cursor = 'cursor', 4 | Windsurf = 'windsurf', 5 | Aider = 'aider', 6 | Cline = 'cline', 7 | Junie = 'junie', 8 | RooCode = 'roocode', 9 | Zed = 'zed', 10 | } 11 | 12 | // Define the AI environment types for easier maintenance 13 | export type AIEnvironment = `${AIEnvironmentName}`; 14 | 15 | type AIEnvironmentConfig = { 16 | [key in AIEnvironmentName]: { 17 | filePath: string; 18 | docsUrl: string; 19 | }; 20 | }; 21 | 22 | export const aiEnvironmentConfig: AIEnvironmentConfig = { 23 | github: { 24 | filePath: '.github/copilot-instructions.md', 25 | docsUrl: 26 | 'https://docs.github.com/en/copilot/customizing-copilot/adding-repository-custom-instructions-for-github-copilot', 27 | }, 28 | cursor: { 29 | filePath: '.cursor/rules/{rule}.mdc', 30 | docsUrl: 'https://docs.cursor.com/context/rules-for-ai', 31 | }, 32 | windsurf: { 33 | filePath: '.windsurfrules', 34 | docsUrl: 'https://docs.codeium.com/windsurf/memories#windsurfrules', 35 | }, 36 | cline: { 37 | filePath: '.clinerules/{rule}.md', 38 | docsUrl: 'https://docs.cline.bot/improving-your-prompting-skills/prompting#clinerules-file', 39 | }, 40 | aider: { 41 | filePath: 'CONVENTIONS.md', 42 | docsUrl: 'https://aider.chat/docs/usage/conventions.html', 43 | }, 44 | junie: { 45 | filePath: '.junie/guidelines.md', 46 | docsUrl: 'https://www.jetbrains.com/guide/ai/article/junie/intellij-idea/', 47 | }, 48 | roocode: { 49 | filePath: '.roo/rules/{rule}.md', 50 | docsUrl: 51 | 'https://docs.roocode.com/features/custom-instructions?_highlight=rules#rules-about-rules-files', 52 | }, 53 | zed: { 54 | filePath: '.rules', 55 | docsUrl: 'https://zed.dev/docs/ai/rules', 56 | }, 57 | }; 58 | -------------------------------------------------------------------------------- /src/data/rules.ts: -------------------------------------------------------------------------------- 1 | import { Library } from './dictionaries'; 2 | import { 3 | frontendRules, 4 | backendRules, 5 | databaseRules, 6 | infrastructureRules, 7 | testingRules, 8 | codingRules, 9 | accessibilityRules, 10 | getRulesForLibrary as getLibraryRules, 11 | getRulesForLibraries as getLibrariesRules, 12 | } from './rules/index'; 13 | 14 | /** 15 | * Combined library rules from all categories 16 | * Cast to Record to ensure all libraries are covered 17 | */ 18 | export const libraryRules: Record = { 19 | ...frontendRules, 20 | ...backendRules, 21 | ...databaseRules, 22 | ...infrastructureRules, 23 | ...testingRules, 24 | ...codingRules, 25 | ...accessibilityRules, 26 | } as Record; 27 | 28 | /** 29 | * Get rules for a specific library 30 | * @param library The library to get rules for 31 | * @returns Array of rules for the library 32 | */ 33 | export const getRulesForLibrary = (library: Library): string[] => { 34 | return getLibraryRules(libraryRules, library); 35 | }; 36 | 37 | /** 38 | * Get rules for multiple libraries 39 | * @param libraries Array of libraries to get rules for 40 | * @returns Record with libraries as keys and arrays of rules as values 41 | */ 42 | export const getRulesForLibraries = (libraries: Library[]): Record => { 43 | // Convert the partial record to a full record by filtering only requested libraries 44 | const partialResult = getLibrariesRules(libraryRules, libraries); 45 | 46 | // Create a new record with only the requested libraries 47 | const result: Record = {} as Record; 48 | 49 | // Only include libraries that were requested 50 | libraries.forEach((library) => { 51 | result[library] = partialResult[library] || []; 52 | }); 53 | 54 | return result; 55 | }; 56 | -------------------------------------------------------------------------------- /src/data/rules/database.ts: -------------------------------------------------------------------------------- 1 | import { Library } from '../dictionaries'; 2 | import { type LibraryRulesMap } from './types'; 3 | 4 | /** 5 | * Database library rules (SQL, NoSQL, Graph) 6 | */ 7 | export const databaseRules: LibraryRulesMap = { 8 | // SQL libraries 9 | [Library.POSTGRES]: [ 10 | 'Use connection pooling to manage database connections efficiently', 11 | 'Implement JSONB columns for semi-structured data instead of creating many tables for {{flexible_data}}', 12 | 'Use materialized views for complex, frequently accessed read-only data', 13 | ], 14 | [Library.MYSQL]: [ 15 | 'Use InnoDB storage engine for transactions and foreign key constraints', 16 | 'Implement proper indexing strategies based on {{query_patterns}}', 17 | 'Use connection pooling for better performance', 18 | ], 19 | [Library.SQLSERVER]: [ 20 | 'Use parameterized queries to prevent SQL injection', 21 | 'Implement proper indexing strategies based on query patterns', 22 | 'Use stored procedures for complex business logic that requires database access to {{business_entities}}', 23 | ], 24 | 25 | // NoSQL libraries 26 | [Library.MONGODB]: [ 27 | 'Use the aggregation framework for complex queries instead of multiple queries', 28 | 'Implement schema validation to ensure data consistency for {{document_types}}', 29 | 'Use indexes for frequently queried fields to improve performance', 30 | ], 31 | [Library.DYNAMODB]: [ 32 | 'Design access patterns first, then create tables and indexes to support {{query_requirements}}', 33 | 'Implement single-table design for related entities to minimize RCU/WCU costs', 34 | 'Use sparse indexes and composite keys for efficient querying', 35 | ], 36 | [Library.FIREBASE]: [ 37 | 'Use security rules to enforce access control at the database level for {{user_roles}}', 38 | 'Implement shallow queries to minimize bandwidth usage', 39 | 'Use offline capabilities for better user experience in mobile apps', 40 | ], 41 | 42 | // Graph libraries 43 | [Library.NEO4J]: [ 44 | 'Use parameterized Cypher queries to prevent injection attacks', 45 | 'Implement proper indexing on node properties used in WHERE clauses for {{node_types}}', 46 | 'Use the APOC library for common operations not covered by Cypher', 47 | ], 48 | [Library.DGRAPH]: [ 49 | 'Use GraphQL+-/DQL for complex queries instead of multiple simple queries', 50 | 'Implement proper indexing based on {{query_patterns}}', 51 | 'Use transactions for maintaining data consistency', 52 | ], 53 | }; 54 | -------------------------------------------------------------------------------- /src/data/rules/helpers.ts: -------------------------------------------------------------------------------- 1 | import { Library } from '../dictionaries'; 2 | import { type LibraryRulesMap } from './types'; 3 | 4 | /** 5 | * Get rules for a specific library 6 | * @param rulesMap The rules map to get rules from 7 | * @param library The library to get rules for 8 | * @returns Array of rules for the library 9 | */ 10 | export const getRulesForLibrary = (rulesMap: LibraryRulesMap, library: Library): string[] => { 11 | return rulesMap[library] || []; 12 | }; 13 | 14 | /** 15 | * Get rules for multiple libraries 16 | * @param rulesMap The rules map to get rules from 17 | * @param libraries Array of libraries to get rules for 18 | * @returns Record with libraries as keys and arrays of rules as values 19 | */ 20 | export const getRulesForLibraries = ( 21 | rulesMap: LibraryRulesMap, 22 | libraries: Library[], 23 | ): Partial> => { 24 | const result: Partial> = {}; 25 | 26 | libraries.forEach((library) => { 27 | if (rulesMap[library]) { 28 | result[library] = getRulesForLibrary(rulesMap, library); 29 | } 30 | }); 31 | 32 | return result; 33 | }; 34 | -------------------------------------------------------------------------------- /src/data/rules/index.ts: -------------------------------------------------------------------------------- 1 | export * from './types'; 2 | export * from './frontend'; 3 | export * from './backend'; 4 | export * from './database'; 5 | export * from './infrastructure'; 6 | export * from './testing'; 7 | export * from './coding'; 8 | export * from './helpers'; 9 | export * from './accessibility'; 10 | -------------------------------------------------------------------------------- /src/data/rules/infrastructure.ts: -------------------------------------------------------------------------------- 1 | import { Library } from '../dictionaries'; 2 | import { type LibraryRulesMap } from './types'; 3 | 4 | /** 5 | * Infrastructure library rules (CI/CD, Containerization, Cloud) 6 | */ 7 | export const infrastructureRules: LibraryRulesMap = { 8 | // CI/CD libraries 9 | [Library.GITHUB_ACTIONS]: [ 10 | 'Check if `package.json` exists in project root and summarize key scripts', 11 | 'Check if `.nvmrc` exists in project root', 12 | 'Check if `.env.example` exists in project root to identify key `env:` variables', 13 | 'Always use terminal command: `git branch -a | cat` to verify whether we use `main` or `master` branch', 14 | 'Always use `env:` variables and secrets attached to jobs instead of global workflows', 15 | 'Always use `npm ci` for Node-based dependency setup', 16 | 'Extract common steps into composite actions in separate files', 17 | 'Once you\'re done, as a final step conduct the following: for each public action always use "Run Terminal" to see what is the most up-to-date version (use only major version) - extract tag_name from the response:', 18 | '```bash curl -s https://api.github.com/repos/{owner}/{repo}/releases/latest ```', 19 | ], 20 | [Library.JENKINS]: [ 21 | 'Use declarative pipelines with Jenkinsfile instead of freestyle jobs', 22 | 'Implement shared libraries for common pipeline steps', 23 | 'Use agents and labels for distributing builds across {{environment_types}}', 24 | ], 25 | [Library.GITLAB_CI]: [ 26 | 'Use includes to reuse configuration across multiple pipelines', 27 | 'Implement caching for dependencies to speed up builds', 28 | 'Use rules and only/except to control when jobs run based on {{branch_patterns}}', 29 | ], 30 | 31 | // Containerization libraries 32 | [Library.DOCKER]: [ 33 | 'Use multi-stage builds to create smaller production images', 34 | 'Implement layer caching strategies to speed up builds for {{dependency_types}}', 35 | 'Use non-root users in containers for better security', 36 | ], 37 | [Library.KUBERNETES]: [ 38 | 'Use Helm charts for packaging and deploying applications', 39 | 'Implement resource requests and limits for all containers based on {{workload_characteristics}}', 40 | 'Use namespaces to organize and isolate resources', 41 | ], 42 | 43 | // Cloud libraries 44 | [Library.AWS]: [ 45 | 'Use Infrastructure as Code (IaC) with AWS CDK or CloudFormation', 46 | 'Implement the principle of least privilege for IAM roles and policies', 47 | 'Use managed services when possible instead of maintaining your own infrastructure for {{service_types}}', 48 | ], 49 | [Library.AZURE]: [ 50 | 'Use Azure Resource Manager (ARM) templates or Bicep for infrastructure as code', 51 | 'Implement Azure AD for authentication and authorization of {{user_types}}', 52 | 'Use managed identities instead of service principals when possible', 53 | ], 54 | [Library.GCP]: [ 55 | 'Use Terraform or Deployment Manager for infrastructure as code', 56 | 'Implement VPC Service Controls for network security around {{sensitive_services}}', 57 | 'Use workload identity for service-to-service authentication', 58 | ], 59 | }; 60 | -------------------------------------------------------------------------------- /src/data/rules/types.ts: -------------------------------------------------------------------------------- 1 | import { Library } from '../dictionaries'; 2 | 3 | /** 4 | * Type definition for library rules 5 | * Using Partial to allow each module to contain only a subset of libraries 6 | */ 7 | export type LibraryRulesMap = Partial>; 8 | -------------------------------------------------------------------------------- /src/db/supabase.client.ts: -------------------------------------------------------------------------------- 1 | import type { AstroCookies } from 'astro'; 2 | import { createServerClient, parseCookieHeader, type CookieOptionsWithName } from '@supabase/ssr'; 3 | import { SUPABASE_URL, SUPABASE_PUBLIC_KEY, SUPABASE_SERVICE_ROLE_KEY } from 'astro:env/server'; 4 | 5 | export const cookieOptions: CookieOptionsWithName = { 6 | path: '/', 7 | secure: true, 8 | httpOnly: true, 9 | sameSite: 'lax', 10 | }; 11 | 12 | type SupabaseContext = { 13 | headers: Headers; 14 | cookies: AstroCookies; 15 | }; 16 | 17 | const createSupabaseInstance = (apiKey: string, context: SupabaseContext) => { 18 | return createServerClient(SUPABASE_URL, apiKey, { 19 | cookieOptions, 20 | cookies: { 21 | // @ts-expect-error - correct implementation per Supabase docs 22 | getAll() { 23 | const cookieHeader = context.headers.get('Cookie') ?? ''; 24 | return parseCookieHeader(cookieHeader); 25 | }, 26 | setAll(cookiesToSet) { 27 | cookiesToSet.forEach(({ name, value, options }) => 28 | context.cookies.set(name, value, options), 29 | ); 30 | }, 31 | }, 32 | }); 33 | }; 34 | 35 | export const createSupabaseServerInstance = (context: SupabaseContext) => { 36 | return createSupabaseInstance(SUPABASE_PUBLIC_KEY, context); 37 | }; 38 | 39 | export const createSupabaseAdminInstance = (context: SupabaseContext) => { 40 | return createSupabaseInstance(SUPABASE_SERVICE_ROLE_KEY, context); 41 | }; 42 | -------------------------------------------------------------------------------- /src/env.d.ts: -------------------------------------------------------------------------------- 1 | /// 2 | 3 | import type { SupabaseClient } from '@supabase/supabase-js'; 4 | import type { Database } from './db/database.types.ts'; 5 | import type { Env } from './features/featureFlags'; 6 | 7 | declare global { 8 | interface Window { 9 | onloadTurnstileCallback: () => void; 10 | dataLayer: unknown[]; 11 | gtag: (...args: unknown[]) => void; 12 | clarity: (event: string, enabled?: boolean) => void; 13 | } 14 | namespace App { 15 | interface Locals { 16 | supabase: SupabaseClient; 17 | user?: { 18 | email: string | null; 19 | id: string; 20 | }; 21 | } 22 | } 23 | } 24 | 25 | interface ImportMetaEnv { 26 | readonly PUBLIC_ENV_NAME: Env; 27 | readonly SUPABASE_URL: string; 28 | readonly SUPABASE_PUBLIC_KEY: string; 29 | readonly SUPABASE_SERVICE_ROLE_KEY: string; 30 | readonly CF_CAPTCHA_SITE_KEY: string; 31 | readonly CF_CAPTCHA_SECRET_KEY: string; 32 | } 33 | 34 | interface ImportMeta { 35 | readonly env: ImportMetaEnv; 36 | } 37 | -------------------------------------------------------------------------------- /src/features/featureFlags.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Feature Flags Module 3 | * 4 | * This module provides functionality to control feature availability based on the current environment. 5 | * It supports 'local', 'integration', and 'prod' environments and allows for feature toggling 6 | * at various levels of the application. 7 | */ 8 | 9 | export type Env = 'local' | 'integration' | 'prod'; 10 | 11 | /** 12 | * Available feature flags in the application 13 | */ 14 | export type FeatureFlag = 'auth' | 'collections' | 'authOnUI'; 15 | type FeatureConfig = { 16 | [E in Env]: { 17 | [K in FeatureFlag]: boolean; 18 | }; 19 | }; 20 | 21 | const featureFlags: FeatureConfig = { 22 | local: { 23 | auth: true, 24 | collections: true, 25 | authOnUI: true, 26 | }, 27 | integration: { 28 | auth: true, 29 | collections: true, 30 | authOnUI: true, 31 | }, 32 | prod: { 33 | auth: true, 34 | collections: true, 35 | authOnUI: true, 36 | }, 37 | }; 38 | 39 | /** 40 | * Gets the current environment from import.meta.env.PUBLIC_ENV_NAME 41 | * Returns null if not set, which disables features 42 | */ 43 | function getCurrentEnv(): Env | null { 44 | return (import.meta.env.PUBLIC_ENV_NAME as Env) || null; 45 | } 46 | 47 | /** 48 | * Checks if a given feature is enabled for the current environment 49 | * 50 | * @param feature - The feature flag to check 51 | * @returns boolean indicating if the feature is enabled 52 | */ 53 | export function isFeatureEnabled(feature: FeatureFlag): boolean { 54 | const env = getCurrentEnv(); 55 | if (!env) { 56 | return false; 57 | } 58 | return featureFlags[env][feature]; 59 | } 60 | 61 | /** 62 | * Gets all available feature flags 63 | * 64 | * @returns Record of all feature flags and their states for the current environment 65 | */ 66 | export function getAllFeatureFlags(): Record { 67 | const flags = {} as Record; 68 | 69 | (Object.keys(featureFlags) as FeatureFlag[]).forEach((feature) => { 70 | flags[feature] = isFeatureEnabled(feature); 71 | }); 72 | 73 | return flags; 74 | } 75 | -------------------------------------------------------------------------------- /src/hooks/useAuth.ts: -------------------------------------------------------------------------------- 1 | import { useState } from 'react'; 2 | import type { 3 | LoginFormData, 4 | SignupFormData, 5 | ResetPasswordFormData, 6 | UpdatePasswordFormData, 7 | } from '../types/auth'; 8 | import { authService } from '../services/auth'; 9 | 10 | interface User { 11 | id: string; 12 | email: string | null; 13 | } 14 | 15 | export const useAuth = () => { 16 | const [error, setError] = useState(null); 17 | const [isLoading, setIsLoading] = useState(false); 18 | 19 | const handleAuthAction = async (action: (data: T) => Promise<{ user: User }>, data: T) => { 20 | try { 21 | setIsLoading(true); 22 | setError(null); 23 | return await action(data); 24 | } catch (err) { 25 | const message = err instanceof Error ? err.message : 'An unexpected error occurred'; 26 | setError(message); 27 | throw err; 28 | } finally { 29 | setIsLoading(false); 30 | } 31 | }; 32 | 33 | const login = (data: LoginFormData) => handleAuthAction(authService.login, data); 34 | 35 | const signup = (data: SignupFormData) => handleAuthAction(authService.signup, data); 36 | 37 | const resetPassword = (data: ResetPasswordFormData) => 38 | handleAuthAction(authService.resetPassword, data); 39 | 40 | const updatePassword = (data: UpdatePasswordFormData) => 41 | handleAuthAction(authService.updatePassword, data); 42 | 43 | return { 44 | login, 45 | signup, 46 | resetPassword, 47 | updatePassword, 48 | error, 49 | isLoading, 50 | }; 51 | }; 52 | -------------------------------------------------------------------------------- /src/hooks/useCaptcha.ts: -------------------------------------------------------------------------------- 1 | import axios from 'axios'; 2 | import type { CaptchaResponse } from '../services/captcha'; 3 | import { useState, useEffect } from 'react'; 4 | 5 | export const useCaptcha = (cfCaptchaSiteKey: string) => { 6 | const [isCaptchaVerified, setIsCaptchaVerified] = useState(false); 7 | 8 | useEffect(() => { 9 | window.onloadTurnstileCallback = function () { 10 | turnstile.render('#cf-captcha-container', { 11 | theme: 'dark', 12 | sitekey: cfCaptchaSiteKey, 13 | callback: async function (captchaToken) { 14 | try { 15 | const captchaResult = await axios.post('/api/captcha/verify', { 16 | captchaToken, 17 | }); 18 | setIsCaptchaVerified(captchaResult.data.success); 19 | } catch (error) { 20 | console.error('Captcha verification error:', error); 21 | setIsCaptchaVerified(false); 22 | } 23 | }, 24 | }); 25 | }; 26 | }, []); 27 | 28 | return { isCaptchaVerified }; 29 | }; 30 | -------------------------------------------------------------------------------- /src/hooks/useCookieConsent.ts: -------------------------------------------------------------------------------- 1 | import { useState, useEffect, useCallback } from 'react'; 2 | 3 | const COOKIE_CONSENT_KEY = '10xrules-consent'; 4 | 5 | interface ConsentState { 6 | isGiven: boolean | null; 7 | value: boolean | null; 8 | } 9 | 10 | function activateFrozenScripts(): void { 11 | const gtmContainer = document.querySelector('.gtm-container'); 12 | const frozenScripts = document.querySelectorAll( 13 | 'script[type="text/plain"][data-required-consent="true"]', 14 | ); 15 | frozenScripts.forEach((script: HTMLScriptElement) => { 16 | const newScript = document.createElement('script'); 17 | newScript.type = 'text/javascript'; 18 | if (script.src) { 19 | newScript.src = script.src; 20 | } else { 21 | newScript.textContent = script.textContent; 22 | } 23 | gtmContainer?.removeChild(script); 24 | gtmContainer?.appendChild(newScript); 25 | }); 26 | } 27 | 28 | function enableGTagConsent(): void { 29 | if (typeof window.gtag === 'function') { 30 | window.gtag('consent', 'update', { 31 | ad_storage: 'granted', 32 | ad_user_data: 'granted', 33 | ad_personalization: 'granted', 34 | analytics_storage: 'granted', 35 | }); 36 | window.gtag('event', 'custom_consent_initialized'); 37 | } 38 | if (typeof window.clarity === 'function') { 39 | window.clarity('consent'); 40 | } 41 | } 42 | 43 | function onConsentEnabled(): void { 44 | activateFrozenScripts(); 45 | setTimeout(() => { 46 | enableGTagConsent(); 47 | }, 2000); 48 | } 49 | 50 | export function useCookieConsent() { 51 | const [consentState, setConsentState] = useState({ 52 | isGiven: null, 53 | value: null, 54 | }); 55 | 56 | useEffect(() => { 57 | const storedValue = window.localStorage.getItem(COOKIE_CONSENT_KEY); 58 | const initialIsGiven = storedValue !== null; 59 | const initialValue = storedValue === 'true'; 60 | 61 | setConsentState({ 62 | isGiven: initialIsGiven, 63 | value: initialValue, 64 | }); 65 | 66 | if (initialValue) { 67 | onConsentEnabled(); 68 | } 69 | }, []); 70 | 71 | const setConsent = useCallback((consent: boolean) => { 72 | setConsentState({ isGiven: true, value: consent }); 73 | window.localStorage.setItem(COOKIE_CONSENT_KEY, consent.toString()); 74 | 75 | if (consent) { 76 | onConsentEnabled(); 77 | } else { 78 | if (typeof window.gtag === 'function') { 79 | window.gtag('consent', 'update', { 80 | ad_storage: 'denied', 81 | ad_user_data: 'denied', 82 | ad_personalization: 'denied', 83 | analytics_storage: 'denied', 84 | }); 85 | } 86 | } 87 | }, []); 88 | 89 | return { 90 | isConsentGiven: consentState.isGiven, 91 | consentValue: consentState.value, 92 | setConsent, 93 | }; 94 | } 95 | -------------------------------------------------------------------------------- /src/hooks/useTokenHashVerification.ts: -------------------------------------------------------------------------------- 1 | import { useState, useEffect } from 'react'; 2 | 3 | export const useTokenHashVerification = () => { 4 | const [verificationError, setVerificationError] = useState(null); 5 | const [isVerified, setIsVerified] = useState(false); 6 | 7 | useEffect(() => { 8 | const verifyToken = async () => { 9 | const tokenHash = new URLSearchParams(window.location.search).get('token_hash'); 10 | if (!tokenHash) { 11 | setVerificationError('No reset token found'); 12 | return; 13 | } 14 | 15 | try { 16 | const response = await fetch('/api/auth/verify-reset-token', { 17 | method: 'POST', 18 | headers: { 'Content-Type': 'application/json' }, 19 | body: JSON.stringify({ token_hash: tokenHash }), 20 | }); 21 | 22 | if (!response.ok) { 23 | const data = (await response.json()) as { error: string }; 24 | throw new Error(data.error || 'Failed to verify token'); 25 | } 26 | 27 | setIsVerified(true); 28 | } catch (error) { 29 | setVerificationError(error instanceof Error ? error.message : 'Failed to verify token'); 30 | console.error('Token verification error:', error); 31 | } 32 | }; 33 | 34 | verifyToken(); 35 | }, []); 36 | 37 | return { verificationError, isVerified }; 38 | }; 39 | -------------------------------------------------------------------------------- /src/i18n/translations.spec.ts: -------------------------------------------------------------------------------- 1 | import { describe, it, expect } from 'vitest'; 2 | import { Layer, Stack, Library, layerToStackMap, stackToLibraryMap } from '../data/dictionaries'; 3 | import { layerTranslations, stackTranslations, libraryTranslations } from './translations'; 4 | 5 | describe('Translation Coverage', () => { 6 | describe('Layer Translation Coverage', () => { 7 | it('should have translations for all Layer enum values', () => { 8 | // Arrange 9 | const layerEnumValues = Object.values(Layer); 10 | const translationKeys = Object.keys(layerTranslations); 11 | 12 | // Act & Assert 13 | // Check if all enum values have translations 14 | layerEnumValues.forEach((layer) => { 15 | expect(layerTranslations[layer as Layer]).toBeDefined(); 16 | expect(typeof layerTranslations[layer as Layer]).toBe('string'); 17 | expect(layerTranslations[layer as Layer].length).toBeGreaterThan(0); 18 | }); 19 | 20 | // Check if there are no extra translations 21 | expect(translationKeys.length).toBe(layerEnumValues.length); 22 | }); 23 | }); 24 | 25 | describe('Stack Translation Mapping', () => { 26 | it('should have valid translations for all stacks referenced in layerToStackMap', () => { 27 | // Arrange 28 | const usedStacks = new Set(); 29 | Object.values(layerToStackMap).forEach((stacks) => { 30 | stacks.forEach((stack) => usedStacks.add(stack)); 31 | }); 32 | 33 | // Act & Assert 34 | // Check if all used stacks have translations 35 | usedStacks.forEach((stack) => { 36 | expect(stackTranslations[stack]).toBeDefined(); 37 | expect(typeof stackTranslations[stack]).toBe('string'); 38 | expect(stackTranslations[stack].length).toBeGreaterThan(0); 39 | }); 40 | 41 | // Check if there are no orphaned translations 42 | Object.keys(stackTranslations).forEach((translationKey) => { 43 | expect(usedStacks.has(translationKey as Stack)).toBe(true); 44 | }); 45 | }); 46 | }); 47 | 48 | describe('Library Translation Chain', () => { 49 | it('should have translations for all libraries referenced in stackToLibraryMap', () => { 50 | // Arrange 51 | const usedLibraries = new Set(); 52 | Object.values(stackToLibraryMap).forEach((libraries) => { 53 | libraries.forEach((library) => usedLibraries.add(library)); 54 | }); 55 | const translationValues = new Set(Object.values(libraryTranslations)); 56 | 57 | // Act & Assert 58 | // Check if all used libraries have translations 59 | usedLibraries.forEach((library) => { 60 | expect(libraryTranslations[library]).toBeDefined(); 61 | expect(typeof libraryTranslations[library]).toBe('string'); 62 | expect(libraryTranslations[library].length).toBeGreaterThan(0); 63 | }); 64 | 65 | // Check if there are no orphaned translations 66 | Object.keys(libraryTranslations).forEach((translationKey) => { 67 | expect(usedLibraries.has(translationKey as Library)).toBe(true); 68 | }); 69 | 70 | // Check for duplicate translations 71 | expect(translationValues.size).toBe(Object.keys(libraryTranslations).length); 72 | }); 73 | }); 74 | }); 75 | -------------------------------------------------------------------------------- /src/layouts/Layout.astro: -------------------------------------------------------------------------------- 1 | --- 2 | import '../styles/global.css'; 3 | import SEO from './partials/SEO.astro'; 4 | import Fonts from './partials/Fonts.astro'; 5 | import GTMContainer from './partials/GTMContainer.astro'; 6 | import CookieBanner from '../components/cookie-banner/CookieBanner.tsx'; 7 | --- 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 19 | 20 | 21 | 22 | 10xRules.ai 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | -------------------------------------------------------------------------------- /src/layouts/partials/Fonts.astro: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | --- 4 | 5 | 6 | 7 | 11 | -------------------------------------------------------------------------------- /src/layouts/partials/GTMContainer.astro: -------------------------------------------------------------------------------- 1 | --- 2 | import { PUBLIC_ENV_NAME } from 'astro:env/server'; 3 | 4 | const isProd = PUBLIC_ENV_NAME === 'prod'; 5 | const {tagId} = Astro.props; 6 | --- 7 | {isProd && 8 |
9 | 32 |
33 | } -------------------------------------------------------------------------------- /src/layouts/partials/SEO.astro: -------------------------------------------------------------------------------- 1 | --- 2 | const siteUrl = 'https://10xrules.ai'; 3 | const siteDescription = 4 | 'Create and manage rules for best-in-class AI tools like GitHub Copilot, Cursor & Windsurf. Make AI Agents aware of your preferences and coding style.'; 5 | const siteTitle = '10xRules.ai'; 6 | --- 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | -------------------------------------------------------------------------------- /src/pages/api/auth/login.ts: -------------------------------------------------------------------------------- 1 | import type { APIRoute } from 'astro'; 2 | import { isFeatureEnabled } from '@/features/featureFlags'; 3 | import { createSupabaseServerInstance } from '@/db/supabase.client'; 4 | 5 | export const POST: APIRoute = async ({ request, cookies }) => { 6 | // Check if auth feature is enabled 7 | if (!isFeatureEnabled('auth')) { 8 | return new Response(JSON.stringify({ error: 'Authentication is currently disabled' }), { 9 | status: 403, 10 | }); 11 | } 12 | 13 | try { 14 | const { email, password } = (await request.json()) as { email: string; password: string }; 15 | 16 | if (!email || !password) { 17 | return new Response(JSON.stringify({ error: 'Email and password are required' }), { 18 | status: 400, 19 | }); 20 | } 21 | 22 | const supabase = createSupabaseServerInstance({ cookies, headers: request.headers }); 23 | 24 | const { data, error } = await supabase.auth.signInWithPassword({ 25 | email, 26 | password, 27 | }); 28 | 29 | if (error) { 30 | return new Response(JSON.stringify({ error: error.message }), { status: 400 }); 31 | } 32 | 33 | return new Response(JSON.stringify({ user: data.user }), { status: 200 }); 34 | } catch (err) { 35 | console.error('Login error:', err); 36 | return new Response(JSON.stringify({ error: 'An unexpected error occurred' }), { status: 500 }); 37 | } 38 | }; 39 | -------------------------------------------------------------------------------- /src/pages/api/auth/logout.ts: -------------------------------------------------------------------------------- 1 | import type { APIRoute } from 'astro'; 2 | import { isFeatureEnabled } from '../../../features/featureFlags'; 3 | 4 | export const POST: APIRoute = async ({ locals }) => { 5 | // Check if auth feature is enabled 6 | if (!isFeatureEnabled('auth')) { 7 | return new Response(JSON.stringify({ error: 'Authentication is currently disabled' }), { 8 | status: 403, 9 | }); 10 | } 11 | 12 | try { 13 | const { error } = await locals.supabase.auth.signOut(); 14 | 15 | if (error) { 16 | return new Response(JSON.stringify({ error: error.message }), { 17 | status: 400, 18 | }); 19 | } 20 | 21 | return new Response(JSON.stringify({ success: true }), { 22 | status: 200, 23 | }); 24 | } catch (err) { 25 | console.error('Logout error:', err); 26 | return new Response(JSON.stringify({ error: 'An unexpected error occurred' }), { 27 | status: 500, 28 | }); 29 | } 30 | }; 31 | -------------------------------------------------------------------------------- /src/pages/api/auth/reset-password.ts: -------------------------------------------------------------------------------- 1 | import type { APIRoute } from 'astro'; 2 | import { createSupabaseAdminInstance } from '@/db/supabase.client'; 3 | 4 | export const POST: APIRoute = async ({ request, url, cookies }) => { 5 | try { 6 | const { email } = (await request.json()) as { email: string }; 7 | 8 | if (!email) { 9 | return new Response(JSON.stringify({ error: 'Email is required' }), { 10 | status: 400, 11 | }); 12 | } 13 | 14 | const supabase = createSupabaseAdminInstance({ cookies, headers: request.headers }); 15 | 16 | const { error } = await supabase.auth.resetPasswordForEmail(email, { 17 | redirectTo: `${url.origin}/auth/update-password`, 18 | }); 19 | 20 | // Don't disclose whether the email exists or not for security reasons. 21 | // Always return a success response. 22 | if (error) { 23 | console.error('Password reset error:', error.message); 24 | } 25 | 26 | return new Response( 27 | JSON.stringify({ message: 'Password reset instructions sent if email is valid' }), 28 | { status: 200 }, 29 | ); 30 | } catch (err) { 31 | console.error('Reset password endpoint error:', err); 32 | // Handle JSON parsing errors or other unexpected issues 33 | if (err instanceof SyntaxError) { 34 | return new Response(JSON.stringify({ error: 'Invalid request body' }), { status: 400 }); 35 | } 36 | return new Response(JSON.stringify({ error: 'An unexpected error occurred' }), { status: 500 }); 37 | } 38 | }; 39 | -------------------------------------------------------------------------------- /src/pages/api/auth/signup.ts: -------------------------------------------------------------------------------- 1 | import type { APIRoute } from 'astro'; 2 | import { createSupabaseAdminInstance } from '../../../db/supabase.client'; 3 | import { isFeatureEnabled } from '../../../features/featureFlags'; 4 | import { PRIVACY_POLICY_VERSION } from '../../../pages/privacy/privacyPolicyVersion'; 5 | 6 | export const POST: APIRoute = async ({ request, cookies }) => { 7 | // Check if auth feature is enabled 8 | if (!isFeatureEnabled('auth')) { 9 | return new Response(JSON.stringify({ error: 'Authentication is currently disabled' }), { 10 | status: 403, 11 | }); 12 | } 13 | 14 | try { 15 | const { email, password, privacyPolicyConsent } = (await request.json()) as { 16 | email: string; 17 | password: string; 18 | privacyPolicyConsent: boolean; 19 | }; 20 | 21 | if (!email || !password || !privacyPolicyConsent) { 22 | return new Response( 23 | JSON.stringify({ error: 'Email, password, and privacy policy consent are required' }), 24 | { status: 400 }, 25 | ); 26 | } 27 | 28 | const supabase = createSupabaseAdminInstance({ cookies, headers: request.headers }); 29 | 30 | const { data: authData, error: authError } = await supabase.auth.signUp({ 31 | email, 32 | password, 33 | options: { 34 | emailRedirectTo: `${new URL(request.url).origin}/auth/login`, 35 | }, 36 | }); 37 | 38 | if (authError) { 39 | return new Response(JSON.stringify({ error: authError.message }), { status: 400 }); 40 | } 41 | 42 | const { error: consentError } = await supabase.from('user_consents').insert({ 43 | user_id: authData.user?.id, 44 | privacy_policy_version: PRIVACY_POLICY_VERSION, 45 | }); 46 | 47 | if (consentError) { 48 | console.error('Error storing consent:', consentError); 49 | } 50 | 51 | return new Response(JSON.stringify({ user: authData.user }), { status: 200 }); 52 | } catch (err) { 53 | console.error('Signup error:', err); 54 | return new Response(JSON.stringify({ error: 'An unexpected error occurred' }), { status: 500 }); 55 | } 56 | }; 57 | -------------------------------------------------------------------------------- /src/pages/api/auth/update-password.ts: -------------------------------------------------------------------------------- 1 | import type { APIRoute } from 'astro'; 2 | 3 | export const POST: APIRoute = async ({ request, locals }) => { 4 | try { 5 | const { password, confirmPassword } = (await request.json()) as { 6 | password: string; 7 | confirmPassword: string; 8 | }; 9 | 10 | if (!password || !confirmPassword || password !== confirmPassword) { 11 | return new Response(JSON.stringify({ error: 'Password and confirm password must match' }), { 12 | status: 400, 13 | }); 14 | } 15 | 16 | const { error } = await locals.supabase.auth.updateUser({ 17 | password, 18 | }); 19 | 20 | if (error) { 21 | return new Response(JSON.stringify({ error: error.message }), { status: 400 }); 22 | } 23 | 24 | return new Response(JSON.stringify({ message: 'Password updated successfully' }), { 25 | status: 200, 26 | }); 27 | } catch (err) { 28 | console.error('Update password endpoint error:', err instanceof Error ? err.message : err); 29 | if (err instanceof SyntaxError) { 30 | return new Response(JSON.stringify({ error: 'Invalid request body' }), { status: 400 }); 31 | } 32 | return new Response(JSON.stringify({ error: 'An unexpected error occurred' }), { status: 500 }); 33 | } 34 | }; 35 | -------------------------------------------------------------------------------- /src/pages/api/auth/verify-reset-token.ts: -------------------------------------------------------------------------------- 1 | import type { APIRoute } from 'astro'; 2 | import { createSupabaseAdminInstance } from '@/db/supabase.client'; 3 | 4 | export const POST: APIRoute = async ({ request, cookies }) => { 5 | try { 6 | const { token_hash } = (await request.json()) as { token_hash: string }; 7 | 8 | if (!token_hash) { 9 | return new Response(JSON.stringify({ error: 'Token hash is required' }), { 10 | status: 400, 11 | }); 12 | } 13 | 14 | const supabase = createSupabaseAdminInstance({ cookies, headers: request.headers }); 15 | 16 | const { 17 | error, 18 | data: { user }, 19 | } = await supabase.auth.verifyOtp({ 20 | token_hash, 21 | type: 'recovery', 22 | }); 23 | 24 | if (error) { 25 | console.error('Error verifying OTP:', error.message); 26 | return new Response(JSON.stringify({ error: 'Invalid or expired token' }), { status: 400 }); 27 | } 28 | 29 | return new Response(JSON.stringify({ user }), { status: 200 }); 30 | } catch (err) { 31 | console.error('Verify reset token error:', err instanceof Error ? err.message : err); 32 | if (err instanceof SyntaxError) { 33 | return new Response(JSON.stringify({ error: 'Invalid request body' }), { status: 400 }); 34 | } 35 | return new Response(JSON.stringify({ error: 'An unexpected error occurred' }), { status: 500 }); 36 | } 37 | }; 38 | -------------------------------------------------------------------------------- /src/pages/api/captcha/verify.ts: -------------------------------------------------------------------------------- 1 | import type { APIRoute } from 'astro'; 2 | import { verifyCaptcha } from '../../../services/captcha'; 3 | import { CF_CAPTCHA_SECRET_KEY } from 'astro:env/server'; 4 | 5 | export const prerender = false; 6 | 7 | export const POST: APIRoute = async ({ request }) => { 8 | const { captchaToken } = (await request.json()) as { captchaToken: string }; 9 | try { 10 | const requestorIp = request.headers.get('cf-connecting-ip'); 11 | 12 | const captchaVerificationResult = await verifyCaptcha( 13 | CF_CAPTCHA_SECRET_KEY, 14 | captchaToken, 15 | requestorIp || '', 16 | ); 17 | 18 | if (captchaVerificationResult.success) { 19 | return new Response( 20 | JSON.stringify({ 21 | success: captchaVerificationResult.success, 22 | challenge_ts: captchaVerificationResult.challenge_ts, 23 | hostname: captchaVerificationResult.hostname, 24 | }), 25 | { 26 | status: 200, 27 | headers: { 'Content-Type': 'application/json' }, 28 | }, 29 | ); 30 | } else { 31 | return new Response( 32 | JSON.stringify({ 33 | success: false, 34 | 'error-codes': captchaVerificationResult['error-codes'] || ['unknown-error'], 35 | }), 36 | { 37 | status: 400, // Or an appropriate error status 38 | headers: { 'Content-Type': 'application/json' }, 39 | }, 40 | ); 41 | } 42 | } catch (error) { 43 | console.error('Captcha verification error:', error); 44 | return new Response( 45 | JSON.stringify({ 46 | success: false, 47 | 'error-codes': ['captcha-verification-failed'], 48 | }), 49 | { 50 | status: 500, 51 | headers: { 'Content-Type': 'application/json' }, 52 | }, 53 | ); 54 | } 55 | }; 56 | -------------------------------------------------------------------------------- /src/pages/auth/login.astro: -------------------------------------------------------------------------------- 1 | --- 2 | import Layout from '../../layouts/Layout.astro'; 3 | import AuthLayout from '../../components/auth/AuthLayout'; 4 | import LoginForm from '../../components/auth/LoginForm'; 5 | import { createSupabaseServerInstance } from '../../db/supabase.client'; 6 | import { isFeatureEnabled } from '../../features/featureFlags'; 7 | 8 | import { CF_CAPTCHA_SITE_KEY } from 'astro:env/server'; 9 | 10 | // Check if auth feature is enabled 11 | if (!isFeatureEnabled('auth')) { 12 | return Astro.redirect('/'); 13 | } 14 | 15 | // Check if user is already logged in 16 | const supabase = createSupabaseServerInstance({ 17 | cookies: Astro.cookies, 18 | headers: Astro.request.headers, 19 | }); 20 | 21 | const { 22 | data: { user }, 23 | } = await supabase.auth.getUser(); 24 | 25 | // If user is already logged in, redirect to home page 26 | if (user) { 27 | return Astro.redirect('/'); 28 | } 29 | --- 30 | 31 | 32 |
33 |
34 | 35 | 36 | 37 |
38 |
39 |
40 | -------------------------------------------------------------------------------- /src/pages/auth/reset-password.astro: -------------------------------------------------------------------------------- 1 | --- 2 | import Layout from '../../layouts/Layout.astro'; 3 | import AuthLayout from '../../components/auth/AuthLayout'; 4 | import ResetPasswordForm from '../../components/auth/ResetPasswordForm'; 5 | 6 | import { CF_CAPTCHA_SITE_KEY } from 'astro:env/server'; 7 | --- 8 | 9 | 10 |
11 |
12 | 16 | 17 | 18 |
19 |
20 |
21 | -------------------------------------------------------------------------------- /src/pages/auth/signup.astro: -------------------------------------------------------------------------------- 1 | --- 2 | import Layout from '../../layouts/Layout.astro'; 3 | import AuthLayout from '../../components/auth/AuthLayout'; 4 | import SignupForm from '../../components/auth/SignupForm'; 5 | import { createSupabaseServerInstance } from '../../db/supabase.client'; 6 | import { isFeatureEnabled } from '../../features/featureFlags'; 7 | 8 | import { CF_CAPTCHA_SITE_KEY } from 'astro:env/server'; 9 | 10 | // Check if auth feature is enabled 11 | if (!isFeatureEnabled('auth')) { 12 | return Astro.redirect('/'); 13 | } 14 | 15 | // Check if user is already logged in 16 | const supabase = createSupabaseServerInstance({ 17 | cookies: Astro.cookies, 18 | headers: Astro.request.headers, 19 | }); 20 | 21 | const { 22 | data: { user }, 23 | } = await supabase.auth.getUser(); 24 | 25 | // If user is already logged in, redirect to home page 26 | if (user) { 27 | return Astro.redirect('/'); 28 | } 29 | --- 30 | 31 | 32 |
33 |
34 | 35 | 36 | 37 |
38 |
39 |
40 | -------------------------------------------------------------------------------- /src/pages/auth/update-password.astro: -------------------------------------------------------------------------------- 1 | --- 2 | import Layout from '../../layouts/Layout.astro'; 3 | import AuthLayout from '../../components/auth/AuthLayout'; 4 | import UpdatePasswordResetForm from '../../components/auth/UpdatePasswordResetForm'; 5 | 6 | import { CF_CAPTCHA_SITE_KEY } from 'astro:env/server'; 7 | --- 8 | 9 | 10 |
11 |
12 | 13 | 14 | 15 |
16 |
17 |
18 | -------------------------------------------------------------------------------- /src/pages/index.astro: -------------------------------------------------------------------------------- 1 | --- 2 | import Layout from '../layouts/Layout.astro'; 3 | import Topbar from '../components/Topbar'; 4 | import TwoPane from '../components/TwoPane'; 5 | import Footer from '../components/Footer'; 6 | 7 | const user = Astro.locals.user; 8 | const initialUrl = Astro.url; 9 | --- 10 | 11 | 12 |
13 | 14 |
15 | 16 |
17 |
18 |
19 |
20 | -------------------------------------------------------------------------------- /src/pages/privacy/en/index.astro: -------------------------------------------------------------------------------- 1 | --- 2 | import Layout from '../../../layouts/Layout.astro'; 3 | import PrivacyLayout from '../../../components/privacy/PrivacyLayout.astro'; 4 | 5 | const privacyContent = await import('../../../assets/privacy-policy/pp-13-04-2025-en.md'); 6 | const Content = privacyContent.Content; 7 | --- 8 | 9 | 10 |
11 |
12 | 13 | 14 | 15 |
16 |
17 |
18 | -------------------------------------------------------------------------------- /src/pages/privacy/pl/index.astro: -------------------------------------------------------------------------------- 1 | --- 2 | import Layout from '../../../layouts/Layout.astro'; 3 | import PrivacyLayout from '../../../components/privacy/PrivacyLayout.astro'; 4 | 5 | const privacyContent = await import('../../../assets/privacy-policy/pp-13-04-2025-pl.md'); 6 | const Content = privacyContent.Content; 7 | --- 8 | 9 | 10 |
11 |
12 | 13 | 14 | 15 |
16 |
17 |
18 | -------------------------------------------------------------------------------- /src/pages/privacy/privacyPolicyVersion.ts: -------------------------------------------------------------------------------- 1 | export const PRIVACY_POLICY_VERSION = '13.04.2024'; 2 | -------------------------------------------------------------------------------- /src/services/auth.ts: -------------------------------------------------------------------------------- 1 | import type { 2 | LoginFormData, 3 | SignupFormData, 4 | ResetPasswordFormData, 5 | UpdatePasswordFormData, 6 | } from '../types/auth'; 7 | 8 | // Define User interface (ideally import from a shared location) 9 | interface User { 10 | id: string; 11 | email: string | null; 12 | } 13 | 14 | interface AuthErrorResponse { 15 | error?: string; 16 | // Add other potential error properties if known 17 | } 18 | 19 | class AuthError extends Error { 20 | constructor( 21 | public status: number, 22 | message: string, 23 | ) { 24 | super(message); 25 | this.name = 'AuthError'; 26 | } 27 | } 28 | 29 | async function handleResponse(response: Response): Promise<{ user: User }> { 30 | if (!response.ok) { 31 | const errorData = (await response.json()) as AuthErrorResponse; 32 | throw new AuthError(response.status, errorData.error || 'Authentication failed'); 33 | } 34 | return response.json() as Promise<{ user: User }>; 35 | } 36 | 37 | export const authService = { 38 | login: async (data: LoginFormData): Promise<{ user: User }> => { 39 | const response = await fetch('/api/auth/login', { 40 | method: 'POST', 41 | headers: { 'Content-Type': 'application/json' }, 42 | body: JSON.stringify(data), 43 | }); 44 | return handleResponse(response); 45 | }, 46 | 47 | signup: async (formData: SignupFormData): Promise<{ user: User }> => { 48 | const { email, password, privacyPolicyConsent } = formData; 49 | const response = await fetch('/api/auth/signup', { 50 | method: 'POST', 51 | headers: { 'Content-Type': 'application/json' }, 52 | body: JSON.stringify({ email, password, privacyPolicyConsent }), 53 | }); 54 | return handleResponse(response); 55 | }, 56 | 57 | resetPassword: async (data: ResetPasswordFormData): Promise<{ user: User }> => { 58 | const response = await fetch('/api/auth/reset-password', { 59 | method: 'POST', 60 | headers: { 'Content-Type': 'application/json' }, 61 | body: JSON.stringify(data), 62 | }); 63 | return handleResponse(response); 64 | }, 65 | 66 | updatePassword: async (data: UpdatePasswordFormData): Promise<{ user: User }> => { 67 | const response = await fetch('/api/auth/update-password', { 68 | method: 'POST', 69 | headers: { 'Content-Type': 'application/json' }, 70 | body: JSON.stringify(data), 71 | }); 72 | return handleResponse(response); 73 | }, 74 | }; 75 | -------------------------------------------------------------------------------- /src/services/captcha.ts: -------------------------------------------------------------------------------- 1 | import axios from 'axios'; 2 | 3 | export interface CaptchaResponse { 4 | success: boolean; 5 | challenge_ts: string; 6 | hostname: string; 7 | 'error-codes': string[]; 8 | } 9 | 10 | export async function verifyCaptcha( 11 | captchaSecret: string, 12 | captchaToken: string, 13 | requestorIp: string, 14 | ): Promise { 15 | const response = await axios.post( 16 | `https://challenges.cloudflare.com/turnstile/v0/siteverify`, 17 | { 18 | secret: captchaSecret, 19 | response: captchaToken, 20 | remoteip: requestorIp, 21 | }, 22 | ); 23 | const captchaResult = response.data; 24 | return captchaResult; 25 | } 26 | -------------------------------------------------------------------------------- /src/services/rateLimiter.ts: -------------------------------------------------------------------------------- 1 | import { type AstroCookies } from 'astro'; 2 | 3 | const RATE_LIMIT_COOKIE_NAME = 'app_rl_v1'; 4 | const DEFAULT_RATE_LIMIT_SECONDS = 10; 5 | 6 | // Type for the cookie value: a map of routes to their last access timestamps 7 | type RateLimitData = { 8 | [route: string]: number; // route: timestamp 9 | }; 10 | 11 | export function checkRateLimit( 12 | cookies: AstroCookies, 13 | route: string, 14 | rateLimitSeconds: number = DEFAULT_RATE_LIMIT_SECONDS, 15 | ): boolean { 16 | const cookie = cookies.get(RATE_LIMIT_COOKIE_NAME); 17 | if (!cookie?.value) { 18 | return true; // No cookie, or empty cookie value, allow 19 | } 20 | 21 | try { 22 | const decodedValue = atob(cookie.value); 23 | const rateLimitData = JSON.parse(decodedValue) as RateLimitData; 24 | const lastAccessTime = rateLimitData[route]; 25 | 26 | if (!lastAccessTime) { 27 | return true; // No timestamp for this route yet, allow 28 | } 29 | 30 | const timeSinceLastAccess = Date.now() - lastAccessTime; 31 | if (timeSinceLastAccess < rateLimitSeconds * 1000) { 32 | console.log( 33 | `Rate limit exceeded for route: ${route}. Time since last access: ${timeSinceLastAccess}ms, Limit: ${rateLimitSeconds * 1000}ms`, 34 | ); 35 | return false; // Rate limit exceeded 36 | } 37 | return true; // Rate limit not exceeded 38 | } catch (error) { 39 | console.error('Error decoding or parsing rate limit cookie:', error); 40 | // If there's an error (e.g., malformed cookie), allow the request and overwrite the cookie later. 41 | // Alternatively, you could block to be safer, depending on requirements. 42 | return true; 43 | } 44 | } 45 | 46 | export function setRateLimitCookie( 47 | cookies: AstroCookies, 48 | route: string, 49 | rateLimitSeconds: number = DEFAULT_RATE_LIMIT_SECONDS, 50 | ): void { 51 | const currentTime = Date.now(); 52 | let rateLimitData: RateLimitData = {}; 53 | 54 | const existingCookie = cookies.get(RATE_LIMIT_COOKIE_NAME); 55 | if (existingCookie?.value) { 56 | try { 57 | const decodedValue = atob(existingCookie.value); 58 | rateLimitData = JSON.parse(decodedValue) as RateLimitData; 59 | } catch (error) { 60 | console.error('Error decoding or parsing existing rate limit cookie:', error); 61 | // If cookie is malformed, start fresh 62 | rateLimitData = {}; 63 | } 64 | } 65 | 66 | rateLimitData[route] = currentTime; 67 | 68 | // Clean up old entries (optional, but good for cookie size management) 69 | // This example doesn't include cleanup, but you might want to add it if routes are dynamic or numerous. 70 | 71 | try { 72 | const newCookieValue = btoa(JSON.stringify(rateLimitData)); 73 | cookies.set(RATE_LIMIT_COOKIE_NAME, newCookieValue, { 74 | path: '/', 75 | maxAge: Math.max(rateLimitSeconds, DEFAULT_RATE_LIMIT_SECONDS) * 2, // Cookie should last longer than the longest rate limit 76 | sameSite: 'lax', 77 | httpOnly: true, // Make cookie httpOnly for security 78 | }); 79 | } catch (error) { 80 | console.error('Error encoding or setting rate limit cookie:', error); 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /src/services/rules-builder/RulesBuilderService.ts: -------------------------------------------------------------------------------- 1 | import { 2 | Layer, 3 | Library, 4 | Stack, 5 | getLayerByStack, 6 | getStacksByLibrary, 7 | } from '../../data/dictionaries.ts'; 8 | import type { RulesContent } from './RulesBuilderTypes.ts'; 9 | import type { RulesGenerationStrategy } from './RulesGenerationStrategy.ts'; 10 | import { MultiFileRulesStrategy } from './rules-generation-strategies/MultiFileRulesStrategy.ts'; 11 | import { SingleFileRulesStrategy } from './rules-generation-strategies/SingleFileRulesStrategy.ts'; 12 | 13 | /** 14 | * Service for building AI rules based on selected libraries 15 | */ 16 | export class RulesBuilderService { 17 | /** 18 | * Generates markdown content for AI rules based on project metadata and selected libraries 19 | * 20 | * @param projectName - The name of the project 21 | * @param projectDescription - The description of the project 22 | * @param selectedLibraries - Array of selected libraries 23 | * @param multiFile - Whether to generate multiple files per each rule content 24 | * @returns The generated markdown content 25 | */ 26 | static generateRulesContent( 27 | projectName: string, 28 | projectDescription: string, 29 | selectedLibraries: Library[], 30 | multiFile?: boolean, 31 | ): RulesContent[] { 32 | // Group libraries by stack and layer 33 | const librariesByStack = this.groupLibrariesByStack(selectedLibraries); 34 | const stacksByLayer = this.groupStacksByLayer(Object.keys(librariesByStack) as Stack[]); 35 | 36 | const strategy: RulesGenerationStrategy = multiFile 37 | ? new MultiFileRulesStrategy() 38 | : new SingleFileRulesStrategy(); 39 | 40 | return strategy.generateRules( 41 | projectName, 42 | projectDescription, 43 | selectedLibraries, 44 | stacksByLayer, 45 | librariesByStack, 46 | ); 47 | } 48 | 49 | /** 50 | * Groups libraries by their stack 51 | * 52 | * @param libraries - Array of libraries to group 53 | * @returns Record with stacks as keys and arrays of libraries as values 54 | */ 55 | private static groupLibrariesByStack(libraries: Library[]): Record { 56 | const result: Record = {} as Record; 57 | 58 | libraries.forEach((library) => { 59 | const stacks = getStacksByLibrary(library); 60 | 61 | stacks.forEach((stack) => { 62 | if (!result[stack]) { 63 | result[stack] = []; 64 | } 65 | 66 | if (!result[stack].includes(library)) { 67 | result[stack].push(library); 68 | } 69 | }); 70 | }); 71 | 72 | return result; 73 | } 74 | 75 | /** 76 | * Groups stacks by their layer 77 | * 78 | * @param stacks - Array of stacks to group 79 | * @returns Record with layers as keys and arrays of stacks as values 80 | */ 81 | private static groupStacksByLayer(stacks: Stack[]): Record { 82 | const result: Record = {} as Record; 83 | 84 | stacks.forEach((stack) => { 85 | const layer = getLayerByStack(stack); 86 | 87 | if (layer) { 88 | if (!result[layer]) { 89 | result[layer] = []; 90 | } 91 | 92 | if (!result[layer].includes(stack)) { 93 | result[layer].push(stack); 94 | } 95 | } 96 | }); 97 | 98 | return result; 99 | } 100 | } 101 | -------------------------------------------------------------------------------- /src/services/rules-builder/RulesBuilderTypes.ts: -------------------------------------------------------------------------------- 1 | export interface RulesContent { 2 | markdown: string; 3 | label: string; 4 | fileName: `${string}.mdc`; 5 | } 6 | -------------------------------------------------------------------------------- /src/services/rules-builder/RulesGenerationStrategy.ts: -------------------------------------------------------------------------------- 1 | import { Layer, Library, Stack } from '../../data/dictionaries.ts'; 2 | import type { RulesContent } from './RulesBuilderTypes.ts'; 3 | 4 | /** 5 | * Strategy interface for rules generation 6 | */ 7 | export interface RulesGenerationStrategy { 8 | generateRules( 9 | projectName: string, 10 | projectDescription: string, 11 | selectedLibraries: Library[], 12 | stacksByLayer: Record, 13 | librariesByStack: Record, 14 | ): RulesContent[]; 15 | } 16 | -------------------------------------------------------------------------------- /src/services/rules-builder/rules-generation-strategies/MultiFileRulesStrategy.ts: -------------------------------------------------------------------------------- 1 | import type { RulesGenerationStrategy } from '../RulesGenerationStrategy.ts'; 2 | import { Layer, type Library, Stack } from '../../../data/dictionaries.ts'; 3 | import type { RulesContent } from '../RulesBuilderTypes.ts'; 4 | import { getRulesForLibrary } from '../../../data/rules'; 5 | import { slugify } from '../../../utils/slugify.ts'; 6 | 7 | /** 8 | * Strategy for multi-file rules generation 9 | */ 10 | export class MultiFileRulesStrategy implements RulesGenerationStrategy { 11 | generateRules( 12 | projectName: string, 13 | projectDescription: string, 14 | selectedLibraries: Library[], 15 | stacksByLayer: Record, 16 | librariesByStack: Record, 17 | ): RulesContent[] { 18 | const projectMarkdown = `# AI Rules for ${projectName}\n\n${projectDescription}\n\n`; 19 | const noSelectedLibrariesMarkdown = `---\n\n👈 Use the Rule Builder on the left or drop dependency file here`; 20 | const projectLabel = 'Project', 21 | projectFileName = 'project.mdc'; 22 | 23 | const markdowns: RulesContent[] = []; 24 | 25 | markdowns.push({ markdown: projectMarkdown, label: projectLabel, fileName: projectFileName }); 26 | 27 | if (selectedLibraries.length === 0) { 28 | markdowns[0].markdown += noSelectedLibrariesMarkdown; 29 | return markdowns; 30 | } 31 | 32 | Object.entries(stacksByLayer).forEach(([layer, stacks]) => { 33 | stacks.forEach((stack) => { 34 | librariesByStack[stack].forEach((library) => { 35 | markdowns.push( 36 | this.buildRulesContent({ 37 | layer, 38 | stack, 39 | library, 40 | libraryRules: getRulesForLibrary(library), 41 | }), 42 | ); 43 | }); 44 | }); 45 | }); 46 | 47 | return markdowns; 48 | } 49 | 50 | private buildRulesContent({ 51 | libraryRules, 52 | layer, 53 | stack, 54 | library, 55 | }: { 56 | libraryRules: string[]; 57 | layer: string; 58 | stack: string; 59 | library: string; 60 | }): RulesContent { 61 | const label = `${layer} - ${stack} - ${library}`; 62 | const fileName: RulesContent['fileName'] = `${slugify(`${layer}-${stack}-${library}`)}.mdc`; 63 | const content = 64 | libraryRules.length > 0 65 | ? `${libraryRules.map((rule) => `- ${rule}`).join('\n')}` 66 | : `- Use ${library} according to best practices`; 67 | const markdown = this.renderRuleMarkdown({ content, layer, stack, library }); 68 | return { markdown, label, fileName }; 69 | } 70 | 71 | private renderRuleMarkdown = ({ 72 | content, 73 | layer, 74 | stack, 75 | library, 76 | }: { 77 | content: string; 78 | layer: string; 79 | stack: string; 80 | library: string; 81 | }) => 82 | `## ${layer}\n\n### Guidelines for ${stack}\n\n#### ${library}\n\n{{content}}\n\n`.replace( 83 | '{{content}}', 84 | content, 85 | ); 86 | } 87 | -------------------------------------------------------------------------------- /src/services/rules-builder/rules-generation-strategies/SingleFileRulesStrategy.ts: -------------------------------------------------------------------------------- 1 | import type { RulesGenerationStrategy } from '../RulesGenerationStrategy.ts'; 2 | import { Layer, Library, Stack } from '../../../data/dictionaries.ts'; 3 | import type { RulesContent } from '../RulesBuilderTypes.ts'; 4 | import { getRulesForLibrary } from '../../../data/rules.ts'; 5 | 6 | /** 7 | * Strategy for single-file rules generation 8 | */ 9 | export class SingleFileRulesStrategy implements RulesGenerationStrategy { 10 | generateRules( 11 | projectName: string, 12 | projectDescription: string, 13 | selectedLibraries: Library[], 14 | stacksByLayer: Record, 15 | librariesByStack: Record, 16 | ): RulesContent[] { 17 | const projectMarkdown = `# AI Rules for ${projectName}\n\n${projectDescription}\n\n`; 18 | const noSelectedLibrariesMarkdown = `---\n\n👈 Use the Rule Builder on the left or drop dependency file here`; 19 | const projectLabel = 'Project', 20 | projectFileName = 'project.mdc'; 21 | 22 | let markdown = projectMarkdown; 23 | 24 | if (selectedLibraries.length === 0) { 25 | markdown += noSelectedLibrariesMarkdown; 26 | return [{ markdown, label: projectLabel, fileName: projectFileName }]; 27 | } 28 | 29 | markdown += this.generateLibraryMarkdown(stacksByLayer, librariesByStack); 30 | return [{ markdown, label: 'All Rules', fileName: 'rules.mdc' }]; 31 | } 32 | 33 | private generateLibraryMarkdown( 34 | stacksByLayer: Record, 35 | librariesByStack: Record, 36 | ): string { 37 | let markdown = ''; 38 | 39 | // Generate content for each layer and its stacks 40 | Object.entries(stacksByLayer).forEach(([layer, stacks]) => { 41 | markdown += `## ${layer}\n\n`; 42 | 43 | stacks.forEach((stack) => { 44 | markdown += `### Guidelines for ${stack}\n\n`; 45 | 46 | const libraries = librariesByStack[stack]; 47 | if (libraries) { 48 | libraries.forEach((library) => { 49 | markdown += `#### ${library}\n\n`; 50 | 51 | // Get specific rules for this library 52 | const libraryRules = getRulesForLibrary(library); 53 | if (libraryRules.length > 0) { 54 | libraryRules.forEach((rule) => { 55 | markdown += `- ${rule}\n`; 56 | }); 57 | } else { 58 | markdown += `- Use ${library} according to best practices\n`; 59 | } 60 | 61 | markdown += '\n'; 62 | }); 63 | } 64 | 65 | markdown += '\n'; 66 | }); 67 | }); 68 | 69 | return markdown; 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /src/store/authStore.ts: -------------------------------------------------------------------------------- 1 | import { create } from 'zustand'; 2 | 3 | interface User { 4 | id: string; 5 | email: string | null; 6 | } 7 | 8 | interface AuthState { 9 | user: User | null; 10 | isAuthenticated: boolean; 11 | setUser: (user: User | null) => void; 12 | logout: () => void; 13 | } 14 | 15 | export const useAuthStore = create((set) => ({ 16 | user: null, 17 | isAuthenticated: false, 18 | setUser: (user) => set({ user, isAuthenticated: !!user }), 19 | logout: () => set({ user: null, isAuthenticated: false }), 20 | })); 21 | -------------------------------------------------------------------------------- /src/store/navigationStore.ts: -------------------------------------------------------------------------------- 1 | import { create } from 'zustand'; 2 | 3 | export type Panel = 'collections' | 'builder' | 'preview'; 4 | 5 | interface NavigationState { 6 | activePanel: Panel; 7 | isSidebarOpen: boolean; 8 | setActivePanel: (panel: Panel) => void; 9 | toggleSidebar: () => void; 10 | setSidebarOpen: (isOpen: boolean) => void; 11 | } 12 | 13 | export const useNavigationStore = create((set) => ({ 14 | activePanel: 'builder', 15 | isSidebarOpen: false, 16 | setActivePanel: (panel) => set({ activePanel: panel }), 17 | toggleSidebar: () => set((state) => ({ isSidebarOpen: !state.isSidebarOpen })), 18 | setSidebarOpen: (isOpen) => set({ isSidebarOpen: isOpen }), 19 | })); 20 | -------------------------------------------------------------------------------- /src/store/projectStore.ts: -------------------------------------------------------------------------------- 1 | import { create } from 'zustand'; 2 | import { createJSONStorage, persist } from 'zustand/middleware'; 3 | import { type AIEnvironment, AIEnvironmentName } from '../data/ai-environments.ts'; 4 | 5 | interface ProjectState { 6 | // Project metadata 7 | projectName: string; 8 | projectDescription: string; 9 | selectedEnvironment: AIEnvironment; 10 | isMultiFileEnvironment: boolean; 11 | 12 | // Hydration state 13 | isHydrated: boolean; 14 | 15 | // Actions 16 | setProjectName: (name: string) => void; 17 | setProjectDescription: (description: string) => void; 18 | setSelectedEnvironment: (environment: AIEnvironment) => void; 19 | setHydrated: () => void; 20 | } 21 | 22 | export const multiFileEnvironments: ReadonlySet = new Set([ 23 | AIEnvironmentName.Cline, 24 | AIEnvironmentName.Cursor, 25 | ]); 26 | export const initialEnvironment: Readonly = AIEnvironmentName.Cursor; 27 | 28 | // Create a store with persistence 29 | export const useProjectStore = create()( 30 | persist( 31 | (set) => ({ 32 | // Initial state 33 | projectName: '{{project-name}}', 34 | projectDescription: '{{project-description}}', 35 | selectedEnvironment: initialEnvironment, 36 | isMultiFileEnvironment: multiFileEnvironments.has(initialEnvironment), 37 | isHydrated: false, 38 | 39 | // Actions 40 | setProjectName: (name: string) => set({ projectName: name }), 41 | setProjectDescription: (description: string) => set({ projectDescription: description }), 42 | setSelectedEnvironment: (environment: AIEnvironment) => 43 | set({ 44 | selectedEnvironment: environment, 45 | isMultiFileEnvironment: multiFileEnvironments.has(environment), 46 | }), 47 | setHydrated: () => set({ isHydrated: true }), 48 | }), 49 | { 50 | name: 'ai-rules-project-storage', 51 | storage: createJSONStorage(() => localStorage), 52 | partialize: (state) => ({ 53 | projectName: state.projectName, 54 | projectDescription: state.projectDescription, 55 | selectedEnvironment: state.selectedEnvironment, 56 | }), 57 | // Set hydration flag when storage is hydrated 58 | onRehydrateStorage: () => (state) => { 59 | if (state) { 60 | state.setHydrated(); 61 | } 62 | }, 63 | }, 64 | ), 65 | ); 66 | -------------------------------------------------------------------------------- /src/store/storage/urlStorage.ts: -------------------------------------------------------------------------------- 1 | import type { PersistStorage, StorageValue } from 'zustand/middleware'; 2 | import * as LZString from 'lz-string'; 3 | 4 | const { compressToEncodedURIComponent, decompressFromEncodedURIComponent } = LZString; 5 | 6 | class UrlStorage implements PersistStorage { 7 | private queryParams: URLSearchParams; 8 | 9 | constructor() { 10 | if (this.isServerSide()) { 11 | this.queryParams = new URLSearchParams(''); 12 | } else { 13 | this.queryParams = new URLSearchParams(window.location.search); 14 | } 15 | } 16 | 17 | getItem(name: string): StorageValue | Promise | null> | null { 18 | const value = this.queryParams.get(name); 19 | if (!value) return null; 20 | return JSON.parse(decompressFromEncodedURIComponent(value)); 21 | } 22 | 23 | setItem(name: string, value: StorageValue): unknown | Promise { 24 | this.queryParams.set(name, compressToEncodedURIComponent(JSON.stringify(value))); 25 | this.updateUrl(); 26 | 27 | return Promise.resolve(); 28 | } 29 | 30 | removeItem(name: string): unknown | Promise { 31 | this.queryParams.delete(name); 32 | this.updateUrl(); 33 | 34 | return Promise.resolve(); 35 | } 36 | 37 | private updateUrl() { 38 | if (this.isServerSide()) return; 39 | window.history.pushState({}, '', window.location.pathname + '?' + this.queryParams.toString()); 40 | } 41 | 42 | private isServerSide() { 43 | return typeof window === 'undefined'; 44 | } 45 | } 46 | 47 | export const createUrlStorage = () => new UrlStorage(); 48 | export const doesUrlContainState = (url: URL, name: string): boolean => 49 | url.searchParams.get(name) !== null; 50 | -------------------------------------------------------------------------------- /src/styles/global.css: -------------------------------------------------------------------------------- 1 | @import 'tailwindcss'; 2 | @plugin "@tailwindcss/typography"; 3 | 4 | @theme { 5 | --font-mono: 'Noto Sans Mono', monospace; 6 | } 7 | 8 | @layer utilities { 9 | @keyframes accordion-down { 10 | from { 11 | height: 0; 12 | opacity: 0; 13 | } 14 | to { 15 | height: var(--radix-accordion-content-height); 16 | opacity: 1; 17 | } 18 | } 19 | 20 | @keyframes accordion-up { 21 | from { 22 | height: var(--radix-accordion-content-height); 23 | opacity: 1; 24 | } 25 | to { 26 | height: 0; 27 | opacity: 0; 28 | } 29 | } 30 | 31 | @keyframes fadeIn { 32 | from { 33 | opacity: 0; 34 | transform: scale(0.95); 35 | } 36 | to { 37 | opacity: 1; 38 | transform: scale(1); 39 | } 40 | } 41 | 42 | @keyframes fadeOut { 43 | from { 44 | opacity: 1; 45 | transform: scale(1); 46 | } 47 | to { 48 | opacity: 0; 49 | transform: scale(0.95); 50 | } 51 | } 52 | 53 | .animate-accordion-down { 54 | animation: accordion-down 0.2s ease-out; 55 | } 56 | 57 | .animate-accordion-up { 58 | animation: accordion-up 0.2s ease-out; 59 | } 60 | 61 | .animate-fade-in { 62 | animation: fadeIn 0.2s ease-out forwards; 63 | } 64 | 65 | .animate-fade-out { 66 | animation: fadeOut 0.2s ease-out forwards; 67 | } 68 | } 69 | 70 | /* Dark mode scrollbar styles to match the dark theme UI */ 71 | @layer utilities { 72 | /* Firefox */ 73 | * { 74 | scrollbar-width: thin; 75 | scrollbar-color: #4b5563 #1f2937; 76 | } 77 | 78 | /* Chrome, Edge, Safari */ 79 | ::-webkit-scrollbar { 80 | width: 8px; 81 | height: 8px; 82 | } 83 | 84 | ::-webkit-scrollbar-track { 85 | background: #1f2937; /* gray-800 */ 86 | } 87 | 88 | ::-webkit-scrollbar-thumb { 89 | background: #4b5563; /* gray-600 */ 90 | border-radius: 4px; 91 | } 92 | 93 | ::-webkit-scrollbar-thumb:hover { 94 | background: #6b7280; /* gray-500 */ 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /src/types/auth.ts: -------------------------------------------------------------------------------- 1 | import * as z from 'zod'; 2 | 3 | export const loginSchema = z.object({ 4 | email: z.string().email('Invalid email format'), 5 | password: z.string().min(8, 'Password must be at least 8 characters long'), 6 | }); 7 | 8 | export const signupSchema = z 9 | .object({ 10 | email: z.string().email('Invalid email address'), 11 | password: z 12 | .string() 13 | .min(8, 'Password must be at least 8 characters') 14 | .regex(/[A-Z]/, 'Password must contain at least one uppercase letter') 15 | .regex(/[a-z]/, 'Password must contain at least one lowercase letter') 16 | .regex(/[0-9]/, 'Password must contain at least one number'), 17 | confirmPassword: z.string(), 18 | privacyPolicyConsent: z.boolean().refine((val) => val === true, { 19 | message: 'You must accept the Privacy Policy', 20 | }), 21 | }) 22 | .refine((data) => data.password === data.confirmPassword, { 23 | message: "Passwords don't match", 24 | path: ['confirmPassword'], 25 | }); 26 | 27 | export const resetPasswordSchema = z.object({ 28 | email: z.string().email('Invalid email format'), 29 | }); 30 | 31 | export const updatePasswordSchema = z 32 | .object({ 33 | password: z 34 | .string() 35 | .min(8, 'Password must be at least 8 characters') 36 | .regex(/[A-Z]/, 'Password must contain at least one uppercase letter') 37 | .regex(/[a-z]/, 'Password must contain at least one lowercase letter') 38 | .regex(/[0-9]/, 'Password must contain at least one number'), 39 | confirmPassword: z.string(), 40 | }) 41 | .refine((data) => data.password === data.confirmPassword, { 42 | message: "Passwords don't match", 43 | path: ['confirmPassword'], 44 | }); 45 | 46 | export type LoginFormData = z.infer; 47 | export type SignupFormData = z.infer; 48 | export type ResetPasswordFormData = z.infer; 49 | export type UpdatePasswordFormData = z.infer; 50 | -------------------------------------------------------------------------------- /src/types/collection.types.ts: -------------------------------------------------------------------------------- 1 | import { type Library } from '../data/dictionaries'; 2 | import { type Database } from '../db/database.types'; 3 | 4 | export interface Collection { 5 | id: string; 6 | userId: string; 7 | name: string; 8 | description: string | null; 9 | libraries: Library[]; 10 | createdAt: string; 11 | updatedAt: string; 12 | } 13 | 14 | export function collectionMapper( 15 | collection: Database['public']['Tables']['collections']['Row'], 16 | ): Collection { 17 | return { 18 | id: collection.id, 19 | userId: collection.user_id, 20 | name: collection.name, 21 | description: collection.description, 22 | libraries: collection.libraries as Library[], 23 | createdAt: collection.created_at, 24 | updatedAt: collection.updated_at, 25 | }; 26 | } 27 | 28 | export const DEFAULT_USER_ID = '899b93b3-f661-4471-9995-1165701d9f51'; 29 | -------------------------------------------------------------------------------- /src/utils/cn.ts: -------------------------------------------------------------------------------- 1 | import { clsx, type ClassValue } from 'clsx'; 2 | import { twMerge } from 'tailwind-merge'; 3 | 4 | /** 5 | * A utility function that merges multiple class names together 6 | * and ensures that Tailwind classes are properly merged 7 | */ 8 | export function cn(...inputs: ClassValue[]) { 9 | return twMerge(clsx(inputs)); 10 | } 11 | -------------------------------------------------------------------------------- /src/utils/slugify.ts: -------------------------------------------------------------------------------- 1 | export const slugify = (text: string) => 2 | text 3 | .toString() 4 | .normalize('NFD') // usuwa akcenty (diakrytyki) 5 | .replace(/[\u0300-\u036f]/g, '') // usuwa pozostałości po diakrytykach 6 | .toLowerCase() 7 | .replace(/\s+/g, '-') // zamienia spacje na myślniki 8 | .replace(/[^\w-]+/g, '') // usuwa znaki specjalne 9 | .replace(/--+/g, '-') // usuwa wielokrotne myślniki 10 | .replace(/^-+/, '') // usuwa myślnik z początku 11 | .replace(/-+$/, ''); // usuwa myślnik z końca 12 | -------------------------------------------------------------------------------- /supabase/.gitignore: -------------------------------------------------------------------------------- 1 | # Supabase 2 | .branches 3 | .temp 4 | 5 | # dotenvx 6 | .env.keys 7 | .env.local 8 | .env.*.local 9 | -------------------------------------------------------------------------------- /supabase/emails/email-template.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Email Template 7 | 52 | 53 | 54 | 69 | 70 | 71 | -------------------------------------------------------------------------------- /supabase/emails/recovery.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | ✨ 10xRules.ai | Confirm email 7 | 52 | 53 | 54 | 70 | 71 | 72 | -------------------------------------------------------------------------------- /supabase/migrations/20250328135512_collections.sql: -------------------------------------------------------------------------------- 1 | -- Create collections table 2 | CREATE TABLE collections ( 3 | id UUID PRIMARY KEY DEFAULT gen_random_uuid(), 4 | user_id UUID NOT NULL REFERENCES auth.users(id) ON DELETE CASCADE, 5 | name VARCHAR(255) NOT NULL, 6 | description TEXT, 7 | libraries TEXT [] NOT NULL DEFAULT '{}', 8 | created_at TIMESTAMP WITH TIME ZONE DEFAULT TIMEZONE('utc', NOW()) NOT NULL, 9 | updated_at TIMESTAMP WITH TIME ZONE DEFAULT TIMEZONE('utc', NOW()) NOT NULL 10 | ); 11 | -- Create index on user_id for faster lookups 12 | CREATE INDEX collections_user_id_idx ON collections(user_id); 13 | -- Enable Row Level Security 14 | ALTER TABLE collections ENABLE ROW LEVEL SECURITY; 15 | -- Create policy to allow users to manage their own collections 16 | CREATE POLICY "Users can manage their own collections" ON collections FOR ALL USING (auth.uid() = user_id); 17 | -- Function to automatically update updated_at timestamp 18 | CREATE OR REPLACE FUNCTION update_updated_at_column() RETURNS TRIGGER AS $$ BEGIN NEW.updated_at = TIMEZONE('utc', NOW()); 19 | RETURN NEW; 20 | END; 21 | $$ language 'plpgsql'; 22 | -- Trigger to call the update function 23 | CREATE TRIGGER update_collections_updated_at BEFORE 24 | UPDATE ON collections FOR EACH ROW EXECUTE FUNCTION update_updated_at_column(); -------------------------------------------------------------------------------- /supabase/migrations/20250328201010_drop-rls.sql: -------------------------------------------------------------------------------- 1 | -- Disable Row Level Security 2 | ALTER TABLE collections DISABLE ROW LEVEL SECURITY; 3 | -- Remove policy to allow users to manage their own collections 4 | DROP POLICY "Users can manage their own collections" ON collections; -------------------------------------------------------------------------------- /supabase/migrations/20250402082709_add_rls_to_collections.sql: -------------------------------------------------------------------------------- 1 | -- Migration: Add Row Level Security (RLS) to collections table 2 | -- Description: Enables RLS and adds granular policies for collections table access control 3 | -- Author: AI Rules Builder Team 4 | -- Date: 2024-04-02 5 | -- Enable RLS on collections table 6 | alter table public.collections enable row level security; 7 | -- Comment on table to document RLS implementation 8 | comment on table public.collections is 'Collections table with RLS enabled. Access controlled by user_id for authenticated users.'; 9 | -- Policy for authenticated users to select their own collections 10 | create policy "Users can view their own collections" on public.collections for 11 | select to authenticated using (auth.uid() = user_id); 12 | -- Policy for authenticated users to insert their own collections 13 | create policy "Users can insert their own collections" on public.collections for 14 | insert to authenticated with check (auth.uid() = user_id); 15 | -- Policy for authenticated users to update their own collections 16 | create policy "Users can update their own collections" on public.collections for 17 | update to authenticated using (auth.uid() = user_id) with check (auth.uid() = user_id); 18 | -- Policy for authenticated users to delete their own collections 19 | create policy "Users can delete their own collections" on public.collections for delete to authenticated using (auth.uid() = user_id); 20 | -- Policy for anonymous users to view collections (if needed in the future, currently disabled) 21 | create policy "Anonymous users cannot access collections" on public.collections for all to anon using (false); -------------------------------------------------------------------------------- /supabase/migrations/20250411083417_create_user_consents.sql: -------------------------------------------------------------------------------- 1 | -- Migration: Create user consents table 2 | -- Description: Stores user consent records for privacy policy acceptance 3 | -- Tables affected: user_consents 4 | -- Special considerations: Implements RLS for data protection 5 | -- Create user consents table 6 | create table user_consents ( 7 | id uuid default gen_random_uuid() primary key, 8 | user_id uuid references auth.users(id) on delete cascade not null, 9 | privacy_policy_version text not null, 10 | consented_at timestamptz default now() not null, 11 | created_at timestamptz default now() not null 12 | ); 13 | -- Enable RLS 14 | alter table user_consents enable row level security; 15 | -- Create indexes 16 | create index user_consents_user_id_idx on user_consents(user_id); 17 | -- Comments 18 | comment on table user_consents is 'Stores user consent records for privacy policy and other terms'; 19 | comment on column user_consents.privacy_policy_version is 'Version or date of the privacy policy that was accepted'; 20 | -- RLS Policies for authenticated users 21 | create policy "Users can view their own consents" on user_consents for 22 | select to authenticated using (auth.uid() = user_id); 23 | create policy "Users can insert their own consents" on user_consents for 24 | insert to authenticated with check (auth.uid() = user_id); 25 | -- RLS Policies for anonymous users (view only for verification purposes) 26 | create policy "Anonymous users cannot view consents" on user_consents for 27 | select to anon using (false); 28 | create policy "Anonymous users cannot insert consents" on user_consents for 29 | insert to anon with check (false); -------------------------------------------------------------------------------- /tests/setup/types.d.ts: -------------------------------------------------------------------------------- 1 | import '@testing-library/jest-dom'; 2 | 3 | declare module 'vitest' { 4 | // interface Assertion extends jest.Matchers {} 5 | // interface AsymmetricMatchersContaining extends jest.Matchers {} 6 | } 7 | -------------------------------------------------------------------------------- /tests/setup/vitest.setup.ts: -------------------------------------------------------------------------------- 1 | import '@testing-library/jest-dom'; 2 | import { afterAll, afterEach, beforeAll, vi } from 'vitest'; 3 | import { setupServer } from 'msw/node'; 4 | import { cleanup } from '@testing-library/react'; 5 | 6 | // Add any global mocks here that should be available for all tests 7 | const server = setupServer(); 8 | 9 | // Start MSW server before all tests 10 | beforeAll(() => server.listen({ onUnhandledRequest: 'warn' })); 11 | 12 | // Clean up after each test 13 | afterEach(() => { 14 | cleanup(); 15 | server.resetHandlers(); 16 | }); 17 | 18 | // Close MSW server after all tests 19 | afterAll(() => server.close()); 20 | 21 | // Global mock for window.matchMedia 22 | Object.defineProperty(window, 'matchMedia', { 23 | writable: true, 24 | value: vi.fn().mockImplementation((query: string) => ({ 25 | matches: false, 26 | media: query, 27 | onchange: null, 28 | addListener: vi.fn(), 29 | removeListener: vi.fn(), 30 | addEventListener: vi.fn(), 31 | removeEventListener: vi.fn(), 32 | dispatchEvent: vi.fn(), 33 | })), 34 | }); 35 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "astro/tsconfigs/strict", 3 | "include": [".astro/types.d.ts", "**/*"], 4 | "exclude": ["dist"], 5 | "compilerOptions": { 6 | "jsx": "react-jsx", 7 | "jsxImportSource": "react", 8 | "baseUrl": ".", 9 | "paths": { 10 | "@/*": ["./src/*"] 11 | } 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /vitest.config.ts: -------------------------------------------------------------------------------- 1 | import { defineConfig } from 'vitest/config'; 2 | import react from '@astrojs/react'; 3 | import { configDefaults } from 'vitest/config'; 4 | import path from 'path'; 5 | 6 | export default defineConfig({ 7 | plugins: [react()], 8 | test: { 9 | globals: true, 10 | environment: 'jsdom', 11 | setupFiles: ['./tests/setup/vitest.setup.ts'], 12 | exclude: [...configDefaults.exclude, 'e2e/**'], 13 | coverage: { 14 | provider: 'v8', 15 | reporter: ['text', 'json', 'html'], 16 | exclude: ['**/node_modules/**', '**/tests/**', '**/dist/**'], 17 | }, 18 | }, 19 | resolve: { 20 | alias: { 21 | '@': path.resolve(__dirname, './src'), 22 | }, 23 | }, 24 | }); 25 | --------------------------------------------------------------------------------