├── .cursor └── rules │ ├── error-handling.mdc │ ├── logging.mdc │ ├── naming-conventions.mdc │ └── project-structure.mdc ├── .env.example ├── .gitignore ├── .shops.example.json ├── CLAUDE.md ├── README.md ├── REFACTORING.md ├── cli.js ├── codehawk.json ├── constants.js ├── eslint.config.cjs ├── graphql ├── CollectionCreate.graphql.js ├── CollectionDelete.graphql.js ├── CollectionFetchAll.graphql.js ├── CollectionFetchByHandle.graphql.js ├── CollectionFetchById.graphql.js ├── CollectionUpdate.graphql.js ├── MetafieldDefinitionCreate.graphql.js ├── MetafieldDefinitionDelete.graphql.js ├── MetafieldDefinitionUpdate.graphql.js ├── MetafieldDefinitionsFetch.graphql.js ├── MetaobjectCreate.graphql.js ├── MetaobjectCreateDefinition.graphql.js ├── MetaobjectDefinitionIdFetch.graphql.js ├── MetaobjectDefinitionTypeFetch.graphql.js ├── MetaobjectFetch.graphql.js ├── MetaobjectFetchAllDefinitions.graphql.js ├── MetaobjectFetchById.graphql.js ├── MetaobjectFetchDefinitionById.graphql.js ├── MetaobjectUpdate.graphql.js ├── MetaobjectUpdateDefinition.graphql.js ├── PageCreate.graphql.js ├── PageFetchAll.graphql.js ├── PageUpdate.graphql.js ├── ProductDelete.graphql.js ├── ProductFetchAll.graphql.js ├── ProductFetchByHandle.graphql.js └── index.js ├── jest.config.js ├── package-lock.json ├── package.json ├── strategies ├── AllResourcesSyncStrategy.js ├── BaseMetafieldSyncStrategy.js ├── CollectionMetafieldSyncStrategy.js ├── CollectionSyncStrategy.js ├── CompanyMetafieldSyncStrategy.js ├── CustomerMetafieldSyncStrategy.js ├── EverythingSyncStrategy.js ├── MetaobjectSyncStrategy.js ├── OrderMetafieldSyncStrategy.js ├── PageSyncStrategy.js ├── ProductMetafieldSyncStrategy.js ├── ProductSyncStrategy.js └── VariantMetafieldSyncStrategy.js ├── test ├── cli.test.js ├── data-product.test.js └── define-metafields.test.js └── utils ├── CollectionRuleSetHandler.js ├── CommandSetup.js ├── ErrorHandler.js ├── Logger.js ├── MetafieldFilterUtils.js ├── MetafieldHandler.js ├── MetafieldReferenceHandler.js ├── MetaobjectDataHandler.js ├── MetaobjectDefinitionHandler.js ├── ProductBaseHandler.js ├── ProductBatchProcessor.js ├── ProductImageHandler.js ├── ProductMetafieldProcessor.js ├── ProductOperationHandler.js ├── ProductPublicationHandler.js ├── ProductVariantHandler.js ├── ShopConfig.js ├── ShopifyClient.js ├── ShopifyIDUtils.js ├── StrategyLoader.js ├── SyncResultTracker.js ├── Validators.js └── collection ├── CollectionFetchHandler.js ├── CollectionMetafieldHandler.js ├── CollectionOperationHandler.js ├── CollectionProductHandler.js └── CollectionPublicationHandler.js /.cursor/rules/error-handling.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: 3 | globs: 4 | alwaysApply: true 5 | --- 6 | # Error Handling 7 | 8 | This project follows specific error handling patterns to maintain clean code and predictable error flows. 9 | 10 | ## Core Principles 11 | 12 | - **Don't add try/catch blocks** unless specifically requested 13 | - Let errors bubble up to be handled by higher-level error handlers 14 | - The main error handling occurs in the CLI entry point [cli.js](mdc:cli.js) 15 | - Use the centralized logger for error reporting [utils/Logger.js](mdc:utils/Logger.js) 16 | 17 | ## Error Flow 18 | 19 | 1. Low-level utility functions should throw errors rather than catching them 20 | 2. Strategy classes should only catch errors they can specifically handle 21 | 3. The main CLI entry point has try/catch blocks that: 22 | - Log the error using the logger 23 | - Close log files properly 24 | - Exit with appropriate error codes 25 | 26 | ## Logging Errors 27 | 28 | When reporting errors, use the logger's error function: 29 | 30 | ```javascript 31 | logger.error(`Error message: ${error.message}`); 32 | ``` 33 | 34 | ## API Error Handling 35 | 36 | When dealing with Shopify API errors, use the existing utilities in ShopifyClient to handle and log them appropriately. Let API errors bubble up unless there's a specific recovery action. 37 | -------------------------------------------------------------------------------- /.cursor/rules/logging.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: 3 | globs: 4 | alwaysApply: true 5 | --- 6 | You can indent sections by using logger.startSection() and logger.endSection() with [Logger.js](mdc:utils/Logger.js) 7 | To log a newline without a bullet next to it use logger.newline() 8 | Don't use `if (this.debug)` when adding in debug logging. 9 | -------------------------------------------------------------------------------- /.cursor/rules/naming-conventions.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: 3 | globs: 4 | alwaysApply: true 5 | --- 6 | # Naming Conventions 7 | 8 | ## File Naming 9 | 10 | - **Utility Files**: All files in the `utils/` directory must follow `CamelCase.js` naming format 11 | - Examples: `MetafieldHandler.js`, `ProductImageHandler.js`, `ShopifyIDUtils.js` 12 | 13 | - **Strategy Files**: Strategy classes in the `strategies/` directory should follow `CamelCase.js` naming format 14 | - Examples: `MetaobjectSyncStrategy.js`, `ProductMetafieldSyncStrategy.js` 15 | 16 | ## Class Naming 17 | 18 | Classes should match their filename: 19 | - A file named `MetafieldHandler.js` should export a class called `MetafieldHandler` 20 | 21 | ## Method Naming 22 | 23 | - Use camelCase for method names (e.g., `createMetafield()`, `updateProduct()`) 24 | - Getter methods should begin with "get" (e.g., `getProductById()`) 25 | -------------------------------------------------------------------------------- /.cursor/rules/project-structure.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: 3 | globs: 4 | alwaysApply: true 5 | --- 6 | # Project Structure 7 | 8 | This project syncs Shopify Metaobject and Metafield definitions between stores. 9 | 10 | ## Core Files 11 | 12 | - **`cli.js`**: [cli.js](mdc:cli.js) e main entry point for the command-line tool. It handles: 13 | - Parsing command-line arguments using `commander`. 14 | - Reading store credentials from `.shops.json`. 15 | - Setting up Shopify API clients using `shopify-api-node`. 16 | - Wrapping the clients with `ShopifyClient` for logging. 17 | - Selecting and executing the appropriate sync strategy based on the `--resource-type` option. 18 | - Displaying the final summary. 19 | 20 | - **`utils/ShopifyClient.js`**: [utils/ShopifyClient.js](mdc:utils/ShopifyClient.js) provides a wrapper around the `shopify-api-node` client to centralize GraphQL call logging using the project's logger. It formats debug logs based on the `--debug` flag. 21 | 22 | - **`.shops.json`**: [`.shops.json`](mdc:.shops.json) (not committed, example in `.shops.example.json`) stores the domain and access tokens for different Shopify stores, referenced by name (e.g., `--source shop1`). 23 | 24 | ## Strategy Pattern 25 | 26 | The core sync logic is implemented using the Strategy pattern, located in the `strategies/` directory. 27 | 28 | - **`strategies/`**: Contains strategy classes for different resource types. 29 | - **`BaseMetafieldSyncStrategy.js`**: [strategies/BaseMetafieldSyncStrategy.js](mdc:strategies/BaseMetafieldSyncStrategy.js) is the base class for all metafield sync strategies. 30 | - **`MetaobjectSyncStrategy.js`**: [strategies/MetaobjectSyncStrategy.js](mdc:strategies/MetaobjectSyncStrategy.js) handles all logic related to syncing Metaobject definitions and data (fetching, creating, updating, reconciling). 31 | - **`ProductMetafieldSyncStrategy.js`**: [strategies/ProductMetafieldSyncStrategy.js](mdc:strategies/ProductMetafieldSyncStrategy.js) handles syncing Product Metafield definitions. 32 | - **`ProductSyncStrategy.js`**: [strategies/ProductSyncStrategy.js](mdc:strategies/ProductSyncStrategy.js) orchestrates syncing Products between Shopify stores, coordinating specialized utility classes for batch processing, CRUD operations, metafield handling, and result tracking. 33 | - **`PageSyncStrategy.js`**: [strategies/PageSyncStrategy.js](mdc:strategies/PageSyncStrategy.js) handles syncing Pages between Shopify stores. 34 | - Additional metafield sync strategies for various resources (Customers, Variants, Orders, Companies). 35 | 36 | Each strategy class typically: 37 | - Takes the wrapped source/target clients and options in its constructor. 38 | - Implements a `sync()` method called by `cli.js`. 39 | - Contains methods for fetching, creating, and updating the specific resource type. 40 | - May contain helper methods specific to that resource type. 41 | - Includes logic for listing available definitions if required options are missing. 42 | 43 | ## Utility Classes 44 | 45 | The project uses specialized utility classes to handle specific aspects of Shopify synchronization: 46 | 47 | - **`utils/`**: Contains utility classes that provide focused functionality. 48 | - **`MetafieldHandler.js`**: [utils/MetafieldHandler.js](mdc:utils/MetafieldHandler.js) manages batched metafield operations, respecting Shopify's 25-metafield limit per API call. 49 | - **`ProductImageHandler.js`**: [utils/ProductImageHandler.js](mdc:utils/ProductImageHandler.js) handles image uploads and variant image associations. 50 | - **`ProductPublicationHandler.js`**: [utils/ProductPublicationHandler.js](mdc:utils/ProductPublicationHandler.js) manages publication channels and publishing products to Shopify sales channels. 51 | - **`ProductBaseHandler.js`**: [utils/ProductBaseHandler.js](mdc:utils/ProductBaseHandler.js) provides core product CRUD operations (create, read, update, delete). 52 | - **`ProductVariantHandler.js`**: [utils/ProductVariantHandler.js](mdc:utils/ProductVariantHandler.js) handles product variant operations. 53 | - **`ProductBatchProcessor.js`**: [utils/ProductBatchProcessor.js](mdc:utils/ProductBatchProcessor.js) handles product batch fetching and pagination. 54 | - **`ProductMetafieldProcessor.js`**: [utils/ProductMetafieldProcessor.js](mdc:utils/ProductMetafieldProcessor.js) processes and transforms product metafields. 55 | - **`ProductOperationHandler.js`**: [utils/ProductOperationHandler.js](mdc:utils/ProductOperationHandler.js) coordinates product creation and updating with related entities. 56 | - **`MetafieldFilterUtils.js`**: [utils/MetafieldFilterUtils.js](mdc:utils/MetafieldFilterUtils.js) filters metafields based on namespace and key constraints. 57 | - **`SyncResultTracker.js`**: [utils/SyncResultTracker.js](mdc:utils/SyncResultTracker.js) tracks operation results and formats them for display. 58 | - **`ShopifyIDUtils.js`**: [utils/ShopifyIDUtils.js](mdc:utils/ShopifyIDUtils.js) provides utility methods for Shopify ID validation and conversion. 59 | - **`Logger.js`**: [Logger.js](mdc:utils/Logger.js) provides consistent log formatting with indentation, symbols, and color coding. 60 | - **`ErrorHandler.js`**: [utils/ErrorHandler.js](mdc:utils/ErrorHandler.js) provides standardized error handling. 61 | - **`CommandSetup.js`**: [utils/CommandSetup.js](mdc:utils/CommandSetup.js) handles command-line interface setup. 62 | 63 | The utility classes follow a composition pattern, where strategy classes initialize and use these utilities rather than inheriting from them. This approach improves modularity and maintainability by breaking down complex operations into specialized components. 64 | 65 | ## Chalk 66 | 67 | Use chalk package for coloringtext in cli. 68 | -------------------------------------------------------------------------------- /.env.example: -------------------------------------------------------------------------------- 1 | LOG_TO_FILE=true 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .shops.json 2 | node_modules 3 | logs/ 4 | .env 5 | -------------------------------------------------------------------------------- /.shops.example.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "name": "test", 4 | "domain": "kalen-test-store.myshopify.com", 5 | "protected": false, 6 | "accessToken": "shpat_example_token_for_dev_shop" 7 | }, 8 | { 9 | "name": "demo", 10 | "domain": "metasync-demo.myshopify.com", 11 | "accessToken": "shpat_example_token_for_test_shop" 12 | } 13 | ] 14 | -------------------------------------------------------------------------------- /CLAUDE.md: -------------------------------------------------------------------------------- 1 | # CLAUDE.md 2 | 3 | This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. 4 | 5 | ## Commands 6 | - **Run tool**: `node run.js [options]` 7 | - **Install dependencies**: `npm install` 8 | - **Start application**: `npm start` 9 | 10 | ## Code Style 11 | - **Naming**: camelCase for variables/functions, PascalCase for classes 12 | - **Classes**: Use ES6 class syntax with static methods where appropriate 13 | - **Error Handling**: Use try/catch blocks with the project logger for logging errors 14 | - **Formatting**: 2-space indentation, semi-colons required 15 | - **GraphQL**: Use #graphql comment tag for template literals 16 | - **Asynchronous**: Use async/await pattern with proper error handling 17 | - **Validation**: Validate inputs early with descriptive error messages 18 | - **Logging**: Use the Logger utility for consistent logging 19 | - **Strategy Pattern**: Implement new resource types using Strategy pattern 20 | 21 | ## Project Structure 22 | - `run.js`: Main entry point for CLI tool 23 | - `strategies/`: Contains sync strategy implementations 24 | - `shopifyClientWrapper.js`: Wrapper for Shopify API client 25 | 26 | ## Development 27 | - All GraphQL queries should follow existing patterns 28 | - Update README.md when adding/changing functionality 29 | - Ensure backward compatibility with existing command options -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # MetaSync CLI 2 | 3 | A command-line tool to synchronize Shopify metaobject definitions, metafield definitions and resource data between stores. 4 | 5 | ## Installation 6 | 7 | 1. Clone this repository 8 | 2. Install dependencies: 9 | ```sh 10 | npm install 11 | ``` 12 | 3. Make the CLI globally available: 13 | ```sh 14 | npm link 15 | ``` 16 | 17 | ## Configuration 18 | 19 | Create a `.shops.json` file in the root directory based on the provided `.shops.json.example` file: 20 | 21 | ```json 22 | [ 23 | { 24 | "name": "my-dev-shop", 25 | "domain": "my-dev-shop.myshopify.com", 26 | "accessToken": "shpat_example_token_for_dev_shop", 27 | "protected": false 28 | }, 29 | { 30 | "name": "my-test-shop", 31 | "domain": "my-test-shop.myshopify.com", 32 | "accessToken": "shpat_example_token_for_test_shop" 33 | } 34 | ] 35 | ``` 36 | 37 | Replace the example values with your actual shop names, domains and access tokens. You'll need an access token with the necessary permissions for the resources you want to sync. 38 | 39 | ### Shop Protection 40 | 41 | By default all shops are protected from accidental modifications. To allow changes to be made to a shop you must explicitly set `"protected": false` in your `.shops.json` file for that shop: 42 | 43 | ```json 44 | { 45 | "name": "my-shop", 46 | "domain": "my-shop.myshopify.com", 47 | "accessToken": "shpat_access_token", 48 | "protected": false 49 | } 50 | ``` 51 | 52 | If a shop is protected and you try to make changes with the `--live` flag the tool will exit with an error. 53 | 54 | ## Usage 55 | 56 | MetaSync has two top‑level commands: 57 | 58 | - `definitions` – sync only the definitions (metaobjects or metafield definitions) 59 | - `data` – sync resource data only 60 | 61 | ### Common Options 62 | 63 | All commands accept the following options: 64 | 65 | - `--source ` – Source shop name (required) 66 | - `--target ` – Target shop name (defaults to source if not specified) 67 | - `--live` – Make actual changes (default is dry run) 68 | - `--debug` – Enable debug logging 69 | - `--limit ` – Limit the number of items to process (default: 3) 70 | - `--batch-size ` – Batch size for pagination (default: 25) 71 | 72 | ### Definition Commands 73 | 74 | ``` 75 | metasync definitions metafields --resource --namespace [options] 76 | metasync definitions metaobjects --type [options] 77 | ``` 78 | 79 | Options for `metafields`: 80 | 81 | - `--resource ` – Resource type (products, companies, orders, variants, customers, collections or `all`) 82 | - `--namespace ` – Namespace to sync (`all` or comma separated list) 83 | - `--key ` – Specific definition key (`namespace.key`) 84 | - `--delete` – Delete mode, remove definitions from the target store 85 | 86 | Options for `metaobjects`: 87 | 88 | - `--type ` – Metaobject definition type to sync 89 | 90 | ### Data Commands 91 | 92 | ``` 93 | metasync data [options] 94 | ``` 95 | 96 | Supported resources: `products`, `metaobjects`, `pages`, `collections`, `customers`, `orders`, `variants`, or `all` to sync everything in one run. 97 | 98 | Resource‑specific options: 99 | 100 | **products** 101 | - `--handle ` – Sync a single product by handle 102 | - `--id ` – Sync a single product by ID 103 | - `--namespace ` – Sync only metafields in this namespace 104 | - `--key ` – Sync only metafields with this key 105 | - `--force-recreate` – Delete and recreate products instead of updating 106 | - `--delete` – Remove matching products from the target store 107 | - `--batch-size ` – Number of products per batch (default: 25) 108 | - `--start-cursor ` – Pagination cursor for resuming interrupted syncs 109 | 110 | **metaobjects** 111 | - `--type ` – Metaobject definition type to sync (required) 112 | - `--handle ` – Sync a single metaobject by handle 113 | - `--delete` – Remove matching metaobjects from the target store 114 | 115 | **pages**, **collections**, **customers**, **orders**, **variants** 116 | - `--handle ` / `--id ` (where applicable) 117 | - `--type ` for collections (`manual` or `smart`) 118 | - `--delete` – Remove matching resources from the target store 119 | 120 | The `data all` command accepts `--batch-size` to control pagination across all resources. 121 | 122 | ### Examples 123 | 124 | ```sh 125 | # Dry run product sync between shops 126 | metasync data products --source my-dev-shop --target my-test-shop 127 | 128 | # Apply changes 129 | metasync data products --source my-dev-shop --target my-test-shop --live 130 | 131 | # Sync multiple namespaces at once 132 | metasync definitions metafields --resource products --namespace custom1,custom2 --source my-dev-shop --target my-test-shop 133 | 134 | # Delete metafield definitions from target 135 | metasync definitions metafields --resource products --namespace custom --delete --live --source my-dev-shop --target my-test-shop 136 | 137 | # Sync a single metaobject by handle 138 | metasync data metaobjects --type blog --handle introduction --source my-dev-shop --target my-test-shop --live 139 | 140 | # Limit number of products and enable debug logging 141 | metasync data products --source my-dev-shop --target my-test-shop --limit 10 --debug 142 | 143 | # Resume product sync from a saved cursor 144 | metasync data products --source my-dev-shop --target my-test-shop --start-cursor --live 145 | ``` 146 | 147 | For additional examples and the most up‑to‑date options run `metasync --help`. 148 | 149 | ## Safety Features 150 | 151 | - Runs in dry run mode by default 152 | - Shops are protected by default and require `"protected": false` to allow writes 153 | - Full logging of all synchronization actions 154 | 155 | ## ISSUES 156 | 157 | Variant option images aren't being uploaded properly upon create, but upon update they are. 158 | -------------------------------------------------------------------------------- /REFACTORING.md: -------------------------------------------------------------------------------- 1 | # Product Sync Strategy Refactoring 2 | 3 | ## Overview 4 | 5 | The `ProductSyncStrategy.js` file was refactored to reduce complexity and improve maintainability by: 6 | 7 | 1. Extracting specialized concerns into separate utility classes 8 | 2. Breaking down large methods into smaller, focused methods 9 | 3. Separating data processing from business logic 10 | 4. Implementing better patterns for tracking results 11 | 12 | ## New Utility Classes 13 | 14 | The following utility classes were created to handle specific concerns: 15 | 16 | ### 1. ProductBatchProcessor (utils/ProductBatchProcessor.js) 17 | 18 | Handles product batch fetching and pagination: 19 | - Fetches products in batches with pagination support 20 | - Handles product retrieval by handle 21 | - Processes GraphQL responses to normalize the data structure 22 | 23 | ### 2. SyncResultTracker (utils/SyncResultTracker.js) 24 | 25 | Manages tracking of operation results: 26 | - Tracks creation, update, deletion, and failure counts 27 | - Merges metafield statistics from various operations 28 | - Provides summary logging and result formatting 29 | 30 | ### 3. MetafieldFilterUtils (utils/MetafieldFilterUtils.js) 31 | 32 | Utility for metafield filtering operations: 33 | - Filters metafields based on namespace and key constraints 34 | - Handles special cases like 'all' namespace 35 | - Supports various filtering patterns including namespace.key format 36 | 37 | ### 4. ProductMetafieldProcessor (utils/ProductMetafieldProcessor.js) 38 | 39 | Processes metafields for products: 40 | - Filters metafields using MetafieldFilterUtils 41 | - Transforms reference metafields 42 | - Logs detailed metafield processing information 43 | - Syncs metafields to the target shop 44 | 45 | ### 5. ProductOperationHandler (utils/ProductOperationHandler.js) 46 | 47 | Orchestrates product CRUD operations: 48 | - Creates products with their associated data (variants, images, metafields) 49 | - Updates products and their associated data 50 | - Handles dry run logging 51 | - Returns structured operation results 52 | 53 | ## Refactored ProductSyncStrategy 54 | 55 | The ProductSyncStrategy class was simplified to: 56 | - Initialize and coordinate utility classes 57 | - Orchestrate the overall sync process 58 | - Process products based on their existence and configured options 59 | - Track and report overall results 60 | 61 | ## Benefits 62 | 63 | 1. **Improved Maintainability**: Each class now has a clear, single responsibility 64 | 2. **Reduced File Size**: The strategy file is now much smaller (reduced from 1000+ lines to <250) 65 | 3. **Better Testability**: Individual components can be tested in isolation 66 | 4. **Enhanced Reusability**: Utility classes can be used by other strategies 67 | 5. **Clearer Code Organization**: Functionality is grouped by purpose rather than mixed together 68 | 69 | ## Usage 70 | 71 | To use the refactored code, no changes are necessary to external interfaces. The ProductSyncStrategy 72 | class maintains the same constructor signature and public methods, ensuring backward compatibility. 73 | -------------------------------------------------------------------------------- /codehawk.json: -------------------------------------------------------------------------------- 1 | { 2 | "skipDirectories": ["/node_modules"], 3 | "minimumThreshold": "4" 4 | } 5 | -------------------------------------------------------------------------------- /constants.js: -------------------------------------------------------------------------------- 1 | const logger = require("./utils/Logger"); 2 | /** 3 | * Centralized constants for the application 4 | */ 5 | 6 | // Shopify API version to use throughout the application 7 | const SHOPIFY_API_VERSION = '2025-04'; 8 | 9 | module.exports = { 10 | SHOPIFY_API_VERSION 11 | }; 12 | -------------------------------------------------------------------------------- /eslint.config.cjs: -------------------------------------------------------------------------------- 1 | module.exports = [ 2 | { 3 | files: ['**/*.js'], 4 | languageOptions: { 5 | ecmaVersion: 2021, 6 | sourceType: 'script', // or 'module' if using `import` 7 | }, 8 | rules: { 9 | 'no-unused-vars': 'warn', 10 | 'no-console': 'off', 11 | }, 12 | }, 13 | ]; 14 | -------------------------------------------------------------------------------- /graphql/CollectionCreate.graphql.js: -------------------------------------------------------------------------------- 1 | const logger = require("../utils/Logger"); 2 | /** 3 | * GraphQL mutation to create a collection 4 | */ 5 | module.exports = `#graphql 6 | mutation CreateCollection($input: CollectionInput!) { 7 | collectionCreate(input: $input) { 8 | collection { 9 | id 10 | title 11 | handle 12 | ruleSet { 13 | rules { 14 | column 15 | condition 16 | relation 17 | } 18 | appliedDisjunctively 19 | } 20 | } 21 | userErrors { 22 | field 23 | message 24 | } 25 | } 26 | } 27 | `; 28 | -------------------------------------------------------------------------------- /graphql/CollectionDelete.graphql.js: -------------------------------------------------------------------------------- 1 | module.exports = /* GraphQL */ ` 2 | mutation DeleteCollection($input: CollectionDeleteInput!) { 3 | collectionDelete(input: $input) { 4 | deletedCollectionId 5 | shop { 6 | id 7 | } 8 | userErrors { 9 | field 10 | message 11 | } 12 | } 13 | } 14 | `; 15 | -------------------------------------------------------------------------------- /graphql/CollectionFetchAll.graphql.js: -------------------------------------------------------------------------------- 1 | const logger = require("../utils/Logger"); 2 | /** 3 | * GraphQL query to fetch all collections with pagination support 4 | */ 5 | module.exports = `#graphql 6 | query GetCollections($first: Int!, $after: String, $query: String) { 7 | collections(first: $first, after: $after, query: $query) { 8 | pageInfo { 9 | hasNextPage 10 | endCursor 11 | } 12 | edges { 13 | node { 14 | id 15 | title 16 | handle 17 | description 18 | descriptionHtml 19 | seo { 20 | title 21 | description 22 | } 23 | image { 24 | id 25 | url 26 | altText 27 | } 28 | templateSuffix 29 | sortOrder 30 | updatedAt 31 | ruleSet { 32 | rules { 33 | column 34 | condition 35 | relation 36 | conditionObject { 37 | ... on CollectionRuleMetafieldCondition { 38 | metafieldDefinition { 39 | id 40 | namespace 41 | key 42 | ownerType 43 | } 44 | } 45 | } 46 | } 47 | appliedDisjunctively 48 | } 49 | publications(first: 25) { 50 | edges { 51 | node { 52 | channel { 53 | id 54 | handle 55 | name 56 | } 57 | isPublished 58 | publishDate 59 | } 60 | } 61 | } 62 | metafields(first: 50) { 63 | edges { 64 | node { 65 | id 66 | namespace 67 | key 68 | type 69 | value 70 | definition { 71 | id 72 | namespace 73 | key 74 | ownerType 75 | type { 76 | name 77 | } 78 | } 79 | } 80 | } 81 | } 82 | } 83 | } 84 | } 85 | } 86 | `; 87 | -------------------------------------------------------------------------------- /graphql/CollectionFetchByHandle.graphql.js: -------------------------------------------------------------------------------- 1 | const logger = require("../utils/Logger"); 2 | /** 3 | * GraphQL query to fetch a collection by its handle 4 | */ 5 | module.exports = `#graphql 6 | query GetCollectionByHandle($handle: String!) { 7 | collectionByHandle(handle: $handle) { 8 | id 9 | title 10 | handle 11 | description 12 | descriptionHtml 13 | seo { 14 | title 15 | description 16 | } 17 | image { 18 | id 19 | url 20 | altText 21 | } 22 | templateSuffix 23 | sortOrder 24 | updatedAt 25 | ruleSet { 26 | rules { 27 | column 28 | condition 29 | relation 30 | conditionObject { 31 | ... on CollectionRuleMetafieldCondition { 32 | metafieldDefinition { 33 | id 34 | namespace 35 | key 36 | ownerType 37 | } 38 | } 39 | } 40 | } 41 | appliedDisjunctively 42 | } 43 | publications(first: 25) { 44 | edges { 45 | node { 46 | channel { 47 | id 48 | handle 49 | name 50 | } 51 | isPublished 52 | publishDate 53 | } 54 | } 55 | } 56 | metafields(first: 50) { 57 | edges { 58 | node { 59 | id 60 | namespace 61 | key 62 | type 63 | value 64 | definition { 65 | id 66 | namespace 67 | key 68 | ownerType 69 | type { 70 | name 71 | } 72 | } 73 | } 74 | } 75 | } 76 | } 77 | } 78 | `; 79 | -------------------------------------------------------------------------------- /graphql/CollectionFetchById.graphql.js: -------------------------------------------------------------------------------- 1 | const logger = require("../utils/Logger"); 2 | /** 3 | * GraphQL query to fetch a collection by its ID 4 | */ 5 | module.exports = `#graphql 6 | query GetCollectionById($id: ID!) { 7 | collection(id: $id) { 8 | handle 9 | } 10 | } 11 | `; 12 | -------------------------------------------------------------------------------- /graphql/CollectionUpdate.graphql.js: -------------------------------------------------------------------------------- 1 | const logger = require("../utils/Logger"); 2 | /** 3 | * GraphQL mutation to update a collection 4 | */ 5 | module.exports = `#graphql 6 | mutation UpdateCollection($input: CollectionInput!) { 7 | collectionUpdate(input: $input) { 8 | collection { 9 | id 10 | title 11 | handle 12 | ruleSet { 13 | rules { 14 | column 15 | condition 16 | relation 17 | } 18 | appliedDisjunctively 19 | } 20 | } 21 | userErrors { 22 | field 23 | message 24 | } 25 | } 26 | } 27 | `; 28 | -------------------------------------------------------------------------------- /graphql/MetafieldDefinitionCreate.graphql.js: -------------------------------------------------------------------------------- 1 | /** 2 | * GraphQL mutation to create a metafield definition 3 | */ 4 | module.exports = `#graphql 5 | mutation CreateMetafieldDefinition($definition: MetafieldDefinitionInput!) { 6 | metafieldDefinitionCreate(definition: $definition) { 7 | createdDefinition { id namespace key } 8 | userErrors { field message code } 9 | } 10 | } 11 | `; 12 | -------------------------------------------------------------------------------- /graphql/MetafieldDefinitionDelete.graphql.js: -------------------------------------------------------------------------------- 1 | /** 2 | * GraphQL mutation to delete a metafield definition 3 | */ 4 | module.exports = `#graphql 5 | mutation DeleteMetafieldDefinition($id: ID!) { 6 | metafieldDefinitionDelete(id: $id) { 7 | deletedDefinitionId 8 | userErrors { field message code } 9 | } 10 | } 11 | `; 12 | -------------------------------------------------------------------------------- /graphql/MetafieldDefinitionUpdate.graphql.js: -------------------------------------------------------------------------------- 1 | /** 2 | * GraphQL mutation to update a metafield definition 3 | */ 4 | module.exports = `#graphql 5 | mutation UpdateMetafieldDefinition($definition: MetafieldDefinitionUpdateInput!) { 6 | metafieldDefinitionUpdate(definition: $definition) { 7 | updatedDefinition { id namespace key } 8 | userErrors { field message code } 9 | } 10 | } 11 | `; 12 | -------------------------------------------------------------------------------- /graphql/MetafieldDefinitionsFetch.graphql.js: -------------------------------------------------------------------------------- 1 | /** 2 | * GraphQL query to fetch metafield definitions for a specific owner type 3 | * Supports filtering by namespace and key 4 | */ 5 | module.exports = `#graphql 6 | query FetchMetafieldDefinitions($ownerType: MetafieldOwnerType!, $namespace: String, $key: String) { 7 | metafieldDefinitions(first: 100, ownerType: $ownerType, namespace: $namespace, key: $key) { 8 | nodes { 9 | id 10 | namespace 11 | key 12 | name 13 | description 14 | type { name } 15 | validations { name value } 16 | access { admin storefront } 17 | pinnedPosition 18 | capabilities { 19 | smartCollectionCondition { enabled } 20 | adminFilterable { enabled } 21 | uniqueValues { enabled } 22 | } 23 | } 24 | } 25 | } 26 | `; 27 | -------------------------------------------------------------------------------- /graphql/MetaobjectCreate.graphql.js: -------------------------------------------------------------------------------- 1 | /** 2 | * GraphQL mutation for creating a metaobject 3 | */ 4 | 5 | const CREATE_METAOBJECT = `#graphql 6 | mutation createMetaobject($metaobject: MetaobjectCreateInput!) { 7 | metaobjectCreate(metaobject: $metaobject) { 8 | metaobject { 9 | id 10 | handle 11 | } 12 | userErrors { 13 | field 14 | message 15 | code 16 | } 17 | } 18 | } 19 | `; 20 | 21 | module.exports = CREATE_METAOBJECT; 22 | -------------------------------------------------------------------------------- /graphql/MetaobjectCreateDefinition.graphql.js: -------------------------------------------------------------------------------- 1 | /** 2 | * GraphQL mutation for creating a metaobject definition 3 | */ 4 | 5 | const CREATE_METAOBJECT_DEFINITION = `#graphql 6 | mutation createMetaobjectDefinition($definition: MetaobjectDefinitionCreateInput!) { 7 | metaobjectDefinitionCreate(definition: $definition) { 8 | metaobjectDefinition { 9 | id 10 | type 11 | } 12 | userErrors { 13 | field 14 | message 15 | code 16 | } 17 | } 18 | } 19 | `; 20 | 21 | module.exports = CREATE_METAOBJECT_DEFINITION; 22 | -------------------------------------------------------------------------------- /graphql/MetaobjectDefinitionIdFetch.graphql.js: -------------------------------------------------------------------------------- 1 | /** 2 | * GraphQL query to get a metaobject definition's ID by its type 3 | */ 4 | module.exports = `#graphql 5 | query GetMetaobjectDefinitionId($type: String!) { 6 | metaobjectDefinitionByType(type: $type) { 7 | id 8 | } 9 | } 10 | `; 11 | -------------------------------------------------------------------------------- /graphql/MetaobjectDefinitionTypeFetch.graphql.js: -------------------------------------------------------------------------------- 1 | /** 2 | * GraphQL query to get the type of a metaobject definition by its ID 3 | */ 4 | module.exports = `#graphql 5 | query GetMetaobjectDefinitionType($id: ID!) { 6 | metaobjectDefinition(id: $id) { 7 | type 8 | } 9 | } 10 | `; 11 | -------------------------------------------------------------------------------- /graphql/MetaobjectFetch.graphql.js: -------------------------------------------------------------------------------- 1 | /** 2 | * GraphQL query for fetching metaobjects for a specific type 3 | */ 4 | 5 | const FETCH_METAOBJECTS = `#graphql 6 | query GetMetaobjects($type: String!) { 7 | metaobjects(type: $type, first: 100) { 8 | edges { 9 | node { 10 | id 11 | handle 12 | type 13 | displayName 14 | fields { 15 | key 16 | value 17 | type 18 | reference { 19 | ... on MediaImage { 20 | image { 21 | url 22 | } 23 | } 24 | ... on Metaobject { 25 | id 26 | handle 27 | type 28 | } 29 | } 30 | } 31 | capabilities { 32 | publishable { 33 | status 34 | } 35 | } 36 | } 37 | } 38 | } 39 | } 40 | `; 41 | 42 | module.exports = FETCH_METAOBJECTS; 43 | -------------------------------------------------------------------------------- /graphql/MetaobjectFetchAllDefinitions.graphql.js: -------------------------------------------------------------------------------- 1 | /** 2 | * GraphQL query for fetching all metaobject definitions 3 | */ 4 | 5 | const FETCH_ALL_METAOBJECT_DEFINITIONS = `#graphql 6 | query FetchAllMetaobjectDefinitions { 7 | metaobjectDefinitions(first: 100) { 8 | nodes { 9 | id 10 | type 11 | name 12 | description 13 | fieldDefinitions { 14 | key 15 | name 16 | description 17 | required 18 | type { 19 | name 20 | } 21 | validations { 22 | name 23 | value 24 | } 25 | } 26 | capabilities { 27 | publishable { 28 | enabled 29 | } 30 | } 31 | access { 32 | admin 33 | storefront 34 | } 35 | } 36 | } 37 | } 38 | `; 39 | 40 | module.exports = FETCH_ALL_METAOBJECT_DEFINITIONS; 41 | -------------------------------------------------------------------------------- /graphql/MetaobjectFetchById.graphql.js: -------------------------------------------------------------------------------- 1 | /** 2 | * GraphQL query for fetching a single metaobject by ID 3 | */ 4 | 5 | const FETCH_METAOBJECT_BY_ID = `#graphql 6 | query GetMetaobjectById($id: ID!) { 7 | metaobject(id: $id) { 8 | id 9 | handle 10 | type 11 | displayName 12 | fields { 13 | key 14 | value 15 | type 16 | reference { 17 | ... on MediaImage { 18 | image { 19 | url 20 | } 21 | } 22 | ... on Metaobject { 23 | id 24 | handle 25 | type 26 | } 27 | } 28 | } 29 | capabilities { 30 | publishable { 31 | status 32 | } 33 | } 34 | } 35 | } 36 | `; 37 | 38 | module.exports = FETCH_METAOBJECT_BY_ID; 39 | -------------------------------------------------------------------------------- /graphql/MetaobjectFetchDefinitionById.graphql.js: -------------------------------------------------------------------------------- 1 | /** 2 | * GraphQL query for fetching a metaobject definition by ID 3 | */ 4 | 5 | const FETCH_METAOBJECT_DEFINITION_BY_ID = `#graphql 6 | query FetchMetaobjectDefinitionById($id: ID!) { 7 | metaobjectDefinition(id: $id) { 8 | id 9 | type 10 | name 11 | description 12 | fieldDefinitions { 13 | key 14 | name 15 | description 16 | required 17 | type { 18 | name 19 | } 20 | validations { 21 | name 22 | value 23 | } 24 | } 25 | capabilities { 26 | publishable { 27 | enabled 28 | } 29 | } 30 | access { 31 | admin 32 | storefront 33 | } 34 | } 35 | } 36 | `; 37 | 38 | module.exports = FETCH_METAOBJECT_DEFINITION_BY_ID; 39 | -------------------------------------------------------------------------------- /graphql/MetaobjectUpdate.graphql.js: -------------------------------------------------------------------------------- 1 | /** 2 | * GraphQL mutation for updating a metaobject 3 | */ 4 | 5 | const UPDATE_METAOBJECT = `#graphql 6 | mutation updateMetaobject($id: ID!, $metaobject: MetaobjectUpdateInput!) { 7 | metaobjectUpdate(id: $id, metaobject: $metaobject) { 8 | metaobject { 9 | id 10 | handle 11 | } 12 | userErrors { 13 | field 14 | message 15 | code 16 | } 17 | } 18 | } 19 | `; 20 | 21 | module.exports = UPDATE_METAOBJECT; 22 | -------------------------------------------------------------------------------- /graphql/MetaobjectUpdateDefinition.graphql.js: -------------------------------------------------------------------------------- 1 | /** 2 | * GraphQL mutation for updating a metaobject definition 3 | */ 4 | 5 | const UPDATE_METAOBJECT_DEFINITION = `#graphql 6 | mutation updateMetaobjectDefinition($id: ID!, $definition: MetaobjectDefinitionUpdateInput!) { 7 | metaobjectDefinitionUpdate(id: $id, definition: $definition) { 8 | metaobjectDefinition { 9 | id 10 | type 11 | } 12 | userErrors { 13 | field 14 | message 15 | code 16 | } 17 | } 18 | } 19 | `; 20 | 21 | module.exports = UPDATE_METAOBJECT_DEFINITION; 22 | -------------------------------------------------------------------------------- /graphql/PageCreate.graphql.js: -------------------------------------------------------------------------------- 1 | const logger = require("../utils/Logger"); 2 | /** 3 | * GraphQL mutation to create a page in a Shopify store 4 | */ 5 | module.exports = `#graphql 6 | mutation CreatePage($page: PageCreateInput!) { 7 | pageCreate(page: $page) { 8 | page { 9 | id 10 | title 11 | handle 12 | templateSuffix 13 | isPublished 14 | } 15 | userErrors { 16 | field 17 | message 18 | } 19 | } 20 | } 21 | `; 22 | -------------------------------------------------------------------------------- /graphql/PageFetchAll.graphql.js: -------------------------------------------------------------------------------- 1 | const logger = require("../utils/Logger"); 2 | /** 3 | * GraphQL query to fetch pages from a Shopify store 4 | */ 5 | module.exports = `#graphql 6 | query GetPages($first: Int!) { 7 | pages(first: $first) { 8 | edges { 9 | node { 10 | id 11 | title 12 | handle 13 | bodySummary 14 | body 15 | templateSuffix 16 | isPublished 17 | createdAt 18 | updatedAt 19 | publishedAt 20 | } 21 | } 22 | pageInfo { 23 | hasNextPage 24 | endCursor 25 | } 26 | } 27 | } 28 | `; 29 | -------------------------------------------------------------------------------- /graphql/PageUpdate.graphql.js: -------------------------------------------------------------------------------- 1 | const logger = require("../utils/Logger"); 2 | /** 3 | * GraphQL mutation to update a page in a Shopify store 4 | */ 5 | module.exports = `#graphql 6 | mutation UpdatePage($id: ID!, $page: PageUpdateInput!) { 7 | pageUpdate(id: $id, page: $page) { 8 | page { 9 | id 10 | title 11 | handle 12 | templateSuffix 13 | isPublished 14 | } 15 | userErrors { 16 | field 17 | message 18 | } 19 | } 20 | } 21 | `; 22 | -------------------------------------------------------------------------------- /graphql/ProductDelete.graphql.js: -------------------------------------------------------------------------------- 1 | module.exports = /* GraphQL */ ` 2 | mutation DeleteProduct($input: ProductDeleteInput!) { 3 | productDelete(input: $input) { 4 | deletedProductId 5 | shop { 6 | id 7 | } 8 | userErrors { 9 | field 10 | message 11 | } 12 | } 13 | } 14 | `; 15 | -------------------------------------------------------------------------------- /graphql/ProductFetchAll.graphql.js: -------------------------------------------------------------------------------- 1 | const logger = require("../utils/Logger"); 2 | /** 3 | * GraphQL query to fetch products with their variants, images, metafields, and publications 4 | */ 5 | module.exports = `#graphql 6 | query GetProducts($first: Int!, $query: String, $after: String) { 7 | products(first: $first, query: $query, after: $after) { 8 | edges { 9 | node { 10 | id 11 | title 12 | handle 13 | description 14 | descriptionHtml 15 | vendor 16 | productType 17 | status 18 | tags 19 | options { 20 | name 21 | values 22 | } 23 | publications(first: 20) { 24 | edges { 25 | node { 26 | channel { 27 | id 28 | name 29 | handle 30 | } 31 | publishDate 32 | isPublished 33 | } 34 | } 35 | } 36 | images(first: 10) { 37 | edges { 38 | node { 39 | id 40 | src 41 | altText 42 | width 43 | height 44 | } 45 | } 46 | } 47 | variants(first: 100) { 48 | edges { 49 | node { 50 | id 51 | title 52 | sku 53 | price 54 | compareAtPrice 55 | inventoryQuantity 56 | inventoryPolicy 57 | inventoryItem { 58 | id 59 | tracked 60 | requiresShipping 61 | measurement { 62 | weight { 63 | value 64 | unit 65 | } 66 | } 67 | } 68 | taxable 69 | barcode 70 | selectedOptions { 71 | name 72 | value 73 | } 74 | image { 75 | id 76 | src 77 | altText 78 | width 79 | height 80 | } 81 | metafields(first: 50) { 82 | edges { 83 | node { 84 | id 85 | namespace 86 | key 87 | value 88 | type 89 | } 90 | } 91 | } 92 | } 93 | } 94 | } 95 | metafields(first: 100) { 96 | edges { 97 | node { 98 | id 99 | namespace 100 | key 101 | value 102 | type 103 | } 104 | } 105 | } 106 | } 107 | } 108 | pageInfo { 109 | hasNextPage 110 | endCursor 111 | } 112 | } 113 | } 114 | `; 115 | -------------------------------------------------------------------------------- /graphql/ProductFetchByHandle.graphql.js: -------------------------------------------------------------------------------- 1 | const logger = require("../utils/Logger"); 2 | /** 3 | * GraphQL query to fetch a product by its handle with all details 4 | */ 5 | module.exports = `#graphql 6 | query GetProductByHandle($handle: String!) { 7 | productByHandle(handle: $handle) { 8 | id 9 | title 10 | handle 11 | description 12 | descriptionHtml 13 | vendor 14 | productType 15 | status 16 | tags 17 | options { 18 | name 19 | values 20 | } 21 | publications(first: 20) { 22 | edges { 23 | node { 24 | channel { 25 | id 26 | name 27 | handle 28 | } 29 | publishDate 30 | isPublished 31 | } 32 | } 33 | } 34 | images(first: 10) { 35 | edges { 36 | node { 37 | id 38 | src 39 | altText 40 | width 41 | height 42 | } 43 | } 44 | } 45 | variants(first: 100) { 46 | edges { 47 | node { 48 | id 49 | title 50 | sku 51 | price 52 | compareAtPrice 53 | inventoryQuantity 54 | inventoryPolicy 55 | inventoryItem { 56 | id 57 | tracked 58 | requiresShipping 59 | measurement { 60 | weight { 61 | value 62 | unit 63 | } 64 | } 65 | } 66 | taxable 67 | barcode 68 | selectedOptions { 69 | name 70 | value 71 | } 72 | image { 73 | id 74 | src 75 | altText 76 | width 77 | height 78 | } 79 | metafields(first: 50) { 80 | edges { 81 | node { 82 | id 83 | namespace 84 | key 85 | value 86 | type 87 | } 88 | } 89 | } 90 | } 91 | } 92 | } 93 | metafields(first: 100) { 94 | edges { 95 | node { 96 | id 97 | namespace 98 | key 99 | value 100 | type 101 | } 102 | } 103 | } 104 | } 105 | } 106 | `; 107 | -------------------------------------------------------------------------------- /graphql/index.js: -------------------------------------------------------------------------------- 1 | const logger = require("../utils/Logger"); 2 | /** 3 | * GraphQL queries index file 4 | * 5 | * This file exports all GraphQL queries in one convenient place for easier importing. 6 | */ 7 | 8 | const CollectionDelete = require('./CollectionDelete.graphql'); 9 | const CollectionCreate = require('./CollectionCreate.graphql'); 10 | const CollectionUpdate = require('./CollectionUpdate.graphql'); 11 | const CollectionFetchById = require('./CollectionFetchById.graphql'); 12 | const CollectionFetchByHandle = require('./CollectionFetchByHandle.graphql'); 13 | const CollectionFetchAll = require('./CollectionFetchAll.graphql'); 14 | const ProductDelete = require('./ProductDelete.graphql'); 15 | const ProductFetchByHandle = require('./ProductFetchByHandle.graphql'); 16 | const ProductFetchAll = require('./ProductFetchAll.graphql'); 17 | const PageFetchAll = require('./PageFetchAll.graphql'); 18 | const PageCreate = require('./PageCreate.graphql'); 19 | const PageUpdate = require('./PageUpdate.graphql'); 20 | 21 | // Metafield operations 22 | const MetafieldDefinitionsFetch = require('./MetafieldDefinitionsFetch.graphql'); 23 | const MetafieldDefinitionCreate = require('./MetafieldDefinitionCreate.graphql'); 24 | const MetafieldDefinitionUpdate = require('./MetafieldDefinitionUpdate.graphql'); 25 | const MetafieldDefinitionDelete = require('./MetafieldDefinitionDelete.graphql'); 26 | const MetaobjectDefinitionTypeFetch = require('./MetaobjectDefinitionTypeFetch.graphql'); 27 | const MetaobjectDefinitionIdFetch = require('./MetaobjectDefinitionIdFetch.graphql'); 28 | 29 | // Metaobject operations 30 | const MetaobjectFetch = require('./MetaobjectFetch.graphql'); 31 | const MetaobjectFetchById = require('./MetaobjectFetchById.graphql'); 32 | const MetaobjectFetchAllDefinitions = require('./MetaobjectFetchAllDefinitions.graphql'); 33 | const MetaobjectFetchDefinitionById = require('./MetaobjectFetchDefinitionById.graphql'); 34 | const MetaobjectCreate = require('./MetaobjectCreate.graphql'); 35 | const MetaobjectUpdate = require('./MetaobjectUpdate.graphql'); 36 | const MetaobjectCreateDefinition = require('./MetaobjectCreateDefinition.graphql'); 37 | const MetaobjectUpdateDefinition = require('./MetaobjectUpdateDefinition.graphql'); 38 | 39 | module.exports = { 40 | CollectionCreate, 41 | CollectionDelete, 42 | CollectionFetchById, 43 | CollectionFetchByHandle, 44 | CollectionFetchAll, 45 | CollectionUpdate, 46 | ProductDelete, 47 | ProductFetchByHandle, 48 | ProductFetchAll, 49 | PageFetchAll, 50 | PageCreate, 51 | PageUpdate, 52 | // Metafield operations 53 | MetafieldDefinitionsFetch, 54 | MetafieldDefinitionCreate, 55 | MetafieldDefinitionUpdate, 56 | MetafieldDefinitionDelete, 57 | MetaobjectDefinitionTypeFetch, 58 | MetaobjectDefinitionIdFetch, 59 | // Metaobject operations 60 | MetaobjectFetch, 61 | MetaobjectFetchById, 62 | MetaobjectFetchAllDefinitions, 63 | MetaobjectFetchDefinitionById, 64 | MetaobjectCreate, 65 | MetaobjectUpdate, 66 | MetaobjectCreateDefinition, 67 | MetaobjectUpdateDefinition 68 | }; 69 | -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | const logger = require("./utils/Logger"); 2 | module.exports = { 3 | // Automatically clear mock calls and instances between tests 4 | clearMocks: true, 5 | 6 | // The directory where Jest should output its coverage files 7 | coverageDirectory: "coverage", 8 | 9 | // The test environment that will be used for testing 10 | testEnvironment: "node", 11 | 12 | // A list of paths to directories that Jest should use to search for files in 13 | roots: [ 14 | "/test/" 15 | ], 16 | 17 | // The glob patterns Jest uses to detect test files 18 | testMatch: [ 19 | "**/test/**/*.test.js" 20 | ], 21 | 22 | // Configure a setup file to run before each test 23 | setupFilesAfterEnv: [], 24 | 25 | // Default timeout (60 seconds for potentially slow API calls) 26 | testTimeout: 60000, 27 | 28 | // Verbose output 29 | verbose: true 30 | }; 31 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "metasync-cli", 3 | "version": "1.0.0", 4 | "description": "Command-line tool to sync Shopify resources between stores", 5 | "main": "run.js", 6 | "bin": { 7 | "metasync": "./cli.js" 8 | }, 9 | "scripts": { 10 | "dev": "node cli.js", 11 | "codehawk": "codehawk utils", 12 | "test": "jest", 13 | "test:watch": "jest --watch" 14 | }, 15 | "author": "", 16 | "license": "MIT", 17 | "dependencies": { 18 | "chalk": "^4.1.2", 19 | "commander": "^11.1.0", 20 | "dotenv": "^16.3.1", 21 | "shopify-api-node": "^3.15.0", 22 | "strip-ansi": "^6.0.1" 23 | }, 24 | "devDependencies": { 25 | "codehawk-cli": "^10.1.0", 26 | "eslint": "^9.27.0", 27 | "execa": "^5.1.1", 28 | "jest": "^29.7.0" 29 | }, 30 | "engines": { 31 | "node": ">=16.0.0" 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /strategies/AllResourcesSyncStrategy.js: -------------------------------------------------------------------------------- 1 | /** 2 | * All Resources Sync Strategy 3 | * 4 | * This strategy syncs all supported resource types in one go: 5 | * - Products 6 | * - Metaobjects 7 | * - Pages 8 | * - Collections 9 | * - Customers 10 | * - Orders 11 | * - Variants 12 | */ 13 | ; 14 | const chalk = require('chalk'); 15 | const logger = require('../utils/Logger'); 16 | 17 | // Import all required strategies 18 | const ProductSyncStrategy = require('./ProductSyncStrategy'); 19 | const MetaobjectSyncStrategy = require('./MetaobjectSyncStrategy'); 20 | const PageSyncStrategy = require('./PageSyncStrategy'); 21 | const CollectionSyncStrategy = require('./CollectionSyncStrategy'); 22 | const CustomerMetafieldSyncStrategy = require('./CustomerMetafieldSyncStrategy'); 23 | const OrderMetafieldSyncStrategy = require('./OrderMetafieldSyncStrategy'); 24 | const VariantMetafieldSyncStrategy = require('./VariantMetafieldSyncStrategy'); 25 | 26 | class AllResourcesSyncStrategy { 27 | constructor(sourceClient, targetClient, options) { 28 | this.sourceClient = sourceClient; 29 | this.targetClient = targetClient; 30 | this.options = options || {}; 31 | this.debug = options.debug; 32 | 33 | // Initialize all individual strategies 34 | this.productStrategy = new ProductSyncStrategy(sourceClient, targetClient, options); 35 | this.metaobjectStrategy = new MetaobjectSyncStrategy(sourceClient, targetClient, options); 36 | this.pageStrategy = new PageSyncStrategy(sourceClient, targetClient, options); 37 | this.collectionStrategy = new CollectionSyncStrategy(sourceClient, targetClient, options); 38 | this.customerStrategy = new CustomerMetafieldSyncStrategy(sourceClient, targetClient, options); 39 | this.orderStrategy = new OrderMetafieldSyncStrategy(sourceClient, targetClient, options); 40 | this.variantStrategy = new VariantMetafieldSyncStrategy(sourceClient, targetClient, options); 41 | } 42 | 43 | async sync() { 44 | const startTime = Date.now(); 45 | logger.info(chalk.blue('📦 Starting All Resources Data Sync - This will sync all supported resource types data')); 46 | 47 | // Create result containers for each resource type 48 | const results = { 49 | definitions: { created: 0, updated: 0, skipped: 0, failed: 0 }, 50 | data: { created: 0, updated: 0, skipped: 0, failed: 0 }, 51 | metafields: { processed: 0, transformed: 0, blanked: 0, errors: 0, warnings: 0 } 52 | }; 53 | 54 | try { 55 | // 1. Sync Products 56 | logger.info(chalk.cyan('🔄 Step 1: Syncing Products Data')); 57 | const productResults = await this._syncProducts(); 58 | this._mergeResults(results, productResults); 59 | 60 | // 2. Sync Metaobjects 61 | logger.info(chalk.cyan('🔄 Step 2: Syncing Metaobjects Data')); 62 | const metaobjectResults = await this._syncMetaobjects(); 63 | this._mergeResults(results, metaobjectResults); 64 | 65 | // 3. Sync Pages 66 | logger.info(chalk.cyan('🔄 Step 3: Syncing Pages Data')); 67 | const pageResults = await this._syncPages(); 68 | this._mergeResults(results, pageResults); 69 | 70 | // 4. Sync Collections 71 | logger.info(chalk.cyan('🔄 Step 4: Syncing Collections Data')); 72 | const collectionResults = await this._syncCollections(); 73 | this._mergeResults(results, collectionResults); 74 | 75 | // 5. Sync Customers (metafields only) 76 | logger.info(chalk.cyan('🔄 Step 5: Syncing Customers Metafields')); 77 | const customerResults = await this._syncCustomers(); 78 | this._mergeResults(results, customerResults); 79 | 80 | // 6. Sync Orders (metafields only) 81 | logger.info(chalk.cyan('🔄 Step 6: Syncing Orders Metafields')); 82 | const orderResults = await this._syncOrders(); 83 | this._mergeResults(results, orderResults); 84 | 85 | // 7. Sync Variants (metafields only) 86 | logger.info(chalk.cyan('🔄 Step 7: Syncing Variants Metafields')); 87 | const variantResults = await this._syncVariants(); 88 | this._mergeResults(results, variantResults); 89 | 90 | const endTime = Date.now(); 91 | const durationSec = ((endTime - startTime) / 1000).toFixed(1); 92 | logger.info(chalk.green(`✅ All Resources Data Sync Completed in ${durationSec}s`)); 93 | 94 | return { 95 | definitionResults: results.definitions, 96 | dataResults: results.data, 97 | metafieldResults: results.metafields 98 | }; 99 | } catch (error) { 100 | logger.error(chalk.red('❌ All Resources Data Sync Failed:'), error.message); 101 | logger.debug(error.stack); 102 | 103 | return { 104 | definitionResults: results.definitions, 105 | dataResults: results.data, 106 | metafieldResults: results.metafields 107 | }; 108 | } 109 | } 110 | 111 | _mergeResults(targetResults, sourceResults) { 112 | // If source has definitionResults, merge them 113 | if (sourceResults.definitionResults) { 114 | Object.keys(sourceResults.definitionResults).forEach(key => { 115 | if (targetResults.definitions[key] !== undefined) { 116 | targetResults.definitions[key] += sourceResults.definitionResults[key] || 0; 117 | } 118 | }); 119 | } 120 | 121 | // If source has dataResults, merge them 122 | if (sourceResults.dataResults) { 123 | Object.keys(sourceResults.dataResults).forEach(key => { 124 | if (targetResults.data[key] !== undefined) { 125 | targetResults.data[key] += sourceResults.dataResults[key] || 0; 126 | } 127 | }); 128 | } 129 | 130 | // If source has metafieldResults, merge them 131 | if (sourceResults.metafieldResults) { 132 | Object.keys(sourceResults.metafieldResults).forEach(key => { 133 | if (targetResults.metafields[key] !== undefined) { 134 | targetResults.metafields[key] += sourceResults.metafieldResults[key] || 0; 135 | } 136 | }); 137 | } 138 | } 139 | 140 | async _syncProducts() { 141 | try { 142 | const result = await this.productStrategy.sync(); 143 | return result; 144 | } catch (error) { 145 | logger.error(chalk.red('❌ Product Sync Failed:'), error.message); 146 | return { 147 | dataResults: { created: 0, updated: 0, skipped: 0, failed: 0 }, 148 | metafieldResults: { processed: 0, transformed: 0, blanked: 0, errors: 0 } 149 | }; 150 | } 151 | } 152 | 153 | async _syncMetaobjects() { 154 | try { 155 | // Make sure it's in data mode and has a type 156 | const originalCommand = this.options.command; 157 | const originalType = this.options.type; 158 | 159 | this.options.command = "data"; 160 | this.options.type = "all"; 161 | 162 | // Also update the strategy's options directly 163 | this.metaobjectStrategy.options.type = "all"; 164 | 165 | const result = await this.metaobjectStrategy.sync(); 166 | 167 | // Restore original values 168 | this.options.command = originalCommand; 169 | this.options.type = originalType; 170 | 171 | return result; 172 | } catch (error) { 173 | logger.error(chalk.red('❌ Metaobject Sync Failed:'), error.message); 174 | return { 175 | dataResults: { created: 0, updated: 0, skipped: 0, failed: 0 }, 176 | metafieldResults: { processed: 0, transformed: 0, blanked: 0, errors: 0 } 177 | }; 178 | } 179 | } 180 | 181 | async _syncPages() { 182 | try { 183 | const result = await this.pageStrategy.sync(); 184 | return result; 185 | } catch (error) { 186 | logger.error(chalk.red('❌ Page Sync Failed:'), error.message); 187 | return { 188 | dataResults: { created: 0, updated: 0, skipped: 0, failed: 0 }, 189 | metafieldResults: { processed: 0, transformed: 0, blanked: 0, errors: 0 } 190 | }; 191 | } 192 | } 193 | 194 | async _syncCollections() { 195 | try { 196 | const result = await this.collectionStrategy.sync(); 197 | return result; 198 | } catch (error) { 199 | logger.error(chalk.red('❌ Collection Sync Failed:'), error.message); 200 | return { 201 | dataResults: { created: 0, updated: 0, skipped: 0, failed: 0 }, 202 | metafieldResults: { processed: 0, transformed: 0, blanked: 0, errors: 0 } 203 | }; 204 | } 205 | } 206 | 207 | async _syncCustomers() { 208 | try { 209 | // Make sure we're in data mode for metafield sync 210 | const originalCommand = this.options.command; 211 | this.options.command = "data"; 212 | const result = await this.customerStrategy.sync(); 213 | this.options.command = originalCommand; 214 | return result; 215 | } catch (error) { 216 | logger.error(chalk.red('❌ Customer Metafield Sync Failed:'), error.message); 217 | return { 218 | definitionResults: { created: 0, updated: 0, skipped: 0, failed: 0 }, 219 | metafieldResults: { processed: 0, transformed: 0, blanked: 0, errors: 0 } 220 | }; 221 | } 222 | } 223 | 224 | async _syncOrders() { 225 | try { 226 | // Make sure we're in data mode for metafield sync 227 | const originalCommand = this.options.command; 228 | this.options.command = "data"; 229 | const result = await this.orderStrategy.sync(); 230 | this.options.command = originalCommand; 231 | return result; 232 | } catch (error) { 233 | logger.error(chalk.red('❌ Order Metafield Sync Failed:'), error.message); 234 | return { 235 | definitionResults: { created: 0, updated: 0, skipped: 0, failed: 0 }, 236 | metafieldResults: { processed: 0, transformed: 0, blanked: 0, errors: 0 } 237 | }; 238 | } 239 | } 240 | 241 | async _syncVariants() { 242 | try { 243 | // Make sure we're in data mode for metafield sync 244 | const originalCommand = this.options.command; 245 | this.options.command = "data"; 246 | const result = await this.variantStrategy.sync(); 247 | this.options.command = originalCommand; 248 | return result; 249 | } catch (error) { 250 | logger.error(chalk.red('❌ Variant Metafield Sync Failed:'), error.message); 251 | return { 252 | definitionResults: { created: 0, updated: 0, skipped: 0, failed: 0 }, 253 | metafieldResults: { processed: 0, transformed: 0, blanked: 0, errors: 0 } 254 | }; 255 | } 256 | } 257 | } 258 | 259 | module.exports = AllResourcesSyncStrategy; 260 | -------------------------------------------------------------------------------- /strategies/CollectionMetafieldSyncStrategy.js: -------------------------------------------------------------------------------- 1 | const BaseMetafieldSyncStrategy = require('./BaseMetafieldSyncStrategy'); 2 | 3 | class CollectionMetafieldSyncStrategy extends BaseMetafieldSyncStrategy { 4 | constructor(sourceClient, targetClient, options) { 5 | super(sourceClient, targetClient, options, 'COLLECTION'); 6 | } 7 | } 8 | 9 | module.exports = CollectionMetafieldSyncStrategy; 10 | -------------------------------------------------------------------------------- /strategies/CompanyMetafieldSyncStrategy.js: -------------------------------------------------------------------------------- 1 | const logger = require("../utils/Logger"); 2 | ; 3 | const BaseMetafieldSyncStrategy = require("./BaseMetafieldSyncStrategy"); 4 | 5 | class CompanyMetafieldSyncStrategy extends BaseMetafieldSyncStrategy { 6 | constructor(sourceClient, targetClient, options) { 7 | // Call the base constructor with the specific ownerType 8 | super(sourceClient, targetClient, options, "COMPANY"); 9 | } 10 | 11 | // All common methods (fetch, create, update, sync, list) are inherited from BaseMetafieldSyncStrategy 12 | // Add any COMPANY-specific overrides or methods here if needed in the future. 13 | } 14 | 15 | module.exports = CompanyMetafieldSyncStrategy; 16 | -------------------------------------------------------------------------------- /strategies/CustomerMetafieldSyncStrategy.js: -------------------------------------------------------------------------------- 1 | const logger = require("../utils/Logger"); 2 | const BaseMetafieldSyncStrategy = require("./BaseMetafieldSyncStrategy"); 3 | 4 | class CustomerMetafieldSyncStrategy extends BaseMetafieldSyncStrategy { 5 | constructor(sourceClient, targetClient, options) { 6 | super(sourceClient, targetClient, options, "CUSTOMER"); 7 | } 8 | } 9 | 10 | module.exports = CustomerMetafieldSyncStrategy; 11 | -------------------------------------------------------------------------------- /strategies/EverythingSyncStrategy.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Everything Sync Strategy 3 | * 4 | * This strategy syncs everything in order: 5 | * 1. All metafield definitions (products, companies, orders, variants, customers, collections) 6 | * 2. Metaobject definitions 7 | * 3. Data for each resource type (products, metaobjects, pages, collections, etc.) 8 | */ 9 | const chalk = require('chalk'); 10 | const logger = require('../utils/logger'); 11 | 12 | // Import metafield definition strategies 13 | const ProductMetafieldSyncStrategy = require('./ProductMetafieldSyncStrategy'); 14 | const CompanyMetafieldSyncStrategy = require('./CompanyMetafieldSyncStrategy'); 15 | const OrderMetafieldSyncStrategy = require('./OrderMetafieldSyncStrategy'); 16 | const VariantMetafieldSyncStrategy = require('./VariantMetafieldSyncStrategy'); 17 | const CustomerMetafieldSyncStrategy = require('./CustomerMetafieldSyncStrategy'); 18 | const CollectionMetafieldSyncStrategy = require('./CollectionMetafieldSyncStrategy'); 19 | 20 | // Import metaobject definition strategy 21 | const MetaobjectSyncStrategy = require('./MetaobjectSyncStrategy'); 22 | 23 | // Import data sync strategies 24 | const ProductSyncStrategy = require('./ProductSyncStrategy'); 25 | const PageSyncStrategy = require('./PageSyncStrategy'); 26 | const CollectionSyncStrategy = require('./CollectionSyncStrategy'); 27 | const AllResourcesSyncStrategy = require('./AllResourcesSyncStrategy'); 28 | 29 | class EverythingSyncStrategy { 30 | constructor(sourceClient, targetClient, options) { 31 | this.sourceClient = sourceClient; 32 | this.targetClient = targetClient; 33 | this.options = { ...options }; 34 | this.debug = options.debug; 35 | 36 | // Set namespace to "all" if not specified 37 | if (!this.options.namespace) { 38 | this.options.namespace = "all"; 39 | } 40 | 41 | // Store original command 42 | this.originalCommand = this.options.command; 43 | 44 | // Initialize metafield definition strategies with copies of our options 45 | this.productMetafieldStrategy = new ProductMetafieldSyncStrategy(sourceClient, targetClient, { ...this.options }); 46 | this.companyMetafieldStrategy = new CompanyMetafieldSyncStrategy(sourceClient, targetClient, { ...this.options }); 47 | this.orderMetafieldStrategy = new OrderMetafieldSyncStrategy(sourceClient, targetClient, { ...this.options }); 48 | this.variantMetafieldStrategy = new VariantMetafieldSyncStrategy(sourceClient, targetClient, { ...this.options }); 49 | this.customerMetafieldStrategy = new CustomerMetafieldSyncStrategy(sourceClient, targetClient, { ...this.options }); 50 | this.collectionMetafieldStrategy = new CollectionMetafieldSyncStrategy(sourceClient, targetClient, { ...this.options }); 51 | 52 | // Initialize metaobject definition strategy using a copy of our options 53 | this.metaobjectDefStrategy = new MetaobjectSyncStrategy(sourceClient, targetClient, { ...this.options }); 54 | 55 | // Initialize data sync strategies with copies of our options 56 | this.productDataStrategy = new ProductSyncStrategy(sourceClient, targetClient, { ...this.options }); 57 | this.pageDataStrategy = new PageSyncStrategy(sourceClient, targetClient, { ...this.options }); 58 | this.collectionDataStrategy = new CollectionSyncStrategy(sourceClient, targetClient, { ...this.options }); 59 | 60 | // We'll use AllResourcesSyncStrategy for syncing all data at once 61 | this.allResourcesDataStrategy = new AllResourcesSyncStrategy(sourceClient, targetClient, { ...this.options }); 62 | } 63 | 64 | async sync() { 65 | const startTime = Date.now(); 66 | logger.info(chalk.blue('🔄 Starting Everything Sync - This will sync all definitions and data')); 67 | 68 | // Create result containers 69 | const results = { 70 | definitions: { created: 0, updated: 0, skipped: 0, failed: 0 }, 71 | data: { created: 0, updated: 0, skipped: 0, failed: 0 }, 72 | metafields: { processed: 0, transformed: 0, blanked: 0, errors: 0, warnings: 0 } 73 | }; 74 | 75 | try { 76 | // PHASE 1: Sync all metafield definitions 77 | logger.info(chalk.cyan('🔄 PHASE 1: Syncing all metafield definitions')); 78 | 79 | // Set command to definitions for this phase 80 | this.options.command = "definitions"; 81 | 82 | // Define the metafield resource types 83 | const metafieldResourceTypes = [ 84 | { name: 'products', strategy: this.productMetafieldStrategy }, 85 | { name: 'companies', strategy: this.companyMetafieldStrategy }, 86 | { name: 'orders', strategy: this.orderMetafieldStrategy }, 87 | { name: 'variants', strategy: this.variantMetafieldStrategy }, 88 | { name: 'customers', strategy: this.customerMetafieldStrategy }, 89 | { name: 'collections', strategy: this.collectionMetafieldStrategy } 90 | ]; 91 | 92 | // Reset indentation level before each operation 93 | logger.resetIndent(); 94 | 95 | // Process each metafield resource type 96 | for (const resourceType of metafieldResourceTypes) { 97 | // Log the resource type 98 | logger.startSection(`RESOURCE TYPE: ${resourceType.name.toUpperCase()}`); 99 | 100 | // Set resource type in options 101 | const originalResource = this.options.resource; 102 | this.options.resource = resourceType.name; 103 | 104 | // Sync definitions for this resource type 105 | const syncResults = await resourceType.strategy.sync(); 106 | 107 | // Restore original resource type 108 | this.options.resource = originalResource; 109 | 110 | // Merge results 111 | this._mergeResults(results, syncResults); 112 | 113 | // Unindent after this resource type is done 114 | logger.endSection(); 115 | } 116 | 117 | // PHASE 2: Sync metaobject definitions 118 | logger.info(chalk.cyan('🔄 PHASE 2: Syncing metaobject definitions')); 119 | 120 | // Set command to definitions and resource to metaobjects for this phase 121 | this.options.command = "definitions"; 122 | this.options.resource = "metaobjects"; 123 | 124 | // Important: Update the strategy's options directly to ensure type is passed 125 | this.metaobjectDefStrategy.options.type = "all"; 126 | 127 | // Sync metaobject definitions 128 | logger.startSection(`RESOURCE TYPE: METAOBJECTS`); 129 | 130 | const metaobjectDefResults = await this.metaobjectDefStrategy.sync(); 131 | this._mergeResults(results, metaobjectDefResults); 132 | 133 | logger.endSection(); 134 | 135 | // PHASE 3: Sync all resource data 136 | logger.info(chalk.cyan('🔄 PHASE 3: Syncing all resource data')); 137 | 138 | // Set command to data for this phase 139 | this.options.command = "data"; 140 | this.options.resource = "all"; 141 | 142 | // Important: Make sure type parameter is kept for metaobjects 143 | // The allResourcesDataStrategy will use this to sync metaobjects 144 | this.options.type = "all"; 145 | 146 | // Update the strategy's options directly 147 | this.allResourcesDataStrategy.options.type = "all"; 148 | this.allResourcesDataStrategy.metaobjectStrategy.options.type = "all"; 149 | 150 | // Use the AllResourcesSyncStrategy to sync all data at once 151 | const allDataResults = await this.allResourcesDataStrategy.sync(); 152 | this._mergeResults(results, allDataResults); 153 | 154 | // Restore original command and resource 155 | this.options.command = this.originalCommand; 156 | 157 | const endTime = Date.now(); 158 | const durationSec = ((endTime - startTime) / 1000).toFixed(1); 159 | logger.info(chalk.green(`✅ Everything Sync Completed in ${durationSec}s`)); 160 | 161 | return { 162 | definitionResults: results.definitions, 163 | dataResults: results.data, 164 | metafieldResults: results.metafields 165 | }; 166 | } catch (error) { 167 | logger.error(chalk.red('❌ Everything Sync Failed:'), error.message); 168 | if (this.debug) { 169 | logger.debug(error.stack); 170 | } 171 | 172 | // Restore original command 173 | this.options.command = this.originalCommand; 174 | 175 | return { 176 | definitionResults: results.definitions, 177 | dataResults: results.data, 178 | metafieldResults: results.metafields 179 | }; 180 | } 181 | } 182 | 183 | _mergeResults(targetResults, sourceResults) { 184 | // Skip if source is null or undefined 185 | if (!sourceResults) return; 186 | 187 | // If source has definitionResults, merge them 188 | if (sourceResults.definitionResults) { 189 | Object.keys(sourceResults.definitionResults).forEach(key => { 190 | if (targetResults.definitions[key] !== undefined) { 191 | targetResults.definitions[key] += sourceResults.definitionResults[key] || 0; 192 | } 193 | }); 194 | } 195 | 196 | // If source has dataResults, merge them 197 | if (sourceResults.dataResults) { 198 | Object.keys(sourceResults.dataResults).forEach(key => { 199 | if (targetResults.data[key] !== undefined) { 200 | targetResults.data[key] += sourceResults.dataResults[key] || 0; 201 | } 202 | }); 203 | } 204 | 205 | // If source has metafieldResults, merge them 206 | if (sourceResults.metafieldResults) { 207 | Object.keys(sourceResults.metafieldResults).forEach(key => { 208 | if (targetResults.metafields[key] !== undefined) { 209 | targetResults.metafields[key] += sourceResults.metafieldResults[key] || 0; 210 | } 211 | }); 212 | } 213 | } 214 | } 215 | 216 | module.exports = EverythingSyncStrategy; 217 | -------------------------------------------------------------------------------- /strategies/MetaobjectSyncStrategy.js: -------------------------------------------------------------------------------- 1 | const logger = require("../utils/Logger"); 2 | const MetaobjectDefinitionHandler = require("../utils/MetaobjectDefinitionHandler"); 3 | const MetaobjectDataHandler = require("../utils/MetaobjectDataHandler"); 4 | 5 | class MetaobjectSyncStrategy { 6 | constructor(sourceClient, targetClient, options) { 7 | this.sourceClient = sourceClient; 8 | this.targetClient = targetClient; 9 | this.options = options; 10 | this.debug = options.debug; 11 | 12 | // Create handlers 13 | this.definitionHandler = new MetaobjectDefinitionHandler(targetClient, options); 14 | this.dataHandler = new MetaobjectDataHandler(targetClient, options); 15 | } 16 | 17 | async sync() { 18 | 19 | // Handle listing definitions if type is missing 20 | if (!this.options.type) { 21 | logger.info(`Type option is missing. Options: ${JSON.stringify({ 22 | type: this.options.type, 23 | key: this.options.key, 24 | command: this.options.command, 25 | resource: this.options.resource 26 | }, null, 2)}`); 27 | 28 | await this.definitionHandler.listAvailableDefinitions(this.sourceClient); 29 | return { definitionResults: null, dataResults: null }; // Indicate no sync occurred 30 | } 31 | 32 | // Special case: "--type all" should fetch all definitions 33 | const shouldFetchAllTypes = this.options.type === "all"; 34 | 35 | // Determine what to sync based on command/strategy type 36 | const isSyncingDefinitions = this.options.command === "definitions"; 37 | const isSyncingData = this.options.command === "data"; 38 | 39 | let definitionResults = { created: 0, updated: 0, skipped: 0, failed: 0 }; 40 | let dataResults = { created: 0, updated: 0, skipped: 0, failed: 0 }; 41 | let definitionTypes = []; 42 | 43 | // Sync definitions if requested by command 44 | if (isSyncingDefinitions) { 45 | // If syncing all types, pass null to fetch all definitions 46 | const fetchType = shouldFetchAllTypes ? null : this.options.type; 47 | const defSync = await this.definitionHandler.syncDefinitions(this.sourceClient, fetchType); 48 | definitionResults = defSync.results; 49 | definitionTypes = defSync.definitionTypes; 50 | } else if (isSyncingData) { 51 | // If only syncing data, use the provided type 52 | // For "all", we need to fetch all available types first 53 | if (shouldFetchAllTypes) { 54 | // Create temporary source handler to fetch all definitions 55 | const tempSourceHandler = new MetaobjectDefinitionHandler(this.sourceClient, this.options); 56 | const allDefinitions = await tempSourceHandler.fetchMetaobjectDefinitions(); 57 | definitionTypes = allDefinitions.map((def) => def.type); 58 | logger.info(`Found ${definitionTypes.length} definition types to sync data for`); 59 | } else { 60 | definitionTypes = [this.options.type]; 61 | } 62 | 63 | // Sync data 64 | if (definitionTypes.length > 0) { 65 | // Create a source definition handler to fetch required field info 66 | const sourceDefinitionHandler = new MetaobjectDefinitionHandler(this.sourceClient, this.options); 67 | dataResults = await this.dataHandler.syncData(this.sourceClient, this.targetClient, definitionTypes, sourceDefinitionHandler); 68 | } 69 | } else { 70 | // Default: sync both definitions and data 71 | // If syncing all types, pass null to fetch all definitions 72 | const fetchType = shouldFetchAllTypes ? null : this.options.type; 73 | const defSync = await this.definitionHandler.syncDefinitions(this.sourceClient, fetchType); 74 | definitionResults = defSync.results; 75 | definitionTypes = defSync.definitionTypes; 76 | 77 | if (definitionTypes.length > 0) { 78 | // Create a source definition handler to fetch required field info 79 | const sourceDefinitionHandler = new MetaobjectDefinitionHandler(this.sourceClient, this.options); 80 | dataResults = await this.dataHandler.syncData(this.sourceClient, this.targetClient, definitionTypes, sourceDefinitionHandler); 81 | } 82 | } 83 | 84 | return { definitionResults, dataResults }; 85 | } 86 | } 87 | 88 | module.exports = MetaobjectSyncStrategy; 89 | -------------------------------------------------------------------------------- /strategies/OrderMetafieldSyncStrategy.js: -------------------------------------------------------------------------------- 1 | const logger = require("../utils/Logger"); 2 | const BaseMetafieldSyncStrategy = require("./BaseMetafieldSyncStrategy"); 3 | 4 | class OrderMetafieldSyncStrategy extends BaseMetafieldSyncStrategy { 5 | constructor(sourceClient, targetClient, options) { 6 | super(sourceClient, targetClient, options, "ORDER"); 7 | } 8 | } 9 | 10 | module.exports = OrderMetafieldSyncStrategy; 11 | -------------------------------------------------------------------------------- /strategies/PageSyncStrategy.js: -------------------------------------------------------------------------------- 1 | const logger = require("../utils/Logger"); 2 | ; 3 | const { 4 | PageFetchAll, 5 | PageCreate, 6 | PageUpdate 7 | } = require('../graphql'); 8 | 9 | class PageSyncStrategy { 10 | constructor(sourceClient, targetClient, options) { 11 | this.sourceClient = sourceClient; 12 | this.targetClient = targetClient; 13 | this.options = options; 14 | this.debug = options.debug; 15 | } 16 | 17 | // --- Page Methods --- 18 | 19 | async fetchPages(client) { 20 | try { 21 | const response = await client.graphql(PageFetchAll, { first: 100 }, 'GetPages'); 22 | return response.pages.edges.map(edge => edge.node); 23 | } catch (error) { 24 | logger.error(`Error fetching pages: ${error.message}`); 25 | return []; 26 | } 27 | } 28 | 29 | async createPage(client, page) { 30 | const input = { 31 | title: page.title, 32 | body: page.body, 33 | handle: page.handle, 34 | templateSuffix: page.templateSuffix 35 | }; 36 | 37 | // If page has publishedAt, mark it as published 38 | if (page.publishedAt) { 39 | input.isPublished = true; 40 | } else if (page.isPublished !== undefined) { 41 | // Use the isPublished flag if publishedAt is not available 42 | input.isPublished = page.isPublished; 43 | } 44 | 45 | if (this.options.notADrill) { 46 | try { 47 | const result = await client.graphql(PageCreate, { page: input }, 'CreatePage'); 48 | if (result.pageCreate.userErrors.length > 0) { 49 | logger.error(`Failed to create page "${page.title}":`, result.pageCreate.userErrors); 50 | return null; 51 | } 52 | return result.pageCreate.page; 53 | } catch (error) { 54 | logger.error(`Error creating page "${page.title}": ${error.message}`); 55 | return null; 56 | } 57 | } else { 58 | logger.info(`[DRY RUN] Would create page "${page.title}"`); 59 | return { id: "dry-run-id", title: page.title, handle: page.handle }; 60 | } 61 | } 62 | 63 | async updatePage(client, page, existingPage) { 64 | // Extract the ID from the GraphQL ID (format: gid://shopify/Page/123456789) 65 | const id = existingPage.id; 66 | 67 | const input = { 68 | title: page.title, 69 | body: page.body, 70 | handle: page.handle, 71 | templateSuffix: page.templateSuffix 72 | }; 73 | 74 | // If page has publishedAt, mark it as published 75 | if (page.publishedAt) { 76 | input.isPublished = true; 77 | } else if (page.isPublished !== undefined) { 78 | // Use the isPublished flag if publishedAt is not available 79 | input.isPublished = page.isPublished; 80 | } 81 | 82 | if (this.options.notADrill) { 83 | try { 84 | const result = await client.graphql(PageUpdate, { id, page: input }, 'UpdatePage'); 85 | if (result.pageUpdate.userErrors.length > 0) { 86 | logger.error(`Failed to update page "${page.title}":`, result.pageUpdate.userErrors); 87 | return null; 88 | } 89 | return result.pageUpdate.page; 90 | } catch (error) { 91 | logger.error(`Error updating page "${page.title}": ${error.message}`); 92 | return null; 93 | } 94 | } else { 95 | logger.info(`[DRY RUN] Would update page "${page.title}"`); 96 | return { id, title: page.title, handle: page.handle }; 97 | } 98 | } 99 | 100 | // --- Sync Orchestration Methods --- 101 | 102 | async sync() { 103 | logger.info(`Syncing pages...`); 104 | 105 | // Fetch pages from source and target shops 106 | const sourcePages = await this.fetchPages(this.sourceClient); 107 | logger.info(`Found ${sourcePages.length} page(s) in source shop`); 108 | 109 | const targetPages = await this.fetchPages(this.targetClient); 110 | logger.info(`Found ${targetPages.length} page(s) in target shop`); 111 | 112 | // Create map of target pages by handle for easy lookup 113 | const targetPageMap = targetPages.reduce((map, page) => { 114 | if (page.handle) { 115 | map[page.handle] = page; 116 | } 117 | return map; 118 | }, {}); 119 | 120 | const results = { created: 0, updated: 0, skipped: 0, failed: 0 }; 121 | let processedCount = 0; 122 | 123 | // Process each source page 124 | for (const page of sourcePages) { 125 | if (processedCount >= this.options.limit) { 126 | logger.info(`Reached processing limit (${this.options.limit}). Stopping page sync.`); 127 | break; 128 | } 129 | 130 | if (page.handle && targetPageMap[page.handle]) { 131 | // Update existing page 132 | logger.info(`Updating page: ${page.title}`); 133 | const updated = await this.updatePage(this.targetClient, page, targetPageMap[page.handle]); 134 | updated ? results.updated++ : results.failed++; 135 | } else { 136 | // Create new page 137 | logger.info(`Creating page: ${page.title}`); 138 | const created = await this.createPage(this.targetClient, page); 139 | created ? results.created++ : results.failed++; 140 | } 141 | 142 | processedCount++; 143 | } 144 | 145 | logger.success(`Finished syncing pages.`); 146 | return { definitionResults: results, dataResults: null }; 147 | } 148 | } 149 | 150 | module.exports = PageSyncStrategy; 151 | -------------------------------------------------------------------------------- /strategies/ProductMetafieldSyncStrategy.js: -------------------------------------------------------------------------------- 1 | const logger = require("../utils/Logger"); 2 | const BaseMetafieldSyncStrategy = require("./BaseMetafieldSyncStrategy"); 3 | 4 | class ProductMetafieldSyncStrategy extends BaseMetafieldSyncStrategy { 5 | constructor(sourceClient, targetClient, options) { 6 | // Call the base constructor with the specific ownerType 7 | super(sourceClient, targetClient, options, "PRODUCT"); 8 | } 9 | 10 | // All common methods (fetch, create, update, sync, list) are inherited from BaseMetafieldSyncStrategy 11 | // Add any PRODUCT-specific overrides or methods here if needed in the future. 12 | } 13 | 14 | module.exports = ProductMetafieldSyncStrategy; 15 | -------------------------------------------------------------------------------- /strategies/VariantMetafieldSyncStrategy.js: -------------------------------------------------------------------------------- 1 | const logger = require("../utils/Logger"); 2 | const BaseMetafieldSyncStrategy = require("./BaseMetafieldSyncStrategy"); 3 | 4 | class VariantMetafieldSyncStrategy extends BaseMetafieldSyncStrategy { 5 | constructor(sourceClient, targetClient, options) { 6 | super(sourceClient, targetClient, options, "PRODUCTVARIANT"); 7 | } 8 | } 9 | 10 | module.exports = VariantMetafieldSyncStrategy; 11 | -------------------------------------------------------------------------------- /test/cli.test.js: -------------------------------------------------------------------------------- 1 | const execa = require('execa') 2 | const path = require('path') 3 | const fs = require('fs') 4 | 5 | const cliPath = path.join(__dirname, '../cli.js') 6 | 7 | // Helper to check if the shops config exists (for conditional testing) 8 | const shopsConfigExists = fs.existsSync(path.join(__dirname, '../.shops.json')) 9 | 10 | describe('metasync CLI tool', () => { 11 | // Set a longer timeout for all tests 12 | jest.setTimeout(30000) 13 | 14 | it('shows help text', async () => { 15 | const { stdout } = await execa('node', [cliPath, '--help']) 16 | expect(stdout).toContain('Metasync - A CLI tool for synchronizing Shopify resources') 17 | expect(stdout).toContain('metasync definitions metafields') 18 | expect(stdout).toContain('metasync definitions metaobjects') 19 | expect(stdout).toContain('metasync data') 20 | }) 21 | 22 | it('shows help text for definitions metafields command', async () => { 23 | const { stdout } = await execa('node', [cliPath, 'definitions', 'metafields', '--help']) 24 | expect(stdout).toContain('Sync metafield definitions') 25 | expect(stdout).toContain('--resource ') 26 | expect(stdout).toContain('--namespace ') 27 | }) 28 | 29 | // Only run these tests if .shops.json exists 30 | if (shopsConfigExists) { 31 | describe('Live API tests', () => { 32 | it('runs definitions metafields command for products', async () => { 33 | try { 34 | // Run the command 35 | await execa('node', [ 36 | cliPath, 37 | 'definitions', 38 | 'metafields', 39 | '--resource', 'products', 40 | '--source', 'demo', 41 | '--target', 'test', 42 | '--namespace', 'custom' 43 | ]) 44 | 45 | // If it gets here, the command executed without throwing an error 46 | expect(true).toBe(true) 47 | } catch (error) { 48 | // Re-throw the error to fail the test 49 | throw error 50 | } 51 | }) 52 | }) 53 | } else { 54 | it('skips live API tests when .shops.json does not exist', () => { 55 | console.log('Skipping live API tests: .shops.json not found') 56 | expect(true).toBe(true) 57 | }) 58 | } 59 | }) 60 | -------------------------------------------------------------------------------- /test/data-product.test.js: -------------------------------------------------------------------------------- 1 | const execa = require('execa') 2 | const path = require('path') 3 | const fs = require('fs') 4 | 5 | const cliPath = path.join(__dirname, '../cli.js') 6 | 7 | // Helper to check if the shops config exists (for conditional testing) 8 | const shopsConfigExists = fs.existsSync(path.join(__dirname, '../.shops.json')) 9 | 10 | // Skip all tests if .shops.json doesn't exist 11 | if (!shopsConfigExists) { 12 | describe.skip('Data Products Command Tests (skipped - missing .shops.json)', () => { 13 | it('dummy test', () => { 14 | expect(true).toBe(true) 15 | }) 16 | }) 17 | } else { 18 | describe('Data Products Command', () => { 19 | beforeAll(() => { 20 | // Set longer timeout for API calls 21 | jest.setTimeout(60000) 22 | }) 23 | 24 | it('executes data products command', async () => { 25 | try { 26 | // Run the command 27 | await execa('node', [ 28 | cliPath, 29 | 'data', 30 | 'products', 31 | '--source', 'demo', 32 | '--target', 'test', 33 | '--limit', '1' 34 | ]) 35 | 36 | // If it gets here, the command executed without throwing an error 37 | expect(true).toBe(true) 38 | } catch (error) { 39 | // Re-throw the error to fail the test 40 | throw error 41 | } 42 | }) 43 | }) 44 | } 45 | -------------------------------------------------------------------------------- /test/define-metafields.test.js: -------------------------------------------------------------------------------- 1 | const execa = require('execa') 2 | const path = require('path') 3 | const fs = require('fs') 4 | 5 | const cliPath = path.join(__dirname, '../cli.js') 6 | 7 | // Helper to check if the shops config exists (for conditional testing) 8 | const shopsConfigExists = fs.existsSync(path.join(__dirname, '../.shops.json')) 9 | 10 | // Skip all tests if .shops.json doesn't exist 11 | if (!shopsConfigExists) { 12 | describe.skip('Define Metafields Command Tests (skipped - missing .shops.json)', () => { 13 | it('dummy test', () => { 14 | expect(true).toBe(true) 15 | }) 16 | }) 17 | } else { 18 | describe('Define Metafields Command', () => { 19 | beforeAll(() => { 20 | // Set longer timeout for API calls 21 | jest.setTimeout(60000) 22 | }) 23 | 24 | it('executes the exact command from the user example', async () => { 25 | try { 26 | // Run the exact command from the user example 27 | await execa('node', [ 28 | cliPath, 29 | 'definitions', 30 | 'metafields', 31 | '--resource', 'products', 32 | '--namespace', 'custom', 33 | '--source', 'demo', 34 | '--target', 'test' 35 | ]) 36 | 37 | // If it gets here, the command executed without throwing an error 38 | expect(true).toBe(true) 39 | } catch (error) { 40 | // Re-throw the error to fail the test 41 | throw error 42 | } 43 | }) 44 | }) 45 | } 46 | -------------------------------------------------------------------------------- /utils/CollectionRuleSetHandler.js: -------------------------------------------------------------------------------- 1 | const logger = require('./logger'); 2 | 3 | /** 4 | * Utility class to handle collection rulesets 5 | */ 6 | class CollectionRuleSetHandler { 7 | constructor(targetMetafieldDefinitions) { 8 | this.targetMetafieldDefinitions = targetMetafieldDefinitions; 9 | } 10 | 11 | /** 12 | * Set the target metafield definitions 13 | * @param {Object} targetMetafieldDefinitions - Metafield definitions from target shop 14 | */ 15 | setTargetMetafieldDefinitions(targetMetafieldDefinitions) { 16 | this.targetMetafieldDefinitions = targetMetafieldDefinitions; 17 | } 18 | 19 | /** 20 | * Analyzes a collection's ruleset and logs detailed information 21 | * @param {Object} collection - The collection to analyze 22 | */ 23 | analyzeRuleSet(collection) { 24 | if (!collection.ruleSet || !collection.ruleSet.rules) { 25 | logger.info('No ruleset found for this collection'); 26 | return null; 27 | } 28 | 29 | // Log the whole ruleSet for debugging 30 | logger.startSection(`Collection ruleSet details:`); 31 | logger.info(`Rules count: ${collection.ruleSet.rules.length}`); 32 | logger.info(`Applied disjunctively: ${collection.ruleSet.appliedDisjunctively}`); 33 | 34 | // Log metafield conditions for debugging (both METAFIELD and PRODUCT_METAFIELD_DEFINITION) 35 | const metafieldConditions = collection.ruleSet.rules.filter(rule => 36 | rule.column === 'METAFIELD' || rule.column === 'PRODUCT_METAFIELD_DEFINITION' 37 | ); 38 | 39 | if (metafieldConditions.length > 0) { 40 | logger.startSection(`Collection has ${metafieldConditions.length} metafield conditions in its rule set`); 41 | 42 | // Check if conditionObject is present for any rules 43 | const hasConditionObject = metafieldConditions.some(rule => rule.conditionObject); 44 | if (!hasConditionObject) { 45 | logger.error(`None of the metafield rules have conditionObject. This may indicate a GraphQL query issue.`); 46 | throw new Error(`Metafield rules missing conditionObject in collection ${collection.title}`); 47 | } 48 | 49 | // Log each metafield condition for diagnosis purposes only 50 | metafieldConditions.forEach((rule, index) => { 51 | logger.startSection(`Rule ${index + 1}: ${rule.column}`); 52 | 53 | if (!rule.conditionObject) { 54 | logger.error(`Metafield rule ${index + 1} is missing conditionObject`); 55 | throw new Error(`Metafield rule missing conditionObject in collection ${collection.title}`); 56 | } 57 | 58 | if (!rule.conditionObject.metafieldDefinition) { 59 | logger.error(`Metafield rule ${index + 1} is missing metafieldDefinition`); 60 | throw new Error(`Metafield rule missing metafieldDefinition in collection ${collection.title}`); 61 | } 62 | 63 | const def = rule.conditionObject.metafieldDefinition; 64 | if (!def.ownerType) { 65 | logger.error(`Metafield condition ${index + 1} is missing ownerType - cannot process rule`); 66 | throw new Error(`Metafield condition is missing ownerType in rule set for collection ${collection.title}`); 67 | } 68 | 69 | logger.info(`Namespace: ${def.namespace}`); 70 | logger.info(`Key: ${def.key}`); 71 | logger.info(`Owner type: ${def.ownerType}`); 72 | logger.info(`Relation: ${rule.relation}`); 73 | logger.info(`Condition: ${rule.condition}`); 74 | 75 | // Find matching definitions by namespace and key (NOT by ID) 76 | if (!this.targetMetafieldDefinitions[def.ownerType]) { 77 | logger.error(`No metafield definitions found for owner type: ${def.ownerType}`); 78 | throw new Error(`No metafield definitions found for owner type: ${def.ownerType} in collection ${collection.title}`); 79 | } 80 | 81 | const matchingDef = this.targetMetafieldDefinitions[def.ownerType].find(targetDef => 82 | targetDef.namespace === def.namespace && targetDef.key === def.key 83 | ); 84 | 85 | if (matchingDef) { 86 | logger.info(`✓ Found matching definition in target shop: ${matchingDef.id}`); 87 | } else { 88 | logger.error(`✗ No matching definition found for ${def.namespace}.${def.key} with ownerType=${def.ownerType}`); 89 | } 90 | 91 | logger.endSection(); 92 | }); 93 | 94 | logger.endSection(); 95 | } 96 | 97 | logger.endSection(); 98 | return metafieldConditions.length > 0; 99 | } 100 | 101 | /** 102 | * Prepares a collection's ruleset for input to Shopify API 103 | * @param {Object} collection - The collection containing the ruleset 104 | * @returns {Object} The prepared ruleset input 105 | */ 106 | prepareRuleSetInput(collection) { 107 | if (!collection.ruleSet || !collection.ruleSet.rules) { 108 | return null; 109 | } 110 | 111 | // Create a clean copy of the rules with necessary fields including conditionObjectId for metafield rules 112 | const cleanRules = collection.ruleSet.rules.map(rule => { 113 | const baseRule = { 114 | column: rule.column, 115 | condition: rule.condition, 116 | relation: rule.relation 117 | }; 118 | 119 | // Handle both METAFIELD and PRODUCT_METAFIELD_DEFINITION columns 120 | if (rule.column === 'METAFIELD' || rule.column === 'PRODUCT_METAFIELD_DEFINITION') { 121 | if (!rule.conditionObject || !rule.conditionObject.metafieldDefinition) { 122 | logger.error(`Metafield rule missing required conditionObject with metafieldDefinition`); 123 | throw new Error(`Metafield rule missing required data in collection ${collection.title}`); 124 | } 125 | 126 | const def = rule.conditionObject.metafieldDefinition; 127 | if (!def.ownerType) { 128 | logger.error(`Missing ownerType in metafield definition for rule with namespace=${def.namespace}, key=${def.key}`); 129 | throw new Error(`Missing ownerType in metafield definition for collection ${collection.title}`); 130 | } 131 | 132 | // Find matching definition by namespace and key (NOT by ID) in target shop 133 | if (!this.targetMetafieldDefinitions[def.ownerType]) { 134 | logger.error(`No metafield definitions found for owner type: ${def.ownerType}`); 135 | throw new Error(`No metafield definitions found for owner type: ${def.ownerType} in collection ${collection.title}`); 136 | } 137 | 138 | const matchingDef = this.targetMetafieldDefinitions[def.ownerType].find(targetDef => 139 | targetDef.namespace === def.namespace && targetDef.key === def.key 140 | ); 141 | 142 | if (matchingDef) { 143 | // Include the conditionObjectId which is required for metafield rules 144 | logger.info(`Adding conditionObjectId: ${matchingDef.id} for ${rule.column} rule ${def.namespace}.${def.key}`); 145 | baseRule.conditionObjectId = matchingDef.id; 146 | } else { 147 | logger.error(`Cannot create ${rule.column} rule for ${def.namespace}.${def.key}: No matching definition found in target shop`); 148 | } 149 | } 150 | 151 | return baseRule; 152 | }); 153 | 154 | // Add ruleSet to input with updated rules 155 | return { 156 | appliedDisjunctively: collection.ruleSet.appliedDisjunctively, 157 | rules: cleanRules 158 | }; 159 | } 160 | 161 | /** 162 | * Logs detailed information about metafield rules in a collection 163 | * @param {Object} collection - The collection containing the rules 164 | */ 165 | logMetafieldRules(collection) { 166 | if (!collection.ruleSet || !collection.ruleSet.rules) { 167 | return; 168 | } 169 | 170 | const metafieldRules = collection.ruleSet.rules.filter(rule => 171 | rule.column === 'METAFIELD' || rule.column === 'PRODUCT_METAFIELD_DEFINITION' 172 | ); 173 | 174 | if (metafieldRules.length > 0) { 175 | logger.startSection(`Smart collection uses ${metafieldRules.length} metafield conditions in its rules`); 176 | 177 | for (const rule of metafieldRules) { 178 | if (!rule.conditionObject || !rule.conditionObject.metafieldDefinition) { 179 | logger.error(`Metafield rule missing required conditionObject with metafieldDefinition`); 180 | throw new Error(`Metafield rule missing required data in collection ${collection.title}`); 181 | } 182 | 183 | const def = rule.conditionObject.metafieldDefinition; 184 | if (!def.ownerType) { 185 | logger.error(`Missing ownerType in metafield definition`); 186 | throw new Error(`Missing ownerType in metafield definition for collection ${collection.title}`); 187 | } 188 | 189 | logger.startSection(`Metafield rule: ${def.namespace}.${def.key}`); 190 | logger.info(`Owner type: ${def.ownerType}`); 191 | logger.info(`Relation: ${rule.relation}`); 192 | logger.info(`Condition: ${rule.condition}`); 193 | 194 | // These would use a special CollectionRuleMetafieldCondition type in Shopify GraphQL 195 | logger.info(`⚠ This collection uses metafield conditions which may require specific metafield definitions`); 196 | logger.info(` with the MetafieldCapabilitySmartCollectionCondition capability for owner type: ${def.ownerType}`); 197 | logger.endSection(); 198 | } 199 | 200 | logger.endSection(); 201 | } 202 | } 203 | 204 | /** 205 | * Logs error information for metafield rules in the collection 206 | * @param {Object} collection - The collection with error details 207 | */ 208 | logRuleErrors(collection) { 209 | if (!collection.ruleSet || !collection.ruleSet.rules) { 210 | return; 211 | } 212 | 213 | const metafieldRules = collection.ruleSet.rules.filter(rule => 214 | rule.column === 'METAFIELD' || rule.column === 'PRODUCT_METAFIELD_DEFINITION' 215 | ); 216 | 217 | if (metafieldRules.length > 0) { 218 | logger.startSection(`Smart collection uses metafield rules:`); 219 | 220 | metafieldRules.forEach((rule, index) => { 221 | if (rule.conditionObject && rule.conditionObject.metafieldDefinition) { 222 | const def = rule.conditionObject.metafieldDefinition; 223 | const ownerType = def.ownerType || 'UNKNOWN'; 224 | logger.error(`Rule ${index + 1}: ${def.namespace}.${def.key}, ownerType=${ownerType}`); 225 | logger.error(` Relation: ${rule.relation}, Condition: ${rule.condition}`); 226 | } else { 227 | logger.error(`Rule ${index + 1}: Missing metafieldDefinition`); 228 | } 229 | }); 230 | 231 | logger.error(`⚠ IMPORTANT: This appears to be a smart collection with metafield conditions.`); 232 | logger.error(`You need to create metafield definitions with the following properties:`); 233 | logger.error(`1. Namespace and key matching what's in the rules`); 234 | logger.error(`2. Owner type matching what's in the rules (usually PRODUCT)`); 235 | logger.error(`3. The MetafieldCapabilitySmartCollectionCondition capability`); 236 | logger.error(`You can do this in Shopify Admin: Settings > Custom data > Product properties`); 237 | 238 | logger.endSection(); 239 | } 240 | } 241 | } 242 | 243 | module.exports = CollectionRuleSetHandler; 244 | -------------------------------------------------------------------------------- /utils/ErrorHandler.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Error Handler Utilities 3 | * 4 | * Provides consistent error handling for Shopify GraphQL API responses. 5 | */ 6 | const logger = require('./logger'); 7 | 8 | class ErrorHandler { 9 | /** 10 | * Handle user errors from a Shopify GraphQL API mutation 11 | * @param {Array} userErrors - Array of user error objects from a GraphQL response 12 | * @param {Array} items - The corresponding data items that caused the errors 13 | * @param {Function} getItemDetails - Function to extract details from an item given its index 14 | * @param {string} batchInfo - Additional context about the current batch 15 | * @returns {number} - Number of errors handled 16 | */ 17 | static handleGraphQLUserErrors(userErrors, items, getItemDetails, batchInfo = '') { 18 | if (!userErrors || userErrors.length === 0) return 0; 19 | 20 | // Log the overall error message 21 | logger.startSection(`Failed to process ${batchInfo}:`); 22 | 23 | // Handle each user error 24 | userErrors.forEach(err => { 25 | try { 26 | // Check if we have field path information (common in Shopify API errors) 27 | if (err.field && err.field.length >= 2) { 28 | // Many Shopify errors use a path like ['metafields', '8', 'value'] 29 | // where the second element is the array index 30 | const itemIndex = parseInt(err.field[1]); 31 | 32 | if (!isNaN(itemIndex) && itemIndex < items.length) { 33 | // Extract item details using the provided function 34 | const details = getItemDetails(items[itemIndex], itemIndex, err.field); 35 | if (details) { 36 | // Log detailed error with item information 37 | logger.error(`${details.itemName}: ${err.message}`); 38 | 39 | // Log value preview if available 40 | if (details.valuePreview) { 41 | // Indent one more level for value preview 42 | logger.startSection(); 43 | logger.error(`Value: ${details.valuePreview}`); 44 | logger.endSection(); 45 | } 46 | return; 47 | } 48 | } 49 | } 50 | 51 | // Fallback for errors without proper field path or when details extraction fails 52 | logger.error(`Error: ${err.message}`); 53 | } catch (error) { 54 | // Ensure error handling doesn't break if something goes wrong 55 | logger.error(`Error: ${err.message}`); 56 | } 57 | }); 58 | 59 | // Reset indentation after error handling 60 | logger.endSection(); 61 | 62 | return userErrors.length; 63 | } 64 | } 65 | 66 | module.exports = ErrorHandler; 67 | -------------------------------------------------------------------------------- /utils/Logger.js: -------------------------------------------------------------------------------- 1 | /** 2 | * logger - Logging Utilities 3 | * 4 | * Provides consistent log formatting for Shopify operations with: 5 | * - Indentation 6 | * - Symbols for operations and statuses 7 | * - Color coding 8 | * - File logging with timestamps 9 | */ 10 | const chalk = require('chalk'); 11 | const fs = require('fs'); 12 | const path = require('path'); 13 | const stripAnsi = require('strip-ansi'); 14 | 15 | // Static indentation level 16 | let indentLevel = 0; 17 | 18 | // Log file stream 19 | let logFileStream = null; 20 | let logFilePath = null; 21 | 22 | /** 23 | * Initialize logging to file with timestamped filename 24 | * Creates the logs directory if it doesn't exist 25 | */ 26 | function initializeLogFile() { 27 | // Create logs directory if it doesn't exist 28 | const logsDir = path.join(process.cwd(), 'logs'); 29 | if (!fs.existsSync(logsDir)) { 30 | fs.mkdirSync(logsDir, { recursive: true }); 31 | } 32 | 33 | // Generate timestamp for filename (YYYY-MM-DD_HH-MM-SS) 34 | const now = new Date(); 35 | const timestamp = now.toISOString() 36 | .replace(/T/, '_') 37 | .replace(/\..+/, '') 38 | .replace(/:/g, '-'); 39 | 40 | // Create log file name 41 | logFilePath = path.join(logsDir, `sync_${timestamp}.log`); 42 | 43 | // Open write stream 44 | logFileStream = fs.createWriteStream(logFilePath, { flags: 'a' }); 45 | 46 | // Log initial information 47 | const startMessage = `=== Sync operation started at ${now.toLocaleString()} ===\n`; 48 | logFileStream.write(startMessage); 49 | 50 | return logFilePath; 51 | } 52 | 53 | /** 54 | * Close the log file stream 55 | */ 56 | function closeLogFile() { 57 | if (logFileStream) { 58 | const endMessage = `\n=== Sync operation completed at ${new Date().toLocaleString()} ===\n`; 59 | logFileStream.write(endMessage); 60 | logFileStream.end(); 61 | logFileStream = null; 62 | } 63 | } 64 | 65 | /** 66 | * Log to both console and file 67 | * @param {string} message - Message to log 68 | */ 69 | function log(message) { 70 | // Log to console 71 | console.log(message); 72 | 73 | // Log to file (strip ANSI color codes) 74 | if (logFileStream) { 75 | logFileStream.write(stripAnsi(message) + '\n'); 76 | } 77 | } 78 | 79 | /** 80 | * Increase indentation level 81 | * @param {number} levels - Number of levels to indent (default: 1) 82 | */ 83 | function _indent(levels = 1) { 84 | indentLevel += levels; 85 | return indentLevel; 86 | } 87 | 88 | /** 89 | * Decrease indentation level 90 | * @param {number} levels - Number of levels to unindent (default: 1) 91 | */ 92 | function _unindent(levels = 1) { 93 | indentLevel = Math.max(0, indentLevel - levels); 94 | return indentLevel; 95 | } 96 | 97 | function debug(message) { 98 | log(message); 99 | } 100 | 101 | /** 102 | * Increase indentation level 103 | * @param {number} levels - Number of levels to indent (default: 1) 104 | */ 105 | function startSection(message) { 106 | info(message); 107 | return _indent(); 108 | } 109 | 110 | function endSection(message) { 111 | if (message) { 112 | info(message); 113 | } 114 | 115 | if (indentLevel == 1) { 116 | newline(); 117 | } 118 | 119 | _unindent(); 120 | } 121 | 122 | 123 | /** 124 | * Reset indentation level to zero 125 | */ 126 | function resetIndent() { 127 | indentLevel = 0; 128 | } 129 | 130 | /** 131 | * Get the current indentation string 132 | * @returns {string} Indentation string 133 | */ 134 | function getIndent() { 135 | return ' '.repeat(indentLevel); 136 | } 137 | 138 | /** 139 | * Create a formatted log for a main product action 140 | * This function also automatically indents after logging to create visual hierarchy 141 | * @param {string} message - Log message 142 | * @param {string} title - Product title 143 | * @param {string} handle - Product handle 144 | * @param {string} type - Type of action (update, create, delete, etc.) 145 | */ 146 | function logProductAction(message, title, handle, type = 'update') { 147 | let color = chalk.cyan; // Default cyan for update 148 | 149 | if (type === 'create') { 150 | color = chalk.green; // Green 151 | } else if (type === 'delete' || type === 'force-recreate') { 152 | color = chalk.yellow; // Yellow/amber 153 | } else if (type === 'error') { 154 | color = chalk.red; // Red 155 | } 156 | 157 | const indent = getIndent(); 158 | // Use a cleaner format that separates the title from the handle 159 | const line1 = `${indent}${color.bold(`◆ ${message}`)}`; 160 | const line2 = `${indent} ${chalk.bold(title)} ${chalk.dim(`(${handle})`)}`; 161 | 162 | log(line1); 163 | log(line2); 164 | 165 | // Automatically indent to create visual hierarchy for operations on this product 166 | indentLevel++; 167 | } 168 | 169 | /** 170 | * End the product action section and unindent 171 | * This should be called after all operations for a product are completed 172 | */ 173 | function endProductAction() { 174 | // Unindent to return to the previous level 175 | _unindent(); 176 | } 177 | 178 | /** 179 | * Format a success message with proper indentation and symbol 180 | * @param {string} message - Log message 181 | */ 182 | function success(message) { 183 | const indent = getIndent(); 184 | // Use green checkmark ✓ with consistent formatting 185 | log(`${indent}${chalk.green('✓')} ${message}`); 186 | } 187 | 188 | /** 189 | * Format an error message with proper indentation and symbol 190 | * @param {string} message - Log message 191 | * @param {Object} data - Additional error data to log 192 | */ 193 | function error(message, data = null) { 194 | const indent = getIndent(); 195 | if (data) { 196 | log(`${indent}${chalk.red('✖')} ${message}`); 197 | console.log(data); // Log data to console 198 | if (logFileStream) { 199 | logFileStream.write(JSON.stringify(data, null, 2) + '\n'); // Write formatted data to file 200 | } 201 | } else { 202 | log(`${indent}${chalk.red('✖')} ${message}`); 203 | } 204 | } 205 | 206 | /** 207 | * Format a warning message with proper indentation and symbol 208 | * @param {string} message - Log message 209 | */ 210 | function warn(message) { 211 | const indent = getIndent(); 212 | log(`${indent}${chalk.yellow('⚠')} ${message}`); 213 | } 214 | 215 | /** 216 | * Format an info message with proper indentation and symbol 217 | * Symbol is automatically determined based on indentation level 218 | * @param {string} message - Log message 219 | */ 220 | function info(message) { 221 | const indent = getIndent(); 222 | 223 | // Top level logs (indentLevel 0) have no symbol 224 | if (indentLevel === 0) { 225 | log(`${message}`); 226 | return; 227 | } 228 | 229 | // Determine symbol based on indentation level: 230 | // Level 1: • (bullet) 231 | // Level 2+: - (dash) 232 | let symbol = '-'; // Default dash 233 | 234 | if (indentLevel === 1) { 235 | symbol = '•'; // Main operation - bullet point 236 | } else { 237 | symbol = '-'; // Detail - dash 238 | } 239 | 240 | log(`${indent}${symbol} ${message}`); 241 | } 242 | 243 | /** 244 | * Format a subdued message with proper indentation (for less important info like cursor data) 245 | * @param {string} message - Log message 246 | */ 247 | function subdued(message) { 248 | const indent = getIndent(); 249 | log(`${indent}${chalk.gray(message)}`); 250 | } 251 | 252 | /** 253 | * Format a dry run message with proper indentation 254 | * @param {string} message - Log message 255 | */ 256 | function dryRun(message) { 257 | const indent = getIndent(); 258 | // Use dimmed text with a muted blue color for dry run messages 259 | log(`${indent}${chalk.dim(' > [DRY RUN] ' + message)}`); 260 | } 261 | 262 | /** 263 | * Log a section header with proper formatting 264 | * @param {string} title - Section title 265 | */ 266 | function section(title) { 267 | log(`\n${chalk.bold.cyan(title)}`); 268 | } 269 | 270 | /** 271 | * Log a blank line with no symbols or prefixes 272 | */ 273 | function newline() { 274 | log(''); 275 | } 276 | 277 | /** 278 | * Get the current log file path 279 | * @returns {string|null} Log file path or null if logging to file is not initialized 280 | */ 281 | function getLogFilePath() { 282 | return logFilePath; 283 | } 284 | 285 | module.exports = { 286 | initializeLogFile, 287 | closeLogFile, 288 | getLogFilePath, 289 | startSection, 290 | endSection, 291 | resetIndent, 292 | getIndent, 293 | logProductAction, 294 | endProductAction, 295 | success, 296 | error, 297 | warn, 298 | info, 299 | subdued, 300 | dryRun, 301 | section, 302 | debug, 303 | newline 304 | }; 305 | -------------------------------------------------------------------------------- /utils/MetafieldFilterUtils.js: -------------------------------------------------------------------------------- 1 | const logger = require("./logger"); 2 | /** 3 | * Metafield Filter Utilities 4 | * 5 | * Provides utilities for filtering metafields based on namespace and key constraints. 6 | */ 7 | 8 | class MetafieldFilterUtils { 9 | /** 10 | * Filter metafields based on namespace and key options 11 | * @param {Array} metafields - Array of metafields to filter 12 | * @param {Object} options - Filtering options 13 | * @returns {Array} - Filtered metafields 14 | */ 15 | static filterMetafields(metafields, options) { 16 | if (!metafields || metafields.length === 0) { 17 | return []; 18 | } 19 | 20 | if (!options.namespace && !options.namespaces && !options.key) { 21 | return metafields; 22 | } 23 | 24 | // Special case: if namespace is 'all', don't filter by namespace 25 | if (options.namespace && options.namespace.toLowerCase() === 'all') { 26 | logger.info(`Using special namespace 'all' - including all namespaces`, 4); 27 | 28 | // Only filter by key if provided 29 | if (options.key) { 30 | logger.info(`Filtering metafields by key: ${options.key}`, 4); 31 | 32 | const filteredByKey = metafields.filter(metafield => { 33 | // Handle case where key includes namespace (namespace.key format) 34 | if (options.key.includes('.')) { 35 | const [keyNamespace, keyName] = options.key.split('.'); 36 | return metafield.namespace === keyNamespace && metafield.key === keyName; 37 | } else { 38 | // Key without namespace 39 | return metafield.key === options.key; 40 | } 41 | }); 42 | 43 | logger.info(`Filtered from ${metafields.length} to ${filteredByKey.length} metafields`, 4); 44 | return filteredByKey; 45 | } 46 | 47 | return metafields; // Return all metafields when namespace is 'all' and no key filter 48 | } 49 | 50 | let logMessage = ''; 51 | 52 | if (options.namespace) { 53 | logMessage += `namespace: ${options.namespace} `; 54 | } else if (options.namespaces) { 55 | logMessage += `namespaces: ${options.namespaces.join(', ')} `; 56 | } 57 | 58 | if (options.key) { 59 | logMessage += `key: ${options.key}`; 60 | } 61 | 62 | logger.info(`Filtering metafields by ${logMessage}`, 4); 63 | 64 | const filteredMetafields = metafields.filter(metafield => { 65 | // Filter by namespace if provided 66 | if (options.namespace && metafield.namespace !== options.namespace) { 67 | // Single namespace doesn't match 68 | return false; 69 | } 70 | 71 | // Filter by namespaces array if provided 72 | if (options.namespaces && Array.isArray(options.namespaces) && 73 | !options.namespaces.includes(metafield.namespace)) { 74 | // Metafield namespace is not in the provided namespaces array 75 | return false; 76 | } 77 | 78 | // Filter by key if provided 79 | if (options.key) { 80 | // Handle case where key includes namespace (namespace.key format) 81 | if (options.key.includes('.')) { 82 | const [keyNamespace, keyName] = options.key.split('.'); 83 | return metafield.namespace === keyNamespace && metafield.key === keyName; 84 | } else { 85 | // Key without namespace 86 | return metafield.key === options.key; 87 | } 88 | } 89 | 90 | return true; 91 | }); 92 | 93 | logger.info(`Filtered from ${metafields.length} to ${filteredMetafields.length} metafields`, 4); 94 | return filteredMetafields; 95 | } 96 | } 97 | 98 | module.exports = MetafieldFilterUtils; 99 | -------------------------------------------------------------------------------- /utils/MetafieldHandler.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Metafield Handler 3 | * 4 | * Handles batching and synchronization of metafields for various Shopify resources. 5 | * Supports batching to respect Shopify's 25-metafield-per-call limit. 6 | */ 7 | const logger = require('./logger'); 8 | const ErrorHandler = require('./ErrorHandler'); 9 | 10 | class MetafieldHandler { 11 | constructor(client, options = {}) { 12 | this.client = client; 13 | this.options = options; 14 | this.debug = options.debug; 15 | this.batchSize = 25; // Shopify's limit per API call 16 | } 17 | 18 | /** 19 | * Sync metafields for a product or other resource 20 | * @param {string} ownerId - The ID of the resource to which metafields belong 21 | * @param {Array} metafields - Array of metafield objects 22 | * @param {string} logPrefix - Prefix for log messages 23 | * @returns {boolean} - Success status 24 | */ 25 | async syncMetafields(ownerId, metafields, logPrefix = '') { 26 | if (!metafields || metafields.length === 0) return true; 27 | 28 | // Log how many metafields we're syncing - use main log type for bullet point 29 | logger.startSection(`Syncing ${metafields.length} metafields for ID: ${ownerId}`); 30 | 31 | // Split metafields into batches of 25 (Shopify limit per metafieldsSet mutation) 32 | const metafieldBatches = []; 33 | const BATCH_SIZE = 25; 34 | 35 | // Create batches of metafields 36 | for (let i = 0; i < metafields.length; i += BATCH_SIZE) { 37 | metafieldBatches.push(metafields.slice(i, i + BATCH_SIZE)); 38 | } 39 | 40 | logger.info(`Processing ${metafieldBatches.length} batches of metafields (max ${BATCH_SIZE} per batch)`); 41 | 42 | let successCount = 0; 43 | let failedCount = 0; 44 | 45 | // Process each batch 46 | for (let batchIndex = 0; batchIndex < metafieldBatches.length; batchIndex++) { 47 | const metafieldBatch = metafieldBatches[batchIndex]; 48 | logger.startSection(`Processing batch ${batchIndex + 1}/${metafieldBatches.length} (${metafieldBatch.length} metafields)`); 49 | 50 | // Prepare metafields inputs for this batch 51 | const metafieldsInput = metafieldBatch.map(metafield => ({ 52 | ownerId: ownerId, 53 | namespace: metafield.namespace, 54 | key: metafield.key, 55 | value: metafield.value, 56 | type: metafield.type 57 | })); 58 | 59 | const mutation = `#graphql 60 | mutation metafieldsSet($metafields: [MetafieldsSetInput!]!) { 61 | metafieldsSet(metafields: $metafields) { 62 | metafields { 63 | id 64 | namespace 65 | key 66 | } 67 | userErrors { 68 | field 69 | message 70 | } 71 | } 72 | } 73 | `; 74 | 75 | if (this.options.notADrill) { 76 | try { 77 | const result = await this.client.graphql(mutation, { metafields: metafieldsInput }, 'MetafieldsSet'); 78 | 79 | if (result.metafieldsSet.userErrors.length > 0) { 80 | // Use the generic error handler with a custom function to extract metafield details 81 | const getMetafieldDetails = (metafield) => { 82 | // Get a preview of the value (truncate if too long) 83 | let valuePreview = String(metafield.value); 84 | if (valuePreview.length > 50) { 85 | valuePreview = valuePreview.substring(0, 47) + '...'; 86 | } 87 | 88 | return { 89 | itemName: `Metafield ${metafield.namespace}.${metafield.key} (${metafield.type})`, 90 | valuePreview: valuePreview 91 | }; 92 | }; 93 | 94 | // Handle the errors with our generic handler (update to use new signature without indentation level) 95 | ErrorHandler.handleGraphQLUserErrors( 96 | result.metafieldsSet.userErrors, 97 | metafieldBatch, 98 | getMetafieldDetails, 99 | `batch ${batchIndex + 1}/${metafieldBatches.length}` 100 | ); 101 | 102 | failedCount += metafieldBatch.length; 103 | } else { 104 | const metafieldCount = result.metafieldsSet.metafields.length; 105 | logger.success(`Successfully set ${metafieldCount} metafields in batch ${batchIndex + 1}`); 106 | successCount += metafieldCount; 107 | 108 | // Log individual metafields if debug is enabled 109 | if (this.debug) { 110 | logger.startSection(); 111 | result.metafieldsSet.metafields.forEach(metafield => { 112 | logger.debug(`Set metafield ${metafield.namespace}.${metafield.key}`); 113 | }); 114 | logger.endSection(); 115 | } 116 | } 117 | } catch (error) { 118 | logger.error(`Error setting metafields in batch ${batchIndex + 1}: ${error.message}`); 119 | failedCount += metafieldBatch.length; 120 | } 121 | } else { 122 | logger.info(`[DRY RUN] Would set ${metafieldBatch.length} metafields in batch ${batchIndex + 1}`); 123 | 124 | // Log individual metafields if debug is enabled 125 | if (this.debug) { 126 | logger.startSection(); 127 | metafieldBatch.forEach(metafield => { 128 | logger.debug(`[DRY RUN] Would set metafield ${metafield.namespace}.${metafield.key}`); 129 | }); 130 | logger.endSection(); 131 | } 132 | } 133 | 134 | // End batch section 135 | logger.endSection(); 136 | } 137 | 138 | // Return success status 139 | if (this.options.notADrill) { 140 | logger.info(`Metafields sync complete: ${successCount} successful, ${failedCount} failed`); 141 | // Unindent after all metafield processing 142 | logger.endSection(); 143 | return failedCount === 0; 144 | } else { 145 | // Unindent after all metafield processing 146 | logger.endSection(); 147 | return true; 148 | } 149 | } 150 | } 151 | 152 | module.exports = MetafieldHandler; 153 | -------------------------------------------------------------------------------- /utils/ProductBaseHandler.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Product Base Handler 3 | * 4 | * Handles core product operations in Shopify, including: 5 | * - Product creation 6 | * - Product updating 7 | * - Product deletion 8 | * - Fetching products by ID or handle 9 | */ 10 | const logger = require('./logger'); 11 | 12 | class ProductBaseHandler { 13 | constructor(client, options = {}) { 14 | this.client = client; 15 | this.options = options; 16 | this.debug = options.debug; 17 | } 18 | 19 | /** 20 | * Create a new product 21 | * @param {Object} productInput - Product creation input 22 | * @param {string} logPrefix - Prefix for logs 23 | * @returns {Promise} - Created product or null if failed 24 | */ 25 | async createProduct(productInput, logPrefix = '') { 26 | const mutation = `#graphql 27 | mutation createProduct($input: ProductInput!) { 28 | productCreate(input: $input) { 29 | product { 30 | id 31 | title 32 | handle 33 | } 34 | userErrors { 35 | field 36 | message 37 | } 38 | } 39 | } 40 | `; 41 | 42 | if (this.options.notADrill) { 43 | try { 44 | logger.info(`Creating base product "${productInput.title}"`, 'main'); 45 | const result = await this.client.graphql(mutation, { input: productInput }, 'CreateProduct'); 46 | 47 | if (result.productCreate.userErrors.length > 0) { 48 | logger.error(`Failed to create product "${productInput.title}"`, result.productCreate.userErrors); 49 | return null; 50 | } 51 | 52 | const newProduct = result.productCreate.product; 53 | logger.success(`Base product created successfully`); 54 | return newProduct; 55 | } catch (error) { 56 | logger.error(`Error creating product "${productInput.title}": ${error.message}`); 57 | return null; 58 | } 59 | } else { 60 | logger.info(`[DRY RUN] Would create product "${productInput.title}"`, 'main'); 61 | return { id: "dry-run-id", title: productInput.title, handle: productInput.handle }; 62 | } 63 | } 64 | 65 | /** 66 | * Update an existing product 67 | * @param {string} productId - ID of the product to update 68 | * @param {Object} productInput - Product update input 69 | * @param {string} logPrefix - Prefix for logs 70 | * @returns {Promise} - Updated product or null if failed 71 | */ 72 | async updateProduct(productId, productInput, logPrefix = '') { 73 | const updateInput = { 74 | ...productInput, 75 | id: productId 76 | }; 77 | 78 | const mutation = `#graphql 79 | mutation ProductUpdate($productUpdateInput: ProductUpdateInput!) { 80 | productUpdate(product: $productUpdateInput) { 81 | product { 82 | id 83 | title 84 | handle 85 | } 86 | userErrors { 87 | field 88 | message 89 | } 90 | } 91 | } 92 | `; 93 | 94 | if (this.options.notADrill) { 95 | try { 96 | logger.startSection(`Updating base product data`, 2, 'main'); 97 | const result = await this.client.graphql( 98 | mutation, 99 | { productUpdateInput: updateInput }, 100 | 'ProductUpdate' 101 | ); 102 | 103 | if (result.productUpdate.userErrors.length > 0) { 104 | logger.error(`Failed to update product "${productInput.title}"`, result.productUpdate.userErrors); 105 | logger.endSection(); 106 | return null; 107 | } 108 | 109 | const updatedProduct = result.productUpdate.product; 110 | logger.success(`Base product data updated successfully`); 111 | logger.endSection(); 112 | return updatedProduct; 113 | } catch (error) { 114 | logger.error(`Error updating product: ${error.message}`); 115 | logger.endSection(); 116 | return null; 117 | } 118 | } else { 119 | logger.info(`[DRY RUN] Would update product "${productInput.title}"`, 'main'); 120 | logger.endSection(); 121 | return { id: productId, title: productInput.title, handle: productInput.handle }; 122 | } 123 | } 124 | 125 | /** 126 | * Delete a product 127 | * @param {string} productId - ID of the product to delete 128 | * @param {string} logPrefix - Prefix for logs 129 | * @returns {Promise} - Success status 130 | */ 131 | async deleteProduct(productId, logPrefix = '') { 132 | if (!productId) { 133 | logger.error(`Cannot delete product: No product ID provided`); 134 | return false; 135 | } 136 | 137 | const mutation = `#graphql 138 | mutation productDelete($input: ProductDeleteInput!) { 139 | productDelete(input: $input) { 140 | deletedProductId 141 | userErrors { 142 | field 143 | message 144 | } 145 | } 146 | } 147 | `; 148 | 149 | if (this.options.notADrill) { 150 | try { 151 | logger.info(`Deleting product with ID: ${productId}`, 'main'); 152 | const result = await this.client.graphql(mutation, { 153 | input: { 154 | id: productId 155 | } 156 | }, 'ProductDelete'); 157 | 158 | if (result.productDelete.userErrors.length > 0) { 159 | logger.error(`Failed to delete product`, result.productDelete.userErrors); 160 | return false; 161 | } 162 | 163 | logger.success(`Product deleted successfully`); 164 | return true; 165 | } catch (error) { 166 | logger.error(`Error deleting product: ${error.message}`); 167 | return false; 168 | } 169 | } else { 170 | logger.info(`[DRY RUN] Would delete product with ID: ${productId}`, 'main'); 171 | return true; 172 | } 173 | } 174 | 175 | /** 176 | * Get a product by handle 177 | * @param {string} handle - Product handle 178 | * @returns {Promise} - Product or null if not found 179 | */ 180 | async getProductByHandle(handle) { 181 | if (!handle) return null; 182 | 183 | const query = `#graphql 184 | query GetProductByHandle($handle: String!) { 185 | productByHandle(handle: $handle) { 186 | id 187 | title 188 | handle 189 | status 190 | } 191 | } 192 | `; 193 | 194 | try { 195 | const response = await this.client.graphql(query, { handle }, 'GetProductByHandle'); 196 | return response.productByHandle; 197 | } catch (error) { 198 | logger.error(`Error fetching product by handle: ${error.message}`); 199 | return null; 200 | } 201 | } 202 | } 203 | 204 | module.exports = ProductBaseHandler; 205 | -------------------------------------------------------------------------------- /utils/ProductBatchProcessor.js: -------------------------------------------------------------------------------- 1 | const logger = require("./logger"); 2 | /** 3 | * Product Batch Processor 4 | * 5 | * Handles batch processing of products during sync operations. 6 | * Provides a pagination-aware product fetcher and batch processing utilities. 7 | */ 8 | const chalk = require('chalk'); 9 | 10 | class ProductBatchProcessor { 11 | constructor(sourceClient, options = {}) { 12 | this.sourceClient = sourceClient; 13 | this.options = options; 14 | this.debug = !!options.debug; 15 | this.batchSize = options.batchSize || 25; 16 | } 17 | 18 | /** 19 | * Fetches products in batches with pagination support 20 | * @param {Object} client - Shopify client 21 | * @param {Number} limit - Maximum number of products to fetch 22 | * @param {Object} options - Additional options (handle, etc.) 23 | * @returns {Object} - An async batch fetcher object 24 | */ 25 | async fetchProducts(client, limit = 50, options = {}) { 26 | // If fetching by handle, use the getProductByHandle method 27 | if (options.handle) { 28 | const product = await this.getProductByHandle(client, options.handle); 29 | return product ? [product] : []; 30 | } 31 | 32 | // For pagination, we'll use the configured batch size 33 | let hasNextPage = true; 34 | // Initialize cursor from options if provided 35 | let cursor = this.options.startCursor || null; 36 | let fetchedCount = 0; 37 | 38 | if (cursor) { 39 | logger.info(`Starting pagination from cursor: ${chalk.blue(cursor)}`); 40 | } 41 | 42 | // Return an async generator function 43 | return { 44 | // Method to fetch the next batch 45 | fetchNextBatch: async () => { 46 | if (!hasNextPage || fetchedCount >= limit) { 47 | return { products: [], done: true }; 48 | } 49 | 50 | try { 51 | // Calculate how many products to fetch in this batch 52 | const fetchCount = Math.min(this.batchSize, limit - fetchedCount); 53 | 54 | // Filter out archived products using the query parameter 55 | const variables = { 56 | first: fetchCount, 57 | query: "status:ACTIVE", // Only fetch active products 58 | after: cursor 59 | }; 60 | 61 | const response = await client.graphql(require('../graphql').ProductFetchAll, variables, 'GetProducts'); 62 | 63 | // Process products 64 | const batchProducts = response.products.edges.map(edge => { 65 | const product = edge.node; 66 | 67 | // Process images 68 | product.images = product.images.edges.map(imgEdge => imgEdge.node); 69 | 70 | // Process variants and their metafields 71 | product.variants = product.variants.edges.map(varEdge => { 72 | const variant = varEdge.node; 73 | 74 | // Process variant metafields 75 | if (variant.metafields && variant.metafields.edges) { 76 | variant.metafields = variant.metafields.edges.map(metaEdge => metaEdge.node); 77 | } else { 78 | variant.metafields = []; 79 | } 80 | 81 | return variant; 82 | }); 83 | 84 | // Process metafields 85 | product.metafields = product.metafields.edges.map(metaEdge => metaEdge.node); 86 | 87 | // Process publications 88 | if (product.publications && product.publications.edges) { 89 | product.publications = product.publications.edges.map(pubEdge => pubEdge.node); 90 | } else { 91 | product.publications = []; 92 | } 93 | 94 | return product; 95 | }); 96 | 97 | // Update pagination info for next iteration 98 | hasNextPage = response.products.pageInfo.hasNextPage; 99 | cursor = response.products.pageInfo.endCursor; 100 | fetchedCount += batchProducts.length; 101 | 102 | // Log the current cursor in gray for reference 103 | if (this.debug) { 104 | logger.debug(`Current pagination cursor: ${cursor}`); 105 | } 106 | 107 | return { 108 | products: batchProducts, 109 | done: !hasNextPage || fetchedCount > limit, 110 | fetchedCount, 111 | totalCount: fetchedCount, 112 | cursor: cursor 113 | }; 114 | } catch (error) { 115 | logger.error(`Error fetching products: ${error.message}`); 116 | return { products: [], done: true, error: error.message }; 117 | } 118 | } 119 | }; 120 | } 121 | 122 | /** 123 | * Fetch a product by its handle 124 | */ 125 | async getProductByHandle(client, handle) { 126 | try { 127 | const response = await client.graphql(require('../graphql').ProductFetchByHandle, { handle }, 'GetProductByHandle'); 128 | 129 | if (!response.productByHandle) { 130 | return null; 131 | } 132 | 133 | const product = response.productByHandle; 134 | 135 | // Process images 136 | product.images = product.images.edges.map(imgEdge => imgEdge.node); 137 | 138 | // Process variants and their metafields 139 | product.variants = product.variants.edges.map(varEdge => { 140 | const variant = varEdge.node; 141 | 142 | // Process variant metafields 143 | if (variant.metafields && variant.metafields.edges) { 144 | variant.metafields = variant.metafields.edges.map(metaEdge => metaEdge.node); 145 | } else { 146 | variant.metafields = []; 147 | } 148 | 149 | return variant; 150 | }); 151 | 152 | // Process metafields 153 | product.metafields = product.metafields.edges.map(metaEdge => metaEdge.node); 154 | 155 | // Process publications 156 | if (product.publications && product.publications.edges) { 157 | product.publications = product.publications.edges.map(pubEdge => pubEdge.node); 158 | } else { 159 | product.publications = []; 160 | } 161 | 162 | return product; 163 | } catch (error) { 164 | logger.error(`Error fetching product by handle: ${error.message}`); 165 | return null; 166 | } 167 | } 168 | 169 | /** 170 | * Gets a collection by its handle 171 | */ 172 | async getCollectionByHandle(client, handle) { 173 | try { 174 | const response = await client.graphql(require('../graphql').CollectionFetchByHandle, { handle }, 'GetCollectionByHandle'); 175 | return response.collectionByHandle; 176 | } catch (error) { 177 | logger.error(`Error fetching collection by handle: ${error.message}`, 4); 178 | return null; 179 | } 180 | } 181 | 182 | /** 183 | * Gets a collection by its ID 184 | * @param {Object} client - Shopify client 185 | * @param {String} id - Collection ID 186 | * @returns {Promise} Collection object with handle 187 | */ 188 | async getCollectionById(client, id) { 189 | try { 190 | const response = await client.graphql(require('../graphql').CollectionFetchById, { id }, 'GetCollectionById'); 191 | return response.collection; 192 | } catch (error) { 193 | logger.error(`Could not find collection for ID: ${id}`, 4); 194 | return null; 195 | } 196 | } 197 | } 198 | 199 | module.exports = ProductBatchProcessor; 200 | -------------------------------------------------------------------------------- /utils/ProductImageHandler.js: -------------------------------------------------------------------------------- 1 | const logger = require("./logger"); 2 | /** 3 | * Product Image Handler 4 | * 5 | * Handles image operations for Shopify products, including: 6 | * - Uploading images to products 7 | * - Associating images with variants 8 | * - Managing media attachments 9 | */ 10 | const ShopifyIDUtils = require('./ShopifyIDUtils'); 11 | 12 | class ProductImageHandler { 13 | constructor(client, options = {}) { 14 | this.client = client; 15 | this.options = options; 16 | this.debug = options.debug; 17 | } 18 | 19 | /** 20 | * Synchronize product images 21 | * @param {string} productId - The product ID 22 | * @param {Array} sourceImages - Array of image objects 23 | * @param {string} logPrefix - Prefix for logs 24 | * @returns {Promise} - Success status 25 | */ 26 | async syncProductImages(productId, sourceImages, logPrefix = '') { 27 | if (!sourceImages || sourceImages.length === 0) return true; 28 | 29 | // Step 1: Get existing images to avoid duplicates 30 | const existingImagesQuery = `#graphql 31 | query getProductMedia($productId: ID!) { 32 | product(id: $productId) { 33 | media(first: 50) { 34 | edges { 35 | node { 36 | id 37 | mediaContentType 38 | ... on MediaImage { 39 | image { 40 | originalSrc 41 | } 42 | } 43 | alt 44 | } 45 | } 46 | } 47 | } 48 | } 49 | `; 50 | 51 | let existingImages = []; 52 | try { 53 | const response = await this.client.graphql(existingImagesQuery, { productId }, 'GetProductMedia'); 54 | existingImages = response.product.media.edges.map(edge => edge.node); 55 | logger.info(`Found ${existingImages.length} existing images on product`); 56 | } catch (error) { 57 | logger.error(`Error fetching existing product images: ${error.message}`); 58 | return false; 59 | } 60 | 61 | // Create a map of existing images by source URL for easy comparison 62 | const existingImageMap = {}; 63 | existingImages.forEach(img => { 64 | if (img.mediaContentType === 'IMAGE' && img.image && img.image.originalSrc) { 65 | // Use just the filename for comparison to handle URL differences 66 | const filename = img.image.originalSrc.split('/').pop().split('?')[0]; 67 | existingImageMap[filename] = img; 68 | } 69 | }); 70 | 71 | // Filter out images that already exist 72 | const newImagesToUpload = sourceImages.filter(img => { 73 | const sourceSrc = img.src; 74 | const sourceFilename = sourceSrc.split('/').pop().split('?')[0]; 75 | return !existingImageMap[sourceFilename]; 76 | }); 77 | 78 | if (newImagesToUpload.length === 0) { 79 | logger.info(`All images already exist on product, no need to upload`, 3); 80 | return true; 81 | } 82 | 83 | // Create media inputs from the new images 84 | const mediaInputs = newImagesToUpload.map(image => ({ 85 | originalSource: image.src, 86 | alt: image.altText || '', 87 | mediaContentType: 'IMAGE' 88 | })); 89 | 90 | const createMediaMutation = `#graphql 91 | mutation productCreateMedia($productId: ID!, $media: [CreateMediaInput!]!) { 92 | productCreateMedia(productId: $productId, media: $media) { 93 | media { 94 | ... on MediaImage { 95 | id 96 | image { 97 | id 98 | url 99 | } 100 | } 101 | } 102 | userErrors { 103 | field 104 | message 105 | } 106 | } 107 | } 108 | `; 109 | 110 | if (this.options.notADrill) { 111 | try { 112 | logger.info(`Uploading ${mediaInputs.length} new images for product`, 3); 113 | const result = await this.client.graphql(createMediaMutation, { 114 | productId, 115 | media: mediaInputs 116 | }, 'ProductCreateMedia'); 117 | 118 | if (result.productCreateMedia.userErrors.length > 0) { 119 | logger.error(`Failed to upload product images:`, 3, result.productCreateMedia.userErrors); 120 | return false; 121 | } else { 122 | logger.success(`Successfully uploaded ${result.productCreateMedia.media.length} images`, 3); 123 | return true; 124 | } 125 | } catch (error) { 126 | logger.error(`Error uploading product images: ${error.message}`, 3); 127 | return false; 128 | } 129 | } else { 130 | logger.info(`[DRY RUN] Would upload ${mediaInputs.length} new images for product`, 3); 131 | for (const input of mediaInputs) { 132 | logger.info(`[DRY RUN] Image: ${input.originalSource} (${input.alt || 'No alt text'})`, 4); 133 | } 134 | return true; 135 | } 136 | } 137 | 138 | /** 139 | * Update variant image association 140 | * @param {string} variantId - Variant ID 141 | * @param {string} imageId - Image ID 142 | * @param {string} productId - Product ID 143 | * @param {string} logPrefix - Prefix for logs 144 | * @returns {Promise} - Success status 145 | */ 146 | async updateVariantImage(variantId, imageId, productId, logPrefix = '') { 147 | // Validate IDs 148 | if (!ShopifyIDUtils.isValidID(variantId) || !ShopifyIDUtils.isValidID(imageId)) { 149 | logger.error(`Invalid parameters: variantId or imageId is missing or invalid`, 2); 150 | return false; 151 | } 152 | 153 | // Extract productId from variantId if not provided 154 | if (!productId) { 155 | productId = await ShopifyIDUtils.getProductIdFromVariantId(variantId, this.client); 156 | if (!productId) { 157 | logger.error(`Could not determine product ID for variant`, 2); 158 | return false; 159 | } 160 | } 161 | 162 | // Validate we have a product ID 163 | if (!ShopifyIDUtils.isValidID(productId)) { 164 | logger.error(`Invalid product ID`, 2); 165 | return false; 166 | } 167 | 168 | // Convert ProductImage ID to MediaImage ID if needed 169 | if (imageId.includes('ProductImage/')) { 170 | const mediaImageId = await ShopifyIDUtils.convertProductImageToMediaImage(imageId, productId, this.client); 171 | if (mediaImageId) { 172 | imageId = mediaImageId; 173 | } else { 174 | logger.error(`Could not find a MediaImage corresponding to ProductImage`, 2); 175 | return false; 176 | } 177 | } 178 | 179 | // Confirm we now have a MediaImage ID 180 | if (!imageId.includes('MediaImage/')) { 181 | logger.error(`Unable to use image: ID is not a MediaImage ID`, 2); 182 | return false; 183 | } 184 | 185 | // Check if the variant already has this media attached 186 | try { 187 | const checkVariantMediaQuery = `#graphql 188 | query checkVariantMedia($variantId: ID!) { 189 | productVariant(id: $variantId) { 190 | media(first: 10) { 191 | nodes { 192 | id 193 | } 194 | } 195 | } 196 | } 197 | `; 198 | 199 | const response = await this.client.graphql(checkVariantMediaQuery, { variantId }, 'checkVariantMedia'); 200 | 201 | if (response.productVariant?.media?.nodes) { 202 | const existingMediaIds = response.productVariant.media.nodes.map(node => node.id); 203 | 204 | // If the variant already has this media attached, we can skip the operation 205 | if (existingMediaIds.includes(imageId)) { 206 | logger.info(`Variant already has the image attached, skipping`, 2); 207 | return true; 208 | } 209 | 210 | // If the variant has different media attached, we need to detach it first 211 | if (existingMediaIds.length > 0) { 212 | const detachMediaMutation = `#graphql 213 | mutation productVariantDetachMedia($variantId: ID!, $mediaIds: [ID!]!) { 214 | productVariantDetachMedia( 215 | variantId: $variantId, 216 | mediaIds: $mediaIds 217 | ) { 218 | productVariant { 219 | id 220 | } 221 | userErrors { 222 | field 223 | message 224 | } 225 | } 226 | } 227 | `; 228 | 229 | // Detach existing media 230 | try { 231 | const detachResult = await this.client.graphql(detachMediaMutation, { 232 | variantId, 233 | mediaIds: existingMediaIds 234 | }, 'productVariantDetachMedia'); 235 | 236 | if (detachResult.productVariantDetachMedia.userErrors.length > 0) { 237 | logger.warn(`Failed to detach existing media, but will continue with attach operation`, 2); 238 | } 239 | } catch (error) { 240 | logger.warn(`Error detaching media: ${error.message}, but will continue with attach operation`, 2); 241 | } 242 | } 243 | } 244 | } catch (error) { 245 | logger.warn(`Could not check existing variant media: ${error.message}, will continue anyway`, 2); 246 | } 247 | 248 | // The mutation to append media to variant 249 | const variantAppendMediaMutation = `#graphql 250 | mutation productVariantAppendMedia($productId: ID!, $variantMedia: [ProductVariantAppendMediaInput!]!) { 251 | productVariantAppendMedia( 252 | productId: $productId, 253 | variantMedia: $variantMedia 254 | ) { 255 | productVariants { 256 | id 257 | } 258 | userErrors { 259 | field 260 | message 261 | } 262 | } 263 | } 264 | `; 265 | 266 | if (this.options.notADrill) { 267 | try { 268 | logger.info(`Updating variant image`, 2); 269 | 270 | // Append the new media 271 | const result = await this.client.graphql(variantAppendMediaMutation, { 272 | productId: productId, 273 | variantMedia: [ 274 | { 275 | variantId: variantId, 276 | mediaIds: [imageId] 277 | } 278 | ] 279 | }, 'productVariantAppendMedia'); 280 | 281 | if (result.productVariantAppendMedia.userErrors.length > 0) { 282 | logger.error(`Failed to update variant image:`, 2, result.productVariantAppendMedia.userErrors); 283 | return false; 284 | } else { 285 | logger.success(`Successfully updated variant image`, 2); 286 | return true; 287 | } 288 | } catch (error) { 289 | logger.error(`Error updating variant image: ${error.message}`, 2); 290 | return false; 291 | } 292 | } else { 293 | logger.info(`[DRY RUN] Would update variant image`, 2); 294 | return true; 295 | } 296 | } 297 | } 298 | 299 | module.exports = ProductImageHandler; 300 | -------------------------------------------------------------------------------- /utils/ProductMetafieldProcessor.js: -------------------------------------------------------------------------------- 1 | const logger = require("./logger"); 2 | /** 3 | * Product Metafield Processor 4 | * 5 | * Handles the processing of product metafields during sync operations, 6 | * including filtering, transformation, and syncing of metafields. 7 | */ 8 | const MetafieldFilterUtils = require('./MetafieldFilterUtils'); 9 | 10 | class ProductMetafieldProcessor { 11 | constructor(metafieldHandler, referenceHandler, options = {}) { 12 | this.metafieldHandler = metafieldHandler; 13 | this.referenceHandler = referenceHandler; 14 | this.options = options; 15 | this.debug = !!options.debug; 16 | } 17 | 18 | /** 19 | * Process and transform metafields for a product 20 | * @param {String} productId - The product ID 21 | * @param {Array} metafields - Raw metafields to process 22 | * @returns {Promise} - Stats about the metafield processing 23 | */ 24 | async processProductMetafields(productId, metafields) { 25 | if (!metafields || metafields.length === 0) { 26 | return { processed: 0, transformed: 0, blanked: 0, errors: 0, warnings: 0 }; 27 | } 28 | 29 | // Filter metafields based on namespace/key options 30 | const filteredMetafields = MetafieldFilterUtils.filterMetafields(metafields, this.options); 31 | 32 | // Transform reference metafields using the dedicated handler 33 | const { transformedMetafields, stats } = await this.referenceHandler.transformReferences(filteredMetafields); 34 | 35 | // Log the number of metafields before and after transformation 36 | logger.info(`Processing metafields: ${filteredMetafields.length} filtered, ` + 37 | `${stats.transformed} transformed, ${stats.blanked} blanked due to errors, ${stats.warnings} warnings`); 38 | 39 | // Print each transformed metafield for debugging 40 | if (this.debug) { 41 | // Increase indentation for debug details 42 | logger.startSection(); 43 | 44 | transformedMetafields.forEach(metafield => { 45 | // Skip logging blanked metafields 46 | if (metafield._blanked) { 47 | logger.info(`Metafield ${metafield.namespace}.${metafield.key} (${metafield.type}): [BLANKED]`); 48 | return; 49 | } 50 | 51 | // Mark unsupported types differently 52 | if (metafield._unsupportedType) { 53 | logger.info(`Metafield ${metafield.namespace}.${metafield.key} (${metafield.type}): [UNSUPPORTED TYPE]`); 54 | return; 55 | } 56 | 57 | const valuePreview = typeof metafield.value === 'string' ? 58 | `${metafield.value.substring(0, 30)}${metafield.value.length > 30 ? '...' : ''}` : 59 | String(metafield.value); 60 | logger.info(`Metafield ${metafield.namespace}.${metafield.key} (${metafield.type}): ${valuePreview}`); 61 | }); 62 | 63 | // Reset indentation after debug details 64 | logger.endSection(); 65 | } 66 | 67 | await this.metafieldHandler.syncMetafields(productId, transformedMetafields); 68 | return stats; 69 | } 70 | } 71 | 72 | module.exports = ProductMetafieldProcessor; 73 | -------------------------------------------------------------------------------- /utils/ProductOperationHandler.js: -------------------------------------------------------------------------------- 1 | const logger = require("./logger"); 2 | /** 3 | * Product Operation Handler 4 | * 5 | * Handles creation and updating of products during sync operations. 6 | * Coordinates the complex process of syncing products with their related entities. 7 | */ 8 | const SyncResultTracker = require('./SyncResultTracker'); 9 | 10 | class ProductOperationHandler { 11 | constructor( 12 | targetClient, 13 | productHandler, 14 | variantHandler, 15 | imageHandler, 16 | metafieldProcessor, 17 | publicationHandler, 18 | options = {} 19 | ) { 20 | this.targetClient = targetClient; 21 | this.productHandler = productHandler; 22 | this.variantHandler = variantHandler; 23 | this.imageHandler = imageHandler; 24 | this.metafieldProcessor = metafieldProcessor; 25 | this.publicationHandler = publicationHandler; 26 | this.options = options; 27 | this.notADrill = !!options.notADrill; 28 | } 29 | 30 | /** 31 | * Create a new product in the target shop 32 | * @param {Object} product - Source product data to create 33 | * @returns {Promise} - Created product result 34 | */ 35 | async createProduct(product) { 36 | // Prepare input with the correct ProductInput structure 37 | const productInput = { 38 | title: product.title, 39 | descriptionHtml: product.descriptionHtml, 40 | vendor: product.vendor, 41 | productType: product.productType, 42 | handle: product.handle, 43 | status: product.status || 'ACTIVE', 44 | tags: product.tags, 45 | productOptions: product.options.map(opt => ({ 46 | name: opt.name, 47 | values: Array.isArray(opt.values) ? opt.values.map(value => ({ name: value })) : [] 48 | })) 49 | // Don't include variants directly as we'll use productVariantsBulkCreate for better control 50 | }; 51 | 52 | if (this.notADrill) { 53 | try { 54 | // Create the product using the productHandler and extract results 55 | const newProduct = await this.productHandler.createProduct(productInput); 56 | if (!newProduct) return null; 57 | 58 | // Initialize the results object to use throughout the method 59 | const results = { 60 | created: 1, 61 | updated: 0, 62 | skipped: 0, 63 | failed: 0, 64 | deleted: 0, 65 | metafields: { 66 | processed: 0, 67 | transformed: 0, 68 | blanked: 0, 69 | errors: 0, 70 | warnings: 0, 71 | unsupportedTypes: [] 72 | } 73 | }; 74 | 75 | // Step 2: Now create variants using productVariantsBulkCreate 76 | if (newProduct.id && product.variants && product.variants.length > 0) { 77 | await this.variantHandler.updateProductVariants(newProduct.id, product.variants); 78 | } 79 | 80 | // Step 3: Upload images if any 81 | if (newProduct.id && product.images && product.images.length > 0) { 82 | logger.startSection(`Processing ${product.images.length} images for new product`, 2, 'main'); 83 | await this.imageHandler.syncProductImages(newProduct.id, product.images); 84 | logger.endSection(); 85 | } 86 | 87 | // Step 4: Process and create metafields 88 | const metafieldStats = await this.metafieldProcessor.processProductMetafields(newProduct.id, product.metafields); 89 | results.metafields.processed = metafieldStats.processed; 90 | results.metafields.transformed = metafieldStats.transformed; 91 | results.metafields.blanked = metafieldStats.blanked; 92 | results.metafields.errors = metafieldStats.errors; 93 | results.metafields.warnings = metafieldStats.warnings; 94 | results.metafields.unsupportedTypes = metafieldStats.unsupportedTypes || []; 95 | 96 | // Step 5: Sync publication status if any 97 | if (newProduct.id && product.publications && product.publications.length > 0) { 98 | await this.publicationHandler.syncProductPublications(newProduct.id, product.publications); 99 | } 100 | 101 | return { 102 | id: newProduct.id, 103 | title: newProduct.title, 104 | handle: newProduct.handle, 105 | results 106 | }; 107 | } catch (error) { 108 | logger.error(`Error creating product "${product.title}": ${error.message}`); 109 | return null; 110 | } 111 | } else { 112 | logger.info(`[DRY RUN] Would create product "${product.title}"`, 'main'); 113 | 114 | // Indent dry run details 115 | logger.startSection(); 116 | logger.info(`[DRY RUN] Would create ${product.variants ? product.variants.length : 0} variant(s)`); 117 | logger.info(`[DRY RUN] Would sync ${product.images ? product.images.length : 0} image(s) and ${product.metafields ? product.metafields.length : 0} metafield(s)`); 118 | 119 | if (product.publications && product.publications.length > 0) { 120 | const publishedChannels = product.publications 121 | .filter(pub => pub.isPublished) 122 | .map(pub => pub.channel.handle); 123 | logger.info(`[DRY RUN] Would publish to ${publishedChannels.length} channels: ${publishedChannels.join(', ')}`); 124 | } 125 | 126 | // Unindent after dry run details 127 | logger.endSection(); 128 | 129 | return { 130 | id: "dry-run-id", 131 | title: product.title, 132 | handle: product.handle, 133 | results: { 134 | created: 1, 135 | updated: 0, 136 | skipped: 0, 137 | failed: 0, 138 | deleted: 0, 139 | metafields: { 140 | processed: 0, 141 | transformed: 0, 142 | blanked: 0, 143 | errors: 0, 144 | warnings: 0, 145 | unsupportedTypes: [] 146 | } 147 | } 148 | }; 149 | } 150 | } 151 | 152 | /** 153 | * Update an existing product in the target shop 154 | * @param {Object} product - Source product data to update with 155 | * @param {Object} existingProduct - Existing product data in target shop 156 | * @returns {Promise} - Updated product result 157 | */ 158 | async updateProduct(product, existingProduct) { 159 | // Prepare input with the correct ProductUpdateInput structure 160 | const productUpdateInput = { 161 | title: product.title, 162 | descriptionHtml: product.descriptionHtml, 163 | vendor: product.vendor, 164 | productType: product.productType, 165 | status: product.status || 'ACTIVE', 166 | tags: product.tags 167 | // Don't include options or variants in update as they need special handling 168 | }; 169 | 170 | if (this.notADrill) { 171 | try { 172 | // Update the product using the productHandler 173 | const updatedProduct = await this.productHandler.updateProduct(existingProduct.id, productUpdateInput); 174 | if (!updatedProduct) return null; 175 | 176 | // Initialize the results object to use throughout the method 177 | const results = { 178 | created: 0, 179 | updated: 1, 180 | skipped: 0, 181 | failed: 0, 182 | deleted: 0, 183 | metafields: { 184 | processed: 0, 185 | transformed: 0, 186 | blanked: 0, 187 | errors: 0, 188 | warnings: 0, 189 | unsupportedTypes: [] 190 | } 191 | }; 192 | 193 | // Step 1: Update variants separately using productVariantsBulkUpdate 194 | if (updatedProduct.id && product.variants && product.variants.length > 0) { 195 | await this.variantHandler.updateProductVariants(updatedProduct.id, product.variants); 196 | } else { 197 | logger.info(`No variants to update for "${product.title}"`); 198 | } 199 | 200 | // Step 2: Sync images 201 | if (updatedProduct.id && product.images && product.images.length > 0) { 202 | logger.startSection(`Processing ${product.images.length} images for existing product`); 203 | await this.imageHandler.syncProductImages(updatedProduct.id, product.images); 204 | logger.endSection(); 205 | } 206 | 207 | // Step 3: Process and update metafields 208 | const metafieldStats = await this.metafieldProcessor.processProductMetafields(updatedProduct.id, product.metafields); 209 | results.metafields.processed = metafieldStats.processed; 210 | results.metafields.transformed = metafieldStats.transformed; 211 | results.metafields.blanked = metafieldStats.blanked; 212 | results.metafields.errors = metafieldStats.errors; 213 | results.metafields.warnings = metafieldStats.warnings; 214 | results.metafields.unsupportedTypes = metafieldStats.unsupportedTypes || []; 215 | 216 | // Step 4: Sync publication status 217 | if (updatedProduct.id && product.publications && product.publications.length > 0) { 218 | await this.publicationHandler.syncProductPublications(updatedProduct.id, product.publications); 219 | } 220 | 221 | return { 222 | id: updatedProduct.id, 223 | title: updatedProduct.title, 224 | handle: updatedProduct.handle, 225 | results 226 | }; 227 | } catch (error) { 228 | logger.error(`Error updating product "${product.title}": ${error.message}`); 229 | return null; 230 | } 231 | } else { 232 | logger.info(`[DRY RUN] Would update product "${product.title}"`, 'main'); 233 | 234 | // Indent dry run details 235 | logger.startSection(); 236 | logger.info(`[DRY RUN] Would update ${product.variants ? product.variants.length : 0} variant(s)`); 237 | logger.info(`[DRY RUN] Would sync ${product.images ? product.images.length : 0} image(s) and ${product.metafields ? product.metafields.length : 0} metafield(s)`); 238 | 239 | if (product.publications && product.publications.length > 0) { 240 | const publishedChannels = product.publications 241 | .filter(pub => pub.isPublished) 242 | .map(pub => pub.channel.handle); 243 | logger.info(`[DRY RUN] Would publish to ${publishedChannels.length} channels: ${publishedChannels.join(', ')}`); 244 | } 245 | 246 | // Unindent after dry run details 247 | logger.endSection(); 248 | 249 | return { 250 | id: existingProduct.id, 251 | title: product.title, 252 | handle: product.handle, 253 | results: { 254 | created: 0, 255 | updated: 1, 256 | skipped: 0, 257 | failed: 0, 258 | deleted: 0, 259 | metafields: { 260 | processed: 0, 261 | transformed: 0, 262 | blanked: 0, 263 | errors: 0, 264 | warnings: 0, 265 | unsupportedTypes: [] 266 | } 267 | } 268 | }; 269 | } 270 | } 271 | } 272 | 273 | module.exports = ProductOperationHandler; 274 | -------------------------------------------------------------------------------- /utils/ProductPublicationHandler.js: -------------------------------------------------------------------------------- 1 | const logger = require("./logger"); 2 | /** 3 | * Product Publication Handler 4 | * 5 | * Handles product publication operations for Shopify products, including: 6 | * - Fetching available channels and publications 7 | * - Checking current publication status 8 | * - Publishing products to channels 9 | * - Managing publication errors 10 | */ 11 | 12 | class ProductPublicationHandler { 13 | constructor(client, options = {}) { 14 | this.client = client; 15 | this.options = options; 16 | this.debug = options.debug; 17 | } 18 | 19 | /** 20 | * Synchronize product publications 21 | * @param {string} productId - The product ID 22 | * @param {Array} sourcePublications - Array of publication objects from source product 23 | * @param {string} logPrefix - Prefix for logs 24 | * @returns {Promise} - Success status 25 | */ 26 | async syncProductPublications(productId, sourcePublications, logPrefix = '') { 27 | if (!sourcePublications || sourcePublications.length === 0) { 28 | logger.info(`No publication channels to sync`); 29 | return true; 30 | } 31 | 32 | // Filter out invalid publications (those without a valid channel.handle) 33 | const validPublications = sourcePublications.filter(pub => 34 | pub && pub.channel && typeof pub.channel.handle === 'string' && pub.channel.handle.length > 0 35 | ); 36 | 37 | // Log skipped publications only in debug mode 38 | if (validPublications.length < sourcePublications.length && this.debug) { 39 | logger.debug(`${logPrefix}- Filtered out ${sourcePublications.length - validPublications.length} invalid publications without valid channel handles`); 40 | 41 | // Debug the problematic publications 42 | const invalidPublications = sourcePublications.filter(pub => 43 | !pub || !pub.channel || typeof pub.channel.handle !== 'string' || pub.channel.handle.length === 0 44 | ); 45 | 46 | logger.debug(`${logPrefix}- Invalid publication details:`, 47 | invalidPublications.map(pub => ({ 48 | isPublished: pub.isPublished || false, 49 | hasChannel: !!pub.channel, 50 | channelId: pub.channel?.id || 'missing', 51 | channelHandle: pub.channel?.handle || 'missing', 52 | publishDate: pub.publishDate || 'unknown' 53 | })) 54 | ); 55 | } 56 | 57 | // Use only valid publications 58 | const publicationsToProcess = validPublications; 59 | 60 | if (publicationsToProcess.length === 0) { 61 | logger.info(`No valid publication channels to sync after filtering`); 62 | return true; 63 | } 64 | 65 | // Log detailed information about the publications we'll process 66 | if (this.debug) { 67 | logger.debug(`${logPrefix}- Valid publications to process:`, 68 | publicationsToProcess.map(pub => ({ 69 | channelHandle: pub.channel.handle, 70 | channelName: pub.channel.name || 'unknown', 71 | isPublished: pub.isPublished 72 | })) 73 | ); 74 | } 75 | 76 | logger.startSection(`Syncing product publication to ${publicationsToProcess.length} channels`); 77 | 78 | // First, get available channels and publications in the target store 79 | const getPublicationsQuery = `#graphql 80 | query GetPublicationsAndChannels { 81 | publications(first: 25) { 82 | edges { 83 | node { 84 | id 85 | name 86 | app { 87 | id 88 | } 89 | } 90 | } 91 | } 92 | channels(first: 25) { 93 | edges { 94 | node { 95 | id 96 | name 97 | handle 98 | } 99 | } 100 | } 101 | } 102 | `; 103 | 104 | let targetChannels = []; 105 | let targetPublications = []; 106 | try { 107 | const response = await this.client.graphql(getPublicationsQuery, {}, 'GetPublicationsAndChannels'); 108 | targetChannels = response.channels.edges.map(edge => edge.node); 109 | targetPublications = response.publications.edges.map(edge => edge.node); 110 | 111 | if (this.debug) { 112 | logger.debug(`${logPrefix} - Found ${targetChannels.length} available channels in target store`); 113 | logger.debug(`${logPrefix} - Found ${targetPublications.length} publications in target store`); 114 | if (targetChannels.length > 0) { 115 | logger.debug(`${logPrefix} - Available channels: ${targetChannels.map(c => c.handle).join(', ')}`); 116 | } 117 | } 118 | } catch (error) { 119 | logger.error(`Error fetching target store publications: ${error.message}`); 120 | logger.endSection(); 121 | return false; 122 | } 123 | 124 | // Get current publication status for this product 125 | const getProductPublicationsQuery = `#graphql 126 | query GetProductPublications($productId: ID!) { 127 | product(id: $productId) { 128 | publications(first: 25) { 129 | edges { 130 | node { 131 | channel { 132 | id 133 | handle 134 | } 135 | isPublished 136 | } 137 | } 138 | } 139 | } 140 | } 141 | `; 142 | 143 | let currentPublications = []; 144 | try { 145 | const response = await this.client.graphql(getProductPublicationsQuery, { productId }, 'GetProductPublications'); 146 | if (response.product && response.product.publications) { 147 | currentPublications = response.product.publications.edges.map(edge => edge.node); 148 | } 149 | 150 | if (this.debug) { 151 | logger.debug(`${logPrefix} - Product is currently published to ${currentPublications.length} channels`); 152 | if (currentPublications.length > 0) { 153 | logger.debug(`${logPrefix} - Currently published to: ${currentPublications.filter(p => p.isPublished).map(p => p.channel.handle).join(', ')}`); 154 | } 155 | } 156 | } catch (error) { 157 | logger.warn(`Unable to fetch current publications: ${error.message}`); 158 | // Continue anyway since we can still try to publish 159 | } 160 | 161 | // Match source publications to target channels by handle 162 | const publicationsToCreate = []; 163 | const skippedChannels = []; 164 | // Set to track publication IDs we've already added to avoid duplicates 165 | const addedPublicationIds = new Set(); 166 | 167 | // For each source publication 168 | for (const sourcePublication of publicationsToProcess) { 169 | // Only process publications that are actually published 170 | if (!sourcePublication.isPublished) continue; 171 | 172 | const sourceChannelHandle = sourcePublication.channel.handle; 173 | 174 | // Find matching target channel 175 | const targetChannel = targetChannels.find(channel => channel.handle === sourceChannelHandle); 176 | if (targetChannel) { 177 | // Find the publication associated with this channel 178 | // For now, use the first publication as a default if we don't have more info 179 | // In most cases, there will only be a single publication (the default one) 180 | const targetPublication = targetPublications.length > 0 ? targetPublications[0] : null; 181 | 182 | if (targetPublication) { 183 | // Check if product is already published to this channel 184 | const alreadyPublished = currentPublications.some(pub => 185 | pub.channel.handle === sourceChannelHandle && pub.isPublished 186 | ); 187 | 188 | // Check if we've already added this publication ID 189 | if (!alreadyPublished && !addedPublicationIds.has(targetPublication.id)) { 190 | publicationsToCreate.push({ 191 | publicationId: targetPublication.id, 192 | channelHandle: sourceChannelHandle 193 | }); 194 | // Mark this publication ID as added to avoid duplicates 195 | addedPublicationIds.add(targetPublication.id); 196 | } else if (this.debug) { 197 | if (alreadyPublished) { 198 | logger.debug(`${logPrefix} - Product already published to ${sourceChannelHandle}`); 199 | } else { 200 | logger.debug(`${logPrefix} - Skipping duplicate publication ID for channel ${sourceChannelHandle}`); 201 | } 202 | } 203 | } else { 204 | logger.warn(`Found channel ${sourceChannelHandle} but no associated publication in target store`); 205 | skippedChannels.push(sourceChannelHandle); 206 | } 207 | } else { 208 | skippedChannels.push(sourceChannelHandle); 209 | } 210 | } 211 | 212 | // Log skipped channels only when there are any and only as a debug message if not important 213 | if (skippedChannels.length > 0) { 214 | if (this.debug) { 215 | logger.debug(`${logPrefix} - Skipping ${skippedChannels.length} channels that don't exist in target store: ${skippedChannels.join(', ')}`); 216 | } else if (skippedChannels.length > 1 || skippedChannels[0] !== 'online_store') { 217 | // Only warn about non-standard channels being skipped 218 | logger.warn(`Skipping ${skippedChannels.length} channels that don't exist in target store: ${skippedChannels.join(', ')}`); 219 | } 220 | } 221 | 222 | // If no publications to create, we're done 223 | if (publicationsToCreate.length === 0) { 224 | logger.info(`No new publication channels to add`); 225 | logger.endSection(); 226 | return true; 227 | } 228 | 229 | // Publish to target channels 230 | const publishMutation = `#graphql 231 | mutation publishablePublish($id: ID!, $input: [PublicationInput!]!) { 232 | publishablePublish(id: $id, input: $input) { 233 | userErrors { 234 | field 235 | message 236 | } 237 | } 238 | } 239 | `; 240 | 241 | if (this.options.notADrill) { 242 | try { 243 | logger.info(`Publishing product to ${publicationsToCreate.length} channels`); 244 | 245 | const input = publicationsToCreate.map(pub => ({ 246 | publicationId: pub.publicationId, 247 | // Use current date for publish date 248 | publishDate: new Date().toISOString() 249 | })); 250 | 251 | const result = await this.client.graphql(publishMutation, { 252 | id: productId, 253 | input 254 | }, 'PublishablePublish'); 255 | 256 | if (result.publishablePublish.userErrors.length > 0) { 257 | logger.error(`Failed to publish product:`, result.publishablePublish.userErrors); 258 | logger.endSection(); 259 | return false; 260 | } 261 | 262 | logger.success(`Successfully published product to ${publicationsToCreate.length} channels`); 263 | 264 | // End publish section before returning 265 | logger.endSection(); 266 | return true; 267 | } catch (error) { 268 | logger.error(`Error publishing product: ${error.message}`); 269 | logger.endSection(); 270 | return false; 271 | } 272 | } else { 273 | logger.info(`[DRY RUN] Would publish product to ${publicationsToCreate.length} channels: ${publicationsToCreate.map(p => p.channelHandle).join(', ')}`); 274 | 275 | // End publish section before returning 276 | logger.endSection(); 277 | return true; 278 | } 279 | } 280 | } 281 | 282 | module.exports = ProductPublicationHandler; 283 | -------------------------------------------------------------------------------- /utils/ShopConfig.js: -------------------------------------------------------------------------------- 1 | const logger = require("./logger"); 2 | const path = require('path'); 3 | const fs = require('fs'); 4 | 5 | /** 6 | * Get shop configuration from .shops.json file by shop name 7 | * 8 | * @param {string} shopName - Shop name to lookup 9 | * @returns {Object|null} Object with domain and accessToken, or null if not found 10 | */ 11 | function getShopConfig(shopName) { 12 | if (!shopName) return null; 13 | 14 | try { 15 | const shopsFile = path.resolve(process.cwd(), '.shops.json'); 16 | if (!fs.existsSync(shopsFile)) return null; 17 | 18 | const shopsConfig = JSON.parse(fs.readFileSync(shopsFile, 'utf8')); 19 | 20 | // Find the shop by name 21 | const shopConfig = shopsConfig.find(s => s.name === shopName) || null; 22 | 23 | if (shopConfig) { 24 | // If protected property is not present, default to true (protected) 25 | if (shopConfig.protected === undefined) { 26 | shopConfig.protected = true; 27 | } 28 | } 29 | 30 | return shopConfig; 31 | } catch (error) { 32 | logger.error('Error reading .shops.json:', error.message); 33 | return null; 34 | } 35 | } 36 | 37 | module.exports = { 38 | getShopConfig 39 | }; 40 | -------------------------------------------------------------------------------- /utils/ShopifyClient.js: -------------------------------------------------------------------------------- 1 | const Shopify = require('shopify-api-node'); 2 | const chalk = require('chalk'); 3 | const logger = require('./logger'); 4 | 5 | class ShopifyClient { 6 | /** 7 | * Wraps a shopify-api-node client instance to provide centralized debugging and error handling. 8 | * @param {Shopify} client - An instance of the shopify-api-node client. 9 | * @param {boolean} debug - Whether debug logging is enabled. 10 | */ 11 | constructor(client, debug = false) { 12 | if (!client) { 13 | throw new Error("Shopify client instance is required."); 14 | } 15 | this.client = client; 16 | this.debug = debug; 17 | this.operationCounter = 0; // To help correlate logs 18 | this.shopName = this.client.options.shopName; 19 | 20 | // Set up rate limit tracking 21 | this.rateLimits = { 22 | restRemaining: null, 23 | restTotal: null, 24 | graphqlRemaining: null, 25 | graphqlTotal: null, 26 | lastUpdated: null 27 | }; 28 | 29 | // Forward event listeners if debug is needed for them elsewhere 30 | if (this.debug) { 31 | // Use logger for these events 32 | this.client.on('callLimits', limits => { 33 | this.rateLimits.restRemaining = limits.remaining; 34 | this.rateLimits.restTotal = limits.current; 35 | this.rateLimits.lastUpdated = new Date(); 36 | logger.info(`REST Call limits for [${this.shopName}]: ${JSON.stringify(limits)}`); 37 | }); 38 | 39 | this.client.on('callGraphqlLimits', limits => { 40 | this.rateLimits.graphqlRemaining = limits.remaining; 41 | this.rateLimits.graphqlTotal = limits.current; 42 | this.rateLimits.lastUpdated = new Date(); 43 | logger.info(`GraphQL limits for [${this.shopName}]: ${JSON.stringify(limits)}`); 44 | }); 45 | } 46 | } 47 | 48 | /** 49 | * Executes a GraphQL query or mutation with debug logging and enhanced error handling. 50 | * @param {string} queryOrMutation - The GraphQL query or mutation string. 51 | * @param {object} [variables] - Optional variables for the query or mutation. 52 | * @param {string} [operationName] - Optional name for the operation for logging context. 53 | * @returns {Promise} - The result from the Shopify API. 54 | * @throws {Error} - Enhanced error with operation context 55 | */ 56 | async graphql(queryOrMutation, variables = undefined, operationName = 'GraphQL Operation') { 57 | this.operationCounter++; 58 | const operationId = `${this.shopName}-${operationName}-${this.operationCounter}`; 59 | const logPrefix = `GraphQL[${this.shopName} ${operationName}]`; 60 | 61 | try { 62 | // Debug logging for the request 63 | if (this.debug) { 64 | logger.info(`${logPrefix}: Starting operation: ${chalk.bold(operationId)}`); 65 | logger.debug(queryOrMutation); 66 | logger.debug(JSON.stringify(variables, null, 2)); 67 | } 68 | 69 | // Execute the GraphQL request 70 | const result = await this.client.graphql(queryOrMutation, variables); 71 | 72 | // Debug logging for the response 73 | if (this.debug) { 74 | logger.info(`${logPrefix}: Operation ${chalk.bold(operationId)} completed successfully`); 75 | logger.debug(`Response: ${JSON.stringify(result, null, 2)}`); 76 | } 77 | 78 | // Check for GraphQL errors in the response 79 | if (result.errors) { 80 | const errorMessages = result.errors.map(e => e.message).join(', '); 81 | const errorDetails = { 82 | operationName, 83 | operationId, 84 | errors: result.errors, 85 | variables 86 | }; 87 | 88 | logger.error(`${logPrefix}: GraphQL errors in operation ${operationId}: ${errorMessages}`); 89 | if (this.debug) { 90 | logger.error(`Error details: ${JSON.stringify(errorDetails, null, 2)}`); 91 | } 92 | 93 | // We still return the result so caller can handle these errors 94 | // as some operations might continue with partial data 95 | return result; 96 | } 97 | 98 | // Return the successful result 99 | return result; 100 | 101 | } catch (error) { 102 | // Handle network errors and other exceptions 103 | // Format a detailed error with context about the operation 104 | const enhancedError = new Error(`Shopify GraphQL error in operation ${operationId} (${operationName}): ${error.message}`); 105 | enhancedError.originalError = error; 106 | enhancedError.operationName = operationName; 107 | enhancedError.operationId = operationId; 108 | enhancedError.shopName = this.shopName; 109 | 110 | // Only include variables in debug mode to avoid leaking sensitive data in production logs 111 | if (this.debug) { 112 | enhancedError.variables = variables; 113 | enhancedError.query = queryOrMutation; 114 | } 115 | 116 | // Check if this is a rate limit error 117 | if (error.message && error.message.includes('Throttled')) { 118 | logger.warn(`${logPrefix}: Rate limit exceeded for shop ${this.shopName}. Consider implementing backoff strategy.`); 119 | enhancedError.isRateLimit = true; 120 | } 121 | 122 | // Log the error 123 | logger.error(`${logPrefix}: GraphQL operation ${chalk.bold(operationId)} failed: ${error.message}`); 124 | 125 | if (this.debug) { 126 | logger.debug(`Error stack: ${error.stack}`); 127 | 128 | // Log rate limit information if available 129 | if (this.rateLimits.lastUpdated) { 130 | logger.info(`Current rate limits: graphql: ${this.rateLimits.graphqlRemaining}/${this.rateLimits.graphqlTotal}, ` + 131 | `rest: ${this.rateLimits.restRemaining}/${this.rateLimits.restTotal}, ` + 132 | `lastUpdated: ${this.rateLimits.lastUpdated}`); 133 | } 134 | } 135 | 136 | throw enhancedError; 137 | } 138 | } 139 | 140 | /** 141 | * Makes a REST API call with consistent error handling 142 | * @param {string} resource - The Shopify resource (e.g., 'products', 'orders') 143 | * @param {string} method - The HTTP method (get, post, put, delete) 144 | * @param {object} [params] - Optional params for the request 145 | * @returns {Promise} - The API response 146 | */ 147 | async rest(resource, method, params = undefined) { 148 | if (!this.client[resource] || typeof this.client[resource][method] !== 'function') { 149 | throw new Error(`Invalid resource or method: ${resource}.${method}`); 150 | } 151 | 152 | const operationId = `${this.shopName}-REST-${resource}-${method}-${++this.operationCounter}`; 153 | const logPrefix = `REST[${this.shopName} ${resource}.${method}]`; 154 | 155 | try { 156 | // Debug logging for the request 157 | if (this.debug) { 158 | logger.info(`${logPrefix}: Starting REST operation: ${chalk.bold(operationId)}`); 159 | if (params) { 160 | logger.debug(`Params: ${JSON.stringify(params, null, 2)}`); 161 | } 162 | } 163 | 164 | // Execute the REST request 165 | const result = await this.client[resource][method](params); 166 | 167 | // Debug logging for the response 168 | if (this.debug) { 169 | logger.info(`${logPrefix}: REST operation ${chalk.bold(operationId)} completed successfully`); 170 | logger.debug(`Response: ${JSON.stringify(result, null, 2)}`); 171 | } 172 | 173 | return result; 174 | 175 | } catch (error) { 176 | // Create enhanced error with context 177 | const enhancedError = new Error(`Shopify REST error in operation ${operationId} (${resource}.${method}): ${error.message}`); 178 | enhancedError.originalError = error; 179 | enhancedError.resource = resource; 180 | enhancedError.method = method; 181 | enhancedError.operationId = operationId; 182 | enhancedError.shopName = this.shopName; 183 | 184 | if (this.debug) { 185 | enhancedError.params = params; 186 | } 187 | 188 | // Check for rate limiting 189 | if (error.statusCode === 429) { 190 | logger.warn(`${logPrefix}: REST rate limit exceeded for shop ${this.shopName}. Consider implementing backoff strategy.`); 191 | enhancedError.isRateLimit = true; 192 | } 193 | 194 | // Log error with appropriate context 195 | logger.error(`${logPrefix}: REST operation ${chalk.bold(operationId)} failed: ${error.message}`); 196 | 197 | if (this.debug) { 198 | logger.debug(`Error stack: ${error.stack}`); 199 | 200 | // Log HTTP details if available 201 | if (error.statusCode) { 202 | logger.error(`HTTP Status: ${error.statusCode}`); 203 | } 204 | 205 | // Log rate limit information if available 206 | if (this.rateLimits.lastUpdated) { 207 | logger.info(`Current rate limits: graphql: ${this.rateLimits.graphqlRemaining}/${this.rateLimits.graphqlTotal}, ` + 208 | `rest: ${this.rateLimits.restRemaining}/${this.rateLimits.restTotal}, ` + 209 | `lastUpdated: ${this.rateLimits.lastUpdated}`); 210 | } 211 | } 212 | 213 | throw enhancedError; 214 | } 215 | } 216 | 217 | /** 218 | * Get information about current rate limits 219 | * @returns {object} Current rate limit information 220 | */ 221 | getRateLimits() { 222 | return { 223 | ...this.rateLimits, 224 | shop: this.shopName, 225 | isNearingLimit: this.isNearingRateLimit() 226 | }; 227 | } 228 | 229 | /** 230 | * Check if we're approaching rate limits 231 | * @param {number} threshold - Percentage threshold to consider "nearing limit" (default: 80%) 232 | * @returns {boolean} Whether we're nearing the rate limit 233 | */ 234 | isNearingRateLimit(threshold = 80) { 235 | if (!this.rateLimits.lastUpdated) return false; 236 | 237 | // Check if we're below the threshold percentage of our total limit 238 | const graphqlPercentUsed = this.rateLimits.graphqlRemaining ? 239 | 100 - (this.rateLimits.graphqlRemaining / this.rateLimits.graphqlTotal * 100) : 0; 240 | 241 | const restPercentUsed = this.rateLimits.restRemaining ? 242 | 100 - (this.rateLimits.restRemaining / this.rateLimits.restTotal * 100) : 0; 243 | 244 | return graphqlPercentUsed > threshold || restPercentUsed > threshold; 245 | } 246 | } 247 | 248 | module.exports = ShopifyClient; 249 | -------------------------------------------------------------------------------- /utils/ShopifyIDUtils.js: -------------------------------------------------------------------------------- 1 | const logger = require("./logger"); 2 | /** 3 | * Shopify ID Utilities 4 | * 5 | * Handles various operations related to Shopify IDs, including: 6 | * - Validation 7 | * - Type checking 8 | * - URL normalization 9 | * - ID extraction and conversion 10 | */ 11 | 12 | class ShopifyIDUtils { 13 | /** 14 | * Check if a string is a valid Shopify ID (starts with gid://) 15 | * @param {string} id - The ID to validate 16 | * @returns {boolean} - Whether the ID is valid 17 | */ 18 | static isValidID(id) { 19 | return typeof id === 'string' && id.startsWith('gid://'); 20 | } 21 | 22 | /** 23 | * Extract the ID type from a Shopify ID 24 | * @param {string} id - The ID to analyze 25 | * @returns {string|null} - The ID type or null if invalid 26 | */ 27 | static getIDType(id) { 28 | if (!this.isValidID(id)) return null; 29 | 30 | // Format is gid://shopify/Type/12345 31 | const parts = id.split('/'); 32 | if (parts.length >= 4) { 33 | return parts[3]; 34 | } 35 | 36 | return null; 37 | } 38 | 39 | /** 40 | * Check if ID is of a specific type 41 | * @param {string} id - The ID to check 42 | * @param {string} type - The type to check for 43 | * @returns {boolean} - Whether the ID is of the specified type 44 | */ 45 | static isIDType(id, type) { 46 | return this.getIDType(id) === type; 47 | } 48 | 49 | /** 50 | * Normalize a URL for comparison 51 | * @param {string} url - URL to normalize 52 | * @returns {string} - Normalized URL 53 | */ 54 | static normalizeUrl(url) { 55 | if (!url) return ''; 56 | // Remove protocol, query params, and normalize to lowercase 57 | return url.replace(/^https?:\/\//, '') 58 | .split('?')[0] 59 | .toLowerCase(); 60 | } 61 | 62 | /** 63 | * Extract product ID from a variant ID 64 | * @param {string} variantId - Variant ID 65 | * @param {object} client - Shopify client 66 | * @returns {Promise} - Product ID or null if not found 67 | */ 68 | static async getProductIdFromVariantId(variantId, client) { 69 | if (!this.isValidID(variantId)) return null; 70 | 71 | try { 72 | const getVariantQuery = `#graphql 73 | query getVariantProduct($variantId: ID!) { 74 | productVariant(id: $variantId) { 75 | product { 76 | id 77 | } 78 | } 79 | } 80 | `; 81 | 82 | const response = await client.graphql(getVariantQuery, { variantId }, 'getVariantProduct'); 83 | return response.productVariant.product.id; 84 | } catch (error) { 85 | console.error(`Could not determine product ID for variant: ${error.message}`); 86 | return null; 87 | } 88 | } 89 | 90 | /** 91 | * Convert a ProductImage ID to a MediaImage ID 92 | * @param {string} imageId - The ProductImage ID 93 | * @param {string} productId - The product ID 94 | * @param {object} client - Shopify client 95 | * @returns {Promise} - MediaImage ID or null if not found 96 | */ 97 | static async convertProductImageToMediaImage(imageId, productId, client) { 98 | if (!this.isValidID(imageId) || !this.isValidID(productId)) return null; 99 | 100 | try { 101 | const getProductMediaQuery = `#graphql 102 | query getProductMedia($productId: ID!) { 103 | product(id: $productId) { 104 | media(first: 50) { 105 | edges { 106 | node { 107 | id 108 | ... on MediaImage { 109 | image { 110 | id 111 | originalSrc 112 | } 113 | } 114 | } 115 | } 116 | } 117 | } 118 | } 119 | `; 120 | 121 | const response = await client.graphql(getProductMediaQuery, { productId }, 'getProductMedia'); 122 | const mediaItems = response.product.media.edges.map(edge => edge.node); 123 | 124 | // Look for a media item whose image.id matches our ProductImage ID 125 | let foundMediaId = null; 126 | for (const media of mediaItems) { 127 | if (media.image && media.image.id === imageId) { 128 | foundMediaId = media.id; 129 | break; 130 | } 131 | } 132 | 133 | if (foundMediaId) { 134 | return foundMediaId; 135 | } 136 | 137 | // If we can't find a direct match, get the product's images and try to match by URL 138 | const getProductImagesQuery = `#graphql 139 | query getProductImages($productId: ID!) { 140 | product(id: $productId) { 141 | images(first: 50) { 142 | edges { 143 | node { 144 | id 145 | src 146 | } 147 | } 148 | } 149 | } 150 | } 151 | `; 152 | 153 | const imagesResponse = await client.graphql(getProductImagesQuery, { productId }, 'getProductImages'); 154 | const images = imagesResponse.product.images.edges.map(edge => edge.node); 155 | 156 | // Find the image with the matching ID 157 | const matchingImage = images.find(img => img.id === imageId); 158 | 159 | if (matchingImage) { 160 | // Now look for a media item with a matching image URL 161 | for (const media of mediaItems) { 162 | if (media.image && 163 | media.image.originalSrc && 164 | matchingImage.src && 165 | this.normalizeUrl(media.image.originalSrc) === this.normalizeUrl(matchingImage.src)) { 166 | foundMediaId = media.id; 167 | break; 168 | } 169 | } 170 | 171 | if (foundMediaId) { 172 | return foundMediaId; 173 | } 174 | } 175 | 176 | return null; 177 | } catch (error) { 178 | console.error(`Error converting ProductImage to MediaImage: ${error.message}`); 179 | return null; 180 | } 181 | } 182 | } 183 | 184 | module.exports = ShopifyIDUtils; 185 | -------------------------------------------------------------------------------- /utils/StrategyLoader.js: -------------------------------------------------------------------------------- 1 | const logger = require("./logger"); 2 | // Import strategy classes 3 | const MetaobjectSyncStrategy = require('../strategies/MetaobjectSyncStrategy'); 4 | const ProductMetafieldSyncStrategy = require('../strategies/ProductMetafieldSyncStrategy'); 5 | const CompanyMetafieldSyncStrategy = require('../strategies/CompanyMetafieldSyncStrategy'); 6 | const OrderMetafieldSyncStrategy = require('../strategies/OrderMetafieldSyncStrategy'); 7 | const VariantMetafieldSyncStrategy = require('../strategies/VariantMetafieldSyncStrategy'); 8 | const CustomerMetafieldSyncStrategy = require('../strategies/CustomerMetafieldSyncStrategy'); 9 | const CollectionMetafieldSyncStrategy = require('../strategies/CollectionMetafieldSyncStrategy'); 10 | const PageSyncStrategy = require('../strategies/PageSyncStrategy'); 11 | const ProductSyncStrategy = require('../strategies/ProductSyncStrategy'); 12 | const CollectionSyncStrategy = require('../strategies/CollectionSyncStrategy'); 13 | const AllResourcesSyncStrategy = require('../strategies/AllResourcesSyncStrategy'); 14 | const EverythingSyncStrategy = require('../strategies/EverythingSyncStrategy'); 15 | 16 | // Definition strategies mapping 17 | const definitionStrategies = { 18 | products: ProductMetafieldSyncStrategy, 19 | companies: CompanyMetafieldSyncStrategy, 20 | orders: OrderMetafieldSyncStrategy, 21 | variants: VariantMetafieldSyncStrategy, 22 | customers: CustomerMetafieldSyncStrategy, 23 | collections: CollectionMetafieldSyncStrategy, 24 | metaobjects: MetaobjectSyncStrategy, 25 | 26 | }; 27 | 28 | // Data strategies mapping 29 | const dataStrategies = { 30 | products: ProductSyncStrategy, 31 | pages: PageSyncStrategy, 32 | collections: CollectionSyncStrategy, 33 | metaobjects: MetaobjectSyncStrategy, 34 | all: AllResourcesSyncStrategy 35 | // Add other data strategies as they're implemented 36 | }; 37 | 38 | /** 39 | * Get the appropriate definition strategy for a resource 40 | * @param {string} resource - The resource type 41 | * @returns {Object|null} - The strategy class or null if not found 42 | */ 43 | function getDefinitionStrategyForResource(resource) { 44 | return definitionStrategies[resource] || null; 45 | } 46 | 47 | /** 48 | * Get the appropriate data strategy for a resource 49 | * @param {string} resource - The resource type 50 | * @returns {Object|null} - The strategy class or null if not found 51 | */ 52 | function getDataStrategyForResource(resource) { 53 | return dataStrategies[resource] || null; 54 | } 55 | 56 | /** 57 | * Get the strategy for the "everything" command 58 | * @returns {Object} - The EverythingSyncStrategy class 59 | */ 60 | function getEverythingStrategy() { 61 | return EverythingSyncStrategy; 62 | } 63 | 64 | module.exports = { 65 | getDefinitionStrategyForResource, 66 | getDataStrategyForResource, 67 | getEverythingStrategy 68 | }; 69 | -------------------------------------------------------------------------------- /utils/SyncResultTracker.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Sync Result Tracker 3 | * 4 | * Handles tracking and aggregating results from sync operations. 5 | * Provides methods to track success/failure counts and generate summary reports. 6 | */ 7 | const logger = require('./logger'); 8 | 9 | class SyncResultTracker { 10 | constructor() { 11 | this.results = { 12 | created: 0, 13 | updated: 0, 14 | skipped: 0, 15 | failed: 0, 16 | deleted: 0, 17 | metafields: { 18 | processed: 0, 19 | transformed: 0, 20 | blanked: 0, 21 | errors: 0, 22 | warnings: 0, 23 | unsupportedTypes: [] 24 | } 25 | }; 26 | } 27 | 28 | /** 29 | * Track a successful product creation 30 | * @param {Object} createResult - The result object from product creation 31 | */ 32 | trackCreation(createResult) { 33 | this.results.created++; 34 | this.mergeMetafieldStats(createResult); 35 | return this; 36 | } 37 | 38 | /** 39 | * Track a successful product update 40 | * @param {Object} updateResult - The result object from product update 41 | */ 42 | trackUpdate(updateResult) { 43 | this.results.updated++; 44 | this.mergeMetafieldStats(updateResult); 45 | return this; 46 | } 47 | 48 | /** 49 | * Track a successful product deletion 50 | */ 51 | trackDeletion() { 52 | this.results.deleted++; 53 | return this; 54 | } 55 | 56 | /** 57 | * Track a failed operation 58 | */ 59 | trackFailure() { 60 | this.results.failed++; 61 | return this; 62 | } 63 | 64 | /** 65 | * Track a skipped operation 66 | */ 67 | trackSkipped() { 68 | this.results.skipped++; 69 | return this; 70 | } 71 | 72 | /** 73 | * Merge metafield stats from an operation result 74 | * @param {Object} result - The operation result containing metafield stats 75 | */ 76 | mergeMetafieldStats(result) { 77 | if (!result || !result.results || !result.results.metafields) { 78 | return; 79 | } 80 | 81 | const stats = result.results.metafields; 82 | 83 | this.results.metafields.processed += stats.processed || 0; 84 | this.results.metafields.transformed += stats.transformed || 0; 85 | this.results.metafields.blanked += stats.blanked || 0; 86 | this.results.metafields.errors += stats.errors || 0; 87 | this.results.metafields.warnings += stats.warnings || 0; 88 | 89 | // Merge unsupported types arrays, avoiding duplicates 90 | if (stats.unsupportedTypes && stats.unsupportedTypes.length > 0) { 91 | stats.unsupportedTypes.forEach(type => { 92 | if (!this.results.metafields.unsupportedTypes.includes(type)) { 93 | this.results.metafields.unsupportedTypes.push(type); 94 | } 95 | }); 96 | } 97 | } 98 | 99 | /** 100 | * Get current results 101 | * @returns {Object} - Current result counts 102 | */ 103 | getResults() { 104 | return this.results; 105 | } 106 | 107 | /** 108 | * Log a summary of results 109 | */ 110 | logSummary() { 111 | console.log(''); // Add a newline before summary 112 | logger.success( 113 | `Finished syncing products. Results: ${this.results.created} created, ${this.results.updated} updated, ` + 114 | `${this.results.deleted} force deleted, ${this.results.failed} failed`, 115 | 0 116 | ); 117 | 118 | // Add metafield stats to the summary if any were processed 119 | if (this.results.metafields.processed > 0) { 120 | logger.info( 121 | `Metafield stats: ${this.results.metafields.processed} processed, ` + 122 | `${this.results.metafields.transformed} transformed, ` + 123 | `${this.results.metafields.blanked} blanked due to errors, ` + 124 | `${this.results.metafields.warnings} warnings`, 125 | 0 126 | ); 127 | } 128 | } 129 | 130 | /** 131 | * Get a formatted result object suitable for returning from strategy 132 | * @returns {Object} - Formatted result object 133 | */ 134 | formatForStrategyResult() { 135 | return { 136 | // For products, we should report them as data results, not definition results 137 | definitionResults: null, 138 | dataResults: this.results, 139 | metafieldResults: { 140 | processed: this.results.metafields.processed, 141 | transformed: this.results.metafields.transformed, 142 | blanked: this.results.metafields.blanked, 143 | errors: this.results.metafields.errors, 144 | warnings: this.results.metafields.warnings, 145 | unsupportedTypes: this.results.metafields.unsupportedTypes 146 | } 147 | }; 148 | } 149 | } 150 | 151 | module.exports = SyncResultTracker; 152 | -------------------------------------------------------------------------------- /utils/Validators.js: -------------------------------------------------------------------------------- 1 | const logger = require("./logger"); 2 | /** 3 | * This module provides validation utilities for the MetaSync application. 4 | * The isProductionShop function has been removed as it's been replaced by 5 | * the "protected" flag in shop configuration. 6 | */ 7 | 8 | module.exports = { 9 | // Add new validator functions here as needed 10 | }; 11 | -------------------------------------------------------------------------------- /utils/collection/CollectionFetchHandler.js: -------------------------------------------------------------------------------- 1 | const logger = require('../logger'); 2 | const { 3 | CollectionFetchAll, 4 | CollectionFetchByHandle 5 | } = require('../../graphql'); 6 | 7 | class CollectionFetchHandler { 8 | constructor(sourceClient, targetClient, options) { 9 | this.sourceClient = sourceClient; 10 | this.targetClient = targetClient; 11 | this.options = options; 12 | } 13 | 14 | // --- Collection Fetch Methods --- 15 | async fetchCollections(client, limit = null) { 16 | let collections = []; 17 | let hasNextPage = true; 18 | let cursor = null; 19 | let totalFetched = 0; 20 | 21 | logger.info(`Fetching collections, please wait...`); 22 | 23 | while (hasNextPage) { 24 | const response = await client.graphql( 25 | CollectionFetchAll, 26 | { first: 100, after: cursor }, 27 | 'GetCollections' 28 | ); 29 | 30 | const edges = response.collections.edges; 31 | collections = collections.concat(edges.map(edge => edge.node)); 32 | totalFetched += edges.length; 33 | 34 | hasNextPage = response.collections.pageInfo.hasNextPage; 35 | cursor = response.collections.pageInfo.endCursor; 36 | 37 | // Break if we've reached the provided limit 38 | if (limit && collections.length >= limit) { 39 | collections = collections.slice(0, limit); 40 | break; 41 | } 42 | } 43 | 44 | return collections; 45 | } 46 | 47 | async getCollectionType(client, collectionId) { 48 | const smartQuery = `query: "id:${collectionId} AND collection_type:smart"`; 49 | const smartResponse = await client.graphql( 50 | CollectionFetchAll, 51 | { first: 1, query: smartQuery }, 52 | 'GetSmartCollection' 53 | ); 54 | 55 | return smartResponse.collections.edges.length > 0 ? 'smart' : 'custom'; 56 | } 57 | 58 | async getCollectionByHandle(client, handle) { 59 | const normalizedHandle = handle.trim().toLowerCase(); 60 | const response = await client.graphql( 61 | CollectionFetchByHandle, 62 | { handle: normalizedHandle }, 63 | 'GetCollectionByHandle' 64 | ); 65 | return response.collectionByHandle; 66 | } 67 | 68 | async fetchSourceCollections() { 69 | // Check if a specific handle is provided 70 | if (this.options.handle) { 71 | const handle = this.options.handle.trim().toLowerCase(); 72 | logger.info(`Fetching collection with handle "${handle}" from source shop`); 73 | 74 | const collection = await this.getCollectionByHandle(this.sourceClient, handle); 75 | if (collection) { 76 | logger.info(`Found collection: ${collection.title}`); 77 | return [collection]; 78 | } else { 79 | logger.warn(`No collection found with handle "${handle}" in source shop`); 80 | return []; 81 | } 82 | } 83 | 84 | const limit = this.options.limit || 250; 85 | 86 | // If type option is provided, filter by collection type 87 | if (this.options.type) { 88 | const type = this.options.type.toLowerCase(); 89 | if (type === 'manual' || type === 'custom') { 90 | // "manual" in our CLI maps to "custom" in Shopify's API 91 | return this.fetchCollectionsByType('custom', limit); 92 | } else if (type === 'smart') { 93 | return this.fetchCollectionsByType('smart', limit); 94 | } else { 95 | logger.warn(`Invalid collection type "${this.options.type}". Valid types are 'manual' or 'smart'.`); 96 | } 97 | } 98 | 99 | const collections = await this.fetchCollections(this.sourceClient, limit); 100 | logger.info(`Found ${collections.length} collection(s) in source shop`); 101 | return collections; 102 | } 103 | 104 | async fetchCollectionsByType(collectionType, limit) { 105 | const typeQuery = `collection_type:${collectionType}`; 106 | const response = await this.sourceClient.graphql( 107 | CollectionFetchAll, 108 | { first: limit, query: typeQuery }, 109 | `Get${collectionType.charAt(0).toUpperCase() + collectionType.slice(1)}Collections` 110 | ); 111 | 112 | const collections = response.collections.edges.map(edge => edge.node); 113 | logger.info(`Filtered to ${collections.length} ${collectionType} collection(s)`); 114 | return collections; 115 | } 116 | 117 | buildTargetCollectionMap(targetCollections) { 118 | const targetCollectionMap = {}; 119 | 120 | for (const collection of targetCollections) { 121 | if (collection.handle) { 122 | const normalizedHandle = collection.handle.trim().toLowerCase(); 123 | targetCollectionMap[normalizedHandle] = collection; 124 | } 125 | } 126 | 127 | return targetCollectionMap; 128 | } 129 | } 130 | 131 | module.exports = CollectionFetchHandler; 132 | -------------------------------------------------------------------------------- /utils/collection/CollectionMetafieldHandler.js: -------------------------------------------------------------------------------- 1 | const logger = require('../logger'); 2 | const FetchMetafieldDefinitionsQuery = require('../../graphql/MetafieldDefinitionsFetch.graphql.js'); 3 | 4 | class CollectionMetafieldHandler { 5 | constructor(sourceClient, targetClient, options) { 6 | this.sourceClient = sourceClient; 7 | this.targetClient = targetClient; 8 | this.options = options; 9 | this.targetMetafieldDefinitions = {}; 10 | } 11 | 12 | async fetchTargetMetafieldDefinitions(ownerType = "COLLECTION") { 13 | try { 14 | const response = await this.targetClient.graphql( 15 | FetchMetafieldDefinitionsQuery, 16 | { ownerType }, 17 | 'FetchMetafieldDefinitions' 18 | ); 19 | 20 | const definitions = response.metafieldDefinitions.nodes; 21 | logger.info(`Found ${definitions.length} metafield definitions for ${ownerType} in target shop`); 22 | 23 | return definitions; 24 | } catch (error) { 25 | logger.error(`Error fetching metafield definitions: ${error.message}`); 26 | return []; 27 | } 28 | } 29 | 30 | async fetchAllTargetMetafieldDefinitions() { 31 | const metafieldDefinitions = {}; 32 | 33 | // Common owner types that could be used in collections or metafield rules 34 | const allOwnerTypes = [ 35 | "PRODUCT", 36 | "COLLECTION", 37 | "CUSTOMER", 38 | "ORDER", 39 | "PRODUCTVARIANT", 40 | "COMPANY", 41 | "COMPANY_LOCATION", 42 | "SHOP" 43 | ]; 44 | 45 | logger.startSection('Pulling metafield definitions from target shop'); 46 | 47 | for (const ownerType of allOwnerTypes) { 48 | metafieldDefinitions[ownerType] = await this.fetchTargetMetafieldDefinitions(ownerType); 49 | } 50 | 51 | logger.endSection(); 52 | 53 | this.targetMetafieldDefinitions = metafieldDefinitions; 54 | return metafieldDefinitions; 55 | } 56 | 57 | async validateMetaobjectReferences(collection) { 58 | if (!collection.metafields || !collection.metafields.edges || collection.metafields.edges.length === 0) { 59 | return { valid: true }; 60 | } 61 | 62 | for (const edge of collection.metafields.edges) { 63 | const node = edge.node; 64 | // Check if the metafield value is a metaobject reference 65 | if (node.value && node.value.includes('gid://shopify/Metaobject/')) { 66 | const metaobjectId = node.value; 67 | 68 | try { 69 | // Query the target store to check if this metaobject exists 70 | const response = await this.targetClient.graphql(` 71 | query CheckMetaobject($id: ID!) { 72 | node(id: $id) { 73 | id 74 | ... on Metaobject { 75 | handle 76 | type 77 | } 78 | } 79 | } 80 | `, { id: metaobjectId }); 81 | 82 | if (!response.node) { 83 | // Get the metaobject details from source store for better error message 84 | const sourceResponse = await this.sourceClient.graphql(` 85 | query GetMetaobjectDetails($id: ID!) { 86 | node(id: $id) { 87 | id 88 | ... on Metaobject { 89 | handle 90 | type 91 | } 92 | } 93 | } 94 | `, { id: metaobjectId }); 95 | 96 | const metaobjectInfo = sourceResponse.node 97 | ? `type: ${sourceResponse.node.type}, handle: ${sourceResponse.node.handle}` 98 | : `ID: ${metaobjectId}`; 99 | 100 | return { 101 | valid: false, 102 | error: `Collection "${collection.title}" references a metaobject (${metaobjectInfo}) that doesn't exist in the target store. Sync the metaobjects first.` 103 | }; 104 | } 105 | } catch (error) { 106 | logger.error(`Error validating metaobject reference: ${error.message}`); 107 | return { 108 | valid: false, 109 | error: `Failed to validate metaobject reference ${metaobjectId}: ${error.message}` 110 | }; 111 | } 112 | } 113 | } 114 | 115 | return { valid: true }; 116 | } 117 | 118 | async validateCollectionReferences(collection) { 119 | if (!collection.metafields || !collection.metafields.edges || collection.metafields.edges.length === 0) { 120 | return { valid: true, validMetafields: collection.metafields }; 121 | } 122 | 123 | const validMetafieldEdges = []; 124 | const invalidCollectionRefs = []; 125 | 126 | for (const edge of collection.metafields.edges) { 127 | const node = edge.node; 128 | // Check if the metafield value is a collection reference 129 | if (node.value && node.value.includes('gid://shopify/Collection/')) { 130 | const collectionId = node.value; 131 | 132 | try { 133 | // Query the target store to check if this collection exists 134 | const response = await this.targetClient.graphql(` 135 | query CheckCollection($id: ID!) { 136 | node(id: $id) { 137 | id 138 | ... on Collection { 139 | title 140 | } 141 | } 142 | } 143 | `, { id: collectionId }); 144 | 145 | if (!response.node) { 146 | // Get the collection details from source store for better warning message 147 | let collectionInfo = collectionId; 148 | try { 149 | const sourceResponse = await this.sourceClient.graphql(` 150 | query GetCollectionDetails($id: ID!) { 151 | node(id: $id) { 152 | id 153 | ... on Collection { 154 | title 155 | } 156 | } 157 | } 158 | `, { id: collectionId }); 159 | 160 | if (sourceResponse.node) { 161 | collectionInfo = `"${sourceResponse.node.title}" (${collectionId})`; 162 | } 163 | } catch (err) { 164 | // If we can't get source details, just use the ID 165 | } 166 | 167 | // Log a warning and don't include this metafield 168 | logger.warn(`Collection "${collection.title}" references collection ${collectionInfo} that doesn't exist in the target store. Removing this metafield reference.`); 169 | invalidCollectionRefs.push(`${node.namespace}.${node.key}`); 170 | continue; 171 | } 172 | } catch (error) { 173 | logger.warn(`Error validating collection reference: ${error.message}. Removing this metafield reference.`); 174 | invalidCollectionRefs.push(`${node.namespace}.${node.key}`); 175 | continue; 176 | } 177 | } 178 | 179 | // Add valid metafield to the list 180 | validMetafieldEdges.push(edge); 181 | } 182 | 183 | // Create a new metafields object with only valid references 184 | const validMetafields = { 185 | ...collection.metafields, 186 | edges: validMetafieldEdges 187 | }; 188 | 189 | return { 190 | valid: true, 191 | validMetafields, 192 | invalidCollectionRefs 193 | }; 194 | } 195 | 196 | async lookupMetafieldDefinitionIds(collection) { 197 | if (!collection.metafields || !collection.metafields.edges || collection.metafields.edges.length === 0) { 198 | return null; 199 | } 200 | 201 | // Validate any metaobject references first 202 | const metaobjectValidationResult = await this.validateMetaobjectReferences(collection); 203 | if (!metaobjectValidationResult.valid) { 204 | throw new Error(metaobjectValidationResult.error); 205 | } 206 | 207 | // Validate and filter out invalid collection references 208 | const collectionValidationResult = await this.validateCollectionReferences(collection); 209 | const validatedCollection = { 210 | ...collection, 211 | metafields: collectionValidationResult.validMetafields 212 | }; 213 | 214 | // If all metafields were filtered out, return null 215 | if (validatedCollection.metafields.edges.length === 0) { 216 | return null; 217 | } 218 | 219 | // Always use COLLECTION owner type 220 | const ownerType = "COLLECTION"; 221 | const targetDefinitions = await this.fetchTargetMetafieldDefinitions(ownerType); 222 | 223 | if (targetDefinitions.length === 0) { 224 | logger.warn(`No metafield definitions found for collections in target shop`); 225 | return null; 226 | } 227 | 228 | const metafieldLookup = {}; 229 | 230 | for (const edge of validatedCollection.metafields.edges) { 231 | const node = edge.node; 232 | const key = `${node.namespace}.${node.key}`; 233 | 234 | const matchingDefinition = targetDefinitions.find(def => 235 | def.namespace === node.namespace && def.key === node.key 236 | ); 237 | 238 | if (matchingDefinition) { 239 | metafieldLookup[key] = matchingDefinition.id; 240 | logger.info(`Found definition ID for ${key}: ${matchingDefinition.id}`); 241 | } else { 242 | logger.info(`No definition found for ${key} in target shop`); 243 | } 244 | } 245 | 246 | return metafieldLookup; 247 | } 248 | 249 | // This method will be called directly before preparing metafields for API 250 | filterCollectionMetafields(metafields) { 251 | if (!metafields || !Array.isArray(metafields) || metafields.length === 0) { 252 | return []; 253 | } 254 | 255 | const filteredMetafields = []; 256 | 257 | for (const metafield of metafields) { 258 | // Filter out collection references 259 | if (metafield.value && typeof metafield.value === 'string' && 260 | metafield.value.includes('gid://shopify/Collection/')) { 261 | logger.warn(`Filtering out collection reference in metafield ${metafield.namespace}.${metafield.key}: ${metafield.value}`); 262 | continue; 263 | } 264 | 265 | // Add valid metafield to the filtered list 266 | filteredMetafields.push(metafield); 267 | } 268 | 269 | if (filteredMetafields.length < metafields.length) { 270 | logger.info(`Removed ${metafields.length - filteredMetafields.length} collection references from metafields`); 271 | } 272 | 273 | return filteredMetafields; 274 | } 275 | 276 | getTargetMetafieldDefinitions() { 277 | return this.targetMetafieldDefinitions; 278 | } 279 | } 280 | 281 | module.exports = CollectionMetafieldHandler; 282 | -------------------------------------------------------------------------------- /utils/collection/CollectionPublicationHandler.js: -------------------------------------------------------------------------------- 1 | const logger = require('../logger'); 2 | 3 | class CollectionPublicationHandler { 4 | constructor(sourceClient, targetClient, options) { 5 | this.sourceClient = sourceClient; 6 | this.targetClient = targetClient; 7 | this.options = options; 8 | this.targetChannels = null; 9 | this.targetPublications = null; 10 | } 11 | 12 | async fetchTargetPublicationData() { 13 | logger.startSection('Fetching target store publication data'); 14 | 15 | const getPublicationsQuery = `#graphql 16 | query GetPublicationsAndChannels { 17 | publications(first: 25) { 18 | edges { 19 | node { 20 | id 21 | name 22 | app { 23 | id 24 | } 25 | } 26 | } 27 | } 28 | channels(first: 25) { 29 | edges { 30 | node { 31 | id 32 | name 33 | handle 34 | } 35 | } 36 | } 37 | } 38 | `; 39 | 40 | try { 41 | const response = await this.targetClient.graphql(getPublicationsQuery, {}, 'GetPublicationsAndChannels'); 42 | this.targetChannels = response.channels.edges.map(edge => edge.node); 43 | this.targetPublications = response.publications.edges.map(edge => edge.node); 44 | 45 | logger.info(`Found ${this.targetChannels.length} channels and ${this.targetPublications.length} publications in target store`); 46 | 47 | if (this.options.debug && this.targetChannels.length > 0) { 48 | logger.debug(`Available channels: ${this.targetChannels.map(c => c.handle).join(', ')}`); 49 | } 50 | } catch (error) { 51 | logger.error(`Error fetching publication data: ${error.message}`); 52 | this.targetChannels = []; 53 | this.targetPublications = []; 54 | } 55 | 56 | logger.endSection(); 57 | } 58 | 59 | async syncCollectionPublications(collectionId, sourcePublications) { 60 | if (!sourcePublications || !this.targetChannels || !this.targetPublications) { 61 | return true; // Skip if no publication data available 62 | } 63 | 64 | // Extract publication data from collection 65 | let publicationsArray = []; 66 | if (sourcePublications.edges && Array.isArray(sourcePublications.edges)) { 67 | publicationsArray = sourcePublications.edges.map(edge => edge.node); 68 | } 69 | 70 | if (publicationsArray.length === 0) { 71 | logger.info(`No publication channels to sync for this collection`); 72 | return true; 73 | } 74 | 75 | // Filter out invalid publications (those without a valid channel.handle) 76 | const validPublications = publicationsArray.filter(pub => 77 | pub && pub.channel && typeof pub.channel.handle === 'string' && pub.channel.handle.length > 0 78 | ); 79 | 80 | if (validPublications.length === 0) { 81 | logger.info(`No valid publication channels found after filtering`); 82 | return true; 83 | } 84 | 85 | logger.startSection(`Syncing collection publication to ${validPublications.length} channels`); 86 | 87 | // Get current publications for this collection 88 | const getCollectionPublicationsQuery = `#graphql 89 | query GetCollectionPublications($collectionId: ID!) { 90 | collection(id: $collectionId) { 91 | publications(first: 25) { 92 | edges { 93 | node { 94 | channel { 95 | id 96 | handle 97 | } 98 | isPublished 99 | } 100 | } 101 | } 102 | } 103 | } 104 | `; 105 | 106 | let currentPublications = []; 107 | try { 108 | const response = await this.targetClient.graphql( 109 | getCollectionPublicationsQuery, 110 | { collectionId }, 111 | 'GetCollectionPublications' 112 | ); 113 | 114 | if (response.collection && response.collection.publications) { 115 | currentPublications = response.collection.publications.edges.map(edge => edge.node); 116 | } 117 | 118 | if (this.options.debug) { 119 | logger.debug(`Collection is currently published to ${currentPublications.length} channels`); 120 | } 121 | } catch (error) { 122 | logger.warn(`Unable to fetch current publications: ${error.message}`); 123 | // Continue anyway since we can still try to publish 124 | } 125 | 126 | // Match source publications to target channels by handle 127 | const publicationsToCreate = []; 128 | const skippedChannels = []; 129 | const addedPublicationIds = new Set(); 130 | 131 | // For each source publication 132 | for (const sourcePublication of validPublications) { 133 | // Only process publications that are actually published 134 | if (!sourcePublication.isPublished) continue; 135 | 136 | const sourceChannelHandle = sourcePublication.channel.handle; 137 | 138 | // Find matching target channel 139 | const targetChannel = this.targetChannels.find(channel => channel.handle === sourceChannelHandle); 140 | if (targetChannel) { 141 | // Find the publication associated with this channel - use first publication as default 142 | const targetPublication = this.targetPublications.length > 0 ? this.targetPublications[0] : null; 143 | 144 | if (targetPublication) { 145 | // Check if collection is already published to this channel 146 | const alreadyPublished = currentPublications.some(pub => 147 | pub.channel.handle === sourceChannelHandle && pub.isPublished 148 | ); 149 | 150 | // Check if we've already added this publication ID 151 | if (!alreadyPublished && !addedPublicationIds.has(targetPublication.id)) { 152 | publicationsToCreate.push({ 153 | publicationId: targetPublication.id, 154 | channelHandle: sourceChannelHandle 155 | }); 156 | // Mark this publication ID as added to avoid duplicates 157 | addedPublicationIds.add(targetPublication.id); 158 | } else if (this.options.debug) { 159 | if (alreadyPublished) { 160 | logger.debug(`Collection already published to ${sourceChannelHandle}`); 161 | } else { 162 | logger.debug(`Skipping duplicate publication ID for channel ${sourceChannelHandle}`); 163 | } 164 | } 165 | } else { 166 | logger.warn(`Found channel ${sourceChannelHandle} but no associated publication in target store`); 167 | skippedChannels.push(sourceChannelHandle); 168 | } 169 | } else { 170 | skippedChannels.push(sourceChannelHandle); 171 | } 172 | } 173 | 174 | // Log skipped channels 175 | if (skippedChannels.length > 0) { 176 | logger.warn(`Skipping ${skippedChannels.length} channels that don't exist in target store: ${skippedChannels.join(', ')}`); 177 | } 178 | 179 | // If no publications to create, we're done 180 | if (publicationsToCreate.length === 0) { 181 | logger.info(`No new publication channels to add`); 182 | logger.endSection(); 183 | return true; 184 | } 185 | 186 | // Publish to target channels 187 | const publishMutation = `#graphql 188 | mutation publishablePublish($id: ID!, $input: [PublicationInput!]!) { 189 | publishablePublish(id: $id, input: $input) { 190 | userErrors { 191 | field 192 | message 193 | } 194 | } 195 | } 196 | `; 197 | 198 | if (this.options.notADrill) { 199 | try { 200 | logger.info(`Publishing collection to ${publicationsToCreate.length} channels`); 201 | 202 | const input = publicationsToCreate.map(pub => ({ 203 | publicationId: pub.publicationId, 204 | publishDate: new Date().toISOString() 205 | })); 206 | 207 | const result = await this.targetClient.graphql(publishMutation, { 208 | id: collectionId, 209 | input 210 | }, 'PublishablePublish'); 211 | 212 | if (result.publishablePublish.userErrors.length > 0) { 213 | logger.error(`Failed to publish collection:`, result.publishablePublish.userErrors); 214 | logger.endSection(); 215 | return false; 216 | } 217 | 218 | logger.success(`Successfully published collection to ${publicationsToCreate.length} channels`); 219 | logger.endSection(); 220 | return true; 221 | } catch (error) { 222 | logger.error(`Error publishing collection: ${error.message}`); 223 | logger.endSection(); 224 | return false; 225 | } 226 | } else { 227 | logger.info(`[DRY RUN] Would publish collection to ${publicationsToCreate.length} channels: ${publicationsToCreate.map(p => p.channelHandle).join(', ')}`); 228 | logger.endSection(); 229 | return true; 230 | } 231 | } 232 | } 233 | 234 | module.exports = CollectionPublicationHandler; 235 | --------------------------------------------------------------------------------