├── setup.cfg ├── tap_stripe └── schemas │ ├── charges.json │ ├── plans.json │ ├── payout_transactions.json │ ├── events.json │ ├── shared │ ├── tax_ids.json │ ├── discount.json │ └── plan.json │ ├── coupons.json │ ├── balance_transactions.json │ ├── transfers.json │ ├── products.json │ ├── disputes.json │ ├── invoice_items.json │ ├── payouts.json │ └── subscription_items.json ├── spikes ├── prior-art │ ├── tap_stripe │ │ ├── schemas │ │ │ ├── plans.json │ │ │ ├── subscriptions.json │ │ │ ├── shared │ │ │ │ ├── discount.json │ │ │ │ ├── subscription_item.json │ │ │ │ ├── coupon.json │ │ │ │ ├── source.json │ │ │ │ ├── subscription.json │ │ │ │ └── plan.json │ │ │ ├── customers.json │ │ │ ├── charges.json │ │ │ └── invoices.json │ │ └── __init__.py │ ├── setup.cfg │ ├── README.md │ ├── publish.sh │ ├── sample_config.json │ ├── MANIFEST.in │ ├── setup.py │ └── .gitignore ├── handle-backoff │ ├── backoff_curl │ └── handle-backoff.py ├── set_api_version │ └── set_api_version.py ├── timeout │ └── timeout.py └── sdk │ ├── missing_invoice.py │ └── sdk.py ├── MANIFEST.in ├── sample_config.json ├── .github └── pull_request_template.md ├── notes.org ├── setup.py ├── tests ├── unittests │ ├── test_log_request_id.py │ ├── test_rate_limit_error.py │ ├── test_recursive_to_dict.py │ ├── test_payout_events_object.py │ ├── test_date_window_size.py │ ├── test_logger_for_events.py │ ├── test_auth_in_discovery.py │ ├── test_deleted_invoice_line_item.py │ ├── test_lookback_evaluation.py │ ├── test_request_timeout.py │ ├── test_get_and_write_bookmark.py │ └── test_sync_event_updates.py ├── utils_invoices.py ├── test_parent_child_independent.py ├── test_configurable_lookback_window.py ├── test_automatic_fields.py ├── test_full_replication.py ├── readme.md ├── test_automatic_payout_transactions.py ├── test_create_object.py ├── test_pagination.py ├── test_start_date.py └── test_discovery.py ├── .gitignore ├── README.md ├── .circleci └── config.yml ├── todo.org └── CHANGELOG.md /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | description-file = README.md 3 | -------------------------------------------------------------------------------- /tap_stripe/schemas/charges.json: -------------------------------------------------------------------------------- 1 | {"$ref": "shared/charge.json#/"} 2 | -------------------------------------------------------------------------------- /tap_stripe/schemas/plans.json: -------------------------------------------------------------------------------- 1 | {"$ref": "shared/plan.json#/"} 2 | -------------------------------------------------------------------------------- /spikes/prior-art/tap_stripe/schemas/plans.json: -------------------------------------------------------------------------------- 1 | { "$ref": "plan.json" } -------------------------------------------------------------------------------- /spikes/prior-art/setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | description-file = README.md 3 | -------------------------------------------------------------------------------- /spikes/prior-art/tap_stripe/schemas/subscriptions.json: -------------------------------------------------------------------------------- 1 | { "$ref": "subscription.json" } -------------------------------------------------------------------------------- /spikes/prior-art/README.md: -------------------------------------------------------------------------------- 1 | # tap-stripe 2 | 3 | This is a [Singer](https://singer.io) tap for Stripe. -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE 2 | include tap_stripe/schemas/*.json 3 | include tap_stripe/schemas/shared/*.json 4 | -------------------------------------------------------------------------------- /spikes/prior-art/publish.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | rm -Rf dist 4 | ./setup.py sdist bdist_wheel 5 | twine upload dist/* -------------------------------------------------------------------------------- /spikes/prior-art/sample_config.json: -------------------------------------------------------------------------------- 1 | { 2 | "access_token": "api_key", 3 | "start_date": "2017-01-01T00:00:00Z" 4 | } 5 | -------------------------------------------------------------------------------- /sample_config.json: -------------------------------------------------------------------------------- 1 | { 2 | "client_secret": "", 3 | "account_id": "" 4 | } 5 | -------------------------------------------------------------------------------- /spikes/prior-art/MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE 2 | include tap_stripe/schemas/*.json 3 | include tap_stripe/schemas/shared/*.json 4 | -------------------------------------------------------------------------------- /spikes/handle-backoff/backoff_curl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Also doesn't immediately trigger a 429… 4 | 5 | while curl -H 'Stitch-Account: '"$1" \ 6 | -s https://api.stripe.com/v1/charges \ 7 | -u '${2}:' >/dev/null 8 | do 9 | date -u 10 | done 11 | -------------------------------------------------------------------------------- /tap_stripe/schemas/payout_transactions.json: -------------------------------------------------------------------------------- 1 | { 2 | "properties": { 3 | "payout_id": { 4 | "type": [ 5 | "null", 6 | "string" 7 | ] 8 | }, 9 | "id": { 10 | "type": [ 11 | "null", 12 | "string" 13 | ] 14 | } 15 | }, 16 | "type": [ 17 | "null", 18 | "object" 19 | ] 20 | } 21 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | # Description of change 2 | (write a short description or paste a link to JIRA) 3 | 4 | # Manual QA steps 5 | - 6 | 7 | # Risks 8 | - 9 | 10 | # Rollback steps 11 | - revert this branch 12 | 13 | #### AI generated code 14 | https://internal.qlik.dev/general/ways-of-working/code-reviews/#guidelines-for-ai-generated-code 15 | - [ ] this PR has been written with the help of GitHub Copilot or another generative AI tool 16 | -------------------------------------------------------------------------------- /notes.org: -------------------------------------------------------------------------------- 1 | * SDK 2 | 3 | The SDK appears to be useful. 4 | 5 | We'll need to change what we're requesting from the OAuth dance to the 6 | =account_id=, assuming that's available. 7 | 8 | See the [[file:spikes/sdk/][SDK Spike]]. 9 | 10 | We are assuming that the =account_id= is not a secret and so should be 11 | logged/kept in the clear. 12 | 13 | Notably the SDK has a pagination iterator. We'll just need to figure out 14 | the bookmarking strategy for it. Default to writing a bookmark for every 15 | record? 16 | -------------------------------------------------------------------------------- /spikes/set_api_version/set_api_version.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import stripe 4 | import sys 5 | 6 | account_id = sys.argv[1] 7 | stripe.api_key = sys.argv[2] 8 | 9 | account = stripe.Account.retrieve(account_id) 10 | 11 | import ipdb; ipdb.set_trace() 12 | 1+1 13 | 14 | # ipdb> stripe.Charge.list(stripe_account=account_id).last_response.headers['Stripe-Version'] 15 | # '2014-11-20' 16 | 17 | # ipdb> stripe.api_version = '2018-09-24' 18 | # ipdb> stripe.Charge.list(stripe_account=account_id).last_response.headers['Stripe-Version'] 19 | # '2018-09-24' 20 | -------------------------------------------------------------------------------- /spikes/timeout/timeout.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import stripe 4 | import sys 5 | 6 | stripe.api_key = sys.argv[1] 7 | # Can't use this until my [PR]() is merged 8 | # stripe.max_network_retries = 5 9 | client = stripe.http_client.RequestsClient(timeout=0.1) 10 | # Use this until my [PR]() is merged 11 | client._max_network_retries = lambda: 5 12 | stripe.default_http_client = client 13 | stripe.log = 'info' 14 | 15 | while True: 16 | try: 17 | charges = stripe.Charge.list() 18 | except stripe.error.APIConnectionError as e: 19 | print(e.user_message) 20 | break 21 | -------------------------------------------------------------------------------- /spikes/prior-art/setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from setuptools import setup 3 | 4 | setup( 5 | name="tap-stripe", 6 | version="1.0.0", 7 | description="Singer.io tap for Stripe", 8 | author="Statsbot", 9 | url="http://statsbot.co", 10 | classifiers=["Programming Language :: Python :: 3 :: Only"], 11 | py_modules=["tap_stripe"], 12 | install_requires=[ 13 | "singer-python==5.13.2", 14 | "stripe==2.4.0", 15 | "requests", 16 | ], 17 | entry_points=""" 18 | [console_scripts] 19 | tap-stripe=tap_stripe:main 20 | """, 21 | packages=["tap_stripe"], 22 | package_data = { 23 | "schemas": ["tap_stripe/schemas/*.json", "tap_stripe/schemas/shared/*.json"] 24 | }, 25 | include_package_data=True, 26 | ) 27 | -------------------------------------------------------------------------------- /spikes/prior-art/tap_stripe/schemas/shared/discount.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": [ 3 | "null", 4 | "object" 5 | ], 6 | "properties": { 7 | "object": { 8 | "type": [ 9 | "null", 10 | "string" 11 | ] 12 | }, 13 | "coupon": { "$ref": "coupon.json" }, 14 | "customer": { 15 | "type": [ 16 | "null", 17 | "string" 18 | ] 19 | }, 20 | "end": { 21 | "type": [ 22 | "null", 23 | "string" 24 | ], 25 | "format": "date-time" 26 | }, 27 | "start": { 28 | "type": [ 29 | "null", 30 | "string" 31 | ], 32 | "format": "date-time" 33 | }, 34 | "subscription": { 35 | "type": [ 36 | "null", 37 | "string" 38 | ] 39 | } 40 | } 41 | } -------------------------------------------------------------------------------- /spikes/handle-backoff/handle-backoff.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # TODO force a 429 and see how to handle it with the SDK 4 | 5 | import stripe 6 | import sys 7 | from datetime import datetime 8 | 9 | stripe.api_key = sys.argv[1] 10 | 11 | script_start = datetime.now() 12 | 13 | # Still nothing as of 14 | # Getting Charges 15 | # Request ran for 0.990934 seconds 16 | # Charges last_response.code: 200 17 | # Script running for 7449.92087 seconds 18 | 19 | while True: 20 | print("Getting Charges") 21 | req_start = datetime.now() 22 | charges = stripe.Charge.list() 23 | print("Request ran for {} seconds".format( 24 | (datetime.now()-req_start).total_seconds())) 25 | print("Charges last_response.code: {}".format(charges.last_response.code)) 26 | print("Script running for {} seconds".format( 27 | (datetime.now()-script_start).total_seconds())) 28 | -------------------------------------------------------------------------------- /spikes/prior-art/tap_stripe/schemas/shared/subscription_item.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": [ 3 | "null", 4 | "object" 5 | ], 6 | "properties": { 7 | "id": { 8 | "type": [ 9 | "null", 10 | "string" 11 | ] 12 | }, 13 | "object": { 14 | "type": [ 15 | "null", 16 | "string" 17 | ] 18 | }, 19 | "created": { 20 | "type": [ 21 | "null", 22 | "string" 23 | ], 24 | "format": "date-time" 25 | }, 26 | "metadata": { 27 | "type": [ 28 | "null", 29 | "object" 30 | ], 31 | "additionalProperties": true, 32 | "properties": {} 33 | }, 34 | "plan": { 35 | "$ref": "plan.json" 36 | }, 37 | "quantity": { 38 | "type": [ 39 | "null", 40 | "integer" 41 | ] 42 | }, 43 | "subscription": { 44 | "type": [ 45 | "null", 46 | "string" 47 | ] 48 | } 49 | } 50 | } -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from setuptools import setup 3 | 4 | setup( 5 | name="tap-stripe", 6 | version="3.2.0", 7 | description="Singer.io tap for extracting data", 8 | author="Stitch", 9 | url="http://singer.io", 10 | classifiers=["Programming Language :: Python :: 3 :: Only"], 11 | py_modules=["tap_stripe"], 12 | install_requires=[ 13 | "singer-python==6.0.1", 14 | "stripe==5.5.0", 15 | ], 16 | extras_require={ 17 | 'test': [ 18 | 'pylint==3.0.3', 19 | 'nose2', 20 | 'coverage' 21 | ], 22 | 'dev': [ 23 | 'ipdb', 24 | 'pylint', 25 | 'astroid==2.5.1', 26 | 'nose2', 27 | ] 28 | }, 29 | entry_points=""" 30 | [console_scripts] 31 | tap-stripe=tap_stripe:main 32 | """, 33 | packages=["tap_stripe"], 34 | package_data = { 35 | "schemas": ["tap_stripe/schemas/*.json"] 36 | }, 37 | include_package_data=True, 38 | ) 39 | -------------------------------------------------------------------------------- /tap_stripe/schemas/events.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "object", 3 | "properties": { 4 | "created": { 5 | "type": [ 6 | "null", 7 | "string" 8 | ], 9 | "format": "date-time" 10 | }, 11 | "data": { 12 | "type": [ 13 | "null", 14 | "object" 15 | ], 16 | "properties": {} 17 | }, 18 | "id": { 19 | "type": [ 20 | "null", 21 | "string" 22 | ] 23 | }, 24 | "api_version": { 25 | "type": [ 26 | "null", 27 | "string" 28 | ] 29 | }, 30 | "object": { 31 | "type": [ 32 | "null", 33 | "string" 34 | ] 35 | }, 36 | "livemode": { 37 | "type": [ 38 | "null", 39 | "boolean" 40 | ] 41 | }, 42 | "pending_webhooks": { 43 | "type": [ 44 | "null", 45 | "integer" 46 | ] 47 | }, 48 | "request": { 49 | "type": [ 50 | "null", 51 | "string" 52 | ] 53 | }, 54 | "type": { 55 | "type": [ 56 | "null", 57 | "string" 58 | ] 59 | }, 60 | "updated": { 61 | "type": [ 62 | "null", 63 | "string" 64 | ], 65 | "format": "date-time" 66 | } 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /tests/unittests/test_log_request_id.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from unittest import mock 3 | from tap_stripe import new_request 4 | 5 | class MockRequest(): 6 | '''Mock Request object''' 7 | def __init__(self, response): 8 | self.last_response = response 9 | 10 | def request_raw(self, method, url, params=None, supplied_headers=None, is_streaming=False): 11 | return {}, {}, {}, {} 12 | 13 | def interpret_response(self, rbody, rcode, rheaders): 14 | return get_request_id() 15 | 16 | class MockResponse(): 17 | '''Mock response object which contains the request_id''' 18 | def __init__(self, request_id): 19 | self.request_id = request_id 20 | 21 | 22 | def get_request_id(): 23 | '''Return the MockRequest object which contains request_id''' 24 | response = MockResponse('dummy_request_id') 25 | return response 26 | 27 | class TestDebugLogger(unittest.TestCase): 28 | @mock.patch('tap_stripe.LOGGER.debug') 29 | def test_debug_logger(self, mock_debug): 30 | '''Test that the debug is called with proper request id.''' 31 | mock_request = MockRequest('url') 32 | new_request(mock_request, 'GET', 'dummy_url') 33 | mock_debug.assert_called_with('request id : %s', 'dummy_request_id') 34 | -------------------------------------------------------------------------------- /tests/unittests/test_rate_limit_error.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from unittest import mock 3 | import stripe 4 | from tap_stripe import new_request 5 | 6 | class MockRequest(): 7 | '''Mock Request object''' 8 | def __init__(self, response): 9 | self.last_response = response 10 | 11 | def request_raw(self, method, url, params=None, supplied_headers=None, is_streaming=False): 12 | return {}, {}, {}, {} 13 | 14 | def interpret_response(self, rbody, rcode, rheaders): 15 | raise stripe.error.RateLimitError("Rate Limit Error", 429, {}, {}, {}) 16 | 17 | class TestRateLimitError(unittest.TestCase): 18 | """ 19 | Test that the tap retries each request 7 times on rate limit error. 20 | """ 21 | 22 | @mock.patch("time.sleep") 23 | def test_retry_count_of_429_error(self, mock_sleep): 24 | """ 25 | Test that the tap retries each request 7 times on 429 error. 26 | - Verify that `time.sleep` was called 6 times. (1 count less than no of retry count) 27 | """ 28 | mock_request = MockRequest('url') 29 | with self.assertRaises(stripe.error.RateLimitError) as e: 30 | new_request(mock_request, 'GET', 'dummy_url') 31 | 32 | # Verify that `time.sleep` was called 6 times. 33 | self.assertEqual(mock_sleep.call_count, 6) 34 | -------------------------------------------------------------------------------- /tests/utils_invoices.py: -------------------------------------------------------------------------------- 1 | import random 2 | import stripe 3 | 4 | from base import BaseTapTest 5 | 6 | stripe.api_key = BaseTapTest.get_credentials()["client_secret"] 7 | 8 | ########################################################################## 9 | # Create Invoice and invoice items methods are defined in the separate file 10 | # to create the records with line details which are automatically populated 11 | # in api response when generated using older version of api 12 | ########################################################################## 13 | 14 | 15 | def create_invoice_items(customer_id, metadata_value, now_value): 16 | item = stripe.InvoiceItem.create( 17 | amount=random.randint(1, 10000), 18 | currency="usd", 19 | customer=customer_id, 20 | description="Comfortable cotton t-shirt {}".format(now_value), 21 | metadata=metadata_value, 22 | discountable=True, 23 | subscription_item=None, 24 | tax_rates=[], # TODO enter the child attributes 25 | stripe_version='2020-08-27' 26 | ) 27 | return item 28 | 29 | 30 | def create_invoices(customer_id, customer_default_source, metadata_value, now_value): 31 | invoices_response = stripe.Invoice.create( 32 | customer=customer_id, 33 | auto_advance=False, 34 | collection_method='charge_automatically', 35 | description="Comfortable cotton t-shirt {}".format(now_value), 36 | metadata=metadata_value, 37 | footer='footer', 38 | statement_descriptor='desc', 39 | default_source=customer_default_source, 40 | default_tax_rates=[], 41 | stripe_version='2020-08-27' 42 | ) 43 | return invoices_response 44 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | .hypothesis/ 47 | 48 | # Translations 49 | *.mo 50 | *.pot 51 | 52 | # Django stuff: 53 | *.log 54 | local_settings.py 55 | 56 | # Flask stuff: 57 | instance/ 58 | .webassets-cache 59 | 60 | # Scrapy stuff: 61 | .scrapy 62 | 63 | # Sphinx documentation 64 | docs/_build/ 65 | 66 | # PyBuilder 67 | target/ 68 | 69 | # IPython Notebook 70 | .ipynb_checkpoints 71 | 72 | # pyenv 73 | .python-version 74 | 75 | # celery beat schedule file 76 | celerybeat-schedule 77 | 78 | # dotenv 79 | .env 80 | 81 | # virtualenv 82 | venv/ 83 | ENV/ 84 | 85 | # Spyder project settings 86 | .spyderproject 87 | 88 | # Rope project settings 89 | .ropeproject 90 | 91 | # Mac 92 | ._* 93 | .DS_Store 94 | 95 | # Custom stuff 96 | env.sh 97 | config.json 98 | .autoenv.zsh 99 | 100 | rsa-key 101 | tags 102 | singer-check-tap-data 103 | state.json 104 | -------------------------------------------------------------------------------- /spikes/prior-art/.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | .hypothesis/ 47 | 48 | # Translations 49 | *.mo 50 | *.pot 51 | 52 | # Django stuff: 53 | *.log 54 | local_settings.py 55 | 56 | # Flask stuff: 57 | instance/ 58 | .webassets-cache 59 | 60 | # Scrapy stuff: 61 | .scrapy 62 | 63 | # Sphinx documentation 64 | docs/_build/ 65 | 66 | # PyBuilder 67 | target/ 68 | 69 | # IPython Notebook 70 | .ipynb_checkpoints 71 | 72 | # pyenv 73 | .python-version 74 | 75 | # celery beat schedule file 76 | celerybeat-schedule 77 | 78 | # dotenv 79 | .env 80 | 81 | # virtualenv 82 | venv/ 83 | ENV/ 84 | 85 | # Spyder project settings 86 | .spyderproject 87 | 88 | # Rope project settings 89 | .ropeproject 90 | 91 | # Mac 92 | ._* 93 | .DS_Store 94 | 95 | # Custom stuff 96 | env.sh 97 | config.json 98 | .autoenv.zsh 99 | 100 | rsa-key 101 | tags 102 | singer-check-tap-data 103 | state.json 104 | .idea/ -------------------------------------------------------------------------------- /tests/unittests/test_recursive_to_dict.py: -------------------------------------------------------------------------------- 1 | import stripe 2 | import unittest 3 | from tap_stripe import recursive_to_dict 4 | 5 | class TestRecursiveToDict(unittest.TestCase): 6 | 7 | def test_recursion(self): 8 | 9 | # Set up cards for customer object 10 | cards_list = [stripe.Card('card_{}'.format(i)) for i in range(123, 126)] 11 | for card in cards_list: 12 | self.assertTrue(isinstance(card, stripe.stripe_object.StripeObject)) 13 | 14 | 15 | # Set up card for source object 16 | source_card = stripe.Card('card_314') 17 | source_object = stripe.Source('source_1') 18 | source_object['card'] = source_card 19 | 20 | # Set up card to use in a dictionary in customer 21 | old_card = stripe.Card('card_001') 22 | 23 | 24 | customer_object = stripe.Customer('cus_12345') 25 | self.assertTrue(isinstance(customer_object, stripe.stripe_object.StripeObject)) 26 | customer_object['cards'] = cards_list 27 | self.assertTrue(isinstance(customer_object, stripe.stripe_object.StripeObject)) 28 | customer_object['sources'] = source_object 29 | customer_object['metadata'] = { 30 | 'city': 'Stripe City', 31 | 'old_card': old_card 32 | } 33 | 34 | 35 | expected_object = { 36 | 'id': 'cus_12345', 37 | 'cards': [ 38 | {'id': 'card_123'}, 39 | {'id': 'card_124'}, 40 | {'id': 'card_125'} 41 | ], 42 | 'sources': { 43 | 'id': 'source_1', 44 | 'card': {'id': 'card_314'} 45 | }, 46 | 'metadata': { 47 | 'city': 'Stripe City', 48 | 'old_card': {'id': 'card_001'} 49 | } 50 | } 51 | 52 | self.assertEqual(recursive_to_dict(customer_object), expected_object) 53 | -------------------------------------------------------------------------------- /tap_stripe/schemas/shared/tax_ids.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": [ 3 | "null", 4 | "object" 5 | ], 6 | "properties":{ 7 | "id": { 8 | "type": [ 9 | "null", 10 | "string" 11 | ] 12 | }, 13 | "object": { 14 | "type": [ 15 | "null", 16 | "string" 17 | ] 18 | }, 19 | "country": { 20 | "type": [ 21 | "null", 22 | "string" 23 | ] 24 | }, 25 | "created": { 26 | "type": [ 27 | "null", 28 | "string" 29 | ], 30 | "format": "date-time" 31 | }, 32 | "customer": { 33 | "type": [ 34 | "null", 35 | "string" 36 | ] 37 | }, 38 | "livemode": { 39 | "type": [ 40 | "null", 41 | "boolean" 42 | ] 43 | }, 44 | "type": { 45 | "type": [ 46 | "null", 47 | "string" 48 | ] 49 | }, 50 | "value": { 51 | "type": [ 52 | "null", 53 | "string" 54 | ] 55 | }, 56 | "verification": { 57 | "type": [ 58 | "null", 59 | "object" 60 | ], 61 | "properties": { 62 | "status": { 63 | "type": [ 64 | "null", 65 | "string" 66 | ] 67 | }, 68 | "verified_address": { 69 | "type": [ 70 | "null", 71 | "string" 72 | ] 73 | }, 74 | "verified_name": { 75 | "type": [ 76 | "null", 77 | "string" 78 | ] 79 | } 80 | } 81 | } 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /spikes/sdk/missing_invoice.py: -------------------------------------------------------------------------------- 1 | import stripe 2 | import singer 3 | import logging 4 | import sys 5 | 6 | LOGGER = singer.get_logger() 7 | 8 | if len(sys.argv) != 5: 9 | LOGGER.error('Expected 4 args, got %d', (len(sys.argv)-1)) 10 | LOGGER.error('Example: python missing_invoice.py ') 11 | exit() 12 | 13 | stripe_account_id = sys.argv[1] 14 | stripe_client_secret = sys.argv[2] 15 | invoice_id = sys.argv[3] 16 | invoice_date = sys.argv[4] 17 | 18 | def configure_stripe_client(): 19 | stripe.api_key = stripe_client_secret 20 | stripe.api_version = '2018-09-24' 21 | stripe.max_network_retries = 15 22 | client = stripe.http_client.RequestsClient(timeout=15) 23 | stripe.default_http_client = client 24 | logging.getLogger('stripe').setLevel(logging.INFO) 25 | account = stripe.Account.retrieve(stripe_account_id) 26 | msg = "Successfully connected to Stripe Account with display name" \ 27 | + " `%s`" 28 | LOGGER.info(msg, account.display_name) 29 | 30 | configure_stripe_client() 31 | 32 | print('\n \n') 33 | LOGGER.info('========== Trying to get Invoice by ID ===========') 34 | try: 35 | invoice_by_id = stripe.Invoice.retrieve(invoice_id) 36 | except stripe.error.InvalidRequestError as ex: 37 | LOGGER.error("Failed to load invoice with id: %s", invoice_id) 38 | 39 | 40 | print('\n \n') 41 | LOGGER.info(' ========== Trying to get Invoice by date range ===========') 42 | 43 | # try to get invoice by created by querying date range +/- 1 ms of target 44 | invoice_greater_than = str(int(invoice_date) - 1) 45 | invoice_less_than = str(int(invoice_date) + 1) 46 | 47 | params = { 48 | "date[gte]": invoice_greater_than, 49 | "date[lte]": invoice_less_than 50 | } 51 | for invoice_obj in stripe.Invoice.list(stripe_account=stripe_account_id, **params).auto_paging_iter(): 52 | if invoice_obj.id == invoice_id: 53 | LOGGER.info('Found Invoice Object using Date!') 54 | LOGGER.info('--------------------------------') 55 | LOGGER.info('ID: %s',invoice_obj.id) 56 | LOGGER.info('Date: %s', invoice_obj.date) 57 | -------------------------------------------------------------------------------- /spikes/prior-art/tap_stripe/schemas/shared/coupon.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": [ 3 | "null", 4 | "object" 5 | ], 6 | "properties": { 7 | "id": { 8 | "type": [ 9 | "null", 10 | "string" 11 | ] 12 | }, 13 | "object": { 14 | "type": [ 15 | "null", 16 | "string" 17 | ] 18 | }, 19 | "amount_off": { 20 | "type": [ 21 | "null", 22 | "integer" 23 | ] 24 | }, 25 | "created": { 26 | "type": [ 27 | "null", 28 | "string" 29 | ], 30 | "format": "date-time" 31 | }, 32 | "currency": { 33 | "type": [ 34 | "null", 35 | "string" 36 | ] 37 | }, 38 | "duration": { 39 | "type": [ 40 | "null", 41 | "string" 42 | ] 43 | }, 44 | "duration_in_months": { 45 | "type": [ 46 | "null", 47 | "integer" 48 | ] 49 | }, 50 | "livemode": { 51 | "type": [ 52 | "null", 53 | "boolean" 54 | ] 55 | }, 56 | "max_redemptions": { 57 | "type": [ 58 | "null", 59 | "integer" 60 | ] 61 | }, 62 | "metadata": { 63 | "type": [ 64 | "null", 65 | "object" 66 | ], 67 | "additionalProperties": true, 68 | "properties": {} 69 | }, 70 | "name": { 71 | "type": [ 72 | "null", 73 | "string" 74 | ] 75 | }, 76 | "percent_off": { 77 | "type": [ 78 | "null", 79 | "number" 80 | ] 81 | }, 82 | "percent_off_precise": { 83 | "type": [ 84 | "null", 85 | "number" 86 | ] 87 | }, 88 | "redeem_by": { 89 | "type": [ 90 | "null", 91 | "string" 92 | ], 93 | "format": "date-time" 94 | }, 95 | "times_redeemed": { 96 | "type": [ 97 | "null", 98 | "integer" 99 | ] 100 | }, 101 | "valid": { 102 | "type": [ 103 | "null", 104 | "boolean" 105 | ] 106 | } 107 | } 108 | } -------------------------------------------------------------------------------- /spikes/prior-art/tap_stripe/schemas/customers.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "object", 3 | "properties": { 4 | "id": { 5 | "type": [ 6 | "null", 7 | "string" 8 | ] 9 | }, 10 | "object": { 11 | "type": [ 12 | "null", 13 | "string" 14 | ] 15 | }, 16 | "account_balance": { 17 | "type": [ 18 | "null", 19 | "integer" 20 | ] 21 | }, 22 | "created": { 23 | "type": [ 24 | "null", 25 | "string" 26 | ], 27 | "format": "date-time" 28 | }, 29 | "currency": { 30 | "type": [ 31 | "null", 32 | "string" 33 | ] 34 | }, 35 | "default_source": { 36 | "type": [ 37 | "null", 38 | "string" 39 | ] 40 | }, 41 | "delinquent": { 42 | "type": [ 43 | "null", 44 | "boolean" 45 | ] 46 | }, 47 | "description": { 48 | "type": [ 49 | "null", 50 | "string" 51 | ] 52 | }, 53 | "discount": { "$ref": "discount.json" }, 54 | "email": { 55 | "type": [ 56 | "null", 57 | "string" 58 | ] 59 | }, 60 | "invoice_prefix": { 61 | "type": [ 62 | "null", 63 | "string" 64 | ] 65 | }, 66 | "livemode": { 67 | "type": [ 68 | "null", 69 | "boolean" 70 | ] 71 | }, 72 | "metadata": { 73 | "type": [ 74 | "null", 75 | "object" 76 | ], 77 | "additionalProperties": true, 78 | "properties": {} 79 | }, 80 | "shipping": { 81 | "type": [ 82 | "null", 83 | "object" 84 | ], 85 | "additionalProperties": true, 86 | "properties": {} 87 | }, 88 | "sources": { 89 | "type": [ 90 | "null", 91 | "array" 92 | ], 93 | "items": { "$ref": "source.json" } 94 | }, 95 | "subscriptions": { 96 | "type": [ 97 | "null", 98 | "array" 99 | ], 100 | "items": { "$ref": "subscription.json" } 101 | } 102 | } 103 | } 104 | -------------------------------------------------------------------------------- /tests/unittests/test_payout_events_object.py: -------------------------------------------------------------------------------- 1 | from tap_stripe import STREAM_TO_TYPE_FILTER, should_sync_event 2 | import unittest 3 | 4 | # create mock classes for returning desired dummy data 5 | class Object: 6 | def __init__(self, data) -> None: 7 | self.data = data 8 | def to_dict_recursive(self): 9 | return self.data 10 | 11 | class Data: 12 | def __init__(self, data): 13 | self.object = Object(data) 14 | 15 | # mock Payouts class 16 | class MockPayout: 17 | def __init__(self, data): 18 | self.data = Data(data) 19 | self.created = "2022-01-01" 20 | 21 | class TestPayoutEventObject(unittest.TestCase): 22 | """ 23 | Test cases to verify the Tap syncs payout events with 'object' type 'transfer' and 'payout' 24 | """ 25 | 26 | def test_payout_stream_transfer_object(self): 27 | """ 28 | Test cases to verify the Tap syncs payout events with 'object' type 'transfer' 29 | """ 30 | # function call 31 | should_sync = should_sync_event(MockPayout({"id": "po_test123", "object": "transfer"}), STREAM_TO_TYPE_FILTER.get("payouts").get("object"), {}) 32 | # verify if the Tap will sync the payout events 33 | self.assertTrue(should_sync) 34 | 35 | def test_payout_stream_payout_object(self): 36 | """ 37 | Test cases to verify the Tap syncs payout events with 'object' type 'payout' 38 | """ 39 | # function call 40 | should_sync = should_sync_event(MockPayout({"id": "po_test123", "object": "payout"}), STREAM_TO_TYPE_FILTER.get("payouts").get("object"), {}) 41 | # verify if the Tap will sync the payout events 42 | self.assertTrue(should_sync) 43 | 44 | def test_payout_stream_invalid_object(self): 45 | """ 46 | Test cases to verify the Tap do not sync payout events with 'object' type other than 'transfer' or 'payout' 47 | """ 48 | # function call 49 | should_sync = should_sync_event(MockPayout({"id": "po_test123", "object": "test"}), STREAM_TO_TYPE_FILTER.get("payouts").get("object"), {}) 50 | # verify if the Tap will sync the payout events 51 | self.assertFalse(should_sync) 52 | -------------------------------------------------------------------------------- /tests/test_parent_child_independent.py: -------------------------------------------------------------------------------- 1 | from tap_tester import runner, connections 2 | from datetime import datetime as dt 3 | from datetime import timedelta 4 | from base import BaseTapTest 5 | 6 | class ParentChildIndependentTest(BaseTapTest): 7 | 8 | @staticmethod 9 | def name(): 10 | return "tt_stripe_parent_child_test" 11 | 12 | def test_child_streams(self): 13 | """ 14 | Test case to verify that tap is working fine if only first level child streams are selected 15 | """ 16 | four_days_ago = dt.strftime(dt.today() - timedelta(days=4), self.START_DATE_FORMAT) 17 | # select child streams only and run the test 18 | child_streams = {"invoice_line_items", "subscription_items"} 19 | self.run_test(child_streams) 20 | # Separated the payout_transactions stream as there is a lag from the Stripe side to reflect 21 | # the automatic payout transactions data, hence we want to change the start_date for that stream 22 | child_streams = {"payout_transactions"} 23 | start_date = four_days_ago 24 | self.run_test(child_streams, start_date, False) 25 | 26 | def run_test(self, streams, start_date=None, default_start_date=True): 27 | """ 28 | Testing that tap is working fine if only child streams are selected 29 | - Verify that if only child streams are selected then only child stream are replicated. 30 | """ 31 | 32 | if not default_start_date: 33 | self.start_date = start_date 34 | # instantiate connection 35 | conn_id = connections.ensure_connection(self, original_properties=default_start_date) 36 | self.conn_id = conn_id 37 | 38 | # run check mode 39 | found_catalogs = self.run_and_verify_check_mode(conn_id) 40 | 41 | # table and field selection 42 | self.perform_and_verify_table_and_field_selection(conn_id, found_catalogs, streams_to_select=streams) 43 | 44 | # run initial sync 45 | record_count_by_stream = self.run_and_verify_sync(conn_id) 46 | synced_records = runner.get_records_from_target_output() 47 | 48 | # Verify no unexpected streams were replicated 49 | synced_stream_names = set(synced_records.keys()) 50 | self.assertSetEqual(streams, synced_stream_names) 51 | -------------------------------------------------------------------------------- /tests/unittests/test_date_window_size.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from parameterized import parameterized 3 | from tap_stripe import Context, get_date_window_size, DEFAULT_DATE_WINDOW_SIZE 4 | 5 | 6 | class TestGetWindowSize(unittest.TestCase): 7 | """ 8 | Test `get_date_window_size` method of the client. 9 | """ 10 | 11 | @parameterized.expand([ 12 | ["integer_value", 10, 10.0], 13 | ["float_value", 100.5, 100.5], 14 | ["string_integer", "10", 10.0], 15 | ["string_float", "100.5", 100.5], 16 | ]) 17 | def test_window_size_values(self, name, date_window_size, expected_value): 18 | """ 19 | Test that for the valid value of window size, 20 | No exception is raised and the expected value is set. 21 | """ 22 | Context.config = {"date_window_size": date_window_size} 23 | 24 | # Verify window size value is expected 25 | self.assertEqual(get_date_window_size("date_window_size", DEFAULT_DATE_WINDOW_SIZE), expected_value) 26 | 27 | @parameterized.expand([ 28 | ["integer_zero", 0], 29 | ["float_zero", 0.0], 30 | ["negative_value", -10], 31 | ["string_zero", "0"], 32 | ["string_float_zero", "0.0"], 33 | ["string_negative_value", "-100"], 34 | ["string_alphabate", "abc"], 35 | ]) 36 | def test_invalid_value(self, name, date_window_size): 37 | """ 38 | Test that for invalid value exception is raised. 39 | """ 40 | Context.config = {"date_window_size": date_window_size} 41 | with self.assertRaises(Exception) as e: 42 | get_date_window_size("date_window_size", DEFAULT_DATE_WINDOW_SIZE) 43 | 44 | # Verify that the exception message is expected. 45 | self.assertEqual( 46 | str(e.exception), 47 | "The entered window size '{}' is invalid, it should be a valid non-zero integer.".format(date_window_size)) 48 | 49 | def test_non_value(self): 50 | """ 51 | Test if no window size is not passed in the config, then set it to the default value. 52 | """ 53 | Context.config = {} 54 | 55 | # Verify that the default window size value is set. 56 | self.assertEqual(get_date_window_size("date_window_size", DEFAULT_DATE_WINDOW_SIZE), DEFAULT_DATE_WINDOW_SIZE) 57 | -------------------------------------------------------------------------------- /tap_stripe/schemas/coupons.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": [ 3 | "null", 4 | "object" 5 | ], 6 | "properties": { 7 | "metadata": { 8 | "type": [ 9 | "null", 10 | "object" 11 | ], 12 | "properties": {} 13 | }, 14 | "updated_by_event_type": { 15 | "type": [ 16 | "null", 17 | "string" 18 | ] 19 | }, 20 | "times_redeemed": { 21 | "type": [ 22 | "null", 23 | "integer" 24 | ] 25 | }, 26 | "percent_off_precise": { 27 | "type": [ 28 | "null", 29 | "number" 30 | ] 31 | }, 32 | "livemode": { 33 | "type": [ 34 | "null", 35 | "boolean" 36 | ] 37 | }, 38 | "object": { 39 | "type": [ 40 | "null", 41 | "string" 42 | ] 43 | }, 44 | "redeem_by": { 45 | "type": [ 46 | "null", 47 | "string" 48 | ], 49 | "format": "date-time" 50 | }, 51 | "duration": { 52 | "type": [ 53 | "null", 54 | "string" 55 | ] 56 | }, 57 | "id": { 58 | "type": [ 59 | "null", 60 | "string" 61 | ] 62 | }, 63 | "valid": { 64 | "type": [ 65 | "null", 66 | "boolean" 67 | ] 68 | }, 69 | "currency": { 70 | "type": [ 71 | "null", 72 | "string" 73 | ] 74 | }, 75 | "duration_in_months": { 76 | "type": [ 77 | "null", 78 | "integer" 79 | ] 80 | }, 81 | "name": { 82 | "type": [ 83 | "null", 84 | "string" 85 | ] 86 | }, 87 | "max_redemptions": { 88 | "type": [ 89 | "null", 90 | "integer" 91 | ] 92 | }, 93 | "amount_off": { 94 | "type": [ 95 | "null", 96 | "integer" 97 | ] 98 | }, 99 | "created": { 100 | "type": [ 101 | "null", 102 | "string" 103 | ], 104 | "format": "date-time" 105 | }, 106 | "percent_off": { 107 | "type": [ 108 | "null", 109 | "number" 110 | ] 111 | }, 112 | "updated": { 113 | "type": [ 114 | "null", 115 | "string" 116 | ], 117 | "format": "date-time" 118 | } 119 | } 120 | } 121 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # tap-stripe 2 | 3 | This is a [Singer](https://singer.io) tap that produces JSON-formatted data 4 | following the [Singer 5 | spec](https://github.com/singer-io/getting-started/blob/master/SPEC.md). 6 | 7 | ## Installation 8 | 9 | See the getting-started guide: 10 | 11 | https://github.com/singer-io/getting-started 12 | 13 | ## Usage 14 | 15 | This section dives into basic usage of `tap-stripe` by walking through extracting 16 | data from the api. 17 | 18 | ### Create the configuration file 19 | 20 | Create a config file containing the stripe credentials, e.g.: 21 | 22 | ```json 23 | { 24 | "client_secret": "sk_live_xxxxxxxxxxxxxxxxxxxxxxxx", 25 | "account_id": "acct_xxxxxxxxxxxxxxxx", 26 | "start_date": "2017-01-01T00:00:00Z", 27 | "request_timeout": 300, 28 | "lookback_window": 600, 29 | "event_date_window_size": 7, 30 | "date_window_size": 30 31 | } 32 | ``` 33 | 34 | ### Discovery mode 35 | 36 | The tap can be invoked in discovery mode to find the available stripe entities. 37 | 38 | ```bash 39 | $ tap-stripe --config config.json --discover 40 | 41 | ``` 42 | 43 | A discovered catalog is output, with a JSON-schema description of each table. A 44 | source table directly corresponds to a Singer stream. 45 | 46 | ### Field selection 47 | 48 | In sync mode, `tap-stripe` consumes the catalog and looks for streams that have been 49 | marked as _selected_ in their associated metadata entries. 50 | 51 | Redirect output from the tap's discovery mode to a file so that it can be 52 | modified: 53 | 54 | ```bash 55 | $ tap-stripe --config config.json --discover > catalog.json 56 | ``` 57 | 58 | Then edit `catalog.json` to make selections. The stream's metadata entry (associated 59 | with `"breadcrumb": []`) gets a top-level `selected` flag, as does its columns' metadata 60 | entries. 61 | 62 | ```diff 63 | [ 64 | { 65 | "breadcrumb": [], 66 | "metadata": { 67 | "valid-replication-keys": [ 68 | "created" 69 | ], 70 | "table-key-properties": [ 71 | "id" 72 | ], 73 | "forced-replication-method": "INCREMENTAL", 74 | + "selected": "true" 75 | } 76 | }, 77 | ] 78 | ``` 79 | 80 | ### Sync mode 81 | 82 | With a `catalog.json` that describes field and table selections, the tap can be invoked in sync mode: 83 | 84 | ```bash 85 | $ tap-stripe --config config.json --catalog catalog.json 86 | ``` 87 | 88 | Messages are written to standard output following the Singer specification. The 89 | resultant stream of JSON data can be consumed by a Singer target. 90 | 91 | --- 92 | 93 | Copyright © 2018 Stitch 94 | -------------------------------------------------------------------------------- /tests/unittests/test_logger_for_events.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from unittest import mock 3 | from datetime import datetime 4 | from tap_stripe import Context, sync_stream 5 | 6 | class MockClass(): 7 | '''The mock class for the Balance Transactions/events object.''' 8 | lines = "lines" 9 | def __init__(self): 10 | return None 11 | 12 | @classmethod 13 | def to_dict_recursive(cls): 14 | '''The mocked to_dict_recursive method of the Balance Transactions/Events class.''' 15 | return "Test Data" 16 | 17 | BOOKMARK_TIME = 1645046000 # epoch bookmark time 18 | BOOKMARK_FORMAT = "%Y-%m-%dT%H:%M:%SZ" 19 | 20 | @mock.patch("tap_stripe.LOGGER.warning") 21 | @mock.patch("singer.write_record") 22 | @mock.patch('singer.utils.now', return_value = datetime.strptime("2022-05-01T08:30:50Z", BOOKMARK_FORMAT)) 23 | @mock.patch("tap_stripe.reduce_foreign_keys", return_value = {"created": 16452804585}) 24 | @mock.patch("tap_stripe.paginate", return_value = [MockClass()]) 25 | @mock.patch("tap_stripe.Context.get_catalog_entry") 26 | @mock.patch("tap_stripe.singer.metadata.to_map") 27 | @mock.patch("tap_stripe.singer.metadata.get", return_value = ["created"]) 28 | @mock.patch("tap_stripe.epoch_to_dt") 29 | @mock.patch("tap_stripe.dt_to_epoch", side_effect = [1645056000, 1645056000, 1647647700, 1645056000]) # epoch timestamps 30 | @mock.patch("tap_stripe.sync_sub_stream") 31 | @mock.patch("tap_stripe.singer.get_bookmark", side_effect = [BOOKMARK_TIME, BOOKMARK_TIME]) 32 | class TestLoggerWarningForEvents(unittest.TestCase): 33 | 34 | def test_date_window_logger(self, mock_get_bookmark_for_stream, mock_sync_substream, mock_dt_to_epoch, mock_epoch_to_dt, mock_get, mock_metadata_map, 35 | mock_get_catalog_entry, mock_paginate, mock_reduce_foreign_keys, 36 | mock_utils_now, mock_write_record, mock_logger): 37 | """ 38 | Test that tap prints expected warning message when bookmark value of before 30 days is passed in the state. 39 | """ 40 | config = {"client_secret": "test_secret", "account_id": "test_account", "start_date": "2022-02-17T00:00:00", "lookback_window": "0"} 41 | Context.config = config 42 | Context.new_counts['events'] = 1 43 | sync_stream("events") 44 | 45 | expected_logger_warning = [ 46 | mock.call("Provided start_date or current bookmark for newly created event records is older than 30 days."), 47 | mock.call("The Stripe Event API returns data for the last 30 days only. So, syncing event data from 30 days only.") 48 | ] 49 | # Verify warning message for bookmark of less than last 30 days. 50 | self.assertEqual(mock_logger.mock_calls, expected_logger_warning) 51 | -------------------------------------------------------------------------------- /tests/unittests/test_auth_in_discovery.py: -------------------------------------------------------------------------------- 1 | from unittest import mock 2 | from tap_stripe import stripe, Context 3 | import tap_stripe 4 | import unittest 5 | import requests 6 | import json 7 | 8 | # Mock args 9 | class Args(): 10 | def __init__(self): 11 | self.discover = True 12 | self.catalog = False 13 | self.config = {"client_secret": "test_secret", "account_id": "test_account", "start_date": "test_start_date"} 14 | self.state = False 15 | 16 | # Mock response 17 | def get_mock_http_response(status_code, content={}): 18 | contents = json.dumps(content) 19 | response = requests.Response() 20 | response.status_code = status_code 21 | response.headers = {} 22 | response._content = contents.encode() 23 | return response 24 | 25 | @mock.patch('tap_stripe.utils.parse_args') 26 | class TestBasicAuthInDiscoverMode(unittest.TestCase): 27 | @mock.patch('tap_stripe.discover') 28 | @mock.patch('stripe.http_client.requests.Session.request') 29 | def test_basic_auth_no_access_401(self, mock_request, mocked_discover, mocked_args): 30 | ''' 31 | Verify exception is raised for no access(401) error code for authentication through sdk 32 | and discover is called zero times for setup Context. 33 | ''' 34 | mock_request.return_value = get_mock_http_response(401, {'error': {'message': 'Invalid API Key provided: test_secret', 'type': 'invalid_request_error'}}) 35 | mocked_args.return_value = Args() 36 | try: 37 | tap_stripe.main() 38 | except stripe.error.AuthenticationError as e: 39 | expected_error_message = 'Invalid API Key provided: test_secret' 40 | # Verifying the message formed for the custom exception 41 | self.assertEqual(str(e), expected_error_message) 42 | # Verify that the discover is not called when incorrect credentials are passed 43 | self.assertEqual(mocked_discover.call_count, 0) 44 | 45 | @mock.patch('tap_stripe.discover', return_value = {}) 46 | @mock.patch('stripe.http_client.requests.Session.request') 47 | def test_basic_auth_access_200(self, mock_retrieve, mocked_discover, mocked_args): 48 | ''' 49 | Verify discover mode is called if credentials are valid by setting up the client and calling the sdk function 50 | and discover function is called once for setup Context and discover mode. 51 | ''' 52 | mock_retrieve.return_value = get_mock_http_response(200, {"settings":{"dashboard": {"display_name": "Stitch"}}}) 53 | mocked_args.return_value = Args() 54 | tap_stripe.main() 55 | # Verify that the discover is called once when correct credentials are passed 56 | self.assertEqual(mocked_discover.call_count, 1) 57 | -------------------------------------------------------------------------------- /spikes/prior-art/tap_stripe/schemas/shared/source.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": [ 3 | "null", 4 | "object" 5 | ], 6 | "additionalProperties": true, 7 | "properties": { 8 | "id": { 9 | "type": [ 10 | "null", 11 | "string" 12 | ] 13 | }, 14 | "object": { 15 | "type": [ 16 | "null", 17 | "string" 18 | ] 19 | }, 20 | "address_city": { 21 | "type": [ 22 | "null", 23 | "string" 24 | ] 25 | }, 26 | "address_country": { 27 | "type": [ 28 | "null", 29 | "string" 30 | ] 31 | }, 32 | "address_line1": { 33 | "type": [ 34 | "null", 35 | "string" 36 | ] 37 | }, 38 | "address_line1_check": { 39 | "type": [ 40 | "null", 41 | "string" 42 | ] 43 | }, 44 | "address_line2": { 45 | "type": [ 46 | "null", 47 | "string" 48 | ] 49 | }, 50 | "address_state": { 51 | "type": [ 52 | "null", 53 | "string" 54 | ] 55 | }, 56 | "address_zip": { 57 | "type": [ 58 | "null", 59 | "string" 60 | ] 61 | }, 62 | "address_zip_check": { 63 | "type": [ 64 | "null", 65 | "string" 66 | ] 67 | }, 68 | "brand": { 69 | "type": [ 70 | "null", 71 | "string" 72 | ] 73 | }, 74 | "country": { 75 | "type": [ 76 | "null", 77 | "string" 78 | ] 79 | }, 80 | "customer": { 81 | "type": [ 82 | "null", 83 | "string" 84 | ] 85 | }, 86 | "cvc_check": { 87 | "type": [ 88 | "null", 89 | "string" 90 | ] 91 | }, 92 | "dynamic_last4": { 93 | "type": [ 94 | "null", 95 | "string" 96 | ] 97 | }, 98 | "exp_month": { 99 | "type": [ 100 | "null", 101 | "integer" 102 | ] 103 | }, 104 | "exp_year": { 105 | "type": [ 106 | "null", 107 | "integer" 108 | ] 109 | }, 110 | "fingerprint": { 111 | "type": [ 112 | "null", 113 | "string" 114 | ] 115 | }, 116 | "funding": { 117 | "type": [ 118 | "null", 119 | "string" 120 | ] 121 | }, 122 | "last4": { 123 | "type": [ 124 | "null", 125 | "string" 126 | ] 127 | }, 128 | "metadata": { 129 | "type": [ 130 | "null", 131 | "object" 132 | ], 133 | "additionalProperties": true, 134 | "properties": {} 135 | }, 136 | "name": { 137 | "type": [ 138 | "null", 139 | "string" 140 | ] 141 | }, 142 | "tokenization_method": { 143 | "type": [ 144 | "null", 145 | "string" 146 | ] 147 | } 148 | } 149 | } -------------------------------------------------------------------------------- /tap_stripe/schemas/balance_transactions.json: -------------------------------------------------------------------------------- 1 | { 2 | "properties": { 3 | "fee": { 4 | "type": [ 5 | "null", 6 | "integer" 7 | ] 8 | }, 9 | "currency": { 10 | "type": [ 11 | "null", 12 | "string" 13 | ] 14 | }, 15 | "source": { 16 | "type": [ 17 | "null", 18 | "string" 19 | ] 20 | }, 21 | "fee_details": { 22 | "type": [ 23 | "null", 24 | "array" 25 | ], 26 | "items": { 27 | "properties": { 28 | "application": { 29 | "type": [ 30 | "null", 31 | "string" 32 | ] 33 | }, 34 | "type": { 35 | "type": [ 36 | "null", 37 | "string" 38 | ] 39 | }, 40 | "description": { 41 | "type": [ 42 | "null", 43 | "string" 44 | ] 45 | }, 46 | "amount": { 47 | "type": [ 48 | "null", 49 | "integer" 50 | ] 51 | }, 52 | "currency": { 53 | "type": [ 54 | "null", 55 | "string" 56 | ] 57 | } 58 | }, 59 | "type": [ 60 | "null", 61 | "object" 62 | ] 63 | } 64 | }, 65 | "available_on": { 66 | "type": [ 67 | "null", 68 | "integer" 69 | ] 70 | }, 71 | "status": { 72 | "type": [ 73 | "null", 74 | "string" 75 | ] 76 | }, 77 | "description": { 78 | "type": [ 79 | "null", 80 | "string" 81 | ] 82 | }, 83 | "net": { 84 | "type": [ 85 | "null", 86 | "integer" 87 | ] 88 | }, 89 | "exchange_rate": { 90 | "type": [ 91 | "null", 92 | "number" 93 | ] 94 | }, 95 | "type": { 96 | "type": [ 97 | "null", 98 | "string" 99 | ] 100 | }, 101 | "sourced_transfers": { 102 | "items": {}, 103 | "type": [ 104 | "null", 105 | "array" 106 | ] 107 | }, 108 | "id": { 109 | "type": [ 110 | "null", 111 | "string" 112 | ] 113 | }, 114 | "object": { 115 | "type": [ 116 | "null", 117 | "string" 118 | ] 119 | }, 120 | "created": { 121 | "type": [ 122 | "null", 123 | "string" 124 | ], 125 | "format": "date-time" 126 | }, 127 | "amount": { 128 | "type": [ 129 | "null", 130 | "integer" 131 | ] 132 | }, 133 | "updated": { 134 | "type": [ 135 | "null", 136 | "string" 137 | ], 138 | "format": "date-time" 139 | } 140 | }, 141 | "type": [ 142 | "null", 143 | "object" 144 | ] 145 | } 146 | -------------------------------------------------------------------------------- /tests/test_configurable_lookback_window.py: -------------------------------------------------------------------------------- 1 | """Test tap configurable properties. Specifically the lookback_window""" 2 | import re 3 | import os 4 | from datetime import datetime as dt 5 | from datetime import timedelta 6 | 7 | from tap_tester import menagerie, connections, runner, LOGGER 8 | 9 | from base import BaseTapTest 10 | 11 | 12 | class ConversionWindowBaseTest(BaseTapTest): 13 | """ 14 | Test tap's sync mode can execute with valid lookback_window values set. 15 | Validate setting the lookback_window configurable property. 16 | Test Cases: 17 | Verify connection can be created, and tap can discover and sync with a lookback window 18 | when passed in config else takes default value. 19 | """ 20 | lookback_window = '600' # default value 21 | 22 | @staticmethod 23 | def name(): 24 | return "tt_stripe_lookback_window_600" 25 | 26 | def get_properties(self): 27 | """Configurable properties, with a switch to override the 'start_date' property""" 28 | return_value = { 29 | 'start_date':dt.strftime(dt.utcnow(), self.START_DATE_FORMAT), 30 | 'lookback_window': self.lookback_window, 31 | 'account_id': os.getenv('TAP_STRIPE_ACCOUNT_ID'), 32 | 'client_secret': os.getenv('TAP_STRIPE_CLIENT_SECRET') 33 | } 34 | return return_value 35 | 36 | def run_test(self): 37 | """ 38 | Testing that basic sync functions without Critical Errors when 39 | a valid lookback_window is set. 40 | """ 41 | LOGGER.info("Configurable Properties Test (lookback_window)") 42 | 43 | conn_id = connections.ensure_connection(self) 44 | self.conn_id = conn_id 45 | 46 | streams_to_test = {'balance_transactions'} 47 | 48 | # Run a discovery job 49 | found_catalogs = self.run_and_verify_check_mode(conn_id) 50 | 51 | # Perform table and field selection... 52 | core_catalogs = [catalog for catalog in found_catalogs 53 | if catalog['stream_name'] in streams_to_test] 54 | 55 | # select all fields for core streams and... 56 | self.select_all_streams_and_fields(conn_id, core_catalogs, select_all_fields=True) 57 | 58 | # set state to ensure conversion window is used 59 | today_datetime = int(dt.utcnow().timestamp()) 60 | 61 | initial_state = {'currently_syncing': None, 'bookmarks': {'balance_transactions': {"created": today_datetime}}} 62 | menagerie.set_state(conn_id, initial_state) 63 | 64 | # Run a sync 65 | sync_job_name = runner.run_sync_mode(self, conn_id) 66 | 67 | # Verify the tap and target do not throw a critical error 68 | exit_status = menagerie.get_exit_status(conn_id, sync_job_name) 69 | menagerie.verify_sync_exit_status(self, exit_status, sync_job_name) 70 | 71 | 72 | class LookbackWindowTestConfig(ConversionWindowBaseTest): 73 | 74 | lookback_window = '300' 75 | 76 | @staticmethod 77 | def name(): 78 | return "tt_stripe_lookback_window_300" 79 | 80 | def test_run(self): 81 | self.run_test() 82 | -------------------------------------------------------------------------------- /spikes/prior-art/tap_stripe/schemas/shared/subscription.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": [ 3 | "null", 4 | "object" 5 | ], 6 | "properties": { 7 | "id": { 8 | "type": [ 9 | "null", 10 | "string" 11 | ] 12 | }, 13 | "object": { 14 | "type": [ 15 | "null", 16 | "string" 17 | ] 18 | }, 19 | "application_fee_percent": { 20 | "type": [ 21 | "null", 22 | "number" 23 | ] 24 | }, 25 | "billing": { 26 | "type": [ 27 | "null", 28 | "string" 29 | ] 30 | }, 31 | "billing_cycle_anchor": { 32 | "type": [ 33 | "null", 34 | "string" 35 | ], 36 | "format": "date-time" 37 | }, 38 | "cancel_at_period_end": { 39 | "type": [ 40 | "null", 41 | "boolean" 42 | ] 43 | }, 44 | "canceled_at": { 45 | "type": [ 46 | "null", 47 | "string" 48 | ], 49 | "format": "date-time" 50 | }, 51 | "created": { 52 | "type": [ 53 | "null", 54 | "string" 55 | ], 56 | "format": "date-time" 57 | }, 58 | "current_period_end": { 59 | "type": [ 60 | "null", 61 | "string" 62 | ], 63 | "format": "date-time" 64 | }, 65 | "current_period_start": { 66 | "type": [ 67 | "null", 68 | "string" 69 | ], 70 | "format": "date-time" 71 | }, 72 | "customer": { 73 | "type": [ 74 | "null", 75 | "string" 76 | ] 77 | }, 78 | "days_until_due": { 79 | "type": [ 80 | "null", 81 | "integer" 82 | ] 83 | }, 84 | "discount": { "$ref": "discount.json" }, 85 | "ended_at": { 86 | "type": [ 87 | "null", 88 | "string" 89 | ], 90 | "format": "date-time" 91 | }, 92 | "items": { 93 | "type": [ 94 | "null", 95 | "array" 96 | ], 97 | "items": { 98 | "$ref": "subscription_item.json" 99 | } 100 | }, 101 | "livemode": { 102 | "type": [ 103 | "null", 104 | "boolean" 105 | ] 106 | }, 107 | "metadata": { 108 | "type": [ 109 | "null", 110 | "object" 111 | ], 112 | "additionalProperties": true, 113 | "properties": {} 114 | }, 115 | "plan": { 116 | "$ref": "plan.json" 117 | }, 118 | "quantity": { 119 | "type": [ 120 | "null", 121 | "integer" 122 | ] 123 | }, 124 | "start": { 125 | "type": [ 126 | "null", 127 | "string" 128 | ], 129 | "format": "date-time" 130 | }, 131 | "status": { 132 | "type": [ 133 | "null", 134 | "string" 135 | ] 136 | }, 137 | "tax_percent": { 138 | "type": [ 139 | "null", 140 | "number" 141 | ] 142 | }, 143 | "trial_end": { 144 | "type": [ 145 | "null", 146 | "string" 147 | ], 148 | "format": "date-time" 149 | }, 150 | "trial_start": { 151 | "type": [ 152 | "null", 153 | "string" 154 | ], 155 | "format": "date-time" 156 | } 157 | } 158 | } -------------------------------------------------------------------------------- /spikes/prior-art/tap_stripe/schemas/shared/plan.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": [ 3 | "null", 4 | "object" 5 | ], 6 | "properties": { 7 | "id": { 8 | "type": [ 9 | "null", 10 | "string" 11 | ] 12 | }, 13 | "name": { 14 | "type": [ 15 | "null", 16 | "string" 17 | ] 18 | }, 19 | "statement_descriptor": { 20 | "type": [ 21 | "null", 22 | "string" 23 | ] 24 | }, 25 | "object": { 26 | "type": [ 27 | "null", 28 | "string" 29 | ] 30 | }, 31 | "active": { 32 | "type": [ 33 | "null", 34 | "boolean" 35 | ] 36 | }, 37 | "aggregate_usage": { 38 | "type": [ 39 | "null", 40 | "string" 41 | ] 42 | }, 43 | "amount": { 44 | "type": [ 45 | "null", 46 | "integer" 47 | ] 48 | }, 49 | "billing_scheme": { 50 | "type": [ 51 | "null", 52 | "string" 53 | ] 54 | }, 55 | "created": { 56 | "type": [ 57 | "null", 58 | "string" 59 | ], 60 | "format": "date-time" 61 | }, 62 | "currency": { 63 | "type": [ 64 | "null", 65 | "string" 66 | ] 67 | }, 68 | "interval": { 69 | "type": [ 70 | "null", 71 | "string" 72 | ] 73 | }, 74 | "interval_count": { 75 | "type": [ 76 | "null", 77 | "integer" 78 | ] 79 | }, 80 | "livemode": { 81 | "type": [ 82 | "null", 83 | "boolean" 84 | ] 85 | }, 86 | "metadata": { 87 | "type": [ 88 | "null", 89 | "object" 90 | ], 91 | "additionalProperties": true, 92 | "properties": {} 93 | }, 94 | "nickname": { 95 | "type": [ 96 | "null", 97 | "string" 98 | ] 99 | }, 100 | "product": { 101 | "type": [ 102 | "null", 103 | "string" 104 | ] 105 | }, 106 | "tiers": { 107 | "type": [ 108 | "null", 109 | "object" 110 | ], 111 | "properties": { 112 | "amount": { 113 | "type": [ 114 | "null", 115 | "integer" 116 | ] 117 | }, 118 | "up_to": { 119 | "type": [ 120 | "null", 121 | "integer" 122 | ] 123 | } 124 | } 125 | }, 126 | "tiers_mode": { 127 | "type": [ 128 | "null", 129 | "string" 130 | ] 131 | }, 132 | "transform_usage": { 133 | "type": [ 134 | "null", 135 | "object" 136 | ], 137 | "properties": { 138 | "divide_by": { 139 | "type": [ 140 | "null", 141 | "integer" 142 | ] 143 | }, 144 | "round": { 145 | "type": [ 146 | "null", 147 | "string" 148 | ] 149 | } 150 | } 151 | }, 152 | "trial_period_days": { 153 | "type": [ 154 | "null", 155 | "integer" 156 | ] 157 | }, 158 | "usage_type": { 159 | "type": [ 160 | "null", 161 | "string" 162 | ] 163 | } 164 | } 165 | } -------------------------------------------------------------------------------- /tap_stripe/schemas/transfers.json: -------------------------------------------------------------------------------- 1 | { 2 | "properties": { 3 | "metadata": { 4 | "type": [ 5 | "null", 6 | "object" 7 | ], 8 | "properties": {} 9 | }, 10 | "updated_by_event_type": { 11 | "type": [ 12 | "null", 13 | "string" 14 | ] 15 | }, 16 | "reversals": { 17 | "type": [ 18 | "null", 19 | "array" 20 | ], 21 | "items": { 22 | "type": [ 23 | "null", 24 | "object" 25 | ], 26 | "properties": {} 27 | } 28 | }, 29 | "id": { 30 | "type": [ 31 | "null", 32 | "string" 33 | ] 34 | }, 35 | "statement_description": { 36 | "type": [ 37 | "null", 38 | "string" 39 | ] 40 | }, 41 | "amount": { 42 | "type": [ 43 | "null", 44 | "integer" 45 | ] 46 | }, 47 | "balance_transaction": { 48 | "type": [ 49 | "null", 50 | "string" 51 | ] 52 | }, 53 | "reversed": { 54 | "type": [ 55 | "null", 56 | "boolean" 57 | ] 58 | }, 59 | "created": { 60 | "type": [ 61 | "null", 62 | "string" 63 | ], 64 | "format": "date-time" 65 | }, 66 | "amount_reversed": { 67 | "type": [ 68 | "null", 69 | "integer" 70 | ] 71 | }, 72 | "source_type": { 73 | "type": [ 74 | "null", 75 | "string" 76 | ] 77 | }, 78 | "source_transaction": { 79 | "type": [ 80 | "null", 81 | "string" 82 | ] 83 | }, 84 | "date": { 85 | "type": [ 86 | "null", 87 | "string" 88 | ], 89 | "format": "date-time" 90 | }, 91 | "livemode": { 92 | "type": [ 93 | "null", 94 | "boolean" 95 | ] 96 | }, 97 | "statement_descriptor": { 98 | "type": [ 99 | "null", 100 | "string" 101 | ] 102 | }, 103 | "failure_balance_transaction": { 104 | "type": [ 105 | "null", 106 | "string" 107 | ] 108 | }, 109 | "recipient": { 110 | "type": [ 111 | "null", 112 | "string" 113 | ] 114 | }, 115 | "destination": { 116 | "type": [ 117 | "null", 118 | "string" 119 | ] 120 | }, 121 | "automatic": { 122 | "type": [ 123 | "null", 124 | "boolean" 125 | ] 126 | }, 127 | "object": { 128 | "type": [ 129 | "null", 130 | "string" 131 | ] 132 | }, 133 | "currency": { 134 | "type": [ 135 | "null", 136 | "string" 137 | ] 138 | }, 139 | "transfer_group": { 140 | "type": [ 141 | "null", 142 | "string" 143 | ] 144 | }, 145 | "arrival_date": { 146 | "type": [ 147 | "null", 148 | "string" 149 | ], 150 | "format": "date-time" 151 | }, 152 | "description": { 153 | "type": [ 154 | "null", 155 | "string" 156 | ] 157 | }, 158 | "updated": { 159 | "type": [ 160 | "null", 161 | "string" 162 | ], 163 | "format": "date-time" 164 | } 165 | }, 166 | "type": [ 167 | "null", 168 | "object" 169 | ] 170 | } 171 | -------------------------------------------------------------------------------- /tests/unittests/test_deleted_invoice_line_item.py: -------------------------------------------------------------------------------- 1 | from unittest import mock 2 | from tap_stripe import new_list 3 | import unittest 4 | from stripe.error import InvalidRequestError 5 | from stripe.api_resources.list_object import ListObject 6 | 7 | # raise 'no such invoice item' error 8 | def raise_no_such_invoice_error(*args, **kwargs): 9 | raise InvalidRequestError('Request req_test123: No such invoice item: \'ii_test123\'', {}) 10 | 11 | # raise 'not found' error 12 | def raise_not_found_error(*args, **kwargs): 13 | raise InvalidRequestError('Not Found', {}) 14 | 15 | # raise other error 16 | def raise_other_error(*args, **kwargs): 17 | raise Exception('Not Found for URL: https://api.stripe.com/v1/test') 18 | 19 | @mock.patch('tap_stripe.ListObject._request') 20 | class DeletedInvoiceLineItem(unittest.TestCase): 21 | """ 22 | Test cases for verifying we log 'warning' in case of deleted invoice item call 23 | """ 24 | 25 | @mock.patch('tap_stripe.LOGGER.warning') 26 | def test_deleted_invoice_line_item_API_call(self, mocked_warn, mocked_request): 27 | """ 28 | Test case for verifying we skip deleted invoice line item API call and log 'warning' 29 | """ 30 | 31 | # mock request and raise 'InvalidRequestError' containing 'No such invoice item' in error message 32 | mocked_request.side_effect = raise_no_such_invoice_error 33 | 34 | # create 'ListObject' object 35 | list_obj = ListObject() 36 | # set dummy url 37 | list_obj['url'] = 'https://api.stripe.com/' 38 | 39 | # function call 40 | resp = new_list(list_obj) 41 | 42 | # verify the 'LOGGER.warning' was called with expected message 43 | mocked_warn.assert_called_with('%s. Currently, skipping this invoice line item call.', 'Request req_test123: No such invoice item: \'ii_test123\'') 44 | 45 | def test_not_found_InvalidRequestError(self, mocked_request): 46 | """ 47 | Test case for verifying we raise 'InvalidRequestError' not containing 'No such invoice item' in the error message 48 | """ 49 | 50 | # mock request and raise 'InvalidRequestError' containing any error message but not 'No such invoice item' 51 | mocked_request.side_effect = raise_not_found_error 52 | 53 | # create 'ListObject' object 54 | list_obj = ListObject() 55 | # set dummy url 56 | list_obj['url'] = 'https://api.stripe.com/' 57 | 58 | # verify we raise error when calling 'new_list' funciton 59 | with self.assertRaises(InvalidRequestError) as e: 60 | resp = new_list(list_obj) 61 | 62 | # verify error message 63 | self.assertEqual(str(e.exception), 'Not Found') 64 | 65 | def test_other_than_InvalidRequestError_error(self, mocked_request): 66 | """ 67 | Test case for verifying we raise any error not containing 'No such invoice item' in the error message 68 | """ 69 | 70 | # mock request and raise 'Exception' 71 | mocked_request.side_effect = raise_other_error 72 | 73 | # create 'ListObject' object 74 | list_obj = ListObject() 75 | # set dummy url 76 | list_obj['url'] = 'https://api.stripe.com/' 77 | 78 | # verify we raise error when calling 'new_list' funciton 79 | with self.assertRaises(Exception) as e: 80 | resp = new_list(list_obj) 81 | 82 | # verify error message 83 | self.assertEqual(str(e.exception), 'Not Found for URL: https://api.stripe.com/v1/test') 84 | -------------------------------------------------------------------------------- /tests/unittests/test_lookback_evaluation.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from unittest import mock 3 | from tap_stripe import IMMUTABLE_STREAM_LOOKBACK, evaluate_start_time_based_on_lookback, Context, utils 4 | 5 | class TestLookbackEvaluation(unittest.TestCase): 6 | 7 | def test_lookback_evaluation_for_no_bookmark_events(self): 8 | '''Verify the sync starts from start date when no bookmark is passed for the events stream''' 9 | stream_name = "events" 10 | replication_key = "created" 11 | config = { "client_secret": "test_secret", "account_id": "test_account", "start_date": "2022-03-30T00:00:00"} 12 | Context.config = config 13 | Context.state = {} 14 | start_window = evaluate_start_time_based_on_lookback(1648599000, IMMUTABLE_STREAM_LOOKBACK) 15 | # Verify that the start_window is start_date 16 | self.assertEqual(start_window, utils.strptime_to_utc(Context.config['start_date']).timestamp()) 17 | 18 | def test_lookback_evaluation_for_no_bookmark_balance_transactions(self): 19 | '''Verify the sync starts from start date when no bookmark is passed for the balance transactions stream''' 20 | stream_name = "balance_transactions" 21 | replication_key = "created" 22 | config = { "client_secret": "test_secret", "account_id": "test_account", "start_date": "2022-03-30T00:00:00"} 23 | Context.config = config 24 | Context.state = {} 25 | start_window = evaluate_start_time_based_on_lookback(1648599000, IMMUTABLE_STREAM_LOOKBACK) 26 | # Verify that the start_window is start_date 27 | self.assertEqual(start_window, utils.strptime_to_utc(Context.config['start_date']).timestamp()) 28 | 29 | 30 | @mock.patch("tap_stripe.dt_to_epoch") 31 | def test_lookback_evaluation_when_bookmark_present_events(self, mock_now): 32 | '''Verify the sync starts from bookmark - lookback when bookmark is passed for the events stream''' 33 | stream_name = "events" 34 | replication_key = "created" 35 | now_time = 1648739354 36 | state = {'bookmarks': {'events': {'created': 1648739554}}} # 2022-03-31T08:42:34 37 | config = { "client_secret": "test_secret", "account_id": "test_account", "start_date": "2022-03-30T00:00:00"} 38 | Context.config = config 39 | Context.state = state 40 | start_window = evaluate_start_time_based_on_lookback(1648739554, IMMUTABLE_STREAM_LOOKBACK) 41 | # Verify that the start_window is bookmark - lookback 42 | self.assertEqual(start_window, Context.state['bookmarks'][stream_name][replication_key] - IMMUTABLE_STREAM_LOOKBACK) 43 | 44 | @mock.patch("tap_stripe.dt_to_epoch") 45 | def test_lookback_evaluation_when_bookmark_present_balance_transactions(self, mock_now): 46 | '''Verify the sync starts from bookmark - lookback when bookmark is passed for the balance transactions stream''' 47 | stream_name = "balance_transactions" 48 | replication_key = "created" 49 | state = {'bookmarks': {'balance_transactions': {'created': 1648739554}}} # 2022-03-31T08:42:34 50 | config = { "client_secret": "test_secret", "account_id": "test_account", "start_date": "2022-03-30T00:00:00"} 51 | Context.config = config 52 | Context.state = state 53 | start_window = evaluate_start_time_based_on_lookback(1648739554, IMMUTABLE_STREAM_LOOKBACK) 54 | # Verify that the start_window is bookmark - lookback 55 | self.assertEqual(start_window,Context.state['bookmarks'][stream_name][replication_key] - IMMUTABLE_STREAM_LOOKBACK) 56 | -------------------------------------------------------------------------------- /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | version: 2.1 2 | 3 | orbs: 4 | slack: circleci/slack@3.4.2 5 | 6 | executors: 7 | tap_tester: 8 | 9 | jobs: 10 | build: 11 | docker: 12 | - image: 218546966473.dkr.ecr.us-east-1.amazonaws.com/circle-ci:stitch-tap-tester-uv 13 | steps: 14 | - checkout 15 | - run: 16 | name: 'Setup virtual env' 17 | command: | 18 | uv venv --python 3.12 /usr/local/share/virtualenvs/tap-stripe 19 | source /usr/local/share/virtualenvs/tap-stripe/bin/activate 20 | uv pip install -U pip setuptools 21 | uv pip install .[test] 22 | - run: 23 | name: 'pylint' 24 | command: | 25 | source /usr/local/share/virtualenvs/tap-stripe/bin/activate 26 | echo "Will ignore the following errors $PYLINT_DISABLE_LIST" 27 | pylint tap_stripe -d "$PYLINT_DISABLE_LIST,line-too-long,too-many-lines,missing-module-docstring,missing-class-docstring,missing-function-docstring,too-many-branches,broad-exception-raised,consider-using-f-string,no-else-return,too-many-arguments" 28 | - run: 29 | name: 'JSON Validator' 30 | command: | 31 | source /usr/local/share/virtualenvs/tap-tester/bin/activate 32 | stitch-validate-json tap_stripe/schemas/*.json 33 | stitch-validate-json tap_stripe/schemas/shared/*.json 34 | - run: 35 | name: 'JSON Validator' 36 | command: | 37 | source /usr/local/share/virtualenvs/tap-tester/bin/activate 38 | stitch-validate-json tap_stripe/schemas/*.json 39 | stitch-validate-json tap_stripe/schemas/shared/*.json 40 | - run: 41 | name: 'Unit Tests' 42 | command: | 43 | source /usr/local/share/virtualenvs/tap-stripe/bin/activate 44 | uv pip install pytest coverage parameterized 45 | coverage run -m pytest tests/unittests 46 | coverage html 47 | - store_test_results: 48 | path: test_output/report.xml 49 | - store_artifacts: 50 | path: htmlcov 51 | - run: 52 | # TODO Instead of using always steps to make reading the output 53 | # easier, emit an xUnit report and let Circle tell you what 54 | # failed. 55 | name: 'Integration Testing' 56 | no_output_timeout: 45m 57 | command: | 58 | source /usr/local/share/virtualenvs/tap-tester/bin/activate 59 | uv pip install --upgrade awscli 60 | aws s3 cp s3://com-stitchdata-dev-deployment-assets/environments/tap-tester/tap_tester_sandbox /usr/local/share/virtualenvs/dev_env.sh 61 | source /usr/local/share/virtualenvs/dev_env.sh 62 | uv pip install 'stripe==5.5.0' 63 | mkdir /tmp/${CIRCLE_PROJECT_REPONAME} 64 | export STITCH_CONFIG_DIR=/tmp/${CIRCLE_PROJECT_REPONAME} 65 | run-test --tap=${CIRCLE_PROJECT_REPONAME} tests 66 | - slack/notify-on-failure: 67 | only_for_branches: master 68 | - store_artifacts: 69 | path: /tmp/tap-stripe 70 | 71 | workflows: 72 | commit: 73 | jobs: 74 | - build: 75 | context: 76 | - circleci-user 77 | - tier-1-tap-user 78 | build_daily: 79 | triggers: 80 | - schedule: 81 | cron: "0 1 * * *" 82 | filters: 83 | branches: 84 | only: 85 | - master 86 | jobs: 87 | - build: 88 | context: 89 | - circleci-user 90 | - tier-1-tap-user 91 | -------------------------------------------------------------------------------- /tap_stripe/schemas/products.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": [ 3 | "null", 4 | "object" 5 | ], 6 | "properties": { 7 | "id": { 8 | "type": [ 9 | "null", 10 | "string" 11 | ] 12 | }, 13 | "object": { 14 | "type": [ 15 | "null", 16 | "string" 17 | ] 18 | }, 19 | "updated_by_event_type": { 20 | "type": [ 21 | "null", 22 | "string" 23 | ] 24 | }, 25 | "active": { 26 | "type": [ 27 | "null", 28 | "boolean" 29 | ] 30 | }, 31 | "attributes": { 32 | "type": [ 33 | "null", 34 | "array" 35 | ], 36 | "items": { 37 | "type": [ 38 | "null", 39 | "string" 40 | ] 41 | } 42 | }, 43 | "caption": { 44 | "type": [ 45 | "null", 46 | "string" 47 | ] 48 | }, 49 | "created": { 50 | "type": [ 51 | "null", 52 | "string" 53 | ], 54 | "format": "date-time" 55 | }, 56 | "deactivate_on": { 57 | "type": [ 58 | "null", 59 | "array" 60 | ], 61 | "items": { 62 | "type": [ 63 | "null", 64 | "string" 65 | ] 66 | } 67 | }, 68 | "description": { 69 | "type": [ 70 | "null", 71 | "string" 72 | ] 73 | }, 74 | "images": { 75 | "type": [ 76 | "null", 77 | "array" 78 | ], 79 | "items": { 80 | "type": [ 81 | "null", 82 | "string" 83 | ] 84 | } 85 | }, 86 | "livemode": { 87 | "type": [ 88 | "null", 89 | "boolean" 90 | ] 91 | }, 92 | "metadata": { 93 | "type": [ 94 | "null", 95 | "object" 96 | ], 97 | "properties": {} 98 | }, 99 | "name": { 100 | "type": [ 101 | "null", 102 | "string" 103 | ] 104 | }, 105 | "package_dimensions": { 106 | "type": [ 107 | "null", 108 | "object" 109 | ], 110 | "properties": { 111 | "width": { 112 | "type": [ 113 | "null", 114 | "number" 115 | ] 116 | }, 117 | "length": { 118 | "type": [ 119 | "null", 120 | "number" 121 | ] 122 | }, 123 | "weight": { 124 | "type": [ 125 | "null", 126 | "number" 127 | ] 128 | }, 129 | "height": { 130 | "type": [ 131 | "null", 132 | "number" 133 | ] 134 | } 135 | } 136 | }, 137 | "shippable": { 138 | "type": [ 139 | "null", 140 | "boolean" 141 | ] 142 | }, 143 | "statement_descriptor": { 144 | "type": [ 145 | "null", 146 | "string" 147 | ] 148 | }, 149 | "type": { 150 | "type": [ 151 | "null", 152 | "string" 153 | ] 154 | }, 155 | "unit_label": { 156 | "type": [ 157 | "null", 158 | "string" 159 | ] 160 | }, 161 | "updated": { 162 | "type": [ 163 | "null", 164 | "string" 165 | ], 166 | "format": "date-time" 167 | }, 168 | "url": { 169 | "type": [ 170 | "null", 171 | "string" 172 | ] 173 | }, 174 | "tax_code": { 175 | "type": [ 176 | "null", 177 | "string" 178 | ] 179 | } 180 | } 181 | } 182 | -------------------------------------------------------------------------------- /tests/test_automatic_fields.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test that with no fields selected for a stream automatic fields are still replicated 3 | """ 4 | 5 | from tap_tester import runner, menagerie, connections 6 | 7 | from base import BaseTapTest 8 | from utils import create_object 9 | 10 | 11 | class MinimumSelectionTest(BaseTapTest): 12 | """Test that with no fields selected for a stream automatic fields are still replicated""" 13 | 14 | @staticmethod 15 | def name(): 16 | return "tt_stripe_auto_fields" 17 | 18 | def test_run(self): 19 | """ 20 | Verify that for each stream you can get multiple pages of data 21 | when no fields are selected and only the automatic fields are replicated. 22 | 23 | PREREQUISITE 24 | For EACH stream add enough data that you surpass the limit of a single 25 | fetch of data. For instance if you have a limit of 250 records ensure 26 | that 251 (or more) records have been posted for that stream. 27 | """ 28 | conn_id = connections.ensure_connection(self) 29 | self.conn_id = conn_id 30 | streams_to_create = { 31 | # "balance_transactions", # should be created implicity with a create in the payouts or charges streams 32 | "charges", 33 | "coupons", 34 | "customers", 35 | "invoice_items", 36 | "invoice_line_items", # this is created implicity by invoices, it just creates another invoice 37 | "invoices", # this will create an invoice_item 38 | "payouts", 39 | "plans", 40 | "payment_intents", 41 | "products", 42 | "subscription_items", 43 | "subscriptions", # this will create a new plan and payment method 44 | "transfers", 45 | } 46 | untested_streams = { 47 | "payout_transactions", 48 | "disputes" 49 | } 50 | new_objects = { 51 | stream: create_object(stream) 52 | for stream in streams_to_create.difference() 53 | } 54 | 55 | 56 | # Select all streams and no fields within streams 57 | # IF THERE ARE NO AUTOMATIC FIELDS FOR A STREAM 58 | # WE WILL NEED TO UPDATE THE BELOW TO SELECT ONE 59 | found_catalogs = self.run_and_verify_check_mode(conn_id) 60 | self.select_all_streams_and_fields(conn_id, found_catalogs, select_all_fields=False) 61 | 62 | # Run a sync job using orchestrator 63 | record_count_by_stream = self.run_and_verify_sync(conn_id) 64 | 65 | actual_fields_by_stream = runner.examine_target_output_for_fields() 66 | 67 | for stream in self.expected_streams().difference(untested_streams): 68 | with self.subTest(stream=stream): 69 | 70 | # verify that you get some records for each stream 71 | # SKIP THIS ASSERTION FOR STREAMS WHERE YOU CANNOT GET 72 | # MORE THAN 1 PAGE OF DATA IN THE TEST ACCOUNT 73 | self.assertGreater( 74 | record_count_by_stream.get(stream, -1), 0, 75 | msg="The number of records is not over the stream max limit") 76 | 77 | # verify that only the automatic fields are sent to the target 78 | actual = actual_fields_by_stream.get(stream) or set() 79 | expected = self.expected_automatic_fields().get(stream, set()) 80 | self.assertEqual( 81 | actual, expected, 82 | msg=("The fields sent to the target are not the automatic fields. Expected: {}, Actual: {}" 83 | .format(actual, expected)) 84 | ) 85 | -------------------------------------------------------------------------------- /spikes/sdk/sdk.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import sys 4 | 5 | stripe_secret_key = sys.argv[1] 6 | 7 | import stripe 8 | 9 | stripe.api_key = stripe_secret_key 10 | 11 | account = stripe.Account.retrieve() 12 | 13 | import ipdb; ipdb.set_trace() 14 | 1+1 15 | 16 | # Retrieve a couple sample objects 17 | 18 | [x for x in stripe.Charge.list().auto_paging_iter()] 19 | 20 | # expanding 'data.customer'. Probably not directly useful as sourcerer 21 | # doesn't do this (I don't think) 22 | # ipdb> [x.customer.keys() for x in stripe.Charge.list(expand=['data.customer']).auto_paging_iter() if x.customer] 23 | # [dict_keys(['invoice_prefix', 'default_source', 'sources', 'subscriptions', 'default_card', 'object', 'id', 'livemode', 'created', 'metadata', 'shipping', 'email', 'description', 'tax_info', 'currency', 'tax_info_verification', 'cards', 'delinquent', 'account_balance', 'discount']), dict_keys(['invoice_prefix', 'default_source', 'sources', 'subscriptions', 'default_card', 'object', 'id', 'livemode', 'created', 'metadata', 'shipping', 'email', 'description', 'tax_info', 'currency', 'tax_info_verification', 'cards', 'delinquent', 'account_balance', 'discount']), dict_keys(['invoice_prefix', 'default_source', 'sources', 'subscriptions', 'default_card', 'object', 'id', 'livemode', 'created', 'metadata', 'shipping', 'email', 'description', 'tax_info', 'currency', 'tax_info_verification', 'cards', 'delinquent', 'account_balance', 'discount']), dict_keys(['invoice_prefix', 'default_source', 'sources', 'subscriptions', 'default_card', 'object', 'id', 'livemode', 'created', 'metadata', 'shipping', 'email', 'description', 'tax_info', 'currency', 'tax_info_verification', 'cards', 'delinquent', 'account_balance', 'discount'])] 24 | # ipdb> [x.customer for x in stripe.Charge.list().auto_paging_iter() if x.customer] 25 | # ['cus_6ZXlps8Nz326Cf', 'cus_6ZXlps8Nz326Cf', 'cus_6ZXlps8Nz326Cf', 'cus_6ZXlps8Nz326Cf'] 26 | 27 | # This call was made with stripe.api_key != the account's api_key 28 | # ipdb> stripe.api_key 29 | # 30 | # ipdb> stripe.Account.retrieve().id 31 | # 'acct_14zvmQDcBSxinnbL' # The Stitch Dev Account 32 | # ipdb> stripe.Account.retrieve("acct_15VQRNKi8yTvIJwI").id # account 1742 conn 27092 33 | # 'acct_15VQRNKi8yTvIJwI' 34 | # This one is the test account 35 | # ipdb> len(stripe.Charge.list()) 36 | # 7 37 | # This one is the 1742/27092 account 38 | # ipdb> len(stripe.Charge.list(stripe_account="acct_15VQRNKi8yTvIJwI")) 39 | # 2 40 | 41 | # We can use gte filtering like so 42 | # ipdb> sorted([x.created for x in stripe.Event.list(stripe_account="acct_15VQRNKi8yTvIJwI").data]) 43 | # [1424111008, 1424111009, 1424123398, 1424123398, 1424583077] 44 | # ipdb> sorted([x.created for x in stripe.Event.list(stripe_account="acct_15VQRNKi8yTvIJwI", created={"gte": 1424123398}).data]) 45 | # [1424123398, 1424123398, 1424583077] 46 | 47 | # Here's how we do object id pagination 48 | 49 | # ipdb> [c.stripe_id for c in stripe.Charge.list()] 50 | # ['ch_1DGvHKDcBSxinnbL8cqeaClE', 'ch_1CncCSDcBSxinnbLGAg0FgCS', 'ch_1AfIBEDcBSxinnbLXEPmc6Yc', 'ch_16OI9wDcBSxinnbLb7d17FuT', 'ch_16MOjADcBSxinnbLMmmwvRi0', 'ch_16Jo1HDcBSxinnbLDT7Mk8hO', 'ch_16JnhSDcBSxinnbLK5KrZm2F', 'ch_16AOSQDcBSxinnbLaqa4dHqZ'] 51 | # ipdb> [c.stripe_id for c in stripe.Charge.list(starting_after="ch_16MOjADcBSxinnbLMmmwvRi0")] 52 | # ['ch_16Jo1HDcBSxinnbLDT7Mk8hO', 'ch_16JnhSDcBSxinnbLK5KrZm2F', 'ch_16AOSQDcBSxinnbLaqa4dHqZ'] 53 | 54 | # Checking how many accounts we can talk to 55 | # ipdb> stripe.Account.retrieve().id 56 | # 'acct_14zvmQDcBSxinnbL' # The Stitch Dev Account 57 | # ipdb> stripe.api_key 58 | # 59 | # ipdb> stripe.api_key = 60 | # ipdb> stripe.Account.retrieve().id 61 | # 'acct_14zvmQDcBSxinnbL' # The Stitch Dev Account again 62 | -------------------------------------------------------------------------------- /tap_stripe/schemas/shared/discount.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": [ 3 | "null", 4 | "object" 5 | ], 6 | "properties": { 7 | "end": { 8 | "type": [ 9 | "null", 10 | "string" 11 | ], 12 | "format": "date-time" 13 | }, 14 | "coupon": { 15 | "type": [ 16 | "null", 17 | "object" 18 | ], 19 | "properties": { 20 | "metadata": { 21 | "type": [ 22 | "null", 23 | "object" 24 | ], 25 | "properties": {} 26 | }, 27 | "valid": { 28 | "type": [ 29 | "null", 30 | "boolean" 31 | ] 32 | }, 33 | "livemode": { 34 | "type": [ 35 | "null", 36 | "boolean" 37 | ] 38 | }, 39 | "amount_off": { 40 | "type": [ 41 | "null", 42 | "integer" 43 | ] 44 | }, 45 | "redeem_by": { 46 | "type": [ 47 | "null", 48 | "string" 49 | ], 50 | "format": "date-time" 51 | }, 52 | "duration_in_months": { 53 | "type": [ 54 | "null", 55 | "integer" 56 | ] 57 | }, 58 | "percent_off_precise": { 59 | "type": [ 60 | "null", 61 | "number" 62 | ] 63 | }, 64 | "max_redemptions": { 65 | "type": [ 66 | "null", 67 | "integer" 68 | ] 69 | }, 70 | "currency": { 71 | "type": [ 72 | "null", 73 | "string" 74 | ] 75 | }, 76 | "name": { 77 | "type": [ 78 | "null", 79 | "string" 80 | ] 81 | }, 82 | "times_redeemed": { 83 | "type": [ 84 | "null", 85 | "integer" 86 | ] 87 | }, 88 | "id": { 89 | "type": [ 90 | "null", 91 | "string" 92 | ] 93 | }, 94 | "duration": { 95 | "type": [ 96 | "null", 97 | "string" 98 | ] 99 | }, 100 | "object": { 101 | "type": [ 102 | "null", 103 | "string" 104 | ] 105 | }, 106 | "percent_off": { 107 | "type": [ 108 | "null", 109 | "number" 110 | ] 111 | }, 112 | "created": { 113 | "type": [ 114 | "null", 115 | "string" 116 | ], 117 | "format": "date-time" 118 | } 119 | } 120 | }, 121 | "customer": { 122 | "type": [ 123 | "null", 124 | "string" 125 | ] 126 | }, 127 | "start": { 128 | "type": [ 129 | "null", 130 | "string" 131 | ], 132 | "format": "date-time" 133 | }, 134 | "object": { 135 | "type": [ 136 | "null", 137 | "string" 138 | ] 139 | }, 140 | "subscription": { 141 | "type": [ 142 | "null", 143 | "string" 144 | ] 145 | }, 146 | "checkout_session": { 147 | "type": [ 148 | "null", 149 | "string" 150 | ] 151 | }, 152 | "id": { 153 | "type": [ 154 | "null", 155 | "string" 156 | ] 157 | }, 158 | "invoice": { 159 | "type": [ 160 | "null", 161 | "string" 162 | ] 163 | }, 164 | "invoice_item": { 165 | "type": [ 166 | "null", 167 | "string" 168 | ] 169 | }, 170 | "promotion_code": { 171 | "type": [ 172 | "null", 173 | "string" 174 | ] 175 | } 176 | } 177 | } 178 | -------------------------------------------------------------------------------- /tap_stripe/schemas/shared/plan.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": [ 3 | "null", 4 | "object", 5 | "string" 6 | ], 7 | "properties": { 8 | "nickname": { 9 | "type": [ 10 | "null", 11 | "string" 12 | ] 13 | }, 14 | "updated_by_event_type": { 15 | "type": [ 16 | "null", 17 | "string" 18 | ] 19 | }, 20 | "amount_decimal": { 21 | "type": [ 22 | "null", 23 | "string" 24 | ], 25 | "format": "singer.decimal" 26 | }, 27 | "tiers": { 28 | "type": [ 29 | "null", 30 | "array" 31 | ], 32 | "items": { 33 | "type": [ 34 | "null", 35 | "string", 36 | "object" 37 | ], 38 | "properties": { 39 | "flat_amount": { 40 | "type": [ 41 | "null", 42 | "integer" 43 | ] 44 | }, 45 | "unit_amount": { 46 | "type": [ 47 | "null", 48 | "integer" 49 | ] 50 | }, 51 | "up_to": { 52 | "type": [ 53 | "null", 54 | "integer" 55 | ] 56 | } 57 | } 58 | } 59 | }, 60 | "object": { 61 | "type": [ 62 | "null", 63 | "string" 64 | ] 65 | }, 66 | "aggregate_usage": { 67 | "type": [ 68 | "null", 69 | "string" 70 | ] 71 | }, 72 | "created": { 73 | "type": [ 74 | "null", 75 | "string" 76 | ], 77 | "format": "date-time" 78 | }, 79 | "statement_description": { 80 | "type": [ 81 | "null", 82 | "string" 83 | ] 84 | }, 85 | "product": { 86 | "type": [ 87 | "null", 88 | "string" 89 | ] 90 | }, 91 | "statement_descriptor": { 92 | "type": [ 93 | "null", 94 | "string" 95 | ] 96 | }, 97 | "interval_count": { 98 | "type": [ 99 | "null", 100 | "integer" 101 | ] 102 | }, 103 | "transform_usage": { 104 | "type": [ 105 | "null", 106 | "object" 107 | ] 108 | }, 109 | "name": { 110 | "type": [ 111 | "null", 112 | "string" 113 | ] 114 | }, 115 | "amount": { 116 | "type": [ 117 | "null", 118 | "integer" 119 | ] 120 | }, 121 | "interval": { 122 | "type": [ 123 | "null", 124 | "string" 125 | ] 126 | }, 127 | "id": { 128 | "type": [ 129 | "null", 130 | "string" 131 | ] 132 | }, 133 | "trial_period_days": { 134 | "type": [ 135 | "null", 136 | "integer" 137 | ] 138 | }, 139 | "usage_type": { 140 | "type": [ 141 | "null", 142 | "string" 143 | ] 144 | }, 145 | "active": { 146 | "type": [ 147 | "null", 148 | "boolean" 149 | ] 150 | }, 151 | "tiers_mode": { 152 | "type": [ 153 | "null", 154 | "string" 155 | ] 156 | }, 157 | "billing_scheme": { 158 | "type": [ 159 | "null", 160 | "string" 161 | ] 162 | }, 163 | "livemode": { 164 | "type": [ 165 | "null", 166 | "boolean" 167 | ] 168 | }, 169 | "currency": { 170 | "type": [ 171 | "null", 172 | "string" 173 | ] 174 | }, 175 | "metadata": { 176 | "type": [ 177 | "null", 178 | "object" 179 | ], 180 | "properties": {} 181 | }, 182 | "updated": { 183 | "type": [ 184 | "null", 185 | "string" 186 | ], 187 | "format": "date-time" 188 | } 189 | } 190 | } 191 | -------------------------------------------------------------------------------- /tests/unittests/test_request_timeout.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from unittest import mock 3 | from tap_stripe import Context, configure_stripe_client 4 | 5 | class TestRequestTimeoutValue(unittest.TestCase): 6 | ''' 7 | Test that request timeout parameter works properly in various cases 8 | ''' 9 | @mock.patch('stripe.http_client.RequestsClient') 10 | @mock.patch('tap_stripe.apply_request_timer_to_client') 11 | @mock.patch('stripe.Account.retrieve') 12 | def test_config_provided_request_timeout(self, mock_retrieve, mock_req_timer, mock_client): 13 | """ 14 | Unit tests to ensure that request timeout is set based on config value 15 | """ 16 | config = { "client_secret": "test_secret", "account_id": "test_account", "start_date": "test_start_date", "request_timeout": 100} 17 | Context.config = config 18 | configure_stripe_client() 19 | # Verify that the client is called with config provided request timeout 20 | mock_client.assert_called_with(timeout=100.0) 21 | 22 | @mock.patch('stripe.http_client.RequestsClient') 23 | @mock.patch('tap_stripe.apply_request_timer_to_client') 24 | @mock.patch('stripe.Account.retrieve') 25 | def test_default_value_request_timeout(self, mock_retrieve, mock_req_timer, mock_client): 26 | """ 27 | Unit tests to ensure that request timeout is set based default value 28 | """ 29 | config = {"client_secret": "test_secret", "account_id": "test_account", "start_date": "test_start_date"} 30 | Context.config = config 31 | configure_stripe_client() 32 | # Verify that the client is called with default request timeout 33 | mock_client.assert_called_with(timeout=300.0) 34 | 35 | @mock.patch('stripe.http_client.RequestsClient') 36 | @mock.patch('tap_stripe.apply_request_timer_to_client') 37 | @mock.patch('stripe.Account.retrieve') 38 | def test_config_provided_empty_request_timeout(self, mock_retrieve, mock_req_timer, mock_client): 39 | """ 40 | Unit tests to ensure that request timeout is set based on default value if empty value is given in config 41 | """ 42 | config = {"client_secret": "test_secret", "account_id": "test_account", "request_timeout": ""} 43 | Context.config = config 44 | configure_stripe_client() 45 | # Verify that the client is called with default request timeout 46 | mock_client.assert_called_with(timeout=300.0) 47 | 48 | @mock.patch('stripe.http_client.RequestsClient') 49 | @mock.patch('tap_stripe.apply_request_timer_to_client') 50 | @mock.patch('stripe.Account.retrieve') 51 | def test_config_provided_string_request_timeout(self, mock_retrieve, mock_req_timer, mock_client): 52 | """ 53 | Unit tests to ensure that request timeout is set based on config string value 54 | """ 55 | config = {"client_secret": "test_secret", "account_id": "test_account", "request_timeout": "100"} 56 | Context.config = config 57 | configure_stripe_client() 58 | # Verify that the client is called with config provided request timeout 59 | mock_client.assert_called_with(timeout=100.0) 60 | 61 | @mock.patch('stripe.http_client.RequestsClient') 62 | @mock.patch('tap_stripe.apply_request_timer_to_client') 63 | @mock.patch('stripe.Account.retrieve') 64 | def test_config_provided_float_request_timeout(self, mock_retrieve, mock_req_timer, mock_client): 65 | """ 66 | Unit tests to ensure that request timeout is set based on config float value 67 | """ 68 | config = {"client_secret": "test_secret", "account_id": "test_account", "request_timeout": 100.8} 69 | Context.config = config 70 | configure_stripe_client() 71 | # Verify that the client is called with config provided float request timeout 72 | mock_client.assert_called_with(timeout=100.8) -------------------------------------------------------------------------------- /tests/test_full_replication.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test tap gets all records for streams with full replication 3 | """ 4 | import json 5 | 6 | from tap_tester import menagerie, runner, connections 7 | 8 | from base import BaseTapTest 9 | 10 | 11 | class FullReplicationTest(BaseTapTest): 12 | """Test tap gets all records for streams with full replication""" 13 | 14 | @staticmethod 15 | def name(): 16 | return "tt_stripe_full_table" 17 | 18 | def test_run(self): 19 | """ 20 | Verify that a bookmark doesn't exist for the stream 21 | Verify that the second sync includes the same number or more records than the first sync 22 | Verify that all records in the first sync are included in the second sync 23 | Verify that the sync only sent records to the target for selected streams (catalogs) 24 | 25 | PREREQUISITE 26 | For EACH stream that is fully replicated there are multiple rows of data with 27 | different values for the replication key 28 | """ 29 | conn_id = connections.ensure_connection(self) 30 | self.conn_id = conn_id 31 | 32 | # Select all streams and no fields within streams 33 | found_catalogs = self.run_and_verify_check_mode(conn_id) 34 | full_streams = {key for key, value in self.expected_replication_method().items() 35 | if value == self.FULL} 36 | our_catalogs = [catalog for catalog in found_catalogs if 37 | catalog.get('tap_stream_id') in full_streams] 38 | self.select_all_streams_and_fields(conn_id, our_catalogs, select_all_fields=True) 39 | 40 | # Run a sync job using orchestrator 41 | first_sync_record_count = self.run_and_verify_sync(conn_id) 42 | 43 | # verify that the sync only sent records to the target for selected streams (catalogs) 44 | self.assertEqual(set(first_sync_record_count.keys()), full_streams) 45 | 46 | first_sync_state = menagerie.get_state(conn_id) 47 | 48 | # Get the set of records from a first sync 49 | first_sync_records = runner.get_records_from_target_output() 50 | 51 | # Run a second sync job using orchestrator 52 | second_sync_record_count = self.run_and_verify_sync(conn_id) 53 | 54 | # Get the set of records from a second sync 55 | second_sync_records = runner.get_records_from_target_output() 56 | 57 | # THIS MAKES AN ASSUMPTION THAT CHILD STREAMS DO NOT NEED TESTING. 58 | # ADJUST IF NECESSARY 59 | for stream in full_streams.difference(self.child_streams()): 60 | with self.subTest(stream=stream): 61 | 62 | # verify there is no bookmark values from state 63 | state_value = first_sync_state.get("bookmarks", {}).get(stream) 64 | self.assertIsNone(state_value) 65 | 66 | # verify that there is more than 1 record of data - setup necessary 67 | self.assertGreater(first_sync_record_count.get(stream, 0), 1, 68 | msg="Data isn't set up to be able to test full sync") 69 | 70 | # verify that you get the same or more data the 2nd time around 71 | self.assertGreaterEqual( 72 | second_sync_record_count.get(stream, 0), 73 | first_sync_record_count.get(stream, 0), 74 | msg="second syc didn't have more records, full sync not verified") 75 | 76 | # verify all data from 1st sync included in 2nd sync 77 | first_data = [record["data"] for record 78 | in first_sync_records.get(stream, {}).get("messages", {"data": {}})] 79 | second_data = [record["data"] for record 80 | in second_sync_records.get(stream, {}).get("messages", {"data": {}})] 81 | 82 | same_records = 0 83 | for first_record in first_data: 84 | first_value = json.dumps(first_record, sort_keys=True) 85 | 86 | for compare_record in second_data: 87 | compare_value = json.dumps(compare_record, sort_keys=True) 88 | 89 | if first_value == compare_value: 90 | second_data.remove(compare_record) 91 | same_records += 1 92 | break 93 | 94 | self.assertEqual(len(first_data), same_records, 95 | msg="Not all data from the first sync was in the second sync") 96 | 97 | 98 | -------------------------------------------------------------------------------- /tests/unittests/test_get_and_write_bookmark.py: -------------------------------------------------------------------------------- 1 | import tap_stripe 2 | import unittest 3 | from unittest import mock 4 | 5 | class TestGetBookmarks(unittest.TestCase): 6 | 7 | @mock.patch("tap_stripe.singer.get_bookmark") 8 | def test_get_bookmark_for_invoices(self, mocked_get_bookmark): 9 | ''' 10 | Verify that invoices use `date` field to get bookmark and not a replication key `created` for invoices 11 | ''' 12 | # Call get_bookmark_for_stream for invoices with `created` replication key 13 | tap_stripe.get_bookmark_for_stream("invoices", "created") 14 | 15 | # Verify that get_bookmark is called with 'date' field 16 | args, kwargs = mocked_get_bookmark.call_args 17 | self.assertEqual(args[1], "invoices") 18 | self.assertEqual(args[2], "date") 19 | 20 | @mock.patch("tap_stripe.singer.get_bookmark") 21 | def test_get_bookmark_for_invoice_line_items(self, mocked_get_bookmark): 22 | ''' 23 | Verify that invoice_line_items use `date` field to get bookmark and not a replication key `created` for invoice_line_items 24 | ''' 25 | # Call get_bookmark_for_stream for invoice_line_items with `created` replication key 26 | tap_stripe.get_bookmark_for_stream("invoice_line_items", "created") 27 | 28 | # Verify that get_bookmark is called with 'date' field 29 | args, kwargs = mocked_get_bookmark.call_args 30 | self.assertEqual(args[1], "invoice_line_items") 31 | self.assertEqual(args[2], "date") 32 | 33 | @mock.patch("tap_stripe.singer.get_bookmark") 34 | def test_get_bookmark_for_normal_streams(self, mocked_get_bookmark): 35 | ''' 36 | Verify that streams other than invoice and invoice_line_items use passed replication key to get bookmark 37 | ''' 38 | # Call get_bookmark_for_stream for other test stream with `test_replication_key` replication key 39 | tap_stripe.get_bookmark_for_stream("test", "test_replication_key") 40 | 41 | # Verify that get_bookmark is called with 'test_replication_key' field which passed in get_bookmark_for_stream() 42 | args, kwargs = mocked_get_bookmark.call_args 43 | self.assertEqual(args[1], "test") 44 | self.assertEqual(args[2], "test_replication_key") 45 | 46 | 47 | class TestWriteBookmarks(unittest.TestCase): 48 | 49 | @mock.patch("tap_stripe.singer.write_bookmark") 50 | def test_write_bookmark_for_invoices(self, mocked_write_bookmark): 51 | ''' 52 | Verify that invoices use `date` field to write bookmark and not a replication key `created` for invoices 53 | ''' 54 | # Call write_bookmark_for_stream for invoices with `created` replication key 55 | tap_stripe.write_bookmark_for_stream("invoices", "created", "bookmark_value") 56 | 57 | # Verify that write_bookmark is called with 'date' field 58 | args, kwargs = mocked_write_bookmark.call_args 59 | self.assertEqual(args[1], "invoices") 60 | self.assertEqual(args[2], "date") 61 | 62 | @mock.patch("tap_stripe.singer.write_bookmark") 63 | def test_write_bookmark_for_invoice_line_items(self, mocked_write_bookmark): 64 | ''' 65 | Verify that invoice_line_items use `date` field to write bookmark and not a replication key `created` for invoice_line_items 66 | ''' 67 | # Call write_bookmark_for_stream for invoice_line_items with `created` replication key 68 | tap_stripe.write_bookmark_for_stream("invoice_line_items", "created", "bookmark_value") 69 | 70 | # Verify that write_bookmark is called with 'date' field 71 | args, kwargs = mocked_write_bookmark.call_args 72 | self.assertEqual(args[1], "invoice_line_items") 73 | self.assertEqual(args[2], "date") 74 | 75 | @mock.patch("tap_stripe.singer.write_bookmark") 76 | def test_write_bookmark_for_normal_streams(self, mocked_write_bookmark): 77 | ''' 78 | Verify that streams other than invoice and invoice_line_items use passed replication key to write bookmark 79 | ''' 80 | # Call write_bookmark_for_stream for other test stream with `test_replication_key` replication key 81 | tap_stripe.write_bookmark_for_stream("test", "test_replication_key", "bookmark_value") 82 | 83 | # Verify that write_bookmark is called with 'test_replication_key' field which passed in write_bookmark_for_stream() 84 | args, kwargs = mocked_write_bookmark.call_args 85 | self.assertEqual(args[1], "test") 86 | self.assertEqual(args[2], "test_replication_key") 87 | -------------------------------------------------------------------------------- /tests/readme.md: -------------------------------------------------------------------------------- 1 | ## Instructions to make a tap-tester test 2 | 3 | ### General 4 | The tap-tester template is the basic set of tests for SaaS type taps. These tests inlude discovery, 5 | bookmarks, start_date usage, replication methods, pagination, and stream field selection 6 | 7 | These tests do not cover specific difficulties of a stream and should be added to when you run into 8 | a situation that is not typical. For instance, if there is logic for stream selection or field 9 | selection where if you pick stream A you cannot pick stream B. It also does not currently test 10 | parent child relationships, streams that you can select the replication method, etc. 11 | 12 | These tests are the starting point for tap-tester and not a comprehesive test. Each tap should be 13 | reviewed to determine if supplemental testing should be completed 14 | 15 | ### How to use this template 16 | 17 | In general all you will need to do is fill out specifics for the properties and credentials 18 | for the tap and create appropriate test data in the dev account used for the testing. 19 | 20 | If there are situations which a test is not appropriate for a stream you can update the catalogs 21 | for that test. An example from the bookmarks test is below 22 | 23 | found_catalogs = menagerie.get_catalogs(conn_id) 24 | incremental_streams = {key for key, value in self.expected_replication_method().items() 25 | if value == self.INCREMENTAL} 26 | 27 | # IF THERE ARE STREAMS THAT SHOULD NOT BE TESTED 28 | # REPLACE THE EMPTY SET BELOW WITH THOSE STREAMS 29 | untested_streams = self.child_streams().union(set()) 30 | our_catalogs = [catalog for catalog in found_catalogs if 31 | catalog.get('tap_stream_id') in incremental_streams.difference( 32 | untested_streams)] 33 | self.select_all_streams_and_fields(conn_id, our_catalogs, select_all_fields=False) 34 | 35 | make sure that if the test does not have an untested streams section that you use it everywhere 36 | catalogs are selected and in the subTests so you are not testing streams that are not selected. 37 | 38 | ####base.py 39 | 40 | Fill out the the following methods to customize the test 41 | 42 | The name of the tap 43 | 44 | def tap_name(self): 45 | return "tap-" 46 | 47 | The extension of the URL for the tap 48 | 49 | def get_type(self): 50 | """Return the expected url route ending""" 51 | return "platform." 52 | 53 | The configuration properties required for the tap 54 | 55 | def get_properties(self, original: bool = True): 56 | """Configuration properties required for the tap.""" 57 | return_value = { 58 | 'start_date': '2017-07-01 00:00:00', 59 | 'shop': 'stitchdatawearhouse' 60 | } 61 | 62 | The credentials required if any. These should be in the 63 | environments repo 64 | 65 | def get_credentials(self): 66 | return { 67 | 'api_key': os.getenv('TAP__API_KEY'), 68 | 'password': os.getenv('TAP__PASSWORD') 69 | } 70 | 71 | def setUp(self): 72 | """Verify that you have set the prerequisites to run the tap (creds, etc.)""" 73 | missing_envs = [x for x in [os.getenv('TAP_SHOPIFY_API_KEY')] if x is None] 74 | if missing_envs: 75 | raise Exception("set environment variables") 76 | 77 | The expected streams and associated metadata to test for. You can either explicitly put in the 78 | metadata for each stream, or can set default metadata for streams and update exceptions. Examples 79 | of both methods are below. 80 | 81 | def expected_metadata(self): 82 | """The expected streams and metadata about the streams""" 83 | 84 | default = { 85 | self.REPLICATION_KEYS: {"updated_at"}, 86 | self.PRIMARY_KEYS: {"id"}, 87 | self.REPLICATION_METHOD: self.INCREMENTAL, 88 | self.API_LIMIT: 250} 89 | 90 | meta = default.copy() 91 | meta.update({self.FOREIGN_KEYS: {"owner_id", "owner_resource"}}) 92 | 93 | return { 94 | "orders": default, 95 | "metafields": meta, 96 | "transactions": { 97 | self.REPLICATION_KEYS: {"created_at"}, 98 | self.PRIMARY_KEYS: {"id"}, 99 | self.FOREIGN_KEYS: {"order_id"}, 100 | self.REPLICATION_METHOD: self.INCREMENTAL, 101 | self.API_LIMIT: 250} 102 | } -------------------------------------------------------------------------------- /todo.org: -------------------------------------------------------------------------------- 1 | * Goal Numero Uno is Sourcerer Parity (direct and unadorned) 2 | 3 | - /No/ tap-tester test 4 | - /No/ features that sourcerer doesn't have (lock down the version via 5 | the api-version header) 6 | 7 | We think this should be possible with the current oauth-flow. We will 8 | fail fast if that turns out not to be the case and fall back to 9 | authenticating via their token. 10 | - !!! We could really use an alpha tester but it's not necessary. We only 11 | need to deliver something we think will probably work rather than 12 | something we're pretty sure will work! 13 | - !!! We should /not/ do field selection. It makes the sync logic much 14 | harder as you have a combinatoric situation of what among Events + 15 | Objects is selected and what records to emit because of it! 16 | - !!! Sourcerer stripe does /not/ implement 429 support! 17 | - !!! Sourcerer does /not/ update all objects. Only =InvoiceItems=, 18 | =Invoices=, =Coupons=, =Plans=, =Customers=, =Transfers=, and 19 | =Charges=. 20 | 21 | Is it possible that retrieving an object by ID merges all updates to 22 | it into its state but retrieving the same object from the Object's 23 | stream has it as it was created? 24 | - @brian Do we need docs according to the contract? 25 | ** DONE Can the VM perform the oauth dance? :v0: 26 | ** DONE Add the form to the ui :v0: 27 | 28 | Start with the assumption that we'll be collecting a 29 | ** DONE Add oauth callback support for platform.stripe to =connections-service= :v0: 30 | 31 | This is when we decide whether we can get the account-id or not. 32 | ** DONE Create the skeleton tap project based on =singer-tap-template= :v0: 33 | ** DONE Get a skeleton pylint build running in circle :v0: 34 | ** DONE Run a check job :v0: 35 | 36 | #+BEGIN_SRC bash 37 | # in orca 38 | ./run_it check 39 | #+END_SRC 40 | 41 | We need at this point to be using the =account_id= property /or/ the 42 | =api_key= property. 43 | 44 | This is probably good enough to do with an 45 | =stripe.Account.retrieve(account_id)= call. 46 | 47 | Log the name, etc. of the account we retrieved. 48 | 49 | Need to be sure to use the stable version of the API that we are 50 | currently using. Allowing customer specification of the API version 51 | needs to come later because it introduces too much uncertainty. 52 | ** DONE Implement the schema for Charges :v0: 53 | 54 | Always selected. We will do field selection later. 55 | ** DONE Do a sync for Charges :v0: 56 | 57 | Notably, we do /not/ get updates for Charges at this time. 58 | ** DONE Handle rate limiting :v0: 59 | 60 | This should be in line with the spike for [[file:spikes/timeout/timeout.py][=timeout=]]. 61 | 62 | This is not a strict requirement because sourcerer doesn't handle it. 63 | /But/ we think it's super easy. 64 | ** DONE Add Charges data typing 65 | ** DONE Add bookmarking for Charges :v0: 66 | 67 | This is object cursor id pagination. See [[https://github.com/singer-io/tap-stripe/blob/cf3eaf16e162762978a8e23d77a99948f248520f/spikes/sdk/sdk.py#L47-L52][sdk.py]]. 68 | ** TODO Add =Events= stream :v0: 69 | 70 | Because we are not doing field selection, this part will be easy. 71 | ** TODO Add =Charges= updates 72 | ** TODO Add =Customers= syncing 73 | ** TODO Add =Customers= updating 74 | ** TODO Add =Plans= syncing 75 | ** TODO Add =Plans= updating 76 | ** TODO Add =Invoices= syncing 77 | ** TODO Add =Invoices= updating 78 | ** TODO Add =InvoiceItems= syncing 79 | ** TODO Add =InvoiceItems= updating 80 | ** TODO Add =Transfers= syncing 81 | ** TODO Add =Transfers= updating 82 | ** TODO Add =Coupons= syncing 83 | ** TODO Add =Coupons= updating 84 | ** The following get no updates according to sourcerer 85 | ** TODO Add =Subscriptions= syncing 86 | ** TODO Add =BalanceHistory= syncing 87 | ** TODO Add =TransferTransactions= syncing 88 | ** TODO Support =start_date= config property 89 | * v1 90 | ** TODO Test field selection :v1: 91 | 92 | @brian: Do we have any reason to believe that field selection could be 93 | complicated for this tap? Are their field exclusions, etc? 94 | ** TODO Implement discovery for Charges :v1: 95 | 96 | The schema may be tricky here. There are many deeply nested objects 97 | (See 98 | https://github.com/stitchdata/sourcerer/blob/f828820666363f15ab47bcff3a883f9fb14791f3/sourcerer/src/clojure/sourcerer/apis/datatype/stripe.clj#L113) 99 | but /all/ of the data typing concerns appear to be around datetime 100 | parsing. We'll need to put that in by hand unfortunately. 101 | ** TODO Add bookmark logic around =Events= / =Object= selection combinatorics. 102 | 103 | How does sourcerer handle resetting your bookmark? 104 | -------------------------------------------------------------------------------- /tap_stripe/schemas/disputes.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": [ 3 | "null", 4 | "object" 5 | ], 6 | "properties":{ 7 | "id": { 8 | "type": [ 9 | "string" 10 | ] 11 | }, 12 | "updated_by_event_type": { 13 | "type": [ 14 | "null", 15 | "string" 16 | ] 17 | }, 18 | "object": { 19 | "type": [ 20 | "null", 21 | "string" 22 | ] 23 | }, 24 | "amount": { 25 | "type": [ 26 | "null", 27 | "integer" 28 | ] 29 | }, 30 | "balance_transactions": { 31 | "type": [ 32 | "null", 33 | "array" 34 | ], 35 | "items": { 36 | "type": [ 37 | "null", 38 | "object" 39 | ], 40 | "properties": { 41 | "id": { 42 | "type": ["string"] 43 | } 44 | } 45 | } 46 | }, 47 | "charge": { 48 | "type": [ 49 | "null", 50 | "string" 51 | ] 52 | }, 53 | "created": { 54 | "type": [ 55 | "null", 56 | "string" 57 | ], 58 | "format": "date-time" 59 | }, 60 | "currency": { 61 | "type": [ 62 | "null", 63 | "string" 64 | ] 65 | }, 66 | "evidence": { 67 | "type": [ 68 | "null", 69 | "string", 70 | "object" 71 | ], 72 | "properties": { 73 | "refund_policy": { 74 | "type": ["null", "string"] 75 | }, 76 | "shipping_address": { 77 | "type": ["null", "string"] 78 | }, 79 | "duplicate_charge_explanation": { 80 | "type": ["null", "string"] 81 | }, 82 | "shipping_tracking_number": { 83 | "type": ["null", "string"] 84 | }, 85 | "customer_signature": { 86 | "type": ["null", "string"] 87 | }, 88 | "uncategorized_text": { 89 | "type": ["null", "string"] 90 | }, 91 | "cancellation_policy_disclosure": { 92 | "type": ["null", "string"] 93 | }, 94 | "refund_policy_disclosure": { 95 | "type": ["null", "string"] 96 | }, 97 | "receipt": { 98 | "type": ["null", "string"] 99 | }, 100 | "customer_name": { 101 | "type": ["null", "string"] 102 | }, 103 | "refund_refusal_explanation": { 104 | "type": ["null", "string"] 105 | }, 106 | "cancellation_rebuttal": { 107 | "type": ["null", "string"] 108 | }, 109 | "product_description": { 110 | "type": ["null", "string"] 111 | }, 112 | "shipping_date": { 113 | "type": ["null", "string"] 114 | }, 115 | "customer_email_address": { 116 | "type": ["null", "string"] 117 | }, 118 | "duplicate_charge_id": { 119 | "type": ["null", "string"] 120 | }, 121 | "shipping_documentation": { 122 | "type": ["null", "string"] 123 | }, 124 | "access_activity_log": { 125 | "type": ["null", "string"] 126 | }, 127 | "customer_purchase_ip": { 128 | "type": ["null", "string"] 129 | }, 130 | "service_date": { 131 | "type": ["null", "string"] 132 | }, 133 | "shipping_carrier": { 134 | "type": ["null", "string"] 135 | }, 136 | "service_documentation": { 137 | "type": ["null", "string"] 138 | }, 139 | "duplicate_charge_documentation": { 140 | "type": ["null", "string"] 141 | }, 142 | "cancellation_policy": { 143 | "type": ["null", "string"] 144 | }, 145 | "customer_communication": { 146 | "type": ["null", "string"] 147 | }, 148 | "uncategorized_file": { 149 | "type": ["null", "string"] 150 | }, 151 | "billing_address": { 152 | "type": ["null", "string"] 153 | } 154 | } 155 | }, 156 | "evidence_details": { 157 | "type": [ 158 | "null", 159 | "object" 160 | ], 161 | "properties": { 162 | "due_by": { 163 | "type": ["null", "string"], 164 | "format": "date-time" 165 | }, 166 | "has_evidence": { 167 | "type": ["null", "boolean"] 168 | }, 169 | "past_due": { 170 | "type": ["null", "boolean"] 171 | }, 172 | "submission_count": { 173 | "type": ["null", "integer"] 174 | } 175 | } 176 | }, 177 | "is_charge_refundable": { 178 | "type": ["null", "boolean"] 179 | }, 180 | "livemode": { 181 | "type": ["null", "boolean"] 182 | }, 183 | "metadata": { 184 | "type": [ 185 | "null", 186 | "object" 187 | ], 188 | "properties": {} 189 | }, 190 | "reason": { 191 | "type": [ 192 | "null", 193 | "string" 194 | ] 195 | }, 196 | "status": { 197 | "type": [ 198 | "null", 199 | "string" 200 | ] 201 | }, 202 | "updated": { 203 | "type": [ 204 | "null", 205 | "string" 206 | ], 207 | "format": "date-time" 208 | } 209 | } 210 | } 211 | -------------------------------------------------------------------------------- /tap_stripe/schemas/invoice_items.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": [ 3 | "null", 4 | "object" 5 | ], 6 | "properties": { 7 | "amount": { 8 | "type": [ 9 | "null", 10 | "integer" 11 | ] 12 | }, 13 | "updated_by_event_type": { 14 | "type": [ 15 | "null", 16 | "string" 17 | ] 18 | }, 19 | "metadata": { 20 | "type": [ 21 | "null", 22 | "object" 23 | ], 24 | "properties": {} 25 | }, 26 | "plan": { 27 | "$ref": "shared/plan.json#/" 28 | }, 29 | "tax_rates": { 30 | "type": [ 31 | "null", 32 | "array" 33 | ], 34 | "items": { 35 | "type": [ 36 | "null", 37 | "object" 38 | ], 39 | "properties": { 40 | "id": { 41 | "type": [ 42 | "null", 43 | "string" 44 | ] 45 | }, 46 | "object": { 47 | "type": [ 48 | "null", 49 | "string" 50 | ] 51 | }, 52 | "active": { 53 | "type": [ 54 | "null", 55 | "boolean" 56 | ] 57 | }, 58 | "country": { 59 | "type": [ 60 | "null", 61 | "string" 62 | ] 63 | }, 64 | "created": { 65 | "type": [ 66 | "null", 67 | "string" 68 | ], 69 | "format": "date-time" 70 | }, 71 | "description": { 72 | "type": [ 73 | "null", 74 | "string" 75 | ] 76 | }, 77 | "display_name": { 78 | "type": [ 79 | "null", 80 | "string" 81 | ] 82 | }, 83 | "inclusive": { 84 | "type": [ 85 | "null", 86 | "boolean" 87 | ] 88 | }, 89 | "jurisdiction": { 90 | "type": [ 91 | "null", 92 | "string" 93 | ] 94 | }, 95 | "livemode": { 96 | "type": [ 97 | "null", 98 | "boolean" 99 | ] 100 | }, 101 | "percentage": { 102 | "type": [ 103 | "null", 104 | "string" 105 | ], 106 | "format": "singer.decimal" 107 | }, 108 | "state": { 109 | "type": [ 110 | "null", 111 | "string" 112 | ] 113 | } 114 | } 115 | } 116 | }, 117 | "invoice": { 118 | "type": [ 119 | "null", 120 | "string" 121 | ] 122 | }, 123 | "unit_amount_decimal": { 124 | "type": [ 125 | "null", 126 | "string" 127 | ], 128 | "format": "singer.decimal" 129 | }, 130 | "period": { 131 | "type": [ 132 | "null", 133 | "object" 134 | ], 135 | "properties": { 136 | "end": { 137 | "type": [ 138 | "null", 139 | "string" 140 | ], 141 | "format": "date-time" 142 | }, 143 | "start": { 144 | "type": [ 145 | "null", 146 | "string" 147 | ], 148 | "format": "date-time" 149 | } 150 | } 151 | }, 152 | "quantity": { 153 | "type": [ 154 | "null", 155 | "integer" 156 | ] 157 | }, 158 | "description": { 159 | "type": [ 160 | "null", 161 | "string" 162 | ] 163 | }, 164 | "date": { 165 | "type": [ 166 | "null", 167 | "string" 168 | ], 169 | "format": "date-time" 170 | }, 171 | "object": { 172 | "type": [ 173 | "null", 174 | "string" 175 | ] 176 | }, 177 | "subscription": { 178 | "type": [ 179 | "null", 180 | "string" 181 | ] 182 | }, 183 | "id": { 184 | "type": [ 185 | "null", 186 | "string" 187 | ] 188 | }, 189 | "livemode": { 190 | "type": [ 191 | "null", 192 | "boolean" 193 | ] 194 | }, 195 | "discounts": { 196 | "type": [ 197 | "null", 198 | "array" 199 | ], 200 | "items": { 201 | "type": [ 202 | "null", 203 | "string" 204 | ] 205 | } 206 | }, 207 | "discountable": { 208 | "type": [ 209 | "null", 210 | "boolean" 211 | ] 212 | }, 213 | "unit_amount": { 214 | "type": [ 215 | "null", 216 | "integer" 217 | ] 218 | }, 219 | "currency": { 220 | "type": [ 221 | "null", 222 | "string" 223 | ] 224 | }, 225 | "customer": { 226 | "type": [ 227 | "null", 228 | "string" 229 | ] 230 | }, 231 | "proration": { 232 | "type": [ 233 | "null", 234 | "boolean" 235 | ] 236 | }, 237 | "subscription_item": { 238 | "type": [ 239 | "null", 240 | "string" 241 | ] 242 | }, 243 | "updated": { 244 | "type": [ 245 | "null", 246 | "string" 247 | ], 248 | "format": "date-time" 249 | } 250 | } 251 | } 252 | -------------------------------------------------------------------------------- /tests/test_automatic_payout_transactions.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime as dt 2 | from datetime import timedelta, time 3 | from utils import stripe_obj_to_dict, client, midnight 4 | 5 | from base import BaseTapTest 6 | from tap_tester import runner, connections 7 | 8 | 9 | def get_payouts(): 10 | """ 11 | Return all the payouts (with pagination), to determine the automatic and non-automatic payouts 12 | """ 13 | # list of all data to return 14 | four_days_ago = int(dt.combine(dt.today()-timedelta(days=4), time.min).timestamp()) 15 | data = [] 16 | # Api call of 1st page starting from 4 days ago as there is lag from the Stripe side to reflect 17 | # the automatic payout transactions data 18 | stripe_obj = client["payouts"].list(limit=100, created={"gte": four_days_ago}) 19 | dict_obj = stripe_obj_to_dict(stripe_obj) 20 | 21 | try: 22 | # add data 23 | data += dict_obj['data'] 24 | except KeyError: 25 | raise Exception("No records for 'Payouts' were replicated, please run 'test_all_fields' " 26 | "before re-running.") 27 | 28 | # loop over rest of the pages and collect data 29 | while dict_obj.get("has_more"): 30 | stripe_obj = client["payouts"].list(limit=100, created={"gte": four_days_ago}, 31 | starting_after=dict_obj.get('data')[-1].get('id')) 32 | dict_obj = stripe_obj_to_dict(stripe_obj) 33 | data += dict_obj['data'] 34 | 35 | # send data 36 | return data 37 | 38 | class AutomaticPayoutTransactionTest(BaseTapTest): 39 | """ 40 | Test case to verify that we only collect payout_transactions for payouts containing 41 | "automatic" field as "True" 42 | Prerequisite: 43 | Run 'test_all_fields' before running this test case. 44 | """ 45 | 46 | @staticmethod 47 | def name(): 48 | return "tt_stripe_automatic_payout_transactions" 49 | 50 | @classmethod 51 | def setUpClass(cls): 52 | """ 53 | Create list of automatic and non-automatic payouts used for assertion 54 | """ 55 | # get all the payouts 56 | payouts = get_payouts() 57 | 58 | # create lists of payout ids containing automatic field as "True" and "False" 59 | cls.payouts_with_automatic_true = [] 60 | cls.payouts_with_automatic_false = [] 61 | for record in payouts: 62 | if record.get("automatic"): 63 | cls.payouts_with_automatic_true.append(record.get("id")) 64 | else: 65 | cls.payouts_with_automatic_false.append(record.get("id")) 66 | 67 | def test_run(self): 68 | # Decreased the start_date for payout_transactions stream as there is a lag from the Stripe 69 | # side to reflect the automatic payout transactions data 70 | self.start_date = dt.strftime(dt.today() - timedelta(days=4), self.START_DATE_FORMAT) 71 | conn_id = connections.ensure_connection(self, original_properties=False) 72 | 73 | expected_streams = {"payouts", "payout_transactions"} 74 | 75 | # Select payouts and payout_transactions streams 76 | found_catalogs = self.run_and_verify_check_mode(conn_id) 77 | our_catalogs = [catalog for catalog in found_catalogs if 78 | catalog.get("tap_stream_id") in expected_streams] 79 | 80 | # field selection 81 | self.select_all_streams_and_fields(conn_id, our_catalogs) 82 | 83 | # Run a sync job using orchestrator 84 | first_sync_record_count = self.run_and_verify_sync(conn_id) 85 | 86 | # Get the set of records from a first sync 87 | first_sync_records = runner.get_records_from_target_output() 88 | 89 | # set stream as "payout_transactions" 90 | stream = "payout_transactions" 91 | with self.subTest(stream=stream): 92 | # verify that there is more than 1 record 93 | self.assertGreater(first_sync_record_count.get(stream, -1), 0, 94 | msg="Data isn't set up to be able to test full sync") 95 | 96 | # get records 97 | records = [message.get("data") for message 98 | in first_sync_records.get(stream).get("messages") 99 | if message["action"] == "upsert"] 100 | 101 | # collect payout ids for all the payout transaction records 102 | payout_transaction_payout_ids = set() 103 | for record in records: 104 | payout_transaction_payout_ids.add(record.get("payout_id")) 105 | 106 | # verify that data exists for payouts with "automatic" field as "True" and "False" 107 | self.assertGreater(len(self.payouts_with_automatic_true), 0) 108 | self.assertGreater(len(self.payouts_with_automatic_false), 0) 109 | 110 | # loop over all the payout ids from the payout transactions to verify 111 | # that we collected "payout transactions" of automatic payouts only 112 | for id in payout_transaction_payout_ids: 113 | # verify payout transaction record collected for payout containing "automatic": True 114 | self.assertTrue(id in self.payouts_with_automatic_true) 115 | # verify payout transaction rec NOT collected for payout with "automatic": False 116 | self.assertTrue(id not in self.payouts_with_automatic_false) 117 | -------------------------------------------------------------------------------- /tap_stripe/schemas/payouts.json: -------------------------------------------------------------------------------- 1 | { 2 | "properties": { 3 | "metadata": { 4 | "type": [ 5 | "null", 6 | "object" 7 | ], 8 | "properties": {} 9 | }, 10 | "failure_code": { 11 | "type": [ 12 | "null", 13 | "string" 14 | ] 15 | }, 16 | "updated_by_event_type": { 17 | "type": [ 18 | "null", 19 | "string" 20 | ] 21 | }, 22 | "id": { 23 | "type": [ 24 | "null", 25 | "string" 26 | ] 27 | }, 28 | "original_payout": { 29 | "type": [ 30 | "null", 31 | "string" 32 | ] 33 | }, 34 | "reversed_by": { 35 | "type": [ 36 | "null", 37 | "string" 38 | ] 39 | }, 40 | "statement_description": { 41 | "type": [ 42 | "null", 43 | "string" 44 | ] 45 | }, 46 | "amount": { 47 | "type": [ 48 | "null", 49 | "integer" 50 | ] 51 | }, 52 | "balance_transaction": { 53 | "type": [ 54 | "null", 55 | "string" 56 | ] 57 | }, 58 | "created": { 59 | "type": [ 60 | "null", 61 | "string" 62 | ], 63 | "format": "date-time" 64 | }, 65 | "amount_reversed": { 66 | "type": [ 67 | "null", 68 | "integer" 69 | ] 70 | }, 71 | "source_type": { 72 | "type": [ 73 | "null", 74 | "string" 75 | ] 76 | }, 77 | "bank_account": { 78 | "properties": { 79 | "metadata": { 80 | "type": [ 81 | "null", 82 | "object" 83 | ], 84 | "properties": {} 85 | }, 86 | "routing_number": { 87 | "type": [ 88 | "null", 89 | "string" 90 | ] 91 | }, 92 | "account_holder_type": { 93 | "type": [ 94 | "null", 95 | "string" 96 | ] 97 | }, 98 | "name": { 99 | "type": [ 100 | "null", 101 | "string" 102 | ] 103 | }, 104 | "id": { 105 | "type": [ 106 | "null", 107 | "string" 108 | ] 109 | }, 110 | "bank_name": { 111 | "type": [ 112 | "null", 113 | "string" 114 | ] 115 | }, 116 | "last4": { 117 | "type": [ 118 | "null", 119 | "string" 120 | ] 121 | }, 122 | "fingerprint": { 123 | "type": [ 124 | "null", 125 | "string" 126 | ] 127 | }, 128 | "account_holder_name": { 129 | "type": [ 130 | "null", 131 | "string" 132 | ] 133 | }, 134 | "object": { 135 | "type": [ 136 | "null", 137 | "string" 138 | ] 139 | }, 140 | "status": { 141 | "type": [ 142 | "null", 143 | "string" 144 | ] 145 | }, 146 | "currency": { 147 | "type": [ 148 | "null", 149 | "string" 150 | ] 151 | }, 152 | "country": { 153 | "type": [ 154 | "null", 155 | "string" 156 | ] 157 | } 158 | }, 159 | "type": [ 160 | "null", 161 | "object" 162 | ] 163 | }, 164 | "date": { 165 | "type": [ 166 | "null", 167 | "string" 168 | ], 169 | "format": "date-time" 170 | }, 171 | "method": { 172 | "type": [ 173 | "null", 174 | "string" 175 | ] 176 | }, 177 | "livemode": { 178 | "type": [ 179 | "null", 180 | "boolean" 181 | ] 182 | }, 183 | "statement_descriptor": { 184 | "type": [ 185 | "null", 186 | "string" 187 | ] 188 | }, 189 | "failure_message": { 190 | "type": [ 191 | "null", 192 | "string" 193 | ] 194 | }, 195 | "failure_balance_transaction": { 196 | "type": [ 197 | "null", 198 | "string" 199 | ] 200 | }, 201 | "recipient": { 202 | "type": [ 203 | "null", 204 | "string" 205 | ] 206 | }, 207 | "destination": { 208 | "type": [ 209 | "null", 210 | "string" 211 | ] 212 | }, 213 | "automatic": { 214 | "type": [ 215 | "null", 216 | "boolean" 217 | ] 218 | }, 219 | "object": { 220 | "type": [ 221 | "null", 222 | "string" 223 | ] 224 | }, 225 | "status": { 226 | "type": [ 227 | "null", 228 | "string" 229 | ] 230 | }, 231 | "currency": { 232 | "type": [ 233 | "null", 234 | "string" 235 | ] 236 | }, 237 | "transfer_group": { 238 | "type": [ 239 | "null", 240 | "string" 241 | ] 242 | }, 243 | "type": { 244 | "type": [ 245 | "null", 246 | "string" 247 | ] 248 | }, 249 | "arrival_date": { 250 | "type": [ 251 | "null", 252 | "string" 253 | ], 254 | "format": "date-time" 255 | }, 256 | "description": { 257 | "type": [ 258 | "null", 259 | "string" 260 | ] 261 | }, 262 | "source_transaction": { 263 | "type": [ 264 | "null", 265 | "string" 266 | ] 267 | }, 268 | "updated": { 269 | "type": [ 270 | "null", 271 | "string" 272 | ], 273 | "format": "date-time" 274 | } 275 | }, 276 | "type": [ 277 | "null", 278 | "object" 279 | ] 280 | } 281 | -------------------------------------------------------------------------------- /tests/unittests/test_sync_event_updates.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from parameterized import parameterized 3 | from unittest import mock 4 | import datetime 5 | from tap_stripe import Context, sync_event_updates, write_bookmark_for_event_updates 6 | 7 | MOCK_DATE_TIME = datetime.datetime.strptime("2021-01-01T08:30:50Z", "%Y-%m-%dT%H:%M:%SZ") 8 | MOCK_CURRENT_TIME = datetime.datetime.strptime("2022-04-01T08:30:50Z", "%Y-%m-%dT%H:%M:%SZ") 9 | 10 | 11 | class TestSyncEventUpdates(unittest.TestCase): 12 | """ 13 | Verify bookmark logic and logger message of sync_event_updates. 14 | """ 15 | @mock.patch('stripe.Event.list') 16 | @mock.patch('singer.utils.now', side_effect = [MOCK_DATE_TIME, MOCK_DATE_TIME, MOCK_DATE_TIME]) 17 | @mock.patch('tap_stripe.write_bookmark_for_event_updates') 18 | def test_sync_event_updates_bookmark_in_last_7_days(self, mock_write_bookmark, mock_stripe_event, mock_utils_now): 19 | """ 20 | Test that sync_event_updates write the maximum bookmark value in the state when its value is with in last 21 | events_date_window_size(7 days default) days. 22 | """ 23 | config = {"client_secret": "test_secret", "account_id": "test_account", "start_date": "2022-02-17T00:00:00"} 24 | Context.config = config 25 | Context.state = {'bookmarks': {'charges_events': {'created': 1698739554}}} 26 | 27 | mock_stripe_event.return_value = "" 28 | sync_event_updates('charges', False) 29 | 30 | # Verify that tap writes bookmark/start_date value in the state. 31 | mock_write_bookmark.assert_called_with(False, 'charges', None, 1645056000) 32 | 33 | @mock.patch('stripe.Event.list') 34 | @mock.patch('singer.utils.now', return_value = datetime.datetime.strptime("2023-05-10T08:30:50Z", "%Y-%m-%dT%H:%M:%SZ")) 35 | def test_sync_event_updates_bookmark_before_last_30_days(self, mock_utils_now, mock_stripe_event): 36 | """ 37 | Test that sync_event_updates throws the exception if bookmark value is older than 30 days 38 | """ 39 | config = {"client_secret": "test_secret", "account_id": "test_account", "start_date": "2022-02-17T00:00:00"} 40 | Context.config = config 41 | Context.state = {'bookmarks': {'charges_events': {'updates_created': 1675251000}}} 42 | mock_stripe_event.return_value = "" 43 | with self.assertRaises(Exception) as e: 44 | sync_event_updates('charges', False) 45 | 46 | @mock.patch('stripe.Event.list') 47 | @mock.patch('singer.utils.now', return_value = datetime.datetime.strptime("2023-05-10T08:30:50Z", "%Y-%m-%dT%H:%M:%SZ")) 48 | def test_sync_event_updates_bookmark_before_last_30_days(self, mock_utils_now, mock_stripe_event): 49 | """ 50 | Test that sync_event_updates throws the exception if bookmark value is older than 30 days 51 | """ 52 | config = {"client_secret": "test_secret", "account_id": "test_account", "start_date": "2022-02-17T00:00:00"} 53 | Context.config = config 54 | Context.state = {"bookmarks": {"charges_events": {"updates_created": 1675251000}}} 55 | mock_stripe_event.return_value = "" 56 | with self.assertRaises(Exception) as e: 57 | sync_event_updates("charges", False) 58 | 59 | @mock.patch('stripe.Event.list') 60 | @mock.patch('singer.utils.now', return_value = datetime.datetime.strptime("2023-05-10T08:30:50Z", "%Y-%m-%dT%H:%M:%SZ")) 61 | @mock.patch('tap_stripe.Context.is_selected', return_value= True) 62 | def test_sync_event_updates_bookmark_before_last_30_days_for_two_streams(self, mock_is_selected, mock_utils_now, mock_stripe_event): 63 | """ 64 | Test that sync_event_updates throws the exception if bookmark value is older than 30 days, testing for 2 streams 65 | """ 66 | config = {"client_secret": "test_secret", "account_id": "test_account", "start_date": "2022-02-17T00:00:00"} 67 | Context.config = config 68 | Context.state = {"bookmarks": {"subscriptions_events": {"updates_created": 1675251000, "subscription_items_events": {"updates_created": 1675251000}}}} 69 | mock_stripe_event.return_value = "" 70 | with self.assertRaises(Exception) as e: 71 | sync_event_updates("subscriptions", False) 72 | 73 | @mock.patch('stripe.Event.list') 74 | @mock.patch('singer.utils.now', return_value = datetime.datetime.strptime("2023-05-15T08:30:50Z", "%Y-%m-%dT%H:%M:%SZ")) 75 | @mock.patch('tap_stripe.reset_bookmark_for_event_updates') 76 | def test_sync_event_updates_bookmark_call_count(self, mock_reset_func, mock_utils_now, mock_stripe_event): 77 | """ 78 | Test that sync_event_updates resets the state if bookmark value is older than 30 days 79 | """ 80 | config = {"client_secret": "test_secret", "account_id": "test_account", "start_date": "2022-02-17T00:00:00"} 81 | Context.config = config 82 | Context.state = {"bookmarks": {"charges_events": {"updates_created": 1675251000}}} 83 | mock_stripe_event.return_value = "" 84 | with self.assertRaises(Exception) as e: 85 | sync_event_updates("charges", False) 86 | self.assertEqual(mock_reset_func.call_count, 1) 87 | 88 | @mock.patch("singer.write_state") 89 | def test_write_bookmark_event_updates_for_non_sub_streams(self, mock_state): 90 | """ 91 | Test that tap writes expected bookmark for non sub streams. 92 | """ 93 | Context.state = {'bookmarks': {}} 94 | write_bookmark_for_event_updates(False, 'charges', None, 1648177250) 95 | 96 | # Verify expected bookmark value 97 | mock_state.assert_called_with({'bookmarks': {'charges_events': {'updates_created': 1648177250}}}) 98 | 99 | @mock.patch('tap_stripe.Context', return_value = Context) 100 | @mock.patch("singer.write_state") 101 | def test_write_bookmark_event_updates_for_non_sub_streams(self, mock_state, mock_context): 102 | """ 103 | Test that tap writes expected bookmark for sub streams. 104 | """ 105 | Context.state = {'bookmarks': {}} 106 | write_bookmark_for_event_updates(True, 'invoices', 'invoice_line_items', 1648177250) 107 | 108 | # Verify expected bookmark value 109 | mock_state.assert_called_with(mock_context.state) 110 | -------------------------------------------------------------------------------- /spikes/prior-art/tap_stripe/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import os 3 | import json 4 | import singer 5 | from singer import utils 6 | from singer import metrics 7 | from singer import bookmarks 8 | from singer import metadata 9 | from singer import (transform, 10 | Transformer) 11 | import stripe 12 | import json 13 | 14 | REQUIRED_CONFIG_KEYS = ["start_date", "access_token"] 15 | LOGGER = singer.get_logger() 16 | STREAM_ENDPOINTS = { 17 | 'customers': stripe.Customer, 18 | 'charges': stripe.Charge, 19 | 'invoices': stripe.Invoice, 20 | 'subscriptions': stripe.Subscription, 21 | 'plans': stripe.Plan 22 | } 23 | 24 | def get_abs_path(path): 25 | return os.path.join(os.path.dirname(os.path.realpath(__file__)), path) 26 | 27 | # Load schemas from schemas folder 28 | def load_schemas(): 29 | schemas = {} 30 | 31 | schemas_path = get_abs_path('schemas') 32 | files = [f for f in os.listdir(schemas_path) if os.path.isfile(os.path.join(schemas_path, f))] 33 | 34 | for filename in files: 35 | path = get_abs_path('schemas') + '/' + filename 36 | file_raw = filename.replace('.json', '') 37 | with open(path) as file: 38 | schemas[file_raw] = json.load(file) 39 | 40 | return schemas 41 | 42 | 43 | def load_shared_schema_refs(): 44 | shared_schemas_path = get_abs_path('schemas/shared') 45 | 46 | shared_file_names = [f for f in os.listdir(shared_schemas_path) 47 | if os.path.isfile(os.path.join(shared_schemas_path, f))] 48 | 49 | shared_schema_refs = {} 50 | for shared_file in shared_file_names: 51 | with open(os.path.join(shared_schemas_path, shared_file)) as data_file: 52 | shared_schema_refs[shared_file] = json.load(data_file) 53 | 54 | return shared_schema_refs 55 | 56 | 57 | def generate_metadata(schema): 58 | mdata = metadata.new() 59 | 60 | mdata = metadata.write(mdata, (), 'table-key-properties', ['id']) 61 | for field_name, props in schema['properties'].items(): 62 | mdata = metadata.write(mdata, ('properties', field_name), 'inclusion', 'automatic') 63 | 64 | return metadata.to_list(mdata) 65 | 66 | 67 | def discover(): 68 | raw_schemas = load_schemas() 69 | streams = [] 70 | 71 | refs = load_shared_schema_refs() 72 | 73 | for schema_name, schema in raw_schemas.items(): 74 | catalog_entry = { 75 | 'stream': schema_name, 76 | 'tap_stream_id': schema_name, 77 | 'schema': singer.resolve_schema_references(schema, refs=refs), 78 | 'metadata' : generate_metadata(schema), 79 | 'key_properties': ['id'] 80 | } 81 | streams.append(catalog_entry) 82 | 83 | return {'streams': streams} 84 | 85 | 86 | def get_selected_streams(catalog): 87 | ''' 88 | Gets selected streams. Checks schema's 'selected' first (legacy) 89 | and then checks metadata (current), looking for an empty breadcrumb 90 | and mdata with a 'selected' entry 91 | ''' 92 | selected_streams = [] 93 | for stream in catalog.streams: 94 | stream_metadata = stream.metadata 95 | if stream.is_selected(): 96 | selected_streams.append(stream.tap_stream_id) 97 | else: 98 | for entry in stream_metadata: 99 | # stream metadata will have empty breadcrumb 100 | if not entry['breadcrumb'] and entry['metadata'].get('selected',None): 101 | selected_streams.append(stream.tap_stream_id) 102 | 103 | return selected_streams 104 | 105 | 106 | def replace_data_array(obj): 107 | for key, value in obj.items(): 108 | if isinstance(value, dict) and value.get('object') == 'list': 109 | result = [] 110 | for nested in value.auto_paging_iter(): 111 | replace_data_array(nested) 112 | result.append(nested) 113 | obj[key] = result 114 | 115 | 116 | def sync_stream(stream, schema, **params): 117 | singer.write_schema(stream, schema.to_dict(), ['id']) 118 | endpoint = STREAM_ENDPOINTS[stream] 119 | starting_after = None 120 | has_more = True 121 | while has_more: 122 | LOGGER.info('Loading data for stream {} after {}'.format(stream, starting_after)) 123 | result = endpoint.list( 124 | starting_after=starting_after, 125 | limit=100, 126 | **params 127 | ) 128 | has_more = result['has_more'] 129 | if has_more: 130 | starting_after = result.data[-1].id 131 | for obj in result.data: 132 | replace_data_array(obj) 133 | 134 | with Transformer(singer.UNIX_SECONDS_INTEGER_DATETIME_PARSING) as transformer: 135 | obj = transformer.transform(obj, schema.to_dict()) 136 | singer.write_record(stream, obj) 137 | 138 | 139 | 140 | 141 | def sync(config, state, catalog): 142 | 143 | selected_stream_ids = get_selected_streams(catalog) 144 | 145 | # Loop over streams in catalog 146 | for stream in catalog.streams: 147 | stream_id = stream.tap_stream_id 148 | stream_schema = stream.schema 149 | if stream_id in selected_stream_ids: 150 | LOGGER.info('Syncing stream: ' + stream_id) 151 | if stream_id == 'subscriptions': 152 | sync_stream(stream_id, stream_schema, status='all') 153 | else: 154 | sync_stream(stream_id, stream_schema) 155 | 156 | return 157 | 158 | @utils.handle_top_exception(LOGGER) 159 | def main(): 160 | 161 | # Parse command line arguments 162 | args = utils.parse_args(REQUIRED_CONFIG_KEYS) 163 | 164 | stripe.api_key = args.config['access_token'] 165 | stripe.default_http_client = stripe.http_client.RequestsClient() 166 | 167 | # If discover flag was passed, run discovery mode and dump output to stdout 168 | if args.discover: 169 | catalog = discover() 170 | print(json.dumps(catalog, indent=2)) 171 | # Otherwise run in sync mode 172 | else: 173 | 174 | # 'properties' is the legacy name of the catalog 175 | if args.properties: 176 | catalog = args.properties 177 | # 'catalog' is the current name 178 | elif args.catalog: 179 | catalog = args.catalog 180 | else: 181 | catalog = discover() 182 | 183 | sync(args.config, args.state, catalog) 184 | 185 | if __name__ == "__main__": 186 | main() 187 | -------------------------------------------------------------------------------- /spikes/prior-art/tap_stripe/schemas/charges.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "object", 3 | "properties": { 4 | "id": { 5 | "type": [ 6 | "null", 7 | "string" 8 | ] 9 | }, 10 | "object": { 11 | "type": [ 12 | "null", 13 | "string" 14 | ] 15 | }, 16 | "amount": { 17 | "type": [ 18 | "null", 19 | "integer" 20 | ] 21 | }, 22 | "amount_refunded": { 23 | "type": [ 24 | "null", 25 | "integer" 26 | ] 27 | }, 28 | "application": { 29 | "type": [ 30 | "null", 31 | "string" 32 | ] 33 | }, 34 | "application_fee": { 35 | "type": [ 36 | "null", 37 | "string" 38 | ] 39 | }, 40 | "balance_transaction": { 41 | "type": [ 42 | "null", 43 | "string" 44 | ] 45 | }, 46 | "captured": { 47 | "type": [ 48 | "null", 49 | "boolean" 50 | ] 51 | }, 52 | "created": { 53 | "type": [ 54 | "null", 55 | "string" 56 | ], 57 | "format": "date-time" 58 | }, 59 | "currency": { 60 | "type": [ 61 | "null", 62 | "string" 63 | ] 64 | }, 65 | "customer": { 66 | "type": [ 67 | "null", 68 | "string" 69 | ] 70 | }, 71 | "description": { 72 | "type": [ 73 | "null", 74 | "string" 75 | ] 76 | }, 77 | "destination": { 78 | "type": [ 79 | "null", 80 | "string" 81 | ] 82 | }, 83 | "dispute": { 84 | "type": [ 85 | "null", 86 | "string" 87 | ] 88 | }, 89 | "failure_code": { 90 | "type": [ 91 | "null", 92 | "string" 93 | ] 94 | }, 95 | "failure_message": { 96 | "type": [ 97 | "null", 98 | "string" 99 | ] 100 | }, 101 | "fraud_details": { 102 | "type": [ 103 | "null", 104 | "object" 105 | ], 106 | "additionalProperties": true, 107 | "properties": {} 108 | }, 109 | "invoice": { 110 | "type": [ 111 | "null", 112 | "string" 113 | ] 114 | }, 115 | "livemode": { 116 | "type": [ 117 | "null", 118 | "boolean" 119 | ] 120 | }, 121 | "metadata": { 122 | "type": [ 123 | "null", 124 | "object" 125 | ], 126 | "additionalProperties": true, 127 | "properties": {} 128 | }, 129 | "on_behalf_of": { 130 | "type": [ 131 | "null", 132 | "string" 133 | ] 134 | }, 135 | "order": { 136 | "type": [ 137 | "null", 138 | "string" 139 | ] 140 | }, 141 | "outcome": { 142 | "type": [ 143 | "null", 144 | "object" 145 | ], 146 | "properties": { 147 | "network_status": { 148 | "type": [ 149 | "null", 150 | "string" 151 | ] 152 | }, 153 | "reason": { 154 | "type": [ 155 | "null", 156 | "string" 157 | ] 158 | }, 159 | "risk_level": { 160 | "type": [ 161 | "null", 162 | "string" 163 | ] 164 | }, 165 | "seller_message": { 166 | "type": [ 167 | "null", 168 | "string" 169 | ] 170 | }, 171 | "type": { 172 | "type": [ 173 | "null", 174 | "string" 175 | ] 176 | } 177 | } 178 | }, 179 | "paid": { 180 | "type": [ 181 | "null", 182 | "boolean" 183 | ] 184 | }, 185 | "receipt_email": { 186 | "type": [ 187 | "null", 188 | "string" 189 | ] 190 | }, 191 | "receipt_number": { 192 | "type": [ 193 | "null", 194 | "string" 195 | ] 196 | }, 197 | "refunded": { 198 | "type": [ 199 | "null", 200 | "boolean" 201 | ] 202 | }, 203 | "refunds": { 204 | "type": [ 205 | "null", 206 | "array" 207 | ], 208 | "items": { 209 | "type": [ 210 | "null", 211 | "object" 212 | ], 213 | "properties": { 214 | "id": { 215 | "type": [ 216 | "null", 217 | "string" 218 | ] 219 | }, 220 | "object": { 221 | "type": [ 222 | "null", 223 | "string" 224 | ] 225 | }, 226 | "amount": { 227 | "type": [ 228 | "null", 229 | "integer" 230 | ] 231 | }, 232 | "balance_transaction": { 233 | "type": [ 234 | "null", 235 | "string" 236 | ] 237 | }, 238 | "charge": { 239 | "type": [ 240 | "null", 241 | "string" 242 | ] 243 | }, 244 | "created": { 245 | "type": [ 246 | "null", 247 | "string" 248 | ], 249 | "format": "date-time" 250 | }, 251 | "currency": { 252 | "type": [ 253 | "null", 254 | "string" 255 | ] 256 | }, 257 | "metadata": { 258 | "type": [ 259 | "null", 260 | "object" 261 | ], 262 | "additionalProperties": true, 263 | "properties": {} 264 | }, 265 | "reason": { 266 | "type": [ 267 | "null", 268 | "string" 269 | ] 270 | }, 271 | "receipt_number": { 272 | "type": [ 273 | "null", 274 | "string" 275 | ] 276 | }, 277 | "status": { 278 | "type": [ 279 | "null", 280 | "string" 281 | ] 282 | } 283 | } 284 | } 285 | }, 286 | "review": { 287 | "type": [ 288 | "null", 289 | "string" 290 | ] 291 | }, 292 | "shipping": { 293 | "type": [ 294 | "null", 295 | "object" 296 | ], 297 | "additionalProperties": true, 298 | "properties": {} 299 | }, 300 | "source": { "$ref": "source.json" }, 301 | "source_transfer": { 302 | "type": [ 303 | "null", 304 | "string" 305 | ] 306 | }, 307 | "statement_descriptor": { 308 | "type": [ 309 | "null", 310 | "string" 311 | ] 312 | }, 313 | "status": { 314 | "type": [ 315 | "null", 316 | "string" 317 | ] 318 | }, 319 | "transfer_group": { 320 | "type": [ 321 | "null", 322 | "string" 323 | ] 324 | } 325 | } 326 | } 327 | -------------------------------------------------------------------------------- /tests/test_create_object.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test tap gets all creates for streams (as long as we can create an object) 3 | """ 4 | 5 | from tap_tester import menagerie, runner, connections, LOGGER 6 | from base import BaseTapTest 7 | from utils import create_object, delete_object 8 | 9 | 10 | class CreateObjectTest(BaseTapTest): 11 | """Test tap gets all creates for streams (as long as we can create an object)""" 12 | 13 | @staticmethod 14 | def name(): 15 | return "tt_stripe_create_objects" 16 | 17 | def test_run(self): 18 | """ 19 | Verify that the sync only sent records to the target for selected streams 20 | Create a new object for each stream 21 | Verify that the second sync includes at least one create for each stream 22 | Verify that the created record was picked up on the second sync 23 | """ 24 | conn_id = connections.ensure_connection(self) 25 | self.conn_id = conn_id 26 | 27 | streams_to_create = { 28 | "balance_transactions", # should be created implicity with create in payouts or charges 29 | "charges", 30 | "coupons", 31 | "customers", 32 | "invoice_items", 33 | # invoice_line_items are created implicity by invoices, this creates another invoice 34 | # TODO update test to remove invoice_line_items from here 35 | "invoice_line_items", 36 | "invoices", # this will create an invoice_item 37 | "payouts", 38 | "plans", 39 | "payment_intents", 40 | "products", 41 | "subscription_items", 42 | "subscriptions", # this will create a new plan and payment method 43 | } 44 | 45 | missing_streams_to_create = { 46 | "disputes", # create by simulating a dispute transaction with a specific card number 47 | # no way to create directly, see: https://stripe.com/docs/testing#disputes 48 | "payout_transactions", # BUG_9703 | https://jira.talendforge.org/browse/TDL-9703 49 | # depends on payouts and transactions 50 | "transfers", 51 | # needs an account that we can transfer to, not sure 52 | # how to set up a test account we can use to create a transfer 53 | } 54 | 55 | our_catalogs = self.run_and_verify_check_mode(conn_id) 56 | 57 | self.select_all_streams_and_fields( 58 | conn_id, our_catalogs, select_all_fields=True 59 | ) 60 | 61 | # Run a sync job using orchestrator 62 | first_sync_record_count = self.run_and_verify_sync(conn_id) 63 | 64 | # verify that the sync sent records to the target for selected streams (catalogs) 65 | self.assertTrue(streams_to_create.issubset(set(first_sync_record_count.keys()))) 66 | 67 | # Get the set of records from a first sync 68 | first_sync_records = runner.get_records_from_target_output() 69 | 70 | first_sync_created, _ = self.split_records_into_created_and_updated( 71 | first_sync_records 72 | ) 73 | 74 | new_objects = { 75 | stream: create_object(stream) 76 | for stream in streams_to_create.difference({"balance_transactions"}) 77 | } 78 | 79 | # Run a second sync job using orchestrator 80 | second_sync_record_count = self.run_and_verify_sync(conn_id) 81 | 82 | # Get the set of records from a second sync 83 | second_sync_records = runner.get_records_from_target_output() 84 | 85 | second_sync_created, _ = self.split_records_into_created_and_updated( 86 | second_sync_records 87 | ) 88 | 89 | # # THIS MAKES AN ASSUMPTION THAT CHILD STREAMS DO NOT NEED TESTING. 90 | # # ADJUST IF NECESSARY 91 | for stream in streams_to_create.difference(self.child_streams()): 92 | with self.subTest(stream=stream): 93 | 94 | second_sync_created_objects = second_sync_created.get(stream, {}).get( 95 | "messages", [] 96 | ) 97 | 98 | # verify that you get at least one new record on the second sync 99 | self.assertGreaterEqual( 100 | len(second_sync_created_objects), 101 | 1, 102 | msg="second sync didn't have created objects", 103 | ) 104 | 105 | if stream == "balance_transactions": 106 | sources = [record.get("data", {}).get("source") 107 | for record in second_sync_created_objects] 108 | 109 | self.assertTrue(new_objects['payouts']['id'] in sources) 110 | self.assertTrue(new_objects['charges']['id'] in sources) 111 | 112 | continue 113 | 114 | # TODO START DEBUG 115 | # remove debug after BUG https://jira.talendforge.org/browse/TDL-21614 is resolved 116 | if stream == 'invoices': 117 | null_date_invoices = [] 118 | masking_invoices = [] 119 | for rec in second_sync_records[stream]['messages']: 120 | # detect old failures by comparing record dates using both replication keys 121 | # it is believed that the created invoices never have 'date' and should 122 | # always fail verification due to the old split logic 123 | if not rec['data'].get('date'): 124 | if rec['data'].get('created') == rec['data'].get('updated'): 125 | null_date_invoices += [rec['data']['id']] 126 | # date key was found for records in the else clause. It is believed that 127 | # these are all updated records. Check to see if failure would be masked 128 | # by the split logic 129 | else: 130 | if rec['data'].get('date') == rec['data'].get('updated'): 131 | masking_invoices += [rec['data']['id']] 132 | LOGGER.info(f"null_date_invoices: {null_date_invoices}, " 133 | f"masking_invoices: {masking_invoices}, " 134 | f"new_id: {new_objects[stream]['id']}") 135 | self.assertTrue(new_objects[stream]['id'] in null_date_invoices) 136 | if new_objects[stream]['id'] not in masking_invoices: 137 | LOGGER.warn(f"### Previous error scenario detected (un-masked failure) ###") 138 | # TODO END DEBUG 139 | 140 | # verify the new object is in the list of created objects 141 | # from the second sync 142 | self.assertTrue( 143 | any( 144 | new_objects[stream]["id"] == record.get("data", {}).get("id") 145 | for record in second_sync_created_objects 146 | ) 147 | ) 148 | 149 | if stream in streams_to_create: 150 | delete_object(stream, new_objects[stream]["id"]) 151 | -------------------------------------------------------------------------------- /spikes/prior-art/tap_stripe/schemas/invoices.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "object", 3 | "properties": { 4 | "id": { 5 | "type": [ 6 | "null", 7 | "string" 8 | ] 9 | }, 10 | "object": { 11 | "type": [ 12 | "null", 13 | "string" 14 | ] 15 | }, 16 | "amount_due": { 17 | "type": [ 18 | "null", 19 | "integer" 20 | ] 21 | }, 22 | "amount_paid": { 23 | "type": [ 24 | "null", 25 | "integer" 26 | ] 27 | }, 28 | "amount_remaining": { 29 | "type": [ 30 | "null", 31 | "integer" 32 | ] 33 | }, 34 | "application_fee": { 35 | "type": [ 36 | "null", 37 | "integer" 38 | ] 39 | }, 40 | "attempt_count": { 41 | "type": [ 42 | "null", 43 | "integer" 44 | ] 45 | }, 46 | "attempted": { 47 | "type": [ 48 | "null", 49 | "boolean" 50 | ] 51 | }, 52 | "auto_advance": { 53 | "type": [ 54 | "null", 55 | "boolean" 56 | ] 57 | }, 58 | "billing": { 59 | "type": [ 60 | "null", 61 | "string" 62 | ] 63 | }, 64 | "billing_reason": { 65 | "type": [ 66 | "null", 67 | "string" 68 | ] 69 | }, 70 | "charge": { 71 | "type": [ 72 | "null", 73 | "string" 74 | ] 75 | }, 76 | "closed": { 77 | "type": [ 78 | "null", 79 | "boolean" 80 | ] 81 | }, 82 | "currency": { 83 | "type": [ 84 | "null", 85 | "string" 86 | ] 87 | }, 88 | "customer": { 89 | "type": [ 90 | "null", 91 | "string" 92 | ] 93 | }, 94 | "date": { 95 | "type": [ 96 | "null", 97 | "string" 98 | ], 99 | "format": "date-time" 100 | }, 101 | "description": { 102 | "type": [ 103 | "null", 104 | "string" 105 | ] 106 | }, 107 | "discount": { 108 | "$ref": "discount.json" 109 | }, 110 | "due_date": { 111 | "type": [ 112 | "null", 113 | "string" 114 | ], 115 | "format": "date-time" 116 | }, 117 | "ending_balance": { 118 | "type": [ 119 | "null", 120 | "integer" 121 | ] 122 | }, 123 | "forgiven": { 124 | "type": [ 125 | "null", 126 | "boolean" 127 | ] 128 | }, 129 | "hosted_invoice_url": { 130 | "type": [ 131 | "null", 132 | "string" 133 | ] 134 | }, 135 | "invoice_pdf": { 136 | "type": [ 137 | "null", 138 | "string" 139 | ] 140 | }, 141 | "lines": { 142 | "type": [ 143 | "null", 144 | "array" 145 | ], 146 | "items": { 147 | "type": "object", 148 | "properties": { 149 | "id": { 150 | "type": [ 151 | "null", 152 | "string" 153 | ] 154 | }, 155 | "object": { 156 | "type": [ 157 | "null", 158 | "string" 159 | ] 160 | }, 161 | "invoice_item": { 162 | "type": [ 163 | "null", 164 | "string" 165 | ] 166 | }, 167 | "amount": { 168 | "type": [ 169 | "null", 170 | "integer" 171 | ] 172 | }, 173 | "currency": { 174 | "type": [ 175 | "null", 176 | "string" 177 | ] 178 | }, 179 | "description": { 180 | "type": [ 181 | "null", 182 | "string" 183 | ] 184 | }, 185 | "discountable": { 186 | "type": [ 187 | "null", 188 | "boolean" 189 | ] 190 | }, 191 | "livemode": { 192 | "type": [ 193 | "null", 194 | "boolean" 195 | ] 196 | }, 197 | "metadata": { 198 | "type": [ 199 | "null", 200 | "object" 201 | ], 202 | "additionalProperties": true, 203 | "properties": {} 204 | }, 205 | "period": { 206 | "type": [ 207 | "null", 208 | "object" 209 | ], 210 | "properties": { 211 | "end": { 212 | "type": [ 213 | "null", 214 | "string" 215 | ], 216 | "format": "date-time" 217 | }, 218 | "start": { 219 | "type": [ 220 | "null", 221 | "string" 222 | ], 223 | "format": "date-time" 224 | } 225 | } 226 | }, 227 | "plan": { 228 | "$ref": "plan.json" 229 | }, 230 | "proration": { 231 | "type": [ 232 | "null", 233 | "boolean" 234 | ] 235 | }, 236 | "quantity": { 237 | "type": [ 238 | "null", 239 | "integer" 240 | ] 241 | }, 242 | "subscription": { 243 | "type": [ 244 | "null", 245 | "string" 246 | ] 247 | }, 248 | "subscription_item": { 249 | "type": [ 250 | "null", 251 | "string" 252 | ] 253 | }, 254 | "type": { 255 | "type": [ 256 | "null", 257 | "string" 258 | ] 259 | } 260 | } 261 | } 262 | }, 263 | "livemode": { 264 | "type": [ 265 | "null", 266 | "boolean" 267 | ] 268 | }, 269 | "metadata": { 270 | "type": [ 271 | "null", 272 | "object" 273 | ], 274 | "additionalProperties": true, 275 | "properties": {} 276 | }, 277 | "next_payment_attempt": { 278 | "type": [ 279 | "null", 280 | "string" 281 | ], 282 | "format": "date-time" 283 | }, 284 | "number": { 285 | "type": [ 286 | "null", 287 | "string" 288 | ] 289 | }, 290 | "paid": { 291 | "type": [ 292 | "null", 293 | "boolean" 294 | ] 295 | }, 296 | "period_end": { 297 | "type": [ 298 | "null", 299 | "string" 300 | ], 301 | "format": "date-time" 302 | }, 303 | "period_start": { 304 | "type": [ 305 | "null", 306 | "string" 307 | ], 308 | "format": "date-time" 309 | }, 310 | "receipt_number": { 311 | "type": [ 312 | "null", 313 | "string" 314 | ] 315 | }, 316 | "starting_balance": { 317 | "type": [ 318 | "null", 319 | "integer" 320 | ] 321 | }, 322 | "statement_descriptor": { 323 | "type": [ 324 | "null", 325 | "string" 326 | ] 327 | }, 328 | "subscription": { 329 | "type": [ 330 | "null", 331 | "string" 332 | ] 333 | }, 334 | "subtotal": { 335 | "type": [ 336 | "null", 337 | "integer" 338 | ] 339 | }, 340 | "tax": { 341 | "type": [ 342 | "null", 343 | "integer" 344 | ] 345 | }, 346 | "tax_percent": { 347 | "type": [ 348 | "null", 349 | "number" 350 | ] 351 | }, 352 | "total": { 353 | "type": [ 354 | "null", 355 | "integer" 356 | ] 357 | }, 358 | "webhooks_delivered_at": { 359 | "type": [ 360 | "null", 361 | "string" 362 | ], 363 | "format": "date-time" 364 | } 365 | } 366 | } 367 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ## 3.2.0 4 | * Makes tap compatible with singer-python 6.0.0 [#195](https://github.com/singer-io/tap-stripe/pull/195) 5 | 6 | ## 3.1.0 7 | * Upgrades to run on python 3.11.7 [#193](https://github.com/singer-io/tap-stripe/pull/193) 8 | 9 | ## 3.0.0 10 | * Upgraded SDK and API version [#181](https://github.com/singer-io/tap-stripe/pull/181) 11 | * Schema changes 12 | 13 | ## 2.0.5 14 | * Resets the respective parent bookmark to start_date when event update bookmark date is older than 30 days [#178](https://github.com/singer-io/tap-stripe/pull/178) 15 | 16 | ## 2.0.4 17 | * Updates `payment_intents` stream to accomodate lists for `payment_method_options.card.available_plans` field [#172](https://github.com/singer-io/tap-stripe/pull/172) 18 | 19 | ## 2.0.3 20 | * Reverts singer-python from 5.13.0 to 5.5.1 [#165](https://github.com/singer-io/tap-stripe/pull/165) 21 | 22 | ## 2.0.2 23 | * Upgrade singer-python to 5.13.0 [#161](https://github.com/singer-io/tap-stripe/pull/161) 24 | 25 | ## 2.0.1 26 | * Reduce API calls to stripe [#150](https://github.com/singer-io/tap-stripe/pull/150) 27 | * Added support of config parameter event_date_date_window. 28 | * Default event_date_date_window is 7 days and maximum possible value of it is 30 days. 29 | * Make API call of event_updates for last 30 days only. If start_date or last saved bookmark value is before 30 days, then start the sync from the last 30 days only. 30 | * Write maximum of replication key value or sync_start_time - event_date_window as bookmark for event_updates. 31 | * Retry 429 error 7 times with exponential factor 2. 32 | 33 | ## 2.0.0 34 | * Upgraded SDK and API version[#105](https://github.com/singer-io/tap-stripe/pull/105) 35 | * Added event_type in all the schemas [#123](https://github.com/singer-io/tap-stripe/pull/123) 36 | * Added a new Payment Intent stream [#127](https://github.com/singer-io/tap-stripe/pull/127) 37 | * Revert back `tiers` field datatype conversion [#117](https://github.com/singer-io/tap-stripe/pull/117) 38 | * Updated the object type for payouts [#133](https://github.com/singer-io/tap-stripe/pull/133) 39 | * Changed the lookback window logic [#138](https://github.com/singer-io/tap-stripe/pull/138) 40 | * Made parent child streams independent of each other [#141](https://github.com/singer-io/tap-stripe/pull/141) 41 | * Logged the request_id for each request [#135](https://github.com/singer-io/tap-stripe/pull/135) 42 | * Check for credentials in discovery mode [#128](https://github.com/singer-io/tap-stripe/pull/128) 43 | * Added request timeout and retry [#126](https://github.com/singer-io/tap-stripe/pull/126) 44 | * Added integration tests [#129](https://github.com/singer-io/tap-stripe/pull/129) [#131](https://github.com/singer-io/tap-stripe/pull/131) 45 | * Updated existing integration tests 46 | 47 | ## 1.5.2 48 | * Event date window reduced from one week to one day [#120](https://github.com/singer-io/tap-stripe/pull/120) 49 | 50 | ## 1.5.1 51 | * Subscriptions stream will now request subscriptions of all statuses [#113](https://github.com/singer-io/tap-stripe/pull/113) 52 | 53 | ## 1.5.0 54 | * Add schema for card_present charges [#101](https://github.com/singer-io/tap-stripe/pull/101) 55 | 56 | ## 1.4.9 57 | * Allow partial days in the `date_window_size` config value [#100](https://github.com/singer-io/tap-stripe/pull/100) 58 | 59 | ## 1.4.8 60 | * Reverts 1.4.7 [#82](https://github.com/singer-io/tap-stripe/pull/82) 61 | 62 | ## 1.4.7 63 | * Updates singer-python from 5.5.1 to 5.12.1 [#81](https://github.com/singer-io/tap-stripe/pull/81) 64 | 65 | ## 1.4.6 66 | * Removed fields that caused transform errors 67 | 68 | ## 1.4.5 69 | * Added various schema [#77](https://github.com/singer-io/tap-stripe/pull/77) 70 | 71 | ## 1.4.4 72 | * Use the default timeout in the stripe client [#61](https://github.com/singer-io/tap-stripe/pull/61) 73 | 74 | ## 1.4.3 75 | * Revert 1.4.2 changes from #59 [#60](https://github.com/singer-io/tap-stripe/pull/60) 76 | * Remove invalid and unused schema pieces [#60](https://github.com/singer-io/tap-stripe/pull/60) 77 | 78 | ## 1.4.2 79 | * Revert 1.4.1 [#59](https://github.com/singer-io/tap-stripe/pull/59) 80 | 81 | ## 1.4.1 82 | * Add functionality that recursively converts `StripeObject`s to dictionaries. [#53](https://github.com/singer-io/tap-stripe/pull/53) 83 | 84 | ## 1.4.0 85 | * Added the `payment_method_details` field to the charges stream. [#49](https://github.com/singer-io/tap-stripe/pull/49) 86 | 87 | ## 1.3.7 88 | * Add 5 minute lookback window on `events` and `balance_transactions` requests to catch an edge case. [#45](https://github.com/singer-io/tap-stripe/pull/45) 89 | 90 | ## 1.3.6 91 | * Use inclusive comparison for comparing bookmarks in the initial sync phase. [#44](https://github.com/singer-io/tap-stripe/pull/44) 92 | 93 | ## 1.3.5 94 | * Add "string" as a valid type for `plan` subschema, to support historical data [#42](https://github.com/singer-io/tap-stripe/pull/42) 95 | * `Deleted` events will not cause the tap to request their sub-objects to prevent 404 errors [#41](https://github.com/singer-io/tap-stripe/pull/41) 96 | 97 | ## 1.3.4 98 | * Filter out invoice line items with null ids in the Events stream because we don't know what to use as the primary key in those cases [#40](https://github.com/singer-io/tap-stripe/pull/40) 99 | * Add products stream [#32](https://github.com/singer-io/tap-stripe/pull/32) 100 | 101 | ## 1.3.3 102 | * Mitigate potential for infinite loop by increasing `limit` on second request for sub-objects (e.g., `invoice_line_items`) [#39](https://github.com/singer-io/tap-stripe/pull/39) 103 | 104 | ## 1.3.0 105 | * Add `disputes` stream [#29](https://github.com/singer-io/tap-stripe/pull/29) 106 | 107 | ## 1.2.8 108 | * Add subsciption and subscription_item to line_item PK [#28](https://github.com/singer-io/tap-stripe/pull/28) 109 | 110 | ## 1.2.0 111 | * Add `payout_transactions` stream and add date windowing to fix bookmarking [#23](https://github.com/singer-io/tap-stripe/pull/23) 112 | 113 | ## 1.1.2 114 | * Add optional `whitelist_map` config param which allows users to define a nested field whitelist [#22](https://github.com/singer-io/tap-stripe/pull/22) 115 | 116 | ## 1.1.1 117 | * On event updates, handles when `invoice_line_items` comes back as a dictionary instead of a list. 118 | * On event updates, skip the record when a sub-stream object doesn't have an "id" (e.g., older event update structures) 119 | 120 | ## 1.1.0 121 | * Invoice Line Items now use a composite PK [#19](https://github.com/singer-io/tap-stripe/pull/19) 122 | 123 | ## 1.0.2 124 | * Fixes `tiers` subschema to include its object properties (when it is an object) [#16](https://github.com/singer-io/tap-stripe/pull/16) 125 | 126 | ## 1.0.1 127 | * Fixes an issue where invoice events might have a different schema [#15](https://github.com/singer-io/tap-stripe/pull/15) 128 | 129 | ## 1.0.0 130 | * Adds proper support for Events to ensure only the most recent event is emitted [#13](https://github.com/singer-io/tap-stripe/pull/13) 131 | * Fixes JSON Schema refs to be correct [#14](https://github.com/singer-io/tap-stripe/pull/14) 132 | 133 | ## 0.2.4 134 | * Adds standard Singer metrics [#11](https://github.com/singer-io/tap-stripe/pull/11) 135 | 136 | ## 0.2.3 137 | * Unwraps data wrappers only if they are of type `list` 138 | * Adds `type` to the remaining `sources` schemas 139 | 140 | ## 0.2.2 141 | * Makes property accessors safer by using `.get()` with a default value 142 | * Adds `type` to items in `customers.cards` 143 | 144 | ## 0.2.1 145 | * Fixes sub-stream requests to not use a separate call to retrieve `subscription_items` and `invoice_line_items` for a parent. 146 | 147 | ## 0.2.0 148 | * Add date-window chunking to event updates stream [#9](https://github.com/singer-io/tap-stripe/pull/9) 149 | 150 | ## 0.1.1 151 | * Fix schema for subscriptions `details` to be a nullable object. 152 | 153 | ## 0.1.0 154 | * Initial release 155 | -------------------------------------------------------------------------------- /tests/test_pagination.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test tap pagination of streams 3 | """ 4 | import time 5 | from tap_tester import menagerie, runner, connections, LOGGER 6 | from base import BaseTapTest 7 | from utils import create_object, update_object, \ 8 | delete_object, list_all_object, get_catalogs, get_schema 9 | 10 | 11 | class PaginationTest(BaseTapTest): 12 | """ Test the tap pagination to get multiple pages of data """ 13 | 14 | @staticmethod 15 | def name(): 16 | return "tt_stripe_pagination" 17 | 18 | def test_run(self): 19 | """ 20 | Verify that for each stream you can get multiple pages of data 21 | and that when all fields are selected more than the automatic fields are replicated. 22 | 23 | PREREQUISITE 24 | For EACH stream add enough data that you surpass the limit of a single 25 | fetch of data. For instance if you have a limit of 250 records ensure 26 | that 251 (or more) records have been posted for that stream. 27 | """ 28 | conn_id = connections.ensure_connection(self) 29 | self.conn_id = conn_id 30 | 31 | incremental_streams = {key for key, value in self.expected_replication_method().items() 32 | if value == self.INCREMENTAL} 33 | # We cannot determine if the child stream is 34 | # returning a page of data due to the duplicacy in the data due to normal parent records 35 | # as well as event updates of the parents. And hence the ticket: https://jira.talendforge.org/browse/TDL-10005 36 | # is a blocker. Hence skipping the child streams from this test. 37 | direct_streams = self.child_streams().union({ 38 | # Data is generated automatically for 'balance_transactions' when 'charges' is created 39 | 'balance_transactions', 40 | # 'charges', 41 | # 'coupons', 42 | # 'customers', 43 | # 'disputes', 44 | # 'invoice_items', 45 | # Data is generated automatically for 'invoice_line_items' when 'invoice_items' is created 46 | 'invoice_line_items', 47 | # 'invoices', 48 | 'payout_transactions', 49 | # 'payouts', 50 | # 'plans', 51 | # 'products', 52 | # 'subscription_items', 53 | # 'subscriptions', 54 | # 'transfers', 55 | }) 56 | tested_streams = incremental_streams.difference(direct_streams) 57 | 58 | # Select all streams and all fields within streams 59 | found_catalogs = self.run_and_verify_check_mode(conn_id) 60 | our_catalogs = get_catalogs(conn_id, incremental_streams) 61 | self.select_all_streams_and_fields(conn_id, our_catalogs, select_all_fields=True) 62 | 63 | # Ensure tested streams have a record count which exceeds the API LIMIT 64 | LOGGER.info("Checking record counts for tested streams...") 65 | streams_to_create = {} 66 | for stream in tested_streams: 67 | records = list_all_object(stream) 68 | record_count = len(records) 69 | # To not append the Streams having record_count >100 which don't results in index out of range error for "new_objects[stream][0].keys()" 70 | if record_count <= self.API_LIMIT: 71 | streams_to_create[stream] = record_count 72 | LOGGER.info("Stream %s has %s records created today", stream, record_count) 73 | 74 | LOGGER.info("Creating records for tested streams...") 75 | new_objects = {stream: [] for stream in streams_to_create} 76 | for stream in streams_to_create: 77 | if stream != "events" and streams_to_create[stream] <= self.API_LIMIT: 78 | while streams_to_create[stream] <= self.API_LIMIT: 79 | LOGGER.info("Creating a record for %s | %s records created today ", 80 | stream, streams_to_create[stream]) 81 | new_objects[stream].append(create_object(stream)) 82 | streams_to_create[stream] += 1 83 | records = list_all_object(stream) 84 | self.assertEqual(100, len(records)) 85 | LOGGER.info("Stream %s has at least %s records created today", stream, len(records) + 1) 86 | 87 | # Run a sync job using orchestrator 88 | record_count_by_stream = self.run_and_verify_sync(conn_id) 89 | synced_records = runner.get_records_from_target_output() 90 | 91 | actual_fields_by_stream = runner.examine_target_output_for_fields() 92 | stream_primary_keys = self.expected_primary_keys() 93 | 94 | for stream in tested_streams: 95 | with self.subTest(stream=stream): 96 | 97 | # verify that we can paginate with all fields selected 98 | self.assertGreater( 99 | record_count_by_stream.get(stream, -1), 100 | self.expected_metadata().get(stream, {}).get(self.API_LIMIT, 0), 101 | msg="The number of records is not over the stream max limit") 102 | 103 | # verify that the automatic fields are sent to the target 104 | actual = actual_fields_by_stream.get(stream) or set() 105 | expected = self.expected_automatic_fields().get(stream, set()) 106 | self.assertTrue(actual.issuperset(expected), 107 | msg="The fields sent to the target don't include all automatic fields. " 108 | "Expected: {}, Actual: {}". format(expected, actual) 109 | ) 110 | 111 | # verify we have more fields sent to the target than just automatic fields 112 | # SKIP THIS ASSERTION IF ALL FIELDS ARE INTENTIONALLY AUTOMATIC FOR THIS STREAM 113 | actual = actual_fields_by_stream.get(stream) or set() 114 | expected = self.expected_automatic_fields().get(stream, set()) 115 | self.assertTrue(actual.symmetric_difference(expected), 116 | msg="The fields sent to the target don't include any non-automatic fields" 117 | ) 118 | 119 | actual_record_message = synced_records.get(stream).get('messages') 120 | 121 | # Primary keys list of the actual stream records which would have `updated_by_event_type` as None 122 | non_events_primary_keys_list = [tuple([message.get('data').get(expected_pk) for expected_pk in stream_primary_keys[stream]]) 123 | for message in actual_record_message 124 | if message.get('action') == 'upsert' and not message.get('data').get('updated_by_event_type', None)] 125 | 126 | 127 | primary_keys_list_1 = non_events_primary_keys_list[:self.API_LIMIT] 128 | primary_keys_list_2 = non_events_primary_keys_list[self.API_LIMIT:2*self.API_LIMIT] 129 | 130 | # Verify by primary keys that data is unique for page 131 | self.assertTrue( 132 | set(primary_keys_list_1).isdisjoint(set(primary_keys_list_2))) 133 | 134 | # Verify we did not duplicate any records across pages 135 | self.assertCountEqual(set(non_events_primary_keys_list), non_events_primary_keys_list, 136 | msg=f"We have duplicate records for {stream}") 137 | 138 | # updated condition here because for some streams Data is being generated directly when create call for Parent stream is held 139 | if stream != "events" and stream in streams_to_create: 140 | actual = actual_fields_by_stream.get(stream, set()) 141 | expected = set(new_objects[stream][0].keys()) 142 | # TODO uncomment when feature is added (https://stitchdata.atlassian.net/browse/SRCE-2466) 143 | # verify the target receives all possible fields for a given stream 144 | # self.assertEqual( 145 | # actual, expected, msg="The fields sent to the target have an extra or missing field" 146 | # ) 147 | 148 | # Primary keys list of the event based stream records which would have `updated_by_event_type` as a string 149 | events_based_primary_keys_list =[tuple([message.get('data').get(expected_pk) for expected_pk in stream_primary_keys[stream]]) 150 | for message in actual_record_message 151 | if message.get('action') == 'upsert' and message.get('data').get('updated_by_event_type', None)] 152 | 153 | primary_keys_list_1 = events_based_primary_keys_list[:self.API_LIMIT] 154 | primary_keys_list_2 = events_based_primary_keys_list[self.API_LIMIT:2*self.API_LIMIT] 155 | 156 | # Verify by primary keys that data is unique for page 157 | self.assertTrue( 158 | set(primary_keys_list_1).isdisjoint(set(primary_keys_list_2))) 159 | 160 | # Verify we did not duplicate any records across pages 161 | self.assertCountEqual(set(events_based_primary_keys_list), events_based_primary_keys_list, 162 | msg=f"We have duplicate records for {stream}") 163 | -------------------------------------------------------------------------------- /tests/test_start_date.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test that the start_date configuration is respected 3 | """ 4 | 5 | from functools import reduce 6 | from datetime import datetime as dt 7 | from datetime import timedelta 8 | from dateutil.parser import parse 9 | 10 | from tap_tester import menagerie, runner, connections 11 | 12 | from base import BaseTapTest 13 | from utils import create_object, update_object, update_payment_intent, delete_object, get_catalogs 14 | 15 | 16 | class StartDateTest(BaseTapTest): 17 | """ 18 | Test that the start_date configuration is respected 19 | 20 | • verify that a sync with a later start date has at least one record synced 21 | and less records than the 1st sync with a previous start date 22 | • verify that each stream has less records than the earlier start date sync 23 | • verify all data from later start data has bookmark values >= start_date 24 | • verify that the minimum bookmark sent to the target for the later start_date sync 25 | is greater than or equal to the start date 26 | • verify by primary key values, that all records in the 1st sync are included in the 2nd sync. 27 | """ 28 | 29 | @staticmethod 30 | def name(): 31 | return "tt_stripe_start_date" 32 | 33 | def test_run(self): 34 | """Test we get a lot of data back based on the start date configured in base""" 35 | conn_id = connections.ensure_connection(self) 36 | self.conn_id = conn_id 37 | 38 | # Select all streams and all fields within streams 39 | found_catalogs = self.run_and_verify_check_mode(conn_id) 40 | incremental_streams = {key for key, value in self.expected_replication_method().items() 41 | if value == self.INCREMENTAL} 42 | 43 | # IF THERE ARE STREAMS THAT SHOULD NOT BE TESTED 44 | # REPLACE THE EMPTY SET BELOW WITH THOSE STREAMS 45 | untested_streams = self.child_streams().union({ 46 | 'events', 47 | 'payout_transactions' 48 | }) 49 | our_catalogs = get_catalogs(conn_id, incremental_streams.difference(untested_streams)) 50 | 51 | self.select_all_streams_and_fields(conn_id, our_catalogs, select_all_fields=True) 52 | 53 | # Create a record for each stream under test prior to the first sync 54 | new_objects = { 55 | stream: create_object(stream) 56 | for stream in incremental_streams.difference(untested_streams) 57 | } 58 | 59 | # Run a sync job using orchestrator 60 | first_sync_record_count = self.run_and_verify_sync(conn_id) 61 | first_total_records = reduce(lambda a, b: a + b, first_sync_record_count.values()) 62 | 63 | # Count actual rows synced 64 | first_sync_records = runner.get_records_from_target_output() 65 | first_sync_created, _ = self.split_records_into_created_and_updated(first_sync_records) 66 | 67 | # set the start date for a new connection based off bookmarks largest value 68 | first_max_bookmarks = self.max_bookmarks_by_stream(first_sync_records) 69 | 70 | bookmark_list = [next(iter(book.values())) for stream, book in first_max_bookmarks.items()] 71 | bookmark_dates = [] 72 | for bookmark in bookmark_list: 73 | try: 74 | bookmark_dates.append(parse(bookmark)) 75 | except (ValueError, OverflowError, TypeError): 76 | pass 77 | 78 | if not bookmark_dates: 79 | # THERE WERE NO BOOKMARKS THAT ARE DATES. 80 | # REMOVE CODE TO FIND A START DATE AND ENTER ONE MANUALLY 81 | raise ValueError 82 | 83 | # largest_bookmark = reduce(lambda a, b: a if a > b else b, bookmark_dates) 84 | # self.start_date = self.local_to_utc(largest_bookmark).strftime(self.START_DATE_FORMAT) 85 | 86 | self.start_date = dt.strftime(dt.today() - timedelta(days=1), self.START_DATE_FORMAT) 87 | 88 | # create a new connection with the new start_date 89 | 90 | conn_id = connections.ensure_connection(self, original_properties=False) 91 | self.conn_id = conn_id 92 | 93 | # Select all streams and all fields within streams 94 | found_catalogs = self.run_and_verify_check_mode(conn_id) 95 | our_catalogs = [catalog for catalog in found_catalogs if 96 | catalog.get('tap_stream_id') in incremental_streams.difference( 97 | untested_streams)] 98 | self.select_all_streams_and_fields(conn_id, our_catalogs, select_all_fields=True) 99 | 100 | # Update a record for each stream under test prior to the 2nd sync 101 | updated = {} # holds id for updated objects in each stream 102 | for stream in new_objects: 103 | if stream == 'payment_intents': 104 | # updating the PaymentIntent object may require multiple attempts 105 | record = update_payment_intent(stream) 106 | else: 107 | # There needs to be some test data for each stream, otherwise this will break 108 | record = first_sync_created[stream]["messages"][0]["data"] 109 | update_object(stream, record["id"]) 110 | updated[stream] = record["id"] 111 | 112 | # Run a sync job using orchestrator 113 | second_sync_record_count = self.run_and_verify_sync(conn_id, clear_state=True) 114 | 115 | # tap-stripe uses events for updates, so these need filtered to validate bookmark 116 | second_sync_records = runner.get_records_from_target_output() 117 | second_sync_created, second_sync_updated = self.split_records_into_created_and_updated(second_sync_records) 118 | second_total_records = reduce(lambda a, b: a + b, second_sync_record_count.values(), 0) 119 | 120 | # Only examine bookmarks for "created" objects, not updates 121 | second_min_bookmarks = self.min_bookmarks_by_stream(second_sync_created) 122 | 123 | # verify that at least one record synced and less records synced than the 1st connection 124 | self.assertGreater(second_total_records, 0) 125 | self.assertLess(first_total_records, second_total_records) 126 | stream_primary_keys = self.expected_primary_keys() 127 | 128 | # validate that all newly created records are greater than the start_date 129 | for stream in incremental_streams.difference(untested_streams): 130 | with self.subTest(stream=stream): 131 | 132 | # verify that each stream has more or equal records in the second sync than in the first 133 | self.assertGreaterEqual( 134 | second_sync_record_count.get(stream, 0), 135 | first_sync_record_count.get(stream, 0), 136 | msg="first had more records, start_date usage not verified") 137 | 138 | # verify all data from 2nd sync >= start_date 139 | target_mark = second_min_bookmarks.get(stream, {"mark": None}) 140 | target_value = next(iter(target_mark.values())) # there should be only one 141 | 142 | record_count_1 = first_sync_record_count[stream] 143 | record_count_2 = second_sync_record_count[stream] 144 | primary_keys_list_1 = [tuple(message.get('data').get(expected_pk) for expected_pk in stream_primary_keys[stream]) 145 | for message in first_sync_records.get(stream).get('messages') 146 | if message.get('action') == 'upsert'] 147 | primary_keys_list_2 = [tuple(message.get('data').get(expected_pk) for expected_pk in stream_primary_keys[stream]) 148 | for message in second_sync_records.get(stream).get('messages') 149 | if message.get('action') == 'upsert'] 150 | primary_keys_sync_1 = set(primary_keys_list_1) 151 | primary_keys_sync_2 = set(primary_keys_list_2) 152 | 153 | # Verify by primary key values, that all records in the 1st sync are included in the 2nd sync. 154 | self.assertTrue( 155 | primary_keys_sync_1.issubset(primary_keys_sync_2)) 156 | 157 | if target_value: 158 | 159 | # it's okay if there isn't target data for a stream 160 | try: 161 | target_value = self.local_to_utc(parse(target_value)) 162 | expected_value = self.local_to_utc(parse(self.start_date)) 163 | # verify that the minimum bookmark sent to the target for the second sync 164 | # is greater than or equal to the start date 165 | 166 | # TODO - BUG https://jira.talendforge.org/browse/TDL-20911 167 | # There is a lookback window being applied to the start_date, but the lookback window should not go beyond the startdate 168 | if stream in ('balance_transactions', 'events'): 169 | expected_value = expected_value - timedelta(minutes=10) 170 | self.assertGreaterEqual(target_value, expected_value) 171 | 172 | except (OverflowError, ValueError, TypeError): 173 | LOGGER.warn("bookmarks cannot be converted to dates, can't test start_date for %s", stream) 174 | 175 | if stream in updated: 176 | delete_object(stream, updated[stream]) 177 | -------------------------------------------------------------------------------- /tests/test_discovery.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test tap discovery 3 | """ 4 | import re 5 | 6 | from tap_tester import menagerie, connections 7 | 8 | from base import BaseTapTest 9 | 10 | 11 | class DiscoveryTest(BaseTapTest): 12 | """ Test the tap discovery """ 13 | 14 | @staticmethod 15 | def name(): 16 | return "tt_stripe_discovery" 17 | 18 | def test_run(self): 19 | """ 20 | Verify that discover creates the appropriate catalog, schema, metadata, etc. 21 | 22 | • Verify number of actual streams discovered match expected 23 | • Verify the stream names discovered were what we expect 24 | • Verify stream names follow naming convention 25 | streams should only have lowercase alphas and underscores 26 | • verify there is only 1 top level breadcrumb 27 | • verify replication key(s) 28 | • verify primary key(s) 29 | • verify that if there is a replication key we are doing INCREMENTAL otherwise FULL 30 | • verify the actual replication matches our expected replication method 31 | • verify that primary, replication and foreign keys 32 | are given the inclusion of automatic (metadata and annotated schema). 33 | • verify that all other fields have inclusion of available (metadata and schema) 34 | • verify there are no duplicate metadata entries 35 | """ 36 | conn_id = connections.ensure_connection(self) 37 | self.conn_id = conn_id 38 | 39 | # Verify number of actual streams discovered match expected 40 | found_catalogs = self.run_and_verify_check_mode(conn_id) 41 | self.assertGreater(len(found_catalogs), 0, 42 | msg="unable to locate schemas for connection {}".format(conn_id)) 43 | self.assertEqual(len(found_catalogs), 44 | len(self.expected_streams()), 45 | msg="Expected {} streams, actual was {} for connection {}," 46 | " actual {}".format( 47 | len(self.expected_streams()), 48 | len(found_catalogs), 49 | found_catalogs, 50 | conn_id)) 51 | 52 | # Verify the stream names discovered were what we expect 53 | found_catalog_names = {c['tap_stream_id'] for c in found_catalogs} 54 | self.assertEqual(set(self.expected_streams()), 55 | set(found_catalog_names), 56 | msg="Expected streams don't match actual streams") 57 | 58 | # Verify stream names follow naming convention 59 | # streams should only have lowercase alphas and underscores 60 | self.assertTrue(all([re.fullmatch(r"[a-z_]+", name) for name in found_catalog_names]), 61 | msg="One or more streams don't follow standard naming") 62 | 63 | for stream in self.expected_streams(): 64 | with self.subTest(stream=stream): 65 | catalog = next(iter([catalog for catalog in found_catalogs 66 | if catalog["stream_name"] == stream])) 67 | assert catalog # based on previous tests this should always be found 68 | 69 | schema_and_metadata = menagerie.get_annotated_schema(conn_id, catalog['stream_id']) 70 | metadata = schema_and_metadata["metadata"] 71 | schema = schema_and_metadata["annotated-schema"] 72 | 73 | # verify the stream level properties are as expected 74 | # verify there is only 1 top level breadcrumb 75 | stream_properties = [item for item in metadata if item.get("breadcrumb") == []] 76 | self.assertTrue(len(stream_properties) == 1, 77 | msg="There is more than one top level breadcrumb") 78 | 79 | actual_fields = [md_entry.get("breadcrumb")[1] for md_entry in metadata if md_entry.get("breadcrumb") != []] 80 | 81 | # verify there are no duplicate metadata entries 82 | self.assertEqual(len(actual_fields), len(set(actual_fields)), msg = f"duplicates in the fields retrieved") 83 | 84 | # verify replication key(s) 85 | actual = set(stream_properties[0].get("metadata", {self.REPLICATION_KEYS: []}).get(self.REPLICATION_KEYS) or []) 86 | expected = self.expected_replication_keys()[stream] or set() 87 | self.assertEqual( 88 | actual, 89 | expected, 90 | msg="expected replication key {} but actual is {}".format( 91 | expected, actual)) 92 | 93 | # verify primary key(s) 94 | self.assertEqual( 95 | set(stream_properties[0].get( 96 | "metadata", {self.PRIMARY_KEYS: []}).get(self.PRIMARY_KEYS, [])), 97 | self.expected_primary_keys()[stream], 98 | msg="expected primary key {} but actual is {}".format( 99 | self.expected_primary_keys()[stream], 100 | set(stream_properties[0].get( 101 | "metadata", {self.PRIMARY_KEYS: None}).get(self.PRIMARY_KEYS, [])))) 102 | 103 | # verify that if there is a replication key we are doing INCREMENTAL otherwise FULL 104 | actual_replication_method = stream_properties[0].get( 105 | "metadata", {self.REPLICATION_METHOD: None}).get(self.REPLICATION_METHOD) 106 | 107 | # invoice_line_items has no bookmark, but is an incremental child of invoices 108 | if stream == 'invoice_line_items' or stream_properties[0].get( 109 | "metadata", {self.REPLICATION_KEYS: []}).get(self.REPLICATION_KEYS, []): 110 | 111 | self.assertTrue(actual_replication_method == self.INCREMENTAL, 112 | msg="Expected INCREMENTAL replication " 113 | "since there is a replication key") 114 | else: 115 | self.assertTrue(actual_replication_method == self.FULL, 116 | msg="Expected FULL replication " 117 | "since there is no replication key") 118 | 119 | # verify the actual replication matches our expected replication method 120 | self.assertEqual( 121 | self.expected_replication_method().get(stream, None), 122 | actual_replication_method, 123 | msg="The actual replication method {} doesn't match the expected {}".format( 124 | actual_replication_method, 125 | self.expected_replication_method().get(stream, None))) 126 | 127 | expected_automatic_fields = self.expected_automatic_fields()[stream] or set() 128 | 129 | # verify that primary, replication and foreign keys 130 | # are given the inclusion of automatic in annotated schema. 131 | actual_automatic_fields = {key for key, value in schema["properties"].items() 132 | if value.get("inclusion") == "automatic"} 133 | self.assertEqual(expected_automatic_fields, 134 | actual_automatic_fields, 135 | msg="expected {} automatic fields but got {}".format( 136 | expected_automatic_fields, 137 | actual_automatic_fields)) 138 | 139 | # verify that all other fields have inclusion of available 140 | # This assumes there are no unsupported fields for SaaS sources 141 | self.assertTrue( 142 | all({value.get("inclusion") == "available" for key, value 143 | in schema["properties"].items() 144 | if key not in actual_automatic_fields}), 145 | msg="Not all non key properties are set to available in annotated schema") 146 | 147 | # verify that primary, replication and foreign keys 148 | # are given the inclusion of automatic in metadata. 149 | actual_automatic_fields = \ 150 | {item.get("breadcrumb", ["properties", None])[1] 151 | for item in metadata 152 | if item.get("metadata").get("inclusion") == "automatic"} 153 | self.assertEqual(expected_automatic_fields, 154 | actual_automatic_fields, 155 | msg="expected {} automatic fields but got {}".format( 156 | expected_automatic_fields, 157 | actual_automatic_fields)) 158 | 159 | # verify that all other fields have inclusion of available 160 | # This assumes there are no unsupported fields for SaaS sources 161 | self.assertTrue( 162 | all({item.get("metadata").get("inclusion") == "available" 163 | for item in metadata 164 | if item.get("breadcrumb", []) != [] 165 | and item.get("breadcrumb", ["properties", None])[1] 166 | not in actual_automatic_fields}), 167 | msg="Not all non key properties are set to available in metadata") 168 | 169 | 170 | -------------------------------------------------------------------------------- /tap_stripe/schemas/subscription_items.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": [ 3 | "null", 4 | "object" 5 | ], 6 | "properties": { 7 | "metadata": { 8 | "type": [ 9 | "null", 10 | "object" 11 | ], 12 | "properties": {} 13 | }, 14 | "canceled_at": { 15 | "type": [ 16 | "null", 17 | "string" 18 | ], 19 | "format": "date-time" 20 | }, 21 | "current_period_end": { 22 | "type": [ 23 | "null", 24 | "string" 25 | ], 26 | "format": "date-time" 27 | }, 28 | "plan": {"$ref": "shared/plan.json#/"}, 29 | "tax_rates": { 30 | "type": [ 31 | "null", 32 | "array" 33 | ], 34 | "items": { 35 | "type": [ 36 | "null", 37 | "object" 38 | ], 39 | "properties": { 40 | "id": { 41 | "type": [ 42 | "null", 43 | "string" 44 | ] 45 | }, 46 | "object": { 47 | "type": [ 48 | "null", 49 | "string" 50 | ] 51 | }, 52 | "active": { 53 | "type": [ 54 | "null", 55 | "boolean" 56 | ] 57 | }, 58 | "country": { 59 | "type": [ 60 | "null", 61 | "string" 62 | ] 63 | }, 64 | "created": { 65 | "type": [ 66 | "null", 67 | "string" 68 | ], 69 | "format": "date-time" 70 | }, 71 | "description": { 72 | "type": [ 73 | "null", 74 | "string" 75 | ] 76 | }, 77 | "display_name": { 78 | "type": [ 79 | "null", 80 | "string" 81 | ] 82 | }, 83 | "inclusive": { 84 | "type": [ 85 | "null", 86 | "boolean" 87 | ] 88 | }, 89 | "jurisdiction": { 90 | "type": [ 91 | "null", 92 | "string" 93 | ] 94 | }, 95 | "livemode": { 96 | "type": [ 97 | "null", 98 | "boolean" 99 | ] 100 | }, 101 | "percentage": { 102 | "type": [ 103 | "null", 104 | "string" 105 | ], 106 | "format": "singer.decimal" 107 | }, 108 | "state": { 109 | "type": [ 110 | "null", 111 | "string" 112 | ] 113 | }, 114 | "tax_type": { 115 | "type": [ 116 | "null", 117 | "boolean" 118 | ] 119 | }, 120 | "metadata": { 121 | "type": [ 122 | "null", 123 | "object" 124 | ], 125 | "properties": {} 126 | } 127 | } 128 | } 129 | }, 130 | "price": { 131 | "type": [ 132 | "null", 133 | "object" 134 | ], 135 | "properties": { 136 | "id": { 137 | "type": [ 138 | "null", 139 | "string" 140 | ] 141 | }, 142 | "object": { 143 | "type": [ 144 | "null", 145 | "string" 146 | ] 147 | }, 148 | "active": { 149 | "type": [ 150 | "null", 151 | "boolean" 152 | ] 153 | }, 154 | "billing_scheme": { 155 | "type": [ 156 | "null", 157 | "string" 158 | ] 159 | }, 160 | "created": { 161 | "type": [ 162 | "null", 163 | "string" 164 | ], 165 | "format": "date-time" 166 | }, 167 | "currency": { 168 | "type": [ 169 | "null", 170 | "string" 171 | ] 172 | }, 173 | "lookup_key": { 174 | "type": [ 175 | "null", 176 | "string" 177 | ] 178 | }, 179 | "nickname": { 180 | "type": [ 181 | "null", 182 | "string" 183 | ] 184 | }, 185 | "product": { 186 | "type": [ 187 | "null", 188 | "string" 189 | ] 190 | }, 191 | "recurring": { 192 | "type": [ 193 | "null", 194 | "object" 195 | ], 196 | "properties": { 197 | "aggregate_usage": { 198 | "type": [ 199 | "null", 200 | "string" 201 | ] 202 | }, 203 | "interval": { 204 | "type": [ 205 | "null", 206 | "string" 207 | ] 208 | }, 209 | "interval_count": { 210 | "type": [ 211 | "null", 212 | "integer" 213 | ] 214 | }, 215 | "usage_type": { 216 | "type": [ 217 | "null", 218 | "string" 219 | ] 220 | } 221 | } 222 | }, 223 | "tax_behavior": { 224 | "type": [ 225 | "null", 226 | "string" 227 | ] 228 | }, 229 | "tiers": { 230 | "type": [ 231 | "null", 232 | "array" 233 | ], 234 | "items": { 235 | "type": [ 236 | "null", 237 | "object" 238 | ], 239 | "properties": { 240 | "flat_amount": { 241 | "type": [ 242 | "null", 243 | "integer" 244 | ] 245 | }, 246 | "flat_amount_decimal": { 247 | "type": [ 248 | "null", 249 | "string" 250 | ] 251 | }, 252 | "unit_amount": { 253 | "type": [ 254 | "null", 255 | "integer" 256 | ] 257 | }, 258 | "unit_amount_decimal": { 259 | "type": [ 260 | "null", 261 | "string" 262 | ] 263 | }, 264 | "up_to": { 265 | "type": [ 266 | "null", 267 | "integer" 268 | ] 269 | } 270 | } 271 | } 272 | }, 273 | "tiers_mode": { 274 | "type": [ 275 | "null", 276 | "string" 277 | ] 278 | }, 279 | "transform_quantity": { 280 | "type": [ 281 | "null", 282 | "object" 283 | ], 284 | "properties": { 285 | "divide_by": { 286 | "type": [ 287 | "null", 288 | "integer" 289 | ] 290 | }, 291 | "round": { 292 | "type": [ 293 | "null", 294 | "string" 295 | ] 296 | } 297 | } 298 | }, 299 | "type": { 300 | "type": [ 301 | "null", 302 | "string" 303 | ] 304 | }, 305 | "unit_amount": { 306 | "type": [ 307 | "null", 308 | "integer" 309 | ] 310 | }, 311 | "unit_amount_decimal": { 312 | "type": [ 313 | "null", 314 | "string" 315 | ] 316 | }, 317 | "livemode": { 318 | "type": [ 319 | "null", 320 | "boolean" 321 | ] 322 | }, 323 | "metadata": { 324 | "type": [ 325 | "null", 326 | "object" 327 | ], 328 | "properties": {} 329 | } 330 | } 331 | }, 332 | "subscription": { 333 | "type": [ 334 | "null", 335 | "string" 336 | ] 337 | }, 338 | "trial_start": { 339 | "type": [ 340 | "null", 341 | "string" 342 | ], 343 | "format": "date-time" 344 | }, 345 | "created": { 346 | "type": [ 347 | "null", 348 | "string" 349 | ], 350 | "format": "date-time" 351 | }, 352 | "cancel_at_period_end": { 353 | "type": [ 354 | "null", 355 | "boolean" 356 | ] 357 | }, 358 | "quantity": { 359 | "type": [ 360 | "null", 361 | "integer" 362 | ] 363 | }, 364 | "tax_percent": { 365 | "type": [ 366 | "null", 367 | "number" 368 | ] 369 | }, 370 | "current_period_start": { 371 | "type": [ 372 | "null", 373 | "string" 374 | ], 375 | "format": "date-time" 376 | }, 377 | "start": { 378 | "type": [ 379 | "null", 380 | "string" 381 | ], 382 | "format": "date-time" 383 | }, 384 | "billing_thresholds": { 385 | "type": [ 386 | "null", 387 | "object" 388 | ], 389 | "properties": { 390 | "usage_gte": { 391 | "type": [ 392 | "null", 393 | "integer" 394 | ] 395 | } 396 | } 397 | }, 398 | "discount": { 399 | "type": [ 400 | "null", 401 | "object" 402 | ], 403 | "properties": {} 404 | }, 405 | "application_fee_percent": { 406 | "type": [ 407 | "null", 408 | "number" 409 | ] 410 | }, 411 | "id": { 412 | "type": [ 413 | "null", 414 | "string" 415 | ] 416 | }, 417 | "status": { 418 | "type": [ 419 | "null", 420 | "string" 421 | ] 422 | }, 423 | "customer": { 424 | "type": [ 425 | "null", 426 | "string" 427 | ] 428 | }, 429 | "object": { 430 | "type": [ 431 | "null", 432 | "string" 433 | ] 434 | }, 435 | "livemode": { 436 | "type": [ 437 | "null", 438 | "boolean" 439 | ] 440 | }, 441 | "ended_at": { 442 | "type": [ 443 | "null", 444 | "string" 445 | ], 446 | "format": "date-time" 447 | }, 448 | "trial_end": { 449 | "type": [ 450 | "null", 451 | "string" 452 | ], 453 | "format": "date-time" 454 | } 455 | } 456 | } 457 | --------------------------------------------------------------------------------