├── tests ├── __init__.py ├── push │ ├── __init__.py │ ├── test_segment_worker.py │ └── test_parser.py ├── sync │ ├── __init__.py │ ├── test_impressions_count_synchronizer.py │ └── test_events_synchronizer.py ├── recorder │ └── __init__.py ├── client │ ├── files │ │ ├── file1.split │ │ └── file2.yaml │ └── test_utils.py ├── integration │ └── files │ │ ├── segmentHumanBeignsChanges.json │ │ ├── segmentEmployeesChanges.json │ │ ├── file2.yaml │ │ └── split_changes_temp.json ├── helpers │ └── __init__.py ├── models │ ├── grammar │ │ ├── files │ │ │ ├── equal-to-semver.csv │ │ │ ├── invalid-semantic-versions.csv │ │ │ ├── between-semver.csv │ │ │ ├── valid-semantic-versions.csv │ │ │ └── regex.txt │ │ ├── test_partitions.py │ │ ├── test_semver.py │ │ └── test_conditions.py │ ├── test_token.py │ ├── test_notification.py │ ├── test_fallback.py │ └── test_rule_based_segments.py ├── engine │ ├── files │ │ ├── rule_base_segments3.json │ │ ├── rule_base_segments.json │ │ └── rule_base_segments2.json │ ├── cache │ │ └── test_lru.py │ ├── test_bloom_filter.py │ ├── test_splitter.py │ ├── test_unique_keys_tracker.py │ └── test_hashfns.py ├── util │ ├── test_threadutil.py │ └── test_backoff.py ├── api │ └── test_util.py ├── tasks │ ├── test_telemetry_sync.py │ ├── test_events_sync.py │ └── test_unique_keys_sync.py └── storage │ └── test_flag_sets.py ├── splitio ├── client │ ├── __init__.py │ ├── key.py │ ├── util.py │ ├── localhost.py │ └── listener.py ├── engine │ ├── __init__.py │ ├── cache │ │ ├── __init__.py │ │ └── lru.py │ ├── hashfns │ │ ├── legacy.py │ │ └── __init__.py │ ├── splitters.py │ ├── filters.py │ └── impressions │ │ ├── impressions.py │ │ └── strategies.py ├── optional │ ├── __init__.py │ └── loaders.py ├── recorder │ └── __init__.py ├── sync │ ├── __init__.py │ ├── util.py │ └── event.py ├── util │ ├── __init__.py │ ├── decorators.py │ ├── time.py │ ├── backoff.py │ └── threadutil.py ├── tasks │ ├── util │ │ └── __init__.py │ ├── __init__.py │ ├── segment_sync.py │ ├── split_sync.py │ ├── telemetry_sync.py │ ├── events_sync.py │ ├── impressions_sync.py │ └── unique_keys_sync.py ├── models │ ├── grammar │ │ ├── __init__.py │ │ ├── matchers │ │ │ ├── utils │ │ │ │ └── __init__.py │ │ │ ├── prerequisites.py │ │ │ ├── keys.py │ │ │ ├── rule_based_segment.py │ │ │ ├── misc.py │ │ │ ├── __init__.py │ │ │ └── base.py │ │ ├── partitions.py │ │ └── condition.py │ ├── __init__.py │ ├── events.py │ ├── fallback_treatment.py │ ├── datatypes.py │ ├── impressions.py │ ├── token.py │ ├── segments.py │ └── fallback_config.py ├── storage │ └── adapters │ │ ├── __init__.py │ │ └── util.py ├── spec.py ├── version.py ├── key.py ├── factories.py ├── impressions.py ├── __init__.py ├── exceptions.py ├── push │ └── __init__.py └── api │ ├── __init__.py │ └── auth.py ├── .github ├── CODEOWNERS ├── pull_request_template.md └── workflows │ ├── update-license-year.yml │ └── ci.yml ├── sonar-project.properties ├── doc └── source │ ├── index.rst │ └── flask_support.rst ├── LICENSE.txt ├── .coveragerc ├── setup.cfg ├── .gitignore ├── CONTRIBUTORS-GUIDE.md └── setup.py /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/push/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/sync/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /splitio/client/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /splitio/engine/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /splitio/optional/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /splitio/recorder/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /splitio/sync/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /splitio/util/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/recorder/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /splitio/engine/cache/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /splitio/tasks/util/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @splitio/sdk 2 | -------------------------------------------------------------------------------- /splitio/models/grammar/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /splitio/storage/adapters/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /splitio/spec.py: -------------------------------------------------------------------------------- 1 | SPEC_VERSION = '1.3' 2 | -------------------------------------------------------------------------------- /splitio/version.py: -------------------------------------------------------------------------------- 1 | __version__ = '10.5.1' -------------------------------------------------------------------------------- /splitio/models/grammar/matchers/utils/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/client/files/file1.split: -------------------------------------------------------------------------------- 1 | split1 on 2 | split2 off 3 | -------------------------------------------------------------------------------- /splitio/key.py: -------------------------------------------------------------------------------- 1 | """Compatibility module for key.""" 2 | 3 | from splitio.client.key import Key 4 | -------------------------------------------------------------------------------- /splitio/factories.py: -------------------------------------------------------------------------------- 1 | """Backwards compatibility module.""" 2 | from splitio.client.factory import get_factory 3 | -------------------------------------------------------------------------------- /splitio/impressions.py: -------------------------------------------------------------------------------- 1 | """Compatibility module for impressions listener.""" 2 | 3 | from splitio.client.listener import ImpressionListener 4 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | # Python SDK 2 | 3 | ## What did you accomplish? 4 | 5 | ## How do we test the changes introduced in this PR? 6 | 7 | ## Extra Notes 8 | -------------------------------------------------------------------------------- /splitio/__init__.py: -------------------------------------------------------------------------------- 1 | from splitio.client.factory import get_factory, get_factory_async 2 | from splitio.client.key import Key 3 | from splitio.version import __version__ 4 | -------------------------------------------------------------------------------- /splitio/exceptions.py: -------------------------------------------------------------------------------- 1 | """This module contains everything related to split.io exceptions""" 2 | from splitio.client.factory import TimeoutException 3 | from splitio.storage.adapters.redis import SentinelConfigurationException 4 | -------------------------------------------------------------------------------- /tests/integration/files/segmentHumanBeignsChanges.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "human_beigns", 3 | "added": [ 4 | "user1", 5 | "user3" 6 | ], 7 | "removed": [], 8 | "since": -1, 9 | "till": 1457102183278 10 | } -------------------------------------------------------------------------------- /tests/integration/files/segmentEmployeesChanges.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "employees", 3 | "added": [ 4 | "employee_3", 5 | "employee_1" 6 | ], 7 | "removed": [], 8 | "since": -1, 9 | "till": 1457474612832 10 | } -------------------------------------------------------------------------------- /splitio/models/__init__.py: -------------------------------------------------------------------------------- 1 | class MatcherNotFoundException(Exception): 2 | """Exception to raise when a matcher is not found.""" 3 | 4 | def __init__(self, custom_message): 5 | """Constructor.""" 6 | Exception.__init__(self, custom_message) -------------------------------------------------------------------------------- /tests/helpers/__init__.py: -------------------------------------------------------------------------------- 1 | """Misc helpers for testing purposes.""" 2 | 3 | 4 | class Any(object): #pylint:disable=too-few-public-methods 5 | """Crap that matches anything.""" 6 | 7 | def __eq__(self, other): 8 | """Match anything.""" 9 | return True 10 | -------------------------------------------------------------------------------- /tests/models/grammar/files/equal-to-semver.csv: -------------------------------------------------------------------------------- 1 | version1,version2,equals 2 | 1.1.1,1.1.1,true 3 | 1.1.1,1.1.1+metadata,false 4 | 1.1.1,1.1.1-rc.1,false 5 | 88.88.88,88.88.88,true 6 | 1.2.3----RC-SNAPSHOT.12.9.1--.12,1.2.3----RC-SNAPSHOT.12.9.1--.12,true 7 | 10.2.3-DEV-SNAPSHOT,10.2.3-SNAPSHOT-123,false -------------------------------------------------------------------------------- /splitio/util/decorators.py: -------------------------------------------------------------------------------- 1 | """Misc decorators.""" 2 | from abc import abstractmethod 3 | 4 | 5 | def abstract_property(func): 6 | """ 7 | Abstract property decorator. 8 | 9 | :param func: method to decorate 10 | :type func: callable 11 | 12 | :returns: decorated function 13 | :rtype: callable 14 | """ 15 | return property(abstractmethod(func)) 16 | -------------------------------------------------------------------------------- /sonar-project.properties: -------------------------------------------------------------------------------- 1 | sonar.projectName=python-client 2 | sonar.projectKey=python-client 3 | sonar.python.version=3.6 4 | sonar.sources=splitio 5 | sonar.tests=tests 6 | sonar.text.excluded.file.suffixes=.csv 7 | sonar.python.coverage.reportPaths=coverage.xml 8 | sonar.coverage.exclusions=**/__init__.py 9 | sonar.links.ci=https://github.com/splitio/python-client 10 | sonar.links.scm=https://github.com/splitio/python-client/actions 11 | -------------------------------------------------------------------------------- /splitio/models/events.py: -------------------------------------------------------------------------------- 1 | """ 2 | Event DTO and Storage classes. 3 | 4 | The dto is implemented as a namedtuple for performance matters. 5 | """ 6 | from collections import namedtuple 7 | 8 | 9 | Event = namedtuple('Event', [ 10 | 'key', 11 | 'traffic_type_name', 12 | 'event_type_id', 13 | 'value', 14 | 'timestamp', 15 | 'properties', 16 | ]) 17 | 18 | EventWrapper = namedtuple('EventWrapper', [ 19 | 'event', 20 | 'size', 21 | ]) 22 | -------------------------------------------------------------------------------- /splitio/push/__init__.py: -------------------------------------------------------------------------------- 1 | class AuthException(Exception): 2 | """Exception to raise when an API call fails.""" 3 | 4 | def __init__(self, custom_message, status_code=None): 5 | """Constructor.""" 6 | Exception.__init__(self, custom_message) 7 | 8 | class SplitStorageException(Exception): 9 | """Exception to raise when an API call fails.""" 10 | 11 | def __init__(self, custom_message, status_code=None): 12 | """Constructor.""" 13 | Exception.__init__(self, custom_message) 14 | -------------------------------------------------------------------------------- /tests/client/files/file2.yaml: -------------------------------------------------------------------------------- 1 | - my_feature: 2 | treatment: "on" 3 | keys: "key" 4 | config: "{\"desc\" : \"this applies only to ON treatment\"}" 5 | - other_feature_3: 6 | treatment: "off" 7 | - my_feature: 8 | treatment: "off" 9 | keys: "only_key" 10 | config: "{\"desc\" : \"this applies only to OFF and only for only_key. The rest will receive ON\"}" 11 | - other_feature_3: 12 | treatment: "on" 13 | keys: "key_whitelist" 14 | - other_feature: 15 | treatment: "on" 16 | keys: ["key2","key3"] 17 | - other_feature_2: 18 | treatment: "on" 19 | -------------------------------------------------------------------------------- /tests/models/grammar/files/invalid-semantic-versions.csv: -------------------------------------------------------------------------------- 1 | invalid 2 | 1 3 | 1.2 4 | 1.alpha.2 5 | +invalid 6 | -invalid 7 | -invalid+invalid 8 | -invalid.01 9 | alpha 10 | alpha.beta 11 | alpha.beta.1 12 | alpha.1 13 | alpha+beta 14 | alpha_beta 15 | alpha. 16 | alpha.. 17 | beta 18 | -alpha. 19 | 1.2 20 | 1.2.3.DEV 21 | 1.2-SNAPSHOT 22 | 1.2.31.2.3----RC-SNAPSHOT.12.09.1--..12+788 23 | 1.2-RC-SNAPSHOT 24 | -1.0.3-gamma+b7718 25 | +justmeta 26 | 1.1.1+ 27 | 1.1.1- 28 | #99999999999999999999999.999999999999999999.99999999999999999----RC-SNAPSHOT.12.09.1--------------------------------..12 -------------------------------------------------------------------------------- /tests/integration/files/file2.yaml: -------------------------------------------------------------------------------- 1 | - my_feature: 2 | treatment: "on" 3 | keys: "key" 4 | config: "{\"desc\" : \"this applies only to ON treatment\"}" 5 | - other_feature_3: 6 | treatment: "off" 7 | - my_feature: 8 | treatment: "off" 9 | keys: "only_key" 10 | config: "{\"desc\" : \"this applies only to OFF and only for only_key. The rest will receive ON\"}" 11 | - other_feature_3: 12 | treatment: "on" 13 | keys: "key_whitelist" 14 | - other_feature: 15 | treatment: "on" 16 | keys: ["key2","key3"] 17 | - other_feature_2: 18 | treatment: "on" 19 | -------------------------------------------------------------------------------- /doc/source/index.rst: -------------------------------------------------------------------------------- 1 | .. splitio_client documentation master file, created by 2 | sphinx-quickstart on Thu Jun 2 14:57:07 2016. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to splitio_client's documentation! 7 | ========================================== 8 | 9 | Contents: 10 | 11 | .. toctree:: 12 | :maxdepth: 2 13 | 14 | introduction 15 | flask_support 16 | 17 | Indices and tables 18 | ================== 19 | 20 | * :ref:`genindex` 21 | * :ref:`modindex` 22 | * :ref:`search` 23 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Copyright © 2025 Split Software, Inc. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | -------------------------------------------------------------------------------- /splitio/client/key.py: -------------------------------------------------------------------------------- 1 | """A module for Split.io SDK API clients.""" 2 | 3 | 4 | class Key(object): 5 | """Key class includes a matching key and bucketing key.""" 6 | 7 | def __init__(self, matching_key, bucketing_key): 8 | """Construct a key object.""" 9 | self._matching_key = matching_key 10 | self._bucketing_key = bucketing_key 11 | 12 | @property 13 | def matching_key(self): 14 | """Return matching key.""" 15 | return self._matching_key 16 | 17 | @property 18 | def bucketing_key(self): 19 | """Return bucketing key.""" 20 | return self._bucketing_key 21 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | source = 3 | splitio/ 4 | 5 | omit = 6 | tests/* 7 | */__init__.py 8 | 9 | branch = True 10 | 11 | relative_files = True 12 | 13 | [report] 14 | # Regexes for lines to exclude from consideration 15 | exclude_lines = 16 | # Have to re-enable the standard pragma 17 | pragma: no cover 18 | 19 | # Don't complain about missing debug-only code: 20 | def __repr__ 21 | if self\.debug 22 | 23 | # Don't complain if tests don't hit defensive assertion code: 24 | raise AssertionError 25 | raise NotImplementedError 26 | 27 | # Don't complain if non-runnable code isn't run: 28 | if 0: 29 | if __name__ == .__main__.: 30 | 31 | precision = 2 32 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [wheel] 2 | universal = 1 3 | 4 | [metadata] 5 | name = splitio_client 6 | description = This SDK is designed to work with Split, the platform for controlled rollouts, which serves features to your users via a Split feature flag to manage your complete customer experience. 7 | long_description = file: README.md 8 | long_description_content_type = text/markdown 9 | 10 | [flake8] 11 | max-line-length=100 12 | exclude=tests/* 13 | 14 | [aliases] 15 | test=pytest 16 | 17 | [tool:pytest] 18 | addopts = --verbose --cov=splitio --cov-report xml 19 | python_classes=*Tests 20 | 21 | [build_sphinx] 22 | source-dir = doc/source 23 | build-dir = doc/build 24 | all_files = 1 25 | 26 | [upload_sphinx] 27 | upload-dir = doc/build/html 28 | -------------------------------------------------------------------------------- /tests/models/grammar/files/between-semver.csv: -------------------------------------------------------------------------------- 1 | version1,version2,version3,expected 2 | 1.1.1,2.2.2,3.3.3,true 3 | 1.1.1-rc.1,1.1.1-rc.2,1.1.1-rc.3,true 4 | 1.0.0-alpha,1.0.0-alpha.1,1.0.0-alpha.beta,true 5 | 1.0.0-alpha.1,1.0.0-alpha.beta,1.0.0-beta,true 6 | 1.0.0-alpha.beta,1.0.0-beta,1.0.0-beta.2,true 7 | 1.0.0-beta,1.0.0-beta.2,1.0.0-beta.11,true 8 | 1.0.0-beta.2,1.0.0-beta.11,1.0.0-rc.1,true 9 | 1.0.0-beta.11,1.0.0-rc.1,1.0.0,true 10 | 1.1.2,1.1.3,1.1.4,true 11 | 1.2.1,1.3.1,1.4.1,true 12 | 2.0.0,3.0.0,4.0.0,true 13 | 2.2.2,2.2.3-rc1,2.2.3,true 14 | 2.2.2,2.3.2-rc100,2.3.3,true 15 | 1.0.0-rc.1+build.1,1.2.3-beta,1.2.3-rc.1+build.123,true 16 | 3.3.3,3.3.3-alpha,3.3.4,false 17 | 2.2.2-rc.1,2.2.2+metadata,2.2.2-rc.10,false 18 | 1.1.1-rc.1,1.1.1-rc.3,1.1.1-rc.2,false -------------------------------------------------------------------------------- /splitio/util/time.py: -------------------------------------------------------------------------------- 1 | """Utilities.""" 2 | from datetime import datetime 3 | import time 4 | 5 | EPOCH_DATETIME = datetime(1970, 1, 1) 6 | 7 | def utctime(): 8 | """ 9 | Return the utc time in nanoseconds. 10 | 11 | :returns: utc time in nanoseconds. 12 | :rtype: float 13 | """ 14 | return (datetime.utcnow() - EPOCH_DATETIME).total_seconds() 15 | 16 | 17 | def utctime_ms(): 18 | """ 19 | Return the utc time in milliseconds. 20 | 21 | :returns: utc time in milliseconds. 22 | :rtype: int 23 | """ 24 | return int(utctime() * 1000) 25 | 26 | def get_current_epoch_time_ms(): 27 | """ 28 | Get current epoch time in milliseconds 29 | 30 | :return: epoch time 31 | :rtype: int 32 | """ 33 | return int(round(time.time() * 1000)) -------------------------------------------------------------------------------- /tests/models/grammar/test_partitions.py: -------------------------------------------------------------------------------- 1 | """Partitions test module.""" 2 | 3 | from splitio.models.grammar import partitions 4 | 5 | class PartitionTests(object): 6 | """Partition model tests.""" 7 | 8 | raw = { 9 | 'treatment': 'on', 10 | 'size': 50 11 | } 12 | 13 | def test_parse(self): 14 | """Test that the partition is parsed correctly.""" 15 | p = partitions.from_raw(self.raw) 16 | assert isinstance(p, partitions.Partition) 17 | assert p.treatment == 'on' 18 | assert p.size == 50 19 | 20 | def test_to_json(self): 21 | """Test the JSON representation.""" 22 | as_json = partitions.from_raw(self.raw).to_json() 23 | assert as_json['treatment'] == 'on' 24 | assert as_json['size'] == 50 25 | -------------------------------------------------------------------------------- /splitio/tasks/__init__.py: -------------------------------------------------------------------------------- 1 | """Split synchronization tasks module.""" 2 | 3 | import abc 4 | 5 | 6 | class BaseSynchronizationTask(object): 7 | """Syncrhonization task interface.""" 8 | 9 | __metadata__ = abc.ABCMeta 10 | 11 | @abc.abstractmethod 12 | def start(self): 13 | """Start the task.""" 14 | pass 15 | 16 | @abc.abstractmethod 17 | def stop(self, event=None): 18 | """ 19 | Stop the task if running. 20 | 21 | Optionally accept an event to be set when the task finally stops. 22 | 23 | :param event: Event to be set as soon as the task finishes. 24 | :type event: Threading.Event 25 | """ 26 | pass 27 | 28 | @abc.abstractmethod 29 | def is_running(self): 30 | """Return true if the task is running, false otherwise.""" 31 | pass 32 | -------------------------------------------------------------------------------- /splitio/engine/hashfns/legacy.py: -------------------------------------------------------------------------------- 1 | """Legacy hash function module.""" 2 | 3 | 4 | def as_int32(value): 5 | """Handle overflow when working with 32 lower bits of 64 bit ints.""" 6 | if not -2147483649 <= value <= 2147483648: 7 | return (value + 2147483648) % 4294967296 - 2147483648 8 | 9 | return value 10 | 11 | 12 | def legacy_hash(key, seed): 13 | """ 14 | Generate a hash for a key and a feature seed. 15 | 16 | :param key: The key for which to get the hash 17 | :type key: str 18 | :param seed: The feature seed 19 | :type seed: int 20 | :return: The hash for the key and seed 21 | :rtype: int 22 | """ 23 | current_hash = 0 24 | 25 | for char in map(ord, key): 26 | current_hash = as_int32(as_int32(31 * as_int32(current_hash)) + char) 27 | 28 | return int(as_int32(current_hash ^ as_int32(seed))) 29 | -------------------------------------------------------------------------------- /splitio/models/fallback_treatment.py: -------------------------------------------------------------------------------- 1 | """Segment module.""" 2 | import json 3 | 4 | class FallbackTreatment(object): 5 | """FallbackTreatment object class.""" 6 | 7 | def __init__(self, treatment, config=None, label=None): 8 | """ 9 | Class constructor. 10 | 11 | :param treatment: treatment. 12 | :type treatment: str 13 | 14 | :param config: config. 15 | :type config: json 16 | """ 17 | self._treatment = treatment 18 | self._config = config 19 | self._label = label 20 | 21 | @property 22 | def treatment(self): 23 | """Return treatment.""" 24 | return self._treatment 25 | 26 | @property 27 | def config(self): 28 | """Return config.""" 29 | return self._config 30 | 31 | @property 32 | def label(self): 33 | """Return label prefix.""" 34 | return self._label -------------------------------------------------------------------------------- /tests/engine/files/rule_base_segments3.json: -------------------------------------------------------------------------------- 1 | {"ff": {"d": [], "t": -1, "s": -1}, 2 | "rbs": {"t": -1, "s": -1, "d": [ 3 | { 4 | "changeNumber": 5, 5 | "name": "sample_rule_based_segment", 6 | "status": "ACTIVE", 7 | "trafficTypeName": "user", 8 | "excluded":{ 9 | "keys":["mauro@split.io","gaston@split.io"], 10 | "segments":[{"type":"standard", "name":"segment1"}] 11 | }, 12 | "conditions": [ 13 | { 14 | "matcherGroup": { 15 | "combiner": "AND", 16 | "matchers": [ 17 | { 18 | "keySelector": { 19 | "trafficType": "user", 20 | "attribute": "email" 21 | }, 22 | "matcherType": "ENDS_WITH", 23 | "negate": false, 24 | "whitelistMatcherData": { 25 | "whitelist": [ 26 | "@split.io" 27 | ] 28 | } 29 | } 30 | ] 31 | } 32 | } 33 | ] 34 | } 35 | ]}} 36 | -------------------------------------------------------------------------------- /tests/models/grammar/files/valid-semantic-versions.csv: -------------------------------------------------------------------------------- 1 | higher,lower 2 | 1.1.2,1.1.1 3 | 1.0.0,1.0.0-rc.1 4 | 1.1.0-rc.1,1.0.0-beta.11 5 | 1.0.0-beta.11,1.0.0-beta.2 6 | 1.0.0-beta.2,1.0.0-beta 7 | 1.0.0-beta,1.0.0-alpha.beta 8 | 1.0.0-alpha.beta,1.0.0-alpha.1 9 | 1.0.0-alpha.1,1.0.0-alpha 10 | 2.2.2-rc.2+metadata-lalala,2.2.2-rc.1.2 11 | 1.2.3,0.0.4 12 | 1.1.2+meta,1.1.2-prerelease+meta 13 | 1.0.0-beta,1.0.0-alpha 14 | 1.0.0-alpha0.valid,1.0.0-alpha.0valid 15 | 1.0.0-rc.1+build.1,1.0.0-alpha-a.b-c-somethinglong+build.1-aef.1-its-okay 16 | 10.2.3-DEV-SNAPSHOT,1.2.3-SNAPSHOT-123 17 | 1.1.1-rc2,1.0.0-0A.is.legal 18 | 1.2.3----RC-SNAPSHOT.12.9.1--.12+788,1.2.3----R-S.12.9.1--.12+meta 19 | 1.2.3----RC-SNAPSHOT.12.9.1--.12.88,1.2.3----RC-SNAPSHOT.12.9.1--.12 20 | 9223372036854775807.9223372036854775807.9223372036854775807,9223372036854775807.9223372036854775807.9223372036854775806 21 | 1.1.1-alpha.beta.rc.build.java.pr.support.10,1.1.1-alpha.beta.rc.build.java.pr.support 22 | 1.1.2,1.1.1 23 | 1.2.1,1.1.1 24 | 2.1.1,1.1.1 25 | 1.1.1-rc.1,1.1.1-rc.0 -------------------------------------------------------------------------------- /splitio/util/backoff.py: -------------------------------------------------------------------------------- 1 | """Exponential Backoff duration calculator.""" 2 | 3 | 4 | class Backoff(object): 5 | """Backoff duration calculator.""" 6 | 7 | MAX_ALLOWED_WAIT = 30 * 60 # half an hour 8 | 9 | def __init__(self, base=1, max_allowed=MAX_ALLOWED_WAIT): 10 | """ 11 | Class constructor. 12 | 13 | :param base: basic unit to be multiplied on each iteration (seconds) 14 | :param base: float 15 | 16 | :param max_allowed: max seconds to wait 17 | :param max_allowed: int 18 | """ 19 | self._base = base 20 | self._max_allowed = max_allowed 21 | self._attempt = 0 22 | 23 | def get(self): 24 | """ 25 | Return the current time to wait and pre-calculate the next one. 26 | 27 | :returns: time to wait until next retry. 28 | :rtype: float 29 | """ 30 | to_return = min(self._base * (2 ** self._attempt), self._max_allowed) 31 | self._attempt += 1 32 | return to_return 33 | 34 | def reset(self): 35 | """Reset the attempt count.""" 36 | self._attempt = 0 37 | -------------------------------------------------------------------------------- /splitio/optional/loaders.py: -------------------------------------------------------------------------------- 1 | import sys 2 | try: 3 | import asyncio 4 | import aiohttp 5 | import aiofiles 6 | except ImportError: 7 | def missing_asyncio_dependencies(*_, **__): 8 | """Fail if missing dependencies are used.""" 9 | raise NotImplementedError( 10 | 'Missing aiohttp dependency. ' 11 | 'Please use `pip install splitio_client[asyncio]` to install the sdk with asyncio support' 12 | ) 13 | aiohttp = missing_asyncio_dependencies 14 | asyncio = missing_asyncio_dependencies 15 | aiofiles = missing_asyncio_dependencies 16 | 17 | try: 18 | from requests_kerberos import HTTPKerberosAuth, OPTIONAL 19 | except ImportError: 20 | def missing_auth_dependencies(*_, **__): 21 | """Fail if missing dependencies are used.""" 22 | raise NotImplementedError( 23 | 'Missing kerberos auth dependency. ' 24 | 'Please use `pip install splitio_client[kerberos]` to install the sdk with kerberos auth support' 25 | ) 26 | HTTPKerberosAuth = missing_auth_dependencies 27 | OPTIONAL = missing_auth_dependencies 28 | 29 | async def _anext(it): 30 | return await it.__anext__() 31 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | venv/ 27 | .vscode 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .coverage 43 | .coverage.* 44 | .cache 45 | nosetests.xml 46 | coverage.xml 47 | *,cover 48 | .hypothesis/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | 57 | # Sphinx documentation 58 | docs/_build/ 59 | 60 | # PyBuilder 61 | target/ 62 | 63 | # Ipython Notebook 64 | .ipynb_checkpoints 65 | 66 | # PyCharm 67 | .idea 68 | 69 | # rope autocomplete 70 | .ropeproject/ 71 | 72 | # vim backup files 73 | *.swp 74 | 75 | .DS_Store 76 | 77 | # Sonarqube 78 | .scannerwork 79 | -------------------------------------------------------------------------------- /tests/util/test_threadutil.py: -------------------------------------------------------------------------------- 1 | """threading utilities unit tests.""" 2 | 3 | import time 4 | import threading 5 | 6 | from splitio.util.threadutil import EventGroup 7 | 8 | 9 | class EventGroupTests(object): 10 | """EventGroup class test cases.""" 11 | 12 | def test_basic_functionality(self): 13 | """Test basic functionality.""" 14 | 15 | def fun(event): #pylint:disable=missing-docstring 16 | time.sleep(1) 17 | event.set() 18 | 19 | group = EventGroup() 20 | event1 = group.make_event() 21 | event2 = group.make_event() 22 | 23 | task = threading.Thread(target=fun, args=(event1,)) 24 | task.start() 25 | group.wait(3) 26 | assert event1.is_set() 27 | assert not event2.is_set() 28 | 29 | group = EventGroup() 30 | event1 = group.make_event() 31 | event2 = group.make_event() 32 | 33 | task = threading.Thread(target=fun, args=(event2,)) 34 | task.start() 35 | group.wait(3) 36 | assert not event1.is_set() 37 | assert event2.is_set() 38 | 39 | group = EventGroup() 40 | event1 = group.make_event() 41 | event2 = group.make_event() 42 | group.wait(3) 43 | assert not event1.is_set() 44 | assert not event2.is_set() 45 | -------------------------------------------------------------------------------- /.github/workflows/update-license-year.yml: -------------------------------------------------------------------------------- 1 | name: Update License Year 2 | 3 | on: 4 | schedule: 5 | - cron: "0 3 1 1 *" # 03:00 AM on January 1 6 | 7 | permissions: 8 | contents: write 9 | pull-requests: write 10 | 11 | jobs: 12 | test: 13 | runs-on: ubuntu-latest 14 | steps: 15 | - name: Checkout 16 | uses: actions/checkout@v4 17 | with: 18 | fetch-depth: 0 19 | 20 | - name: Set Current year 21 | run: "echo CURRENT=$(date +%Y) >> $GITHUB_ENV" 22 | 23 | - name: Set Previous Year 24 | run: "echo PREVIOUS=$(($CURRENT-1)) >> $GITHUB_ENV" 25 | 26 | - name: Update LICENSE 27 | uses: jacobtomlinson/gha-find-replace@v3 28 | with: 29 | find: ${{ env.PREVIOUS }} 30 | replace: ${{ env.CURRENT }} 31 | include: "LICENSE.txt" 32 | regex: false 33 | 34 | - name: Commit files 35 | run: | 36 | git config user.name 'github-actions[bot]' 37 | git config user.email 'github-actions[bot]@users.noreply.github.com' 38 | git commit -m "Updated License Year" -a 39 | 40 | - name: Create Pull Request 41 | uses: peter-evans/create-pull-request@v5 42 | with: 43 | token: ${{ secrets.GITHUB_TOKEN }} 44 | title: Update License Year 45 | branch: update-license 46 | -------------------------------------------------------------------------------- /splitio/models/grammar/matchers/prerequisites.py: -------------------------------------------------------------------------------- 1 | """Prerequisites matcher classes.""" 2 | 3 | class PrerequisitesMatcher(object): 4 | 5 | def __init__(self, prerequisites): 6 | """ 7 | Build a PrerequisitesMatcher. 8 | 9 | :param prerequisites: prerequisites 10 | :type raw_matcher: List of Prerequisites 11 | """ 12 | self._prerequisites = prerequisites 13 | 14 | def match(self, key, attributes=None, context=None): 15 | """ 16 | Evaluate user input against a matcher and return whether the match is successful. 17 | 18 | :param key: User key. 19 | :type key: str. 20 | :param attributes: Custom user attributes. 21 | :type attributes: dict. 22 | :param context: Evaluation context 23 | :type context: dict 24 | 25 | :returns: Wheter the match is successful. 26 | :rtype: bool 27 | """ 28 | if self._prerequisites == None: 29 | return True 30 | 31 | evaluator = context.get('evaluator') 32 | bucketing_key = context.get('bucketing_key') 33 | for prerequisite in self._prerequisites: 34 | result = evaluator.eval_with_context(key, bucketing_key, prerequisite.feature_flag_name, attributes, context['ec']) 35 | if result['treatment'] not in prerequisite.treatments: 36 | return False 37 | 38 | return True -------------------------------------------------------------------------------- /tests/integration/files/split_changes_temp.json: -------------------------------------------------------------------------------- 1 | {"ff": {"t": -1, "s": -1, "d": [{"changeNumber": 10, "trafficTypeName": "user", "name": "rbs_feature_flag", "trafficAllocation": 100, "trafficAllocationSeed": 1828377380, "seed": -286617921, "status": "ACTIVE", "killed": false, "defaultTreatment": "off", "algo": 2, "conditions": [{"conditionType": "ROLLOUT", "matcherGroup": {"combiner": "AND", "matchers": [{"keySelector": {"trafficType": "user"}, "matcherType": "IN_RULE_BASED_SEGMENT", "negate": false, "userDefinedSegmentMatcherData": {"segmentName": "sample_rule_based_segment"}}]}, "partitions": [{"treatment": "on", "size": 100}, {"treatment": "off", "size": 0}], "label": "in rule based segment sample_rule_based_segment"}, {"conditionType": "ROLLOUT", "matcherGroup": {"combiner": "AND", "matchers": [{"keySelector": {"trafficType": "user"}, "matcherType": "ALL_KEYS", "negate": false}]}, "partitions": [{"treatment": "on", "size": 0}, {"treatment": "off", "size": 100}], "label": "default rule"}], "configurations": {}, "sets": [], "impressionsDisabled": false}]}, "rbs": {"t": 1675259356568, "s": -1, "d": [{"changeNumber": 5, "name": "sample_rule_based_segment", "status": "ACTIVE", "trafficTypeName": "user", "excluded": {"keys": ["mauro@split.io", "gaston@split.io"], "segments": []}, "conditions": [{"matcherGroup": {"combiner": "AND", "matchers": [{"keySelector": {"trafficType": "user", "attribute": "email"}, "matcherType": "ENDS_WITH", "negate": false, "whitelistMatcherData": {"whitelist": ["@split.io"]}}]}}]}]}} -------------------------------------------------------------------------------- /splitio/engine/hashfns/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Hash functions module. 3 | 4 | This module contains hash functions implemented in pure python 5 | as well as the optional import (if installed) of a C compiled murmur hash 6 | function with python bindings. 7 | """ 8 | from splitio.models.splits import HashAlgorithm 9 | from splitio.engine.hashfns import legacy 10 | 11 | try: 12 | # First attempt to import module with C++ core (faster) 13 | import mmh3cffi 14 | 15 | def _murmur_hash(key, seed): 16 | return mmh3cffi.hash_str(key, seed) 17 | 18 | def _murmur_hash128(key, seed): 19 | return mmh3cffi.hash_str_128(key, seed)[0] 20 | 21 | except ImportError: 22 | # Fallback to interpreted python hash algoritm (slower) 23 | from splitio.engine.hashfns import murmur3py # pylint: disable=ungrouped-imports 24 | _murmur_hash = murmur3py.murmur32_py # pylint: disable=invalid-name 25 | _murmur_hash128 = lambda k, s: murmur3py.hash128_x64(k, s)[0] # pylint: disable=invalid-name 26 | 27 | 28 | _HASH_ALGORITHMS = { 29 | HashAlgorithm.LEGACY: legacy.legacy_hash, 30 | HashAlgorithm.MURMUR: _murmur_hash 31 | } 32 | 33 | murmur_128 = _murmur_hash128 # pylint: disable=invalid-name 34 | 35 | 36 | def get_hash_fn(algo): 37 | """ 38 | Return appropriate hash function for requested algorithm. 39 | 40 | :param algo: Algoritm to use 41 | :type algo: int 42 | :return: Hash function 43 | :rtype: function 44 | """ 45 | return _HASH_ALGORITHMS.get(algo, legacy.legacy_hash) 46 | -------------------------------------------------------------------------------- /tests/client/test_utils.py: -------------------------------------------------------------------------------- 1 | """Split client utilities test module.""" 2 | #pylint: disable=no-self-use,too-few-public-methods 3 | 4 | import socket 5 | 6 | 7 | from splitio.client import util, config 8 | from splitio.version import __version__ 9 | from splitio.client.config import DEFAULT_CONFIG 10 | 11 | class ClientUtilsTests(object): 12 | """Client utilities test cases.""" 13 | 14 | def test_get_metadata(self, mocker): 15 | """Test the get_metadata function.""" 16 | meta = util.get_metadata({'machineIp': 'some_ip', 'machineName': 'some_machine_name'}) 17 | assert meta.instance_ip == 'some_ip' 18 | assert meta.instance_name == 'some_machine_name' 19 | assert meta.sdk_version == 'python-' + __version__ 20 | 21 | cfg = DEFAULT_CONFIG.copy() 22 | cfg.update({'IPAddressesEnabled': False}) 23 | meta = util.get_metadata(cfg) 24 | assert meta.instance_ip == 'NA' 25 | assert meta.instance_name == 'NA' 26 | 27 | meta = util.get_metadata(config.DEFAULT_CONFIG) 28 | ip_address, hostname = util._get_hostname_and_ip(config.DEFAULT_CONFIG) 29 | assert meta.instance_ip != 'NA' 30 | assert meta.instance_name != 'NA' 31 | assert meta.instance_ip == ip_address 32 | assert meta.instance_name == hostname 33 | 34 | self.called = 0 35 | def get_hostname_and_ip_mock(any): 36 | self.called += 0 37 | return mocker.Mock(), mocker.Mock() 38 | mocker.patch('splitio.client.util._get_hostname_and_ip', new=get_hostname_and_ip_mock) 39 | 40 | meta = util.get_metadata(config.DEFAULT_CONFIG) 41 | self.called = 1 -------------------------------------------------------------------------------- /tests/util/test_backoff.py: -------------------------------------------------------------------------------- 1 | """Backoff unit tests.""" 2 | from splitio.util.backoff import Backoff 3 | 4 | 5 | class BackOffTests(object): # pylint:disable=too-few-public-methods 6 | """Backoff test cases.""" 7 | 8 | def test_basic_functionality(self): # pylint:disable=no-self-use 9 | """Test basic working.""" 10 | backoff = Backoff() 11 | assert backoff.get() == 1 12 | assert backoff.get() == 2 13 | assert backoff.get() == 4 14 | assert backoff.get() == 8 15 | assert backoff.get() == 16 16 | assert backoff.get() == 32 17 | assert backoff.get() == 64 18 | assert backoff.get() == 128 19 | assert backoff.get() == 256 20 | assert backoff.get() == 512 21 | assert backoff.get() == 1024 22 | 23 | # assert that it's limited to 30 minutes 24 | assert backoff.get() == 1800 25 | assert backoff.get() == 1800 26 | assert backoff.get() == 1800 27 | assert backoff.get() == 1800 28 | 29 | # assert that resetting begins on 1 30 | backoff.reset() 31 | assert backoff.get() == 1 32 | assert backoff.get() == 2 33 | assert backoff.get() == 4 34 | assert backoff.get() == 8 35 | assert backoff.get() == 16 36 | assert backoff.get() == 32 37 | assert backoff.get() == 64 38 | assert backoff.get() == 128 39 | assert backoff.get() == 256 40 | assert backoff.get() == 512 41 | assert backoff.get() == 1024 42 | assert backoff.get() == 1800 43 | assert backoff.get() == 1800 44 | assert backoff.get() == 1800 45 | assert backoff.get() == 1800 46 | -------------------------------------------------------------------------------- /splitio/api/__init__.py: -------------------------------------------------------------------------------- 1 | """Split API module.""" 2 | 3 | 4 | class APIException(Exception): 5 | """Exception to raise when an API call fails.""" 6 | 7 | def __init__(self, custom_message, status_code=None): 8 | """Constructor.""" 9 | Exception.__init__(self, custom_message) 10 | self._status_code = status_code if status_code else -1 11 | 12 | @property 13 | def status_code(self): 14 | """Return HTTP status code.""" 15 | return self._status_code 16 | 17 | class APIUriException(APIException): 18 | """Exception to raise when an API call fails due to 414 http error.""" 19 | 20 | def __init__(self, custom_message, status_code=None): 21 | """Constructor.""" 22 | APIException.__init__(self, custom_message, status_code) 23 | 24 | def headers_from_metadata(sdk_metadata, client_key=None): 25 | """ 26 | Generate a dict with headers required by data-recording API endpoints. 27 | :param sdk_metadata: SDK Metadata object, generated at sdk initialization time. 28 | :type sdk_metadata: splitio.client.util.SdkMetadata 29 | :param client_key: client key. 30 | :type client_key: str 31 | :return: A dictionary with headers. 32 | :rtype: dict 33 | """ 34 | 35 | metadata = { 36 | 'SplitSDKVersion': sdk_metadata.sdk_version, 37 | 'SplitSDKMachineIP': sdk_metadata.instance_ip, 38 | 'SplitSDKMachineName': sdk_metadata.instance_name 39 | } if sdk_metadata.instance_ip != 'NA' and sdk_metadata.instance_ip != 'unknown' else { 40 | 'SplitSDKVersion': sdk_metadata.sdk_version, 41 | } 42 | 43 | if client_key is not None: 44 | metadata['SplitSDKClientKey'] = client_key 45 | 46 | return metadata -------------------------------------------------------------------------------- /tests/engine/files/rule_base_segments.json: -------------------------------------------------------------------------------- 1 | {"ff": {"d": [], "t": -1, "s": -1}, 2 | "rbs": {"t": -1, "s": -1, "d": 3 | [{ 4 | "changeNumber": 5, 5 | "name": "dependent_rbs", 6 | "status": "ACTIVE", 7 | "trafficTypeName": "user", 8 | "excluded":{"keys":["mauro@split.io","gaston@split.io"],"segments":[]}, 9 | "conditions": [ 10 | { 11 | "conditionType": "WHITELIST", 12 | "matcherGroup": { 13 | "combiner": "AND", 14 | "matchers": [ 15 | { 16 | "keySelector": { 17 | "trafficType": "user", 18 | "attribute": "email" 19 | }, 20 | "matcherType": "ENDS_WITH", 21 | "negate": false, 22 | "whitelistMatcherData": { 23 | "whitelist": [ 24 | "@split.io" 25 | ] 26 | } 27 | } 28 | ] 29 | } 30 | } 31 | ]}, 32 | { 33 | "changeNumber": 5, 34 | "name": "sample_rule_based_segment", 35 | "status": "ACTIVE", 36 | "trafficTypeName": "user", 37 | "excluded": { 38 | "keys": [], 39 | "segments": [] 40 | }, 41 | "conditions": [ 42 | { 43 | "conditionType": "ROLLOUT", 44 | "matcherGroup": { 45 | "combiner": "AND", 46 | "matchers": [ 47 | { 48 | "keySelector": { 49 | "trafficType": "user" 50 | }, 51 | "matcherType": "IN_RULE_BASED_SEGMENT", 52 | "negate": false, 53 | "userDefinedSegmentMatcherData": { 54 | "segmentName": "dependent_rbs" 55 | } 56 | } 57 | ] 58 | } 59 | } 60 | ] 61 | }] 62 | }} 63 | -------------------------------------------------------------------------------- /CONTRIBUTORS-GUIDE.md: -------------------------------------------------------------------------------- 1 | # Contributing to the Split Python SDK 2 | 3 | Split SDK is an open source project and we welcome feedback and contribution. The information below describes how to build the project with your changes, run the tests, and send the Pull Request(PR). 4 | 5 | ## Development 6 | 7 | ### Development process 8 | 9 | 1. Fork the repository and create a topic branch from `development` branch. Please use a descriptive name for your branch. 10 | 2. While developing, use descriptive messages in your commits. Avoid short or meaningless sentences like "fix bug". 11 | 3. Make sure to add tests for both positive and negative cases. 12 | 4. Run the linter script of the project and fix any issues you find. 13 | 5. Run the build script and make sure it runs with no errors. 14 | 6. Run all tests and make sure there are no failures. 15 | 7. `git push` your changes to GitHub within your topic branch. 16 | 8. Open a Pull Request(PR) from your forked repo and into the `development` branch of the original repository. 17 | 9. When creating your PR, please fill out all the fields of the PR template, as applicable, for the project. 18 | 10. Check for conflicts once the pull request is created to make sure your PR can be merged cleanly into `development`. 19 | 11. Keep an eye out for any feedback or comments from Split's SDK team. 20 | 21 | ### Running tests 22 | 23 | To run test you need to execute the following commands: 24 | 1. `pip install -U pip setuptools` 25 | 2. `python setup.py install` 26 | 3. `pip install redis pytest pytest-cov pytest-mock` 27 | 4. `python setup.py test` 28 | 29 | # Contact 30 | 31 | If you have any other questions or need to contact us directly in a private manner send us a note at sdks@split.io. 32 | -------------------------------------------------------------------------------- /tests/engine/cache/test_lru.py: -------------------------------------------------------------------------------- 1 | """LRU Cache unit tests.""" 2 | 3 | from splitio.engine.cache.lru import SimpleLruCache 4 | 5 | class SimpleLruCacheTests(object): 6 | """Test SimpleLruCache.""" 7 | 8 | def test_basic_usage(self, mocker): 9 | """Test that a missing split logs and returns CONTROL.""" 10 | cache = SimpleLruCache(5) 11 | assert cache.test_and_set('a', 1) is None 12 | assert cache.test_and_set('b', 2) is None 13 | assert cache.test_and_set('c', 3) is None 14 | assert cache.test_and_set('d', 4) is None 15 | assert cache.test_and_set('e', 5) is None 16 | 17 | assert cache.test_and_set('a', 10) is 1 18 | assert cache.test_and_set('b', 20) is 2 19 | assert cache.test_and_set('c', 30) is 3 20 | assert cache.test_and_set('d', 40) is 4 21 | assert cache.test_and_set('e', 50) is 5 22 | assert len(cache._data) is 5 23 | 24 | def test_lru_eviction(self, mocker): 25 | """Test that a missing split logs and returns CONTROL.""" 26 | cache = SimpleLruCache(5) 27 | assert cache.test_and_set('a', 1) is None 28 | assert cache.test_and_set('b', 2) is None 29 | assert cache.test_and_set('c', 3) is None 30 | assert cache.test_and_set('d', 4) is None 31 | assert cache.test_and_set('e', 5) is None 32 | assert cache.test_and_set('f', 6) is None 33 | assert cache.test_and_set('g', 7) is None 34 | assert cache.test_and_set('h', 8) is None 35 | assert cache.test_and_set('i', 9) is None 36 | assert cache.test_and_set('j', 0) is None 37 | assert len(cache._data) is 5 38 | assert set(cache._data.keys()) == set(['f', 'g', 'h', 'i', 'j']) 39 | -------------------------------------------------------------------------------- /splitio/util/threadutil.py: -------------------------------------------------------------------------------- 1 | """Threading utilities.""" 2 | from threading import Event, Condition 3 | 4 | 5 | class EventGroup(object): 6 | """EventGroup that can be waited with an OR condition.""" 7 | 8 | class Event(Event): # pylint:disable=too-few-public-methods 9 | """Threading event meant to be used in an group.""" 10 | 11 | def __init__(self, shared_condition): 12 | """ 13 | Construct an event. 14 | 15 | :param shared_condition: shared condition varaible. 16 | :type shared_condition: threading.Condition 17 | """ 18 | Event.__init__(self) 19 | self._shared_cond = shared_condition 20 | 21 | def set(self): 22 | """Set the event.""" 23 | Event.set(self) 24 | with self._shared_cond: 25 | self._shared_cond.notify() 26 | 27 | def __init__(self): 28 | """Construct an event group.""" 29 | self._cond = Condition() 30 | 31 | def make_event(self): 32 | """ 33 | Make a new event associated to this waitable group. 34 | 35 | :returns: an event that can be awaited as part of a group 36 | :rtype: EventGroup.Event 37 | """ 38 | return EventGroup.Event(self._cond) 39 | 40 | def wait(self, timeout=None): 41 | """ 42 | Wait until one of the events is triggered. 43 | 44 | :param timeout: how many seconds to wait. None means forever. 45 | :type timeout: int 46 | 47 | :returns: True if the condition was notified within the specified timeout. False otherwise. 48 | :rtype: bool 49 | """ 50 | with self._cond: 51 | return self._cond.wait(timeout) 52 | -------------------------------------------------------------------------------- /splitio/models/grammar/partitions.py: -------------------------------------------------------------------------------- 1 | """Split partition module.""" 2 | 3 | 4 | class Partition(object): 5 | """Partition object class.""" 6 | 7 | def __init__(self, treatment, size): 8 | """ 9 | Class constructor. 10 | 11 | :param treatment: The treatment for the partition 12 | :type treatment: str 13 | :param size: A number between 0 a 100 14 | :type size: float 15 | """ 16 | if size < 0 or size > 100: 17 | raise ValueError('size MUST BE between 0 and 100') 18 | 19 | self._treatment = treatment 20 | self._size = size 21 | 22 | @property 23 | def treatment(self): 24 | """Return the treatment associated with this partition.""" 25 | return self._treatment 26 | 27 | @property 28 | def size(self): 29 | """Return the percentage owned by this partition.""" 30 | return self._size 31 | 32 | def to_json(self): 33 | """Return a JSON representation of a partition.""" 34 | return { 35 | 'treatment': self._treatment, 36 | 'size': self._size 37 | } 38 | 39 | def __str__(self): 40 | """Return string representation of a partition.""" 41 | return '{size}%:{treatment}'.format(size=self._size, 42 | treatment=self._treatment) 43 | 44 | 45 | def from_raw(raw_partition): 46 | """ 47 | Build a partition object from a splitChanges partition portion. 48 | 49 | :param raw_partition: JSON snippet of a partition. 50 | :type raw_partition: dict 51 | 52 | :return: New partition object. 53 | :rtype: Partition 54 | """ 55 | return Partition(raw_partition['treatment'], raw_partition['size']) 56 | -------------------------------------------------------------------------------- /splitio/client/util.py: -------------------------------------------------------------------------------- 1 | """General purpose SDK utilities.""" 2 | 3 | import socket 4 | from collections import namedtuple 5 | from splitio.version import __version__ 6 | 7 | SdkMetadata = namedtuple( 8 | 'SdkMetadata', 9 | ['sdk_version', 'instance_name', 'instance_ip'] 10 | ) 11 | 12 | 13 | def _get_ip(): 14 | sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) 15 | try: 16 | # doesn't even have to be reachable 17 | sock.connect(('10.255.255.255', 1)) 18 | ip_address = sock.getsockname()[0] 19 | except Exception: # pylint: disable=broad-except 20 | ip_address = 'unknown' 21 | finally: 22 | sock.close() 23 | return ip_address 24 | 25 | 26 | def _get_hostname(ip_address): 27 | return 'unknown' if ip_address == 'unknown' else 'ip-' + ip_address.replace('.', '-') 28 | 29 | 30 | def _get_hostname_and_ip(config): 31 | if config.get('IPAddressesEnabled') is False: 32 | return 'NA', 'NA' 33 | 34 | ip_from_config = config.get('machineIp') 35 | machine_from_config = config.get('machineName') 36 | ip_address = ip_from_config if ip_from_config is not None else _get_ip() 37 | hostname = machine_from_config if machine_from_config is not None else _get_hostname(ip_address) 38 | return ip_address, hostname 39 | 40 | 41 | def get_metadata(config): 42 | """ 43 | Gather SDK metadata and return a tuple with such info. 44 | 45 | :param config: User supplied config augmented with defaults. 46 | :type config: dict 47 | 48 | :return: SDK Metadata information. 49 | :rtype: SdkMetadata 50 | """ 51 | version = 'python-%s' % __version__ 52 | ip_address, hostname = _get_hostname_and_ip(config) 53 | return SdkMetadata(version, hostname, ip_address) -------------------------------------------------------------------------------- /splitio/models/datatypes.py: -------------------------------------------------------------------------------- 1 | """Datatypes converters for matchers.""" 2 | 3 | 4 | def ts_truncate_seconds(timestamp): 5 | """ 6 | Set seconds to zero in a timestamp. 7 | 8 | :param ts: Timestamp in seconds. 9 | :type ts: int 10 | 11 | :return: Timestamp in seconds, but without counting them (ie: DD-MM-YY HH:MM:00) 12 | :rtype: int 13 | """ 14 | return timestamp - (timestamp % 60) 15 | 16 | 17 | def ts_truncate_time(timestamp): 18 | """ 19 | Set time to zero in a timestamp. 20 | 21 | :param ts: Timestamp in seconds. 22 | :type ts: int 23 | 24 | :return: Timestamp in seconds, without counting time (ie: DD-MM-YYYY 00:00:00) 25 | :rtype: int 26 | """ 27 | return timestamp - (timestamp % 86400) 28 | 29 | 30 | def java_ts_to_secs(java_ts): 31 | """ 32 | Convert java timestamp into unix timestamp. 33 | 34 | :param java_ts: java timestamp in milliseconds. 35 | :type java_ts: int 36 | 37 | :return: Timestamp in seconds. 38 | :rtype: int 39 | """ 40 | return java_ts / 1000 41 | 42 | 43 | def java_ts_truncate_seconds(java_ts): 44 | """ 45 | Set seconds to zero in a timestamp. 46 | 47 | :param ts: Timestamp in seconds. 48 | :type ts: int 49 | 50 | :return: Timestamp in seconds, but without counting them (ie: DD-MM-YY HH:MM:00) 51 | :rtype: int 52 | """ 53 | return ts_truncate_seconds(java_ts_to_secs(java_ts)) 54 | 55 | 56 | def java_ts_truncate_time(java_ts): 57 | """ 58 | Set time to zero in a timestamp. 59 | 60 | :param ts: Timestamp in seconds. 61 | :type ts: int 62 | 63 | :return: Timestamp in seconds, without counting time (ie: DD-MM-YYYY 00:00:00) 64 | :rtype: int 65 | """ 66 | return ts_truncate_time(java_ts_to_secs(java_ts)) 67 | -------------------------------------------------------------------------------- /doc/source/flask_support.rst: -------------------------------------------------------------------------------- 1 | Flask support 2 | ============= 3 | 4 | The `Split.io `_ SDK API Python client works with `Flask `_ out of the box. Both our in-memory and `Redis `_ backed clients work well with Flask. 5 | 6 | The following section shows how to use Split.io in a simple one-view Flask app. 7 | 8 | A simple Flask App 9 | ------------------ 10 | 11 | This example assumes that the Split.io configuration is save in a file called ``splitio-config.json``. :: 12 | 13 | import logging 14 | from flask import Flask, render_template, request 15 | 16 | from splitio import get_factory 17 | 18 | logging.basicConfig(level=logging.INFO) 19 | 20 | app = Flask(__name__) 21 | 22 | factory = get_factory('SOME-API-KEY', config_file='splitio-config.json') 23 | # Redis: The redisHost key must be set into config-file 24 | # In-memory: Don't set redis configurations 25 | client = factory.client() 26 | 27 | 28 | @app.route('/') 29 | def index(): 30 | user = request.args.get('user', '') 31 | 32 | context['some_treatment'] = client.get_treatment(user, 'some_feature') 33 | context['some_other_treatment'] = client.get_treatment(user, 'some_other_feature', 34 | {'number_attribute': 42, 'date_attribute': 1466185587010}) 35 | 36 | return render_template('index.html', **context) 37 | 38 | When using the Redis client the update scripts need to be run periodically, otherwise there won't be any data available to the client. 39 | 40 | As mentioned before, if the API key is set to ``'localhost'`` a localhost environment client is generated and no connections to Split.io are made as everything is read from ``.split`` file (you can read about this feature in the Localhost Environment section of the :doc:`/introduction`.) 41 | -------------------------------------------------------------------------------- /tests/api/test_util.py: -------------------------------------------------------------------------------- 1 | """Split API tests module.""" 2 | 3 | import pytest 4 | import unittest.mock as mock 5 | 6 | from splitio.api import headers_from_metadata 7 | from splitio.client.util import SdkMetadata 8 | from splitio.engine.telemetry import TelemetryStorageProducer 9 | from splitio.storage.inmemmory import InMemoryTelemetryStorage 10 | from splitio.models.telemetry import HTTPExceptionsAndLatencies 11 | 12 | 13 | class UtilTests(object): 14 | """Util test cases.""" 15 | 16 | def test_headers_from_metadata(self, mocker): 17 | """Test headers from metadata call.""" 18 | metadata = headers_from_metadata(SdkMetadata('1.0', 'some', '1.2.3.4')) 19 | assert metadata['SplitSDKVersion'] == '1.0' 20 | assert metadata['SplitSDKMachineIP'] == '1.2.3.4' 21 | assert metadata['SplitSDKMachineName'] == 'some' 22 | assert 'SplitSDKClientKey' not in metadata 23 | 24 | metadata = headers_from_metadata(SdkMetadata('1.0', 'some', '1.2.3.4'), 'abcd') 25 | assert metadata['SplitSDKVersion'] == '1.0' 26 | assert metadata['SplitSDKMachineIP'] == '1.2.3.4' 27 | assert metadata['SplitSDKMachineName'] == 'some' 28 | assert metadata['SplitSDKClientKey'] == 'abcd' 29 | 30 | metadata = headers_from_metadata(SdkMetadata('1.0', 'some', 'NA')) 31 | assert metadata['SplitSDKVersion'] == '1.0' 32 | assert 'SplitSDKMachineIP' not in metadata 33 | assert 'SplitSDKMachineName' not in metadata 34 | assert 'SplitSDKClientKey' not in metadata 35 | 36 | metadata = headers_from_metadata(SdkMetadata('1.0', 'some', 'unknown')) 37 | assert metadata['SplitSDKVersion'] == '1.0' 38 | assert 'SplitSDKMachineIP' not in metadata 39 | assert 'SplitSDKMachineName' not in metadata 40 | assert 'SplitSDKClientKey' not in metadata 41 | -------------------------------------------------------------------------------- /tests/engine/files/rule_base_segments2.json: -------------------------------------------------------------------------------- 1 | {"ff": {"d": [], "t": -1, "s": -1}, 2 | "rbs": {"t": -1, "s": -1, "d": [ 3 | { 4 | "changeNumber": 5, 5 | "name": "sample_rule_based_segment", 6 | "status": "ACTIVE", 7 | "trafficTypeName": "user", 8 | "excluded":{ 9 | "keys":["mauro@split.io","gaston@split.io"], 10 | "segments":[{"type":"rule-based", "name":"no_excludes"}] 11 | }, 12 | "conditions": [ 13 | { 14 | "matcherGroup": { 15 | "combiner": "AND", 16 | "matchers": [ 17 | { 18 | "keySelector": { 19 | "trafficType": "user", 20 | "attribute": "email" 21 | }, 22 | "matcherType": "STARTS_WITH", 23 | "negate": false, 24 | "whitelistMatcherData": { 25 | "whitelist": [ 26 | "bilal" 27 | ] 28 | } 29 | } 30 | ] 31 | } 32 | } 33 | ] 34 | }, 35 | { 36 | "changeNumber": 5, 37 | "name": "no_excludes", 38 | "status": "ACTIVE", 39 | "trafficTypeName": "user", 40 | "excluded":{ 41 | "keys":["bilal2@split.io"], 42 | "segments":[] 43 | }, 44 | "conditions": [ 45 | { 46 | "matcherGroup": { 47 | "combiner": "AND", 48 | "matchers": [ 49 | { 50 | "keySelector": { 51 | "trafficType": "user", 52 | "attribute": "email" 53 | }, 54 | "matcherType": "ENDS_WITH", 55 | "negate": false, 56 | "whitelistMatcherData": { 57 | "whitelist": [ 58 | "@split.io" 59 | ] 60 | } 61 | } 62 | ] 63 | } 64 | } 65 | ] 66 | } 67 | ]}} 68 | -------------------------------------------------------------------------------- /splitio/models/impressions.py: -------------------------------------------------------------------------------- 1 | """Impressions model module.""" 2 | from collections import namedtuple 3 | 4 | 5 | Impression = namedtuple( 6 | 'Impression', 7 | [ 8 | 'matching_key', 9 | 'feature_name', 10 | 'treatment', 11 | 'label', 12 | 'change_number', 13 | 'bucketing_key', 14 | 'time', 15 | 'previous_time', 16 | 'properties' 17 | ] 18 | ) 19 | 20 | ImpressionDecorated = namedtuple( 21 | 'ImpressionDecorated', 22 | [ 23 | 'Impression', 24 | 'disabled' 25 | ] 26 | ) 27 | 28 | # pre-python3.7 hack to make previous_time optional 29 | Impression.__new__.__defaults__ = (None,) 30 | 31 | 32 | class Label(object): # pylint: disable=too-few-public-methods 33 | """Impressions labels.""" 34 | 35 | # Condition: Split Was Killed 36 | # Treatment: Default treatment 37 | # Label: killed 38 | KILLED = 'killed' 39 | 40 | # Condition: No condition matched 41 | # Treatment: Default Treatment 42 | # Label: no condition matched 43 | NO_CONDITION_MATCHED = 'default rule' 44 | 45 | # Condition: Split definition was not found 46 | # Treatment: control 47 | # Label: split not found 48 | SPLIT_NOT_FOUND = 'definition not found' 49 | 50 | # Condition: Traffic allocation failed 51 | # Treatment: Default Treatment 52 | # Label: not in split 53 | NOT_IN_SPLIT = 'not in split' 54 | 55 | # Condition: There was an exception 56 | # Treatment: control 57 | # Label: exception 58 | EXCEPTION = 'exception' 59 | 60 | # Condition: Evaluation requested while client not ready 61 | # Treatment: control 62 | # Label: not ready 63 | NOT_READY = 'not ready' 64 | 65 | # Condition: Prerequisites not met 66 | # Treatment: Default treatment 67 | # Label: prerequisites not met 68 | PREREQUISITES_NOT_MET = "prerequisites not met" 69 | -------------------------------------------------------------------------------- /tests/engine/test_bloom_filter.py: -------------------------------------------------------------------------------- 1 | """BloomFilter unit tests.""" 2 | 3 | from random import random 4 | import uuid 5 | from splitio.engine.filters import BloomFilter 6 | 7 | class BloomFilterTests(object): 8 | """StandardRecorderTests test cases.""" 9 | 10 | def test_bloom_filter_methods(self, mocker): 11 | bloom_filter = BloomFilter() 12 | key1 = str(uuid.uuid4()) 13 | key2 = str(uuid.uuid4()) 14 | bloom_filter.add(key1) 15 | 16 | assert(bloom_filter.contains(key1)) 17 | assert(not bloom_filter.contains(key2)) 18 | 19 | bloom_filter.clear() 20 | assert(not bloom_filter.contains(key1)) 21 | 22 | bloom_filter.add(key1) 23 | bloom_filter.add(key2) 24 | assert(bloom_filter.contains(key1)) 25 | assert(bloom_filter.contains(key2)) 26 | 27 | def test_bloom_filter_error_percentage(self, mocker): 28 | arr_storage = [] 29 | total_sample = 20000 30 | error_rate = 0.01 31 | bloom_filter = BloomFilter(total_sample, error_rate) 32 | 33 | for x in range(1, total_sample): 34 | myuuid = str(uuid.uuid4()) 35 | bloom_filter.add(myuuid) 36 | arr_storage.append(myuuid) 37 | 38 | false_positive_count = 0 39 | for x in range(1, total_sample): 40 | y = int(random()*total_sample*5) 41 | if y > total_sample - 2: 42 | myuuid = str(uuid.uuid4()) 43 | if myuuid in arr_storage: 44 | # False Negative 45 | assert(bloom_filter.contains(myuuid)) 46 | else: 47 | if bloom_filter.contains(myuuid): 48 | # False Positive 49 | false_positive_count = false_positive_count + 1 50 | else: 51 | myuuid = arr_storage[y] 52 | assert(bloom_filter.contains(myuuid)) 53 | # False Negative 54 | 55 | assert(false_positive_count/total_sample <= error_rate) -------------------------------------------------------------------------------- /splitio/tasks/segment_sync.py: -------------------------------------------------------------------------------- 1 | """Segment syncrhonization module.""" 2 | 3 | import logging 4 | from splitio.tasks import BaseSynchronizationTask 5 | from splitio.tasks.util import asynctask 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | 10 | 11 | class SegmentSynchronizationTaskBase(BaseSynchronizationTask): 12 | """Segment Syncrhonization base class.""" 13 | 14 | def start(self): 15 | """Start segment synchronization.""" 16 | self._task.start() 17 | 18 | def stop(self, event=None): 19 | """Stop segment synchronization.""" 20 | pass 21 | 22 | def is_running(self): 23 | """ 24 | Return whether the task is running or not. 25 | 26 | :return: True if the task is running. False otherwise. 27 | :rtype: bool 28 | """ 29 | return self._task.running() 30 | 31 | 32 | class SegmentSynchronizationTask(SegmentSynchronizationTaskBase): 33 | """Segment Syncrhonization class.""" 34 | 35 | def __init__(self, synchronize_segments, period): 36 | """ 37 | Clas constructor. 38 | 39 | :param synchronize_segments: handler for syncing segments 40 | :type synchronize_segments: func 41 | 42 | """ 43 | self._task = asynctask.AsyncTask(synchronize_segments, period, on_init=None) 44 | 45 | def stop(self, event=None): 46 | """Stop segment synchronization.""" 47 | self._task.stop(event) 48 | 49 | 50 | class SegmentSynchronizationTaskAsync(SegmentSynchronizationTaskBase): 51 | """Segment Syncrhonization async class.""" 52 | 53 | def __init__(self, synchronize_segments, period): 54 | """ 55 | Clas constructor. 56 | 57 | :param synchronize_segments: handler for syncing segments 58 | :type synchronize_segments: func 59 | 60 | """ 61 | self._task = asynctask.AsyncTaskAsync(synchronize_segments, period, on_init=None) 62 | 63 | async def stop(self): 64 | """Stop segment synchronization.""" 65 | await self._task.stop(True) 66 | -------------------------------------------------------------------------------- /splitio/engine/splitters.py: -------------------------------------------------------------------------------- 1 | """A module for implementation of the Splitter engine.""" 2 | from splitio.engine.evaluator import CONTROL 3 | from splitio.engine.hashfns import get_hash_fn 4 | 5 | 6 | class Splitter(object): 7 | """Class responsible for choosing the right partition.""" 8 | 9 | def get_treatment(self, key, seed, partitions, algo): 10 | """ 11 | Return the appropriate treatment or CONTROL if no partitions are found. 12 | 13 | :param key: The key for which to determine the treatment 14 | :type key: str 15 | :param seed: The feature seed 16 | :type seed: int 17 | :param partitions: The condition partitions 18 | :type partitions: list 19 | :return: The treatment 20 | :rtype: str 21 | """ 22 | if not partitions: 23 | return CONTROL 24 | 25 | if len(partitions) == 1 and partitions[0].size == 100: 26 | return partitions[0].treatment 27 | 28 | return self.get_treatment_for_bucket( 29 | self.get_bucket(key, seed, algo), 30 | partitions 31 | ) 32 | 33 | @staticmethod 34 | def get_bucket(key, seed, algo): 35 | """ 36 | Get the bucket for a key hash. 37 | 38 | :param key_hash: The hash for a key 39 | :type key_hash: int 40 | :return: The bucked for a hash 41 | :rtype: int 42 | """ 43 | hashfn = get_hash_fn(algo) 44 | key_hash = hashfn(key, seed) 45 | return abs(key_hash) % 100 + 1 46 | 47 | @staticmethod 48 | def get_treatment_for_bucket(bucket, partitions): 49 | """ 50 | Get the treatment for a given bucket and partitions. 51 | 52 | :param bucket: The bucket number generated by get_bucket 53 | :type bucket: int 54 | :param partitions: The condition partitions 55 | :type partitions: list 56 | :return: The treatment 57 | :rtype: str 58 | """ 59 | covered_buckets = 0 60 | 61 | for partition in partitions: 62 | covered_buckets += partition.size 63 | 64 | if covered_buckets >= bucket: 65 | return partition.treatment 66 | 67 | return CONTROL 68 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | """Setup module.""" 2 | # !/usr/bin/env python 3 | 4 | from os import path 5 | from setuptools import setup, find_packages 6 | 7 | TESTS_REQUIRES = [ 8 | 'flake8', 9 | 'pytest==7.0.1', 10 | 'pytest-mock==3.11.1', 11 | 'coverage==7.0.0', 12 | 'pytest-cov==4.1.0', 13 | 'importlib-metadata==6.7', 14 | 'tomli==1.2.3', 15 | 'iniconfig==1.1.1', 16 | 'attrs==22.1.0', 17 | 'pytest-asyncio==0.21.0', 18 | 'aiohttp>=3.8.4', 19 | 'aiofiles>=23.1.0', 20 | 'requests-kerberos>=0.15.0', 21 | 'urllib3==2.0.7' 22 | ] 23 | 24 | INSTALL_REQUIRES = [ 25 | 'requests', 26 | 'pyyaml', 27 | 'docopt>=0.6.2', 28 | 'enum34;python_version<"3.4"', 29 | 'bloom-filter2>=2.0.0' 30 | ] 31 | 32 | with open(path.join(path.abspath(path.dirname(__file__)), 'splitio', 'version.py')) as f: 33 | exec(f.read()) # pylint: disable=exec-used 34 | 35 | setup( 36 | name='splitio_client', 37 | version=__version__, # pylint: disable=undefined-variable 38 | description='Split.io Python Client', 39 | author='Patricio Echague, Sebastian Arrubia', 40 | author_email='pato@split.io, sebastian@split.io', 41 | url='https://github.com/splitio/python-client', 42 | download_url=('https://github.com/splitio/python-client/tarball/' + __version__), # pylint: disable=undefined-variable 43 | license='Apache License 2.0', 44 | install_requires=INSTALL_REQUIRES, 45 | tests_require=TESTS_REQUIRES, 46 | extras_require={ 47 | 'test': TESTS_REQUIRES, 48 | 'redis': ['redis>=2.10.5,<7.0.0'], 49 | 'uwsgi': ['uwsgi>=2.0.0'], 50 | 'cpphash': ['mmh3cffi==0.2.1'], 51 | 'asyncio': ['aiohttp>=3.8.4', 'aiofiles>=23.1.0'], 52 | 'kerberos': ['requests-kerberos>=0.15.0'] 53 | }, 54 | setup_requires=['pytest-runner', 'pluggy==1.0.0;python_version<"3.8"'], 55 | classifiers=[ 56 | 'Environment :: Console', 57 | 'Intended Audience :: Developers', 58 | 'Programming Language :: Python', 59 | 'Programming Language :: Python :: 2', 60 | 'Programming Language :: Python :: 3', 61 | 'Topic :: Software Development :: Libraries' 62 | ], 63 | packages=find_packages(exclude=('tests', 'tests.*')) 64 | ) 65 | -------------------------------------------------------------------------------- /tests/engine/test_splitter.py: -------------------------------------------------------------------------------- 1 | """Splitter test module.""" 2 | 3 | from splitio.models.grammar.partitions import Partition 4 | from splitio.engine.splitters import Splitter, CONTROL 5 | 6 | 7 | class SplitterTests(object): 8 | """Tests for engine/splitter.""" 9 | 10 | def test_get_treatment(self, mocker): 11 | """Test get_treatment method on all possible outputs.""" 12 | splitter = Splitter() 13 | 14 | # no partitions returns control 15 | assert splitter.get_treatment('key', 123, [], 1) == CONTROL 16 | # single partition returns that treatment 17 | assert splitter.get_treatment('key', 123, [Partition('on', 100)], 1) == 'on' 18 | # multiple partitions call hash_functions 19 | splitter.get_treatment_for_bucket = lambda x,y: 'on' 20 | partitions = [Partition('on', 50), Partition('off', 50)] 21 | assert splitter.get_treatment('key', 123, partitions, 1) == 'on' 22 | 23 | def test_get_bucket(self, mocker): 24 | """Test get_bucket method.""" 25 | get_hash_fn_mock = mocker.Mock() 26 | hash_fn = mocker.Mock() 27 | hash_fn.return_value = 1 28 | get_hash_fn_mock.side_effect = lambda x: hash_fn 29 | mocker.patch('splitio.engine.splitters.get_hash_fn', new=get_hash_fn_mock) 30 | splitter = Splitter() 31 | splitter.get_bucket(1, 123, 1) 32 | assert get_hash_fn_mock.mock_calls == [mocker.call(1)] 33 | assert hash_fn.mock_calls == [mocker.call(1, 123)] 34 | 35 | def test_treatment_for_bucket(self, mocker): 36 | """Test treatment for bucket method.""" 37 | splitter = Splitter() 38 | assert splitter.get_treatment_for_bucket(0, []) == CONTROL 39 | assert splitter.get_treatment_for_bucket(-1, []) == CONTROL 40 | assert splitter.get_treatment_for_bucket(101, [Partition('a', 100)]) == CONTROL 41 | assert splitter.get_treatment_for_bucket(1, [Partition('a', 100)]) == 'a' 42 | assert splitter.get_treatment_for_bucket(100, [Partition('a', 100)]) == 'a' 43 | assert splitter.get_treatment_for_bucket(50, [Partition('a', 50), Partition('b', 50)]) == 'a' 44 | assert splitter.get_treatment_for_bucket(51, [Partition('a', 50), Partition('b', 50)]) == 'b' 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | -------------------------------------------------------------------------------- /splitio/tasks/split_sync.py: -------------------------------------------------------------------------------- 1 | """Split Synchronization task.""" 2 | 3 | import logging 4 | from splitio.tasks import BaseSynchronizationTask 5 | from splitio.tasks.util.asynctask import AsyncTask, AsyncTaskAsync 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | 10 | 11 | class SplitSynchronizationTaskBase(BaseSynchronizationTask): 12 | """Split Synchronization task class.""" 13 | 14 | def start(self): 15 | """Start the task.""" 16 | self._task.start() 17 | 18 | def stop(self, event=None): 19 | """Stop the task. Accept an optional event to set when the task has finished.""" 20 | pass 21 | 22 | def is_running(self): 23 | """ 24 | Return whether the task is running. 25 | 26 | :return: True if the task is running. False otherwise. 27 | :rtype bool 28 | """ 29 | return self._task.running() 30 | 31 | 32 | class SplitSynchronizationTask(SplitSynchronizationTaskBase): 33 | """Split Synchronization task class.""" 34 | 35 | def __init__(self, synchronize_splits, period): 36 | """ 37 | Class constructor. 38 | 39 | :param synchronize_splits: Handler 40 | :type synchronize_splits: func 41 | :param period: Period of task 42 | :type period: int 43 | """ 44 | self._period = period 45 | self._task = AsyncTask(synchronize_splits, period, on_init=None) 46 | 47 | def stop(self, event=None): 48 | """Stop the task. Accept an optional event to set when the task has finished.""" 49 | self._task.stop(event) 50 | 51 | 52 | class SplitSynchronizationTaskAsync(SplitSynchronizationTaskBase): 53 | """Split Synchronization async task class.""" 54 | 55 | def __init__(self, synchronize_splits, period): 56 | """ 57 | Class constructor. 58 | 59 | :param synchronize_splits: Handler 60 | :type synchronize_splits: func 61 | :param period: Period of task 62 | :type period: int 63 | """ 64 | self._period = period 65 | self._task = AsyncTaskAsync(synchronize_splits, period, on_init=None) 66 | 67 | async def stop(self, event=None): 68 | """Stop the task. Accept an optional event to set when the task has finished.""" 69 | await self._task.stop(True) 70 | -------------------------------------------------------------------------------- /tests/models/test_token.py: -------------------------------------------------------------------------------- 1 | """Split model tests module.""" 2 | 3 | from splitio.models import token 4 | from splitio.models.grammar.condition import Condition 5 | 6 | 7 | class TokenTests(object): 8 | """Token model tests.""" 9 | raw_false = {'pushEnabled': False} 10 | 11 | def test_from_raw_false(self): 12 | """Test token model parsing.""" 13 | parsed = token.from_raw(self.raw_false) 14 | assert parsed.push_enabled == False 15 | assert parsed.iat == None 16 | assert parsed.channels == None 17 | assert parsed.exp == None 18 | assert parsed.token == None 19 | 20 | raw_empty = { 21 | 'pushEnabled': True, 22 | 'token': '', 23 | } 24 | 25 | def test_from_raw_empty(self): 26 | """Test token model parsing.""" 27 | parsed = token.from_raw(self.raw_empty) 28 | assert parsed.push_enabled == False 29 | assert parsed.iat == None 30 | assert parsed.channels == None 31 | assert parsed.exp == None 32 | assert parsed.token == None 33 | 34 | raw_ok = { 35 | 'pushEnabled': True, 36 | 'token': 'eyJhbGciOiJIUzI1NiIsImtpZCI6IjVZOU05US45QnJtR0EiLCJ0eXAiOiJKV1QifQ.eyJ4LWFibHktY2FwYWJpbGl0eSI6IntcIk56TTJNREk1TXpjMF9NVGd5TlRnMU1UZ3dOZz09X3NlZ21lbnRzXCI6W1wic3Vic2NyaWJlXCJdLFwiTnpNMk1ESTVNemMwX01UZ3lOVGcxTVRnd05nPT1fc3BsaXRzXCI6W1wic3Vic2NyaWJlXCJdLFwiY29udHJvbF9wcmlcIjpbXCJzdWJzY3JpYmVcIixcImNoYW5uZWwtbWV0YWRhdGE6cHVibGlzaGVyc1wiXSxcImNvbnRyb2xfc2VjXCI6W1wic3Vic2NyaWJlXCIsXCJjaGFubmVsLW1ldGFkYXRhOnB1Ymxpc2hlcnNcIl19IiwieC1hYmx5LWNsaWVudElkIjoiY2xpZW50SWQiLCJleHAiOjE2MDIwODgxMjcsImlhdCI6MTYwMjA4NDUyN30.5_MjWonhs6yoFhw44hNJm3H7_YMjXpSW105DwjjppqE', 37 | } 38 | 39 | def test_from_raw(self): 40 | """Test token model parsing.""" 41 | parsed = token.from_raw(self.raw_ok) 42 | assert isinstance(parsed, token.Token) 43 | assert parsed.push_enabled == True 44 | assert parsed.iat == 1602084527 45 | assert parsed.exp == 1602088127 46 | assert parsed.channels['NzM2MDI5Mzc0_MTgyNTg1MTgwNg==_segments'] == ['subscribe'] 47 | assert parsed.channels['NzM2MDI5Mzc0_MTgyNTg1MTgwNg==_splits'] == ['subscribe'] 48 | assert parsed.channels['control_pri'] == ['subscribe', 'channel-metadata:publishers'] 49 | assert parsed.channels['control_sec'] == ['subscribe', 'channel-metadata:publishers'] 50 | -------------------------------------------------------------------------------- /splitio/models/token.py: -------------------------------------------------------------------------------- 1 | """Token module""" 2 | 3 | import base64 4 | import json 5 | 6 | 7 | class Token(object): 8 | """Token object class.""" 9 | 10 | def __init__(self, push_enabled, token, channels, exp, iat): 11 | """ 12 | Class constructor. 13 | 14 | :param push_enabled: flag push enabled. 15 | :type push_enabled: bool 16 | 17 | :param token: Token from auth. 18 | :type token: str 19 | 20 | :param channels: Channels parsed from token. 21 | :type channels: str 22 | 23 | :param exp: exp parsed from token. 24 | :type exp: int 25 | 26 | :param iat: iat parsed from token. 27 | :type iat: int 28 | """ 29 | self._push_enabled = push_enabled 30 | self._token = token 31 | self._channels = channels 32 | self._exp = exp 33 | self._iat = iat 34 | 35 | @property 36 | def push_enabled(self): 37 | """Return push_enabled""" 38 | return self._push_enabled 39 | 40 | @property 41 | def token(self): 42 | """Return token""" 43 | return self._token 44 | 45 | @property 46 | def channels(self): 47 | """Return channels""" 48 | return self._channels 49 | 50 | @property 51 | def exp(self): 52 | """Return exp""" 53 | return self._exp 54 | 55 | @property 56 | def iat(self): 57 | """Return iat""" 58 | return self._iat 59 | 60 | 61 | def from_raw(raw_token): 62 | """ 63 | Parse a new token from a raw token response. 64 | 65 | :param raw_token: Token parsed from auth response. 66 | :type raw_token: dict 67 | 68 | :return: New token model object 69 | :rtype: splitio.models.token.Token 70 | """ 71 | if not 'pushEnabled' in raw_token or not 'token' in raw_token: 72 | return Token(False, None, None, None, None) 73 | 74 | token = raw_token['token'] 75 | push_enabled = raw_token['pushEnabled'] 76 | token_parts = token.strip().split('.') 77 | 78 | if not push_enabled or len(token_parts) < 2: 79 | return Token(False, None, None, None, None) 80 | 81 | to_decode = token_parts[1] 82 | decoded_token = json.loads(base64.b64decode(to_decode + '='*(-len(to_decode) % 4))) 83 | return Token(push_enabled, token, json.loads(decoded_token['x-ably-capability']), decoded_token['exp'], decoded_token['iat']) -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: ci 2 | on: 3 | push: 4 | branches: 5 | - master 6 | - development 7 | pull_request: 8 | branches: 9 | - master 10 | - development 11 | 12 | concurrency: 13 | group: ${{ github.workflow }}-${{ github.event.pull_request.number }} 14 | cancel-in-progress: true 15 | 16 | jobs: 17 | test: 18 | name: Test 19 | runs-on: ubuntu-22.04 20 | services: 21 | redis: 22 | image: redis 23 | ports: 24 | - 6379:6379 25 | steps: 26 | - name: Checkout code 27 | uses: actions/checkout@v3 28 | with: 29 | fetch-depth: 0 30 | 31 | - name: Setup Python 32 | uses: actions/setup-python@v3 33 | with: 34 | python-version: '3.7.16' 35 | 36 | - name: Install dependencies 37 | run: | 38 | sudo apt update 39 | sudo apt-get install -y libkrb5-dev 40 | pip install -U setuptools pip wheel 41 | pip install -e .[cpphash,redis,uwsgi] 42 | 43 | - name: Run tests 44 | run: python setup.py test 45 | 46 | - name: Set VERSION env 47 | run: echo "VERSION=$(cat splitio/version.py | grep "__version__" | awk -F\' '{print $2}')" >> $GITHUB_ENV 48 | 49 | - name: SonarQube Scan (Push) 50 | if: github.event_name == 'push' 51 | uses: SonarSource/sonarcloud-github-action@v1.9 52 | env: 53 | SONAR_TOKEN: ${{ secrets.SONARQUBE_TOKEN }} 54 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 55 | with: 56 | projectBaseDir: . 57 | args: > 58 | -Dsonar.host.url=${{ secrets.SONARQUBE_HOST }} 59 | -Dsonar.projectVersion=${{ env.VERSION }} 60 | 61 | - name: SonarQube Scan (Pull Request) 62 | if: github.event_name == 'pull_request' 63 | uses: SonarSource/sonarcloud-github-action@v1.9 64 | env: 65 | SONAR_TOKEN: ${{ secrets.SONARQUBE_TOKEN }} 66 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 67 | with: 68 | projectBaseDir: . 69 | args: > 70 | -Dsonar.host.url=${{ secrets.SONARQUBE_HOST }} 71 | -Dsonar.projectVersion=${{ env.VERSION }} 72 | -Dsonar.pullrequest.key=${{ github.event.pull_request.number }} 73 | -Dsonar.pullrequest.branch=${{ github.event.pull_request.head.ref }} 74 | -Dsonar.pullrequest.base=${{ github.event.pull_request.base.ref }} 75 | -------------------------------------------------------------------------------- /splitio/engine/filters.py: -------------------------------------------------------------------------------- 1 | import abc 2 | import threading 3 | 4 | from bloom_filter2 import BloomFilter as BloomFilter2 5 | 6 | class BaseFilter(object, metaclass=abc.ABCMeta): 7 | """Impressions Filter interface.""" 8 | 9 | @abc.abstractmethod 10 | def add(self, data): 11 | """ 12 | Return a boolean flag 13 | 14 | """ 15 | pass 16 | 17 | @abc.abstractmethod 18 | def contains(self, data): 19 | """ 20 | Return a boolean flag 21 | 22 | """ 23 | pass 24 | 25 | @abc.abstractmethod 26 | def clear(self): 27 | """ 28 | No return 29 | 30 | """ 31 | pass 32 | 33 | class BloomFilter(BaseFilter): 34 | """Optimized mode strategy.""" 35 | 36 | def __init__(self, max_elements=5000, error_rate=0.01): 37 | """ 38 | Construct a bloom filter instance. 39 | 40 | :param max_element: maximum elements in the filter 41 | :type string: 42 | 43 | :param error_rate: error rate for the false positives, reduce it will consume more memory 44 | :type numeric: 45 | """ 46 | self._max_elements = max_elements 47 | self._error_rate = error_rate 48 | self._imps_bloom_filter = BloomFilter2(max_elements=self._max_elements, error_rate=self._error_rate) 49 | self._lock = threading.RLock() 50 | 51 | def add(self, data): 52 | """ 53 | Add an item to the bloom filter instance. 54 | 55 | :param data: element to be added 56 | :type string: 57 | 58 | :return: True if successful 59 | :rtype: boolean 60 | """ 61 | with self._lock: 62 | self._imps_bloom_filter.add(data) 63 | return data in self._imps_bloom_filter 64 | 65 | def contains(self, data): 66 | """ 67 | Check if an item exist in the bloom filter instance. 68 | 69 | :param data: element to be checked 70 | :type string: 71 | 72 | :return: True if exist 73 | :rtype: boolean 74 | """ 75 | with self._lock: 76 | return data in self._imps_bloom_filter 77 | 78 | def clear(self): 79 | """ 80 | Destroy the current filter instance and create new one. 81 | 82 | """ 83 | with self._lock: 84 | self._imps_bloom_filter.close() 85 | self._imps_bloom_filter = BloomFilter2(max_elements=self._max_elements, error_rate=self._error_rate) 86 | -------------------------------------------------------------------------------- /splitio/models/segments.py: -------------------------------------------------------------------------------- 1 | """Segment module.""" 2 | 3 | 4 | class Segment(object): 5 | """Segment object class.""" 6 | 7 | def __init__(self, name, keys, change_number): 8 | """ 9 | Class constructor. 10 | 11 | :param name: Segment name. 12 | :type name: str 13 | 14 | :param keys: List of keys belonging to the segment. 15 | :type keys: List 16 | """ 17 | self._name = name 18 | self._keys = set(keys) 19 | self._change_number = change_number 20 | 21 | @property 22 | def name(self): 23 | """Return segment name.""" 24 | return self._name 25 | 26 | def contains(self, key): 27 | """ 28 | Return whether the supplied key belongs to the segment. 29 | 30 | :param key: User key. 31 | :type key: str 32 | 33 | :return: True if the user is in the segment. False otherwise. 34 | :rtype: bool 35 | """ 36 | return key in self._keys 37 | 38 | def update(self, to_add, to_remove): 39 | """ 40 | Add supplied keys to the segment. 41 | 42 | :param to_add: List of keys to add. 43 | :type to_add: list 44 | :param to_remove: List of keys to remove. 45 | :type to_remove: list 46 | """ 47 | self._keys = self._keys.union(set(to_add)).difference(to_remove) 48 | 49 | @property 50 | def keys(self): 51 | """ 52 | Return the segment keys. 53 | 54 | :return: A set of the segment keys 55 | :rtype: set 56 | """ 57 | return self._keys 58 | 59 | @property 60 | def change_number(self): 61 | """Return segment change number.""" 62 | return self._change_number 63 | 64 | @change_number.setter 65 | def change_number(self, new_value): 66 | """ 67 | Set new change number. 68 | 69 | :param new_value: New change number. 70 | :type new_value: int 71 | """ 72 | self._change_number = new_value 73 | 74 | 75 | def from_raw(raw_segment): 76 | """ 77 | Parse a new segment from a raw segment_changes response. 78 | 79 | :param raw_segment: Segment parsed from segment changes response. 80 | :type raw_segment: dict 81 | 82 | :return: New segment model object 83 | :rtype: splitio.models.segment.Segment 84 | """ 85 | keys = set(raw_segment['added']).difference(raw_segment['removed']) 86 | return Segment(raw_segment['name'], keys, raw_segment['till']) 87 | -------------------------------------------------------------------------------- /splitio/engine/impressions/impressions.py: -------------------------------------------------------------------------------- 1 | """Split evaluator module.""" 2 | from enum import Enum 3 | 4 | class ImpressionsMode(Enum): 5 | """Impressions tracking mode.""" 6 | 7 | OPTIMIZED = "OPTIMIZED" 8 | DEBUG = "DEBUG" 9 | NONE = "NONE" 10 | 11 | class Manager(object): # pylint:disable=too-few-public-methods 12 | """Impression manager.""" 13 | 14 | def __init__(self, strategy, none_strategy, telemetry_runtime_producer): 15 | """ 16 | Construct a manger to track and forward impressions to the queue. 17 | 18 | :param listener: Optional impressions listener that will capture all seen impressions. 19 | :type listener: splitio.client.listener.ImpressionListenerWrapper 20 | 21 | :param strategy: Impressions stragetgy instance 22 | :type strategy: (BaseStrategy) 23 | """ 24 | 25 | self._strategy = strategy 26 | self._none_strategy = none_strategy 27 | self._telemetry_runtime_producer = telemetry_runtime_producer 28 | 29 | def process_impressions(self, impressions_decorated): 30 | """ 31 | Process impressions. 32 | 33 | Impressions are analyzed to see if they've been seen before and counted. 34 | 35 | :param impressions_decorated: List of impression objects with attributes 36 | :type impressions_decorated: list[tuple[splitio.models.impression.ImpressionDecorated, dict]] 37 | 38 | :return: processed and deduped impressions. 39 | :rtype: tuple(list[tuple[splitio.models.impression.Impression, dict]], list(int)) 40 | """ 41 | for_listener_all = [] 42 | for_log_all = [] 43 | for_counter_all = [] 44 | for_unique_keys_tracker_all = [] 45 | for impression_decorated, att in impressions_decorated: 46 | if impression_decorated.disabled: 47 | for_log, for_listener, for_counter, for_unique_keys_tracker = self._none_strategy.process_impressions([(impression_decorated.Impression, att)]) 48 | else: 49 | for_log, for_listener, for_counter, for_unique_keys_tracker = self._strategy.process_impressions([(impression_decorated.Impression, att)]) 50 | for_listener_all.extend(for_listener) 51 | for_log_all.extend(for_log) 52 | for_counter_all.extend(for_counter) 53 | for_unique_keys_tracker_all.extend(for_unique_keys_tracker) 54 | 55 | return for_log_all, len(impressions_decorated) - len(for_log_all), for_listener_all, for_counter_all, for_unique_keys_tracker_all 56 | -------------------------------------------------------------------------------- /splitio/client/localhost.py: -------------------------------------------------------------------------------- 1 | """Localhost client mocked components.""" 2 | import logging 3 | import re 4 | 5 | from splitio.storage import ImpressionStorage, EventStorage 6 | 7 | _LEGACY_COMMENT_LINE_RE = re.compile(r'^#.*$') 8 | _LEGACY_DEFINITION_LINE_RE = re.compile(r'^(?[\w_-]+)\s+(?P[\w_-]+)$') 9 | 10 | 11 | _LOGGER = logging.getLogger(__name__) 12 | 13 | 14 | class LocalhostImpressionsStorage(ImpressionStorage): 15 | """Impression storage that doesn't cache anything.""" 16 | 17 | def put(self, *_, **__): # pylint: disable=arguments-differ 18 | """Accept any arguments and do nothing.""" 19 | pass 20 | 21 | def pop_many(self, *_, **__): # pylint: disable=arguments-differ 22 | """Accept any arguments and do nothing.""" 23 | pass 24 | 25 | def clear(self, *_, **__): # pylint: disable=arguments-differ 26 | """Accept any arguments and do nothing.""" 27 | pass 28 | 29 | 30 | class LocalhostEventsStorage(EventStorage): 31 | """Impression storage that doesn't cache anything.""" 32 | 33 | def put(self, *_, **__): # pylint: disable=arguments-differ 34 | """Accept any arguments and do nothing.""" 35 | pass 36 | 37 | def pop_many(self, *_, **__): # pylint: disable=arguments-differ 38 | """Accept any arguments and do nothing.""" 39 | pass 40 | 41 | def clear(self, *_, **__): # pylint: disable=arguments-differ 42 | """Accept any arguments and do nothing.""" 43 | pass 44 | 45 | class LocalhostImpressionsStorageAsync(ImpressionStorage): 46 | """Impression storage that doesn't cache anything.""" 47 | 48 | async def put(self, *_, **__): # pylint: disable=arguments-differ 49 | """Accept any arguments and do nothing.""" 50 | pass 51 | 52 | async def pop_many(self, *_, **__): # pylint: disable=arguments-differ 53 | """Accept any arguments and do nothing.""" 54 | pass 55 | 56 | async def clear(self, *_, **__): # pylint: disable=arguments-differ 57 | """Accept any arguments and do nothing.""" 58 | pass 59 | 60 | 61 | class LocalhostEventsStorageAsync(EventStorage): 62 | """Impression storage that doesn't cache anything.""" 63 | 64 | async def put(self, *_, **__): # pylint: disable=arguments-differ 65 | """Accept any arguments and do nothing.""" 66 | pass 67 | 68 | async def pop_many(self, *_, **__): # pylint: disable=arguments-differ 69 | """Accept any arguments and do nothing.""" 70 | pass 71 | 72 | async def clear(self, *_, **__): # pylint: disable=arguments-differ 73 | """Accept any arguments and do nothing.""" 74 | pass 75 | -------------------------------------------------------------------------------- /splitio/tasks/telemetry_sync.py: -------------------------------------------------------------------------------- 1 | """Telemetry syncrhonization task.""" 2 | import logging 3 | 4 | from splitio.tasks import BaseSynchronizationTask 5 | from splitio.tasks.util.asynctask import AsyncTask, AsyncTaskAsync 6 | 7 | _LOGGER = logging.getLogger(__name__) 8 | 9 | class TelemetrySyncTaskBase(BaseSynchronizationTask): 10 | """Telemetry synchronization task uses an asynctask.AsyncTask to send MTKs.""" 11 | 12 | def start(self): 13 | """Start executing the telemetry synchronization task.""" 14 | self._task.start() 15 | 16 | def stop(self, event=None): 17 | """Stop executing the unique telemetry synchronization task.""" 18 | pass 19 | 20 | def is_running(self): 21 | """ 22 | Return whether the task is running or not. 23 | 24 | :return: True if the task is running. False otherwise. 25 | :rtype: bool 26 | """ 27 | return self._task.running() 28 | 29 | def flush(self): 30 | """Flush unique keys.""" 31 | _LOGGER.debug('Forcing flush execution for telemetry') 32 | self._task.force_execution() 33 | 34 | 35 | class TelemetrySyncTask(TelemetrySyncTaskBase): 36 | """Unique Telemetry task uses an asynctask.AsyncTask to send MTKs.""" 37 | 38 | def __init__(self, synchronize_telemetry, period): 39 | """ 40 | Class constructor. 41 | 42 | :param synchronize_telemetry: sender 43 | :type synchronize_telemetry: func 44 | :param period: How many seconds to wait between subsequent unique keys pushes to the BE. 45 | :type period: int 46 | """ 47 | 48 | self._task = AsyncTask(synchronize_telemetry, period, 49 | on_stop=synchronize_telemetry) 50 | 51 | def stop(self, event=None): 52 | """Stop executing the unique telemetry synchronization task.""" 53 | self._task.stop(event) 54 | 55 | 56 | class TelemetrySyncTaskAsync(TelemetrySyncTaskBase): 57 | """Telemetry synchronization task uses an asynctask.AsyncTask to send MTKs.""" 58 | 59 | def __init__(self, synchronize_telemetry, period): 60 | """ 61 | Class constructor. 62 | 63 | :param synchronize_telemetry: sender 64 | :type synchronize_telemetry: func 65 | :param period: How many seconds to wait between subsequent unique keys pushes to the BE. 66 | :type period: int 67 | """ 68 | 69 | self._task = AsyncTaskAsync(synchronize_telemetry, period, 70 | on_stop=synchronize_telemetry) 71 | 72 | async def stop(self): 73 | """Stop executing the unique telemetry synchronization task.""" 74 | await self._task.stop(True) 75 | -------------------------------------------------------------------------------- /tests/models/test_notification.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from splitio.models.notification import wrap_notification, SplitChangeNotification, SplitKillNotification, SegmentChangeNotification, ControlNotification 4 | 5 | class NotificationTests(object): 6 | """Notification model tests.""" 7 | 8 | def test_wrap_notification(self): 9 | with pytest.raises(ValueError): 10 | wrap_notification('{"type":"WRONG","controlType":"STREAMING_PAUSED"}', 'control_pri') 11 | 12 | with pytest.raises(ValueError): 13 | wrap_notification('sadasd', 'control_pri') 14 | 15 | with pytest.raises(TypeError): 16 | wrap_notification(None, 'control_pri') 17 | 18 | with pytest.raises(ValueError): 19 | wrap_notification('{"type":"SPLIT_UPDATE","changeNumber":1591996754396}', None) 20 | 21 | with pytest.raises(KeyError): 22 | wrap_notification('{"type":"SPLIT_UPDATE"}', 'NDA5ODc2MTAyNg==_MzAyODY0NDkyOA==_splits') 23 | 24 | with pytest.raises(ValueError): 25 | wrap_notification('{"type":"CONTROL","controlType":"STREAMING_PAUSEDD"}', 'control_pri') 26 | 27 | n0 = wrap_notification('{"type":"SPLIT_UPDATE","changeNumber":1591996754396}', 'NDA5ODc2MTAyNg==_MzAyODY0NDkyOA==_splits') 28 | assert isinstance(n0, SplitChangeNotification) 29 | assert n0.channel == 'NDA5ODc2MTAyNg==_MzAyODY0NDkyOA==_splits' 30 | assert n0.notification_type.name == 'SPLIT_UPDATE' 31 | 32 | n1 = wrap_notification('{"type":"SPLIT_KILL","changeNumber":1591996754396,"defaultTreatment":"some","splitName":"test"}', 'NDA5ODc2MTAyNg==_MzAyODY0NDkyOA==_splits') 33 | assert isinstance(n1, SplitKillNotification) 34 | assert n1.channel == 'NDA5ODc2MTAyNg==_MzAyODY0NDkyOA==_splits' 35 | assert n1.change_number == 1591996754396 36 | assert n1.default_treatment == 'some' 37 | assert n1.split_name == 'test' 38 | assert n1.notification_type.name == 'SPLIT_KILL' 39 | 40 | n2 = wrap_notification('{"type":"SEGMENT_UPDATE","changeNumber":1591996754396,"segmentName":"some"}', 'NDA5ODc2MTAyNg==_MzAyODY0NDkyOA==_segments') 41 | assert isinstance(n2, SegmentChangeNotification) 42 | assert n2.channel == 'NDA5ODc2MTAyNg==_MzAyODY0NDkyOA==_segments' 43 | assert n2.change_number == 1591996754396 44 | assert n2.segment_name == 'some' 45 | assert n2.notification_type.name == 'SEGMENT_UPDATE' 46 | 47 | n3 = wrap_notification('{"type":"CONTROL","controlType":"STREAMING_PAUSED"}', 'control_pri') 48 | assert isinstance(n3, ControlNotification) 49 | assert n3.channel == 'control_pri' 50 | assert n3.control_type.name == 'STREAMING_PAUSED' 51 | assert n3.notification_type.name == 'CONTROL' 52 | -------------------------------------------------------------------------------- /splitio/tasks/events_sync.py: -------------------------------------------------------------------------------- 1 | """Events syncrhonization task.""" 2 | import logging 3 | 4 | from splitio.tasks import BaseSynchronizationTask 5 | from splitio.tasks.util.asynctask import AsyncTask, AsyncTaskAsync 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | 10 | 11 | class EventsSyncTaskBase(BaseSynchronizationTask): 12 | """Events synchronization task base uses an asynctask.AsyncTask to send events.""" 13 | 14 | def start(self): 15 | """Start executing the events synchronization task.""" 16 | self._task.start() 17 | 18 | def stop(self, event=None): 19 | """Stop executing the events synchronization task.""" 20 | pass 21 | 22 | def flush(self): 23 | """Flush events in storage.""" 24 | _LOGGER.debug('Forcing flush execution for events') 25 | self._task.force_execution() 26 | 27 | def is_running(self): 28 | """ 29 | Return whether the task is running or not. 30 | 31 | :return: True if the task is running. False otherwise. 32 | :rtype: bool 33 | """ 34 | return self._task.running() 35 | 36 | 37 | class EventsSyncTask(EventsSyncTaskBase): 38 | """Events synchronization task uses an asynctask.AsyncTask to send events.""" 39 | 40 | def __init__(self, synchronize_events, period): 41 | """ 42 | Class constructor. 43 | 44 | :param synchronize_events: Events Api object to send data to the backend 45 | :type synchronize_events: splitio.api.events.EventsAPI 46 | :param period: How many seconds to wait between subsequent event pushes to the BE. 47 | :type period: int 48 | 49 | """ 50 | self._period = period 51 | self._task = AsyncTask(synchronize_events, self._period, on_stop=synchronize_events) 52 | 53 | def stop(self, event=None): 54 | """Stop executing the events synchronization task.""" 55 | self._task.stop(event) 56 | 57 | 58 | class EventsSyncTaskAsync(EventsSyncTaskBase): 59 | """Events synchronization task uses an asynctask.AsyncTaskAsync to send events.""" 60 | 61 | def __init__(self, synchronize_events, period): 62 | """ 63 | Class constructor. 64 | 65 | :param synchronize_events: Events Api object to send data to the backend 66 | :type synchronize_events: splitio.api.events.EventsAPIAsync 67 | :param period: How many seconds to wait between subsequent event pushes to the BE. 68 | :type period: int 69 | 70 | """ 71 | self._period = period 72 | self._task = AsyncTaskAsync(synchronize_events, self._period, on_stop=synchronize_events) 73 | 74 | async def stop(self, event=None): 75 | """Stop executing the events synchronization task.""" 76 | await self._task.stop(True) 77 | -------------------------------------------------------------------------------- /splitio/models/grammar/matchers/keys.py: -------------------------------------------------------------------------------- 1 | """Keys matchers module.""" 2 | from splitio.models.grammar.matchers.base import Matcher 3 | 4 | 5 | class AllKeysMatcher(Matcher): 6 | """A matcher that always returns True.""" 7 | 8 | def _build(self, raw_matcher): 9 | """ 10 | Build an AllKeysMatcher. 11 | 12 | :param raw_matcher: raw matcher as fetched from splitChanges response. 13 | :type raw_matcher: dict 14 | """ 15 | pass 16 | 17 | def _match(self, key, attributes=None, context=None): 18 | """ 19 | Evaluate user input against a matcher and return whether the match is successful. 20 | 21 | :param key: User key. 22 | :type key: str. 23 | :param attributes: Custom user attributes. 24 | :type attributes: dict. 25 | :param context: Evaluation context 26 | :type context: dict 27 | 28 | :returns: Wheter the match is successful. 29 | :rtype: bool 30 | """ 31 | return key is not None 32 | 33 | def __str__(self): 34 | """Return string Representation.""" 35 | return 'in segment all' 36 | 37 | def _add_matcher_specific_properties_to_json(self): 38 | """Add matcher specific properties to base dict before returning it.""" 39 | return {} 40 | 41 | 42 | class UserDefinedSegmentMatcher(Matcher): 43 | """Matcher that returns true when the submitted key belongs to a segment.""" 44 | 45 | def _build(self, raw_matcher): 46 | """ 47 | Build an UserDefinedSegmentMatcher. 48 | 49 | :param raw_matcher: raw matcher as fetched from splitChanges response. 50 | :type raw_matcher: dict 51 | """ 52 | self._segment_name = raw_matcher['userDefinedSegmentMatcherData']['segmentName'] 53 | 54 | def _match(self, key, attributes=None, context=None): 55 | """ 56 | Evaluate user input against a matcher and return whether the match is successful. 57 | 58 | :param key: User key. 59 | :type key: str. 60 | :param attributes: Custom user attributes. 61 | :type attributes: dict. 62 | :param context: Evaluation context 63 | :type context: dict 64 | 65 | :returns: Wheter the match is successful. 66 | :rtype: bool 67 | """ 68 | matching_data = self._get_matcher_input(key, attributes) 69 | if matching_data is None: 70 | return False 71 | 72 | return context['ec'].segment_memberships[self._segment_name] 73 | 74 | def _add_matcher_specific_properties_to_json(self): 75 | """Return UserDefinedSegment specific properties.""" 76 | return { 77 | 'userDefinedSegmentMatcherData': { 78 | 'segmentName': self._segment_name 79 | } 80 | } 81 | 82 | def __str__(self): 83 | """Return string Representation.""" 84 | return 'in segment {segment_name}'.format( 85 | segment_name=self._segment_name 86 | ) 87 | -------------------------------------------------------------------------------- /tests/sync/test_impressions_count_synchronizer.py: -------------------------------------------------------------------------------- 1 | """Split Worker tests.""" 2 | 3 | import threading 4 | import time 5 | import pytest 6 | 7 | from splitio.api.client import HttpResponse 8 | from splitio.api import APIException 9 | from splitio.engine.impressions.impressions import Manager as ImpressionsManager 10 | from splitio.engine.impressions.manager import Counter 11 | from splitio.engine.impressions.strategies import StrategyOptimizedMode 12 | from splitio.sync.impression import ImpressionsCountSynchronizer, ImpressionsCountSynchronizerAsync 13 | from splitio.api.impressions import ImpressionsAPI 14 | 15 | 16 | class ImpressionsCountSynchronizerTests(object): 17 | """ImpressionsCount synchronizer test cases.""" 18 | 19 | def test_synchronize_impressions_counts(self, mocker): 20 | counter = mocker.Mock(spec=Counter) 21 | 22 | counters = [ 23 | Counter.CountPerFeature('f1', 123, 2), 24 | Counter.CountPerFeature('f2', 123, 123), 25 | Counter.CountPerFeature('f1', 456, 111), 26 | Counter.CountPerFeature('f2', 456, 222) 27 | ] 28 | 29 | counter.pop_all.return_value = counters 30 | api = mocker.Mock(spec=ImpressionsAPI) 31 | api.flush_counters.return_value = HttpResponse(200, '', {}) 32 | impression_count_synchronizer = ImpressionsCountSynchronizer(api, counter) 33 | impression_count_synchronizer.synchronize_counters() 34 | 35 | assert counter.pop_all.mock_calls[0] == mocker.call() 36 | assert api.flush_counters.mock_calls[0] == mocker.call(counters) 37 | 38 | assert len(api.flush_counters.mock_calls) == 1 39 | 40 | 41 | class ImpressionsCountSynchronizerAsyncTests(object): 42 | """ImpressionsCount synchronizer test cases.""" 43 | 44 | @pytest.mark.asyncio 45 | async def test_synchronize_impressions_counts(self, mocker): 46 | counter = mocker.Mock(spec=Counter) 47 | 48 | self.called = 0 49 | def pop_all(): 50 | self.called += 1 51 | return [ 52 | Counter.CountPerFeature('f1', 123, 2), 53 | Counter.CountPerFeature('f2', 123, 123), 54 | Counter.CountPerFeature('f1', 456, 111), 55 | Counter.CountPerFeature('f2', 456, 222) 56 | ] 57 | counter.pop_all = pop_all 58 | 59 | self.counters = None 60 | async def flush_counters(counters): 61 | self.counters = counters 62 | return HttpResponse(200, '', {}) 63 | api = mocker.Mock(spec=ImpressionsAPI) 64 | api.flush_counters = flush_counters 65 | 66 | impression_count_synchronizer = ImpressionsCountSynchronizerAsync(api, counter) 67 | await impression_count_synchronizer.synchronize_counters() 68 | 69 | assert self.counters == [ 70 | Counter.CountPerFeature('f1', 123, 2), 71 | Counter.CountPerFeature('f2', 123, 123), 72 | Counter.CountPerFeature('f1', 456, 111), 73 | Counter.CountPerFeature('f2', 456, 222) 74 | ] 75 | assert self.called == 1 76 | -------------------------------------------------------------------------------- /splitio/models/grammar/matchers/rule_based_segment.py: -------------------------------------------------------------------------------- 1 | """Rule based segment matcher classes.""" 2 | from splitio.models.grammar.matchers.base import Matcher 3 | from splitio.models.rule_based_segments import SegmentType 4 | 5 | class RuleBasedSegmentMatcher(Matcher): 6 | 7 | def _build(self, raw_matcher): 8 | """ 9 | Build an RuleBasedSegmentMatcher. 10 | 11 | :param raw_matcher: raw matcher as fetched from splitChanges response. 12 | :type raw_matcher: dict 13 | """ 14 | self._rbs_segment_name = raw_matcher['userDefinedSegmentMatcherData']['segmentName'] 15 | 16 | def _match(self, key, attributes=None, context=None): 17 | """ 18 | Evaluate user input against a matcher and return whether the match is successful. 19 | 20 | :param key: User key. 21 | :type key: str. 22 | :param attributes: Custom user attributes. 23 | :type attributes: dict. 24 | :param context: Evaluation context 25 | :type context: dict 26 | 27 | :returns: Wheter the match is successful. 28 | :rtype: bool 29 | """ 30 | if self._rbs_segment_name == None: 31 | return False 32 | 33 | rb_segment = context['ec'].rbs_segments.get(self._rbs_segment_name) 34 | 35 | if key in rb_segment.excluded.get_excluded_keys(): 36 | return False 37 | 38 | if self._match_dep_rb_segments(rb_segment.excluded.get_excluded_segments(), key, attributes, context): 39 | return False 40 | 41 | return self._match_conditions(rb_segment.conditions, key, attributes, context) 42 | 43 | def _add_matcher_specific_properties_to_json(self): 44 | """Return UserDefinedSegment specific properties.""" 45 | return { 46 | 'userDefinedSegmentMatcherData': { 47 | 'segmentName': self._rbs_segment_name 48 | } 49 | } 50 | 51 | def _match_conditions(self, rbs_segment_conditions, key, attributes, context): 52 | for parsed_condition in rbs_segment_conditions: 53 | if parsed_condition.matches(key, attributes, context): 54 | return True 55 | 56 | return False 57 | 58 | def _match_dep_rb_segments(self, excluded_rb_segments, key, attributes, context): 59 | for excluded_rb_segment in excluded_rb_segments: 60 | if excluded_rb_segment.type == SegmentType.STANDARD: 61 | if context['ec'].segment_memberships[excluded_rb_segment.name]: 62 | return True 63 | else: 64 | excluded_segment = context['ec'].rbs_segments.get(excluded_rb_segment.name) 65 | if key in excluded_segment.excluded.get_excluded_keys(): 66 | return False 67 | 68 | if self._match_dep_rb_segments(excluded_segment.excluded.get_excluded_segments(), key, attributes, context) \ 69 | or self._match_conditions(excluded_segment.conditions, key, attributes, context): 70 | return True 71 | 72 | return False 73 | -------------------------------------------------------------------------------- /splitio/sync/util.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | import logging 3 | 4 | _LOGGER = logging.getLogger(__name__) 5 | 6 | def _get_sha(fetched): 7 | """ 8 | Return sha256 of given string. 9 | 10 | :param fetched: string variable 11 | :type fetched: str 12 | 13 | :return: hex representation of sha256 14 | :rtype: str 15 | """ 16 | return hashlib.sha256(fetched.encode()).hexdigest() 17 | 18 | def _sanitize_object_element(object, object_name, element_name, default_value, lower_value=None, upper_value=None, in_list=None, not_in_list=None): 19 | """ 20 | Sanitize specific object element. 21 | 22 | :param object: split or segment dict object 23 | :type object: Dict 24 | :param element_name: element name 25 | :type element_name: str 26 | :param default_value: element default value 27 | :type default_value: any 28 | :param lower_value: Optional, element lower value limit 29 | :type lower_value: any 30 | :param upper_value: Optional, element upper value limit 31 | :type upper_value: any 32 | :param in_list: Optional, list of values expected in element 33 | :type in_list: [any] 34 | :param not_in_list: Optional, list of values not expected in element 35 | :type not_in_list: [any] 36 | 37 | :return: sanitized object 38 | :rtype: Dict 39 | """ 40 | if element_name not in object or object[element_name] is None: 41 | object[element_name] = default_value 42 | _LOGGER.debug("Sanitized element [%s] to '%s' in %s: %s.", element_name, default_value, object_name, object['name']) 43 | if lower_value is not None and upper_value is not None: 44 | if object[element_name] < lower_value or object[element_name] > upper_value: 45 | object[element_name] = default_value 46 | _LOGGER.debug("Sanitized element [%s] to '%s' in %s: %s.", element_name, default_value, object_name, object['name']) 47 | elif lower_value is not None: 48 | if object[element_name] < lower_value: 49 | object[element_name] = default_value 50 | _LOGGER.debug("Sanitized element [%s] to '%s' in %s: %s.", element_name, default_value, object_name, object['name']) 51 | elif upper_value is not None: 52 | if object[element_name] > upper_value: 53 | object[element_name] = default_value 54 | _LOGGER.debug("Sanitized element [%s] to '%s' in %s: %s.", element_name, default_value, object_name, object['name']) 55 | if in_list is not None: 56 | if object[element_name] not in in_list: 57 | object[element_name] = default_value 58 | _LOGGER.debug("Sanitized element [%s] to '%s' in %s: %s.", element_name, default_value, object_name, object['name']) 59 | if not_in_list is not None: 60 | if object[element_name] in not_in_list: 61 | object[element_name] = default_value 62 | _LOGGER.debug("Sanitized element [%s] to '%s' in %s: %s.", element_name, default_value, object_name, object['name']) 63 | 64 | return object 65 | 66 | def convert_to_new_spec(body): 67 | return {"ff": {"d": body["splits"], "s": body["since"], "t": body["till"]}, 68 | "rbs": {"d": [], "s": -1, "t": -1}} 69 | -------------------------------------------------------------------------------- /tests/tasks/test_telemetry_sync.py: -------------------------------------------------------------------------------- 1 | """Impressions synchronization task test module.""" 2 | import pytest 3 | import threading 4 | import time 5 | from splitio.api.client import HttpResponse 6 | from splitio.tasks.telemetry_sync import TelemetrySyncTask, TelemetrySyncTaskAsync 7 | from splitio.api.telemetry import TelemetryAPI, TelemetryAPIAsync 8 | from splitio.sync.telemetry import TelemetrySynchronizer, TelemetrySynchronizerAsync, InMemoryTelemetrySubmitter, InMemoryTelemetrySubmitterAsync 9 | from splitio.storage.inmemmory import InMemoryTelemetryStorage, InMemoryTelemetryStorageAsync 10 | from splitio.engine.telemetry import TelemetryStorageConsumer, TelemetryStorageConsumerAsync 11 | from splitio.optional.loaders import asyncio 12 | 13 | 14 | class TelemetrySyncTaskTests(object): 15 | """Unique Keys Syncrhonization task test cases.""" 16 | 17 | def test_record_stats(self, mocker): 18 | """Test that the task works properly under normal circumstances.""" 19 | api = mocker.Mock(spec=TelemetryAPI) 20 | api.record_stats.return_value = HttpResponse(200, '', {}) 21 | telemetry_storage = InMemoryTelemetryStorage() 22 | telemetry_consumer = TelemetryStorageConsumer(telemetry_storage) 23 | telemetry_submitter = InMemoryTelemetrySubmitter(telemetry_consumer, mocker.Mock(), mocker.Mock(), api) 24 | def _build_stats(): 25 | return {} 26 | telemetry_submitter._build_stats = _build_stats 27 | 28 | telemetry_synchronizer = TelemetrySynchronizer(telemetry_submitter) 29 | task = TelemetrySyncTask(telemetry_synchronizer.synchronize_stats, 1) 30 | task.start() 31 | time.sleep(2) 32 | assert task.is_running() 33 | assert len(api.record_stats.mock_calls) >= 1 34 | stop_event = threading.Event() 35 | task.stop(stop_event) 36 | stop_event.wait(5) 37 | assert stop_event.is_set() 38 | 39 | 40 | class TelemetrySyncTaskAsyncTests(object): 41 | """Unique Keys Syncrhonization task test cases.""" 42 | 43 | @pytest.mark.asyncio 44 | async def test_record_stats(self, mocker): 45 | """Test that the task works properly under normal circumstances.""" 46 | api = mocker.Mock(spec=TelemetryAPIAsync) 47 | self.called = False 48 | async def record_stats(stats): 49 | self.called = True 50 | return HttpResponse(200, '', {}) 51 | api.record_stats = record_stats 52 | 53 | telemetry_storage = await InMemoryTelemetryStorageAsync.create() 54 | telemetry_consumer = TelemetryStorageConsumerAsync(telemetry_storage) 55 | telemetry_submitter = InMemoryTelemetrySubmitterAsync(telemetry_consumer, mocker.Mock(), mocker.Mock(), api) 56 | async def _build_stats(): 57 | return {} 58 | telemetry_submitter._build_stats = _build_stats 59 | 60 | telemetry_synchronizer = TelemetrySynchronizerAsync(telemetry_submitter) 61 | task = TelemetrySyncTaskAsync(telemetry_synchronizer.synchronize_stats, 1) 62 | task.start() 63 | await asyncio.sleep(2) 64 | assert task.is_running() 65 | assert self.called 66 | await task.stop() 67 | assert not task.is_running() 68 | -------------------------------------------------------------------------------- /tests/storage/test_flag_sets.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from splitio.storage import FlagSetsFilter 4 | from splitio.storage.inmemmory import FlagSets 5 | 6 | class FlagSetsFilterTests(object): 7 | """Flag sets filter storage tests.""" 8 | def test_without_initial_set(self): 9 | flag_set = FlagSets() 10 | assert flag_set.sets_feature_flag_map == {} 11 | 12 | flag_set._add_flag_set('set1') 13 | assert flag_set.get_flag_set('set1') == set({}) 14 | assert flag_set.flag_set_exist('set1') == True 15 | assert flag_set.flag_set_exist('set2') == False 16 | 17 | flag_set.add_feature_flag_to_flag_set('set1', 'split1') 18 | assert flag_set.get_flag_set('set1') == {'split1'} 19 | flag_set.add_feature_flag_to_flag_set('set1', 'split2') 20 | assert flag_set.get_flag_set('set1') == {'split1', 'split2'} 21 | flag_set.remove_feature_flag_to_flag_set('set1', 'split1') 22 | assert flag_set.get_flag_set('set1') == {'split2'} 23 | flag_set._remove_flag_set('set2') 24 | assert flag_set.sets_feature_flag_map == {'set1': set({'split2'})} 25 | flag_set._remove_flag_set('set1') 26 | assert flag_set.sets_feature_flag_map == {} 27 | assert flag_set.flag_set_exist('set1') == False 28 | 29 | def test_with_initial_set(self): 30 | flag_set = FlagSets(['set1', 'set2']) 31 | assert flag_set.sets_feature_flag_map == {'set1': set(), 'set2': set()} 32 | 33 | flag_set._add_flag_set('set1') 34 | assert flag_set.get_flag_set('set1') == set({}) 35 | assert flag_set.flag_set_exist('set1') == True 36 | assert flag_set.flag_set_exist('set2') == True 37 | 38 | flag_set.add_feature_flag_to_flag_set('set1', 'split1') 39 | assert flag_set.get_flag_set('set1') == {'split1'} 40 | flag_set.add_feature_flag_to_flag_set('set1', 'split2') 41 | assert flag_set.get_flag_set('set1') == {'split1', 'split2'} 42 | flag_set.remove_feature_flag_to_flag_set('set1', 'split1') 43 | assert flag_set.get_flag_set('set1') == {'split2'} 44 | flag_set._remove_flag_set('set2') 45 | assert flag_set.sets_feature_flag_map == {'set1': set({'split2'})} 46 | flag_set._remove_flag_set('set1') 47 | assert flag_set.sets_feature_flag_map == {} 48 | assert flag_set.flag_set_exist('set1') == False 49 | 50 | def test_flag_set_filter(self): 51 | flag_set_filter = FlagSetsFilter() 52 | assert flag_set_filter.flag_sets == set() 53 | assert not flag_set_filter.should_filter 54 | 55 | flag_set_filter = FlagSetsFilter(['set1', 'set2']) 56 | assert flag_set_filter.flag_sets == set({'set1', 'set2'}) 57 | assert flag_set_filter.should_filter 58 | assert flag_set_filter.intersect(set({'set1', 'set2'})) 59 | assert flag_set_filter.intersect(set({'set1', 'set2', 'set5'})) 60 | assert not flag_set_filter.intersect(set({'set4'})) 61 | assert not flag_set_filter.set_exist('set4') 62 | assert flag_set_filter.set_exist('set1') 63 | 64 | flag_set_filter = FlagSetsFilter(['set5', 'set2', 'set6', 'set1']) 65 | assert flag_set_filter.sorted_flag_sets == ['set1', 'set2', 'set5', 'set6'] -------------------------------------------------------------------------------- /tests/models/grammar/test_semver.py: -------------------------------------------------------------------------------- 1 | """Condition model tests module.""" 2 | import csv 3 | import os 4 | 5 | from splitio.models.grammar.matchers.utils.utils import build_semver_or_none 6 | 7 | valid_versions = os.path.join(os.path.dirname(__file__), 'files', 'valid-semantic-versions.csv') 8 | invalid_versions = os.path.join(os.path.dirname(__file__), 'files', 'invalid-semantic-versions.csv') 9 | equalto_versions = os.path.join(os.path.dirname(__file__), 'files', 'equal-to-semver.csv') 10 | between_versions = os.path.join(os.path.dirname(__file__), 'files', 'between-semver.csv') 11 | 12 | class SemverTests(object): 13 | """Test the semver object model.""" 14 | 15 | def test_valid_versions(self): 16 | with open(valid_versions) as csvfile: 17 | reader = csv.DictReader(csvfile) 18 | for row in reader: 19 | assert build_semver_or_none(row['higher']) is not None 20 | assert build_semver_or_none(row['lower']) is not None 21 | 22 | def test_invalid_versions(self): 23 | with open(invalid_versions) as csvfile: 24 | reader = csv.DictReader(csvfile) 25 | for row in reader: 26 | assert build_semver_or_none(row['invalid']) is None 27 | 28 | def test_compare(self): 29 | with open(valid_versions) as csvfile: 30 | reader = csv.DictReader(csvfile) 31 | for row in reader: 32 | higher = build_semver_or_none(row['higher']) 33 | lower = build_semver_or_none(row['lower']) 34 | assert higher is not None 35 | assert lower is not None 36 | assert higher.compare(lower) == 1 37 | assert lower.compare(higher) == -1 38 | 39 | with open(equalto_versions) as csvfile: 40 | reader = csv.DictReader(csvfile) 41 | for row in reader: 42 | version1 = build_semver_or_none(row['version1']) 43 | version2 = build_semver_or_none(row['version2']) 44 | assert version1 is not None 45 | assert version2 is not None 46 | if row['equals'] == "true": 47 | assert version1.version == version2.version 48 | else: 49 | assert version1.version != version2.version 50 | 51 | with open(between_versions) as csvfile: 52 | reader = csv.DictReader(csvfile) 53 | for row in reader: 54 | version1 = build_semver_or_none(row['version1']) 55 | version2 = build_semver_or_none(row['version2']) 56 | version3 = build_semver_or_none(row['version3']) 57 | assert version1 is not None 58 | assert version2 is not None 59 | assert version3 is not None 60 | if row['expected'] == "true": 61 | assert version2.compare(version1) >= 0 and version3.compare(version2) >= 0 62 | else: 63 | assert version2.compare(version1) < 0 or version3.compare(version2) < 0 64 | 65 | def test_leading_zeros(self): 66 | semver = build_semver_or_none('1.01.2') 67 | assert semver is not None 68 | assert semver.version == '1.1.2' 69 | semver2 = build_semver_or_none('1.01.2-rc.01') 70 | assert semver2 is not None 71 | assert semver2.version == '1.1.2-rc.1' 72 | -------------------------------------------------------------------------------- /tests/models/test_fallback.py: -------------------------------------------------------------------------------- 1 | from splitio.models.fallback_treatment import FallbackTreatment 2 | from splitio.models.fallback_config import FallbackTreatmentsConfiguration, FallbackTreatmentCalculator 3 | 4 | class FallbackTreatmentModelTests(object): 5 | """Fallback treatment model tests.""" 6 | 7 | def test_working(self): 8 | fallback_treatment = FallbackTreatment("on", '{"prop": "val"}') 9 | assert fallback_treatment.config == '{"prop": "val"}' 10 | assert fallback_treatment.treatment == 'on' 11 | 12 | fallback_treatment = FallbackTreatment("off") 13 | assert fallback_treatment.config == None 14 | assert fallback_treatment.treatment == 'off' 15 | 16 | class FallbackTreatmentsConfigModelTests(object): 17 | """Fallback treatment configuration model tests.""" 18 | 19 | def test_working(self): 20 | global_fb = FallbackTreatment("on") 21 | flag_fb = FallbackTreatment("off") 22 | fallback_config = FallbackTreatmentsConfiguration(global_fb, {"flag1": flag_fb}) 23 | assert fallback_config.global_fallback_treatment == global_fb 24 | assert fallback_config.by_flag_fallback_treatment == {"flag1": flag_fb} 25 | 26 | fallback_config.global_fallback_treatment = None 27 | assert fallback_config.global_fallback_treatment == None 28 | 29 | fallback_config.by_flag_fallback_treatment["flag2"] = flag_fb 30 | assert fallback_config.by_flag_fallback_treatment == {"flag1": flag_fb, "flag2": flag_fb} 31 | 32 | fallback_config = FallbackTreatmentsConfiguration("on", {"flag1": "off"}) 33 | assert isinstance(fallback_config.global_fallback_treatment, FallbackTreatment) 34 | assert fallback_config.global_fallback_treatment.treatment == "on" 35 | 36 | assert isinstance(fallback_config.by_flag_fallback_treatment["flag1"], FallbackTreatment) 37 | assert fallback_config.by_flag_fallback_treatment["flag1"].treatment == "off" 38 | 39 | 40 | class FallbackTreatmentCalculatorTests(object): 41 | """Fallback treatment calculator model tests.""" 42 | 43 | def test_working(self): 44 | fallback_config = FallbackTreatmentsConfiguration(FallbackTreatment("on" ,"{}"), None) 45 | fallback_calculator = FallbackTreatmentCalculator(fallback_config) 46 | assert fallback_calculator.fallback_treatments_configuration == fallback_config 47 | assert fallback_calculator._label_prefix == "fallback - " 48 | 49 | fallback_treatment = fallback_calculator.resolve("feature", "not ready") 50 | assert fallback_treatment.treatment == "on" 51 | assert fallback_treatment.label == "fallback - not ready" 52 | assert fallback_treatment.config == "{}" 53 | 54 | fallback_calculator._fallback_treatments_configuration = FallbackTreatmentsConfiguration(FallbackTreatment("on" ,"{}"), {'feature': FallbackTreatment("off" , '{"prop": "val"}')}) 55 | fallback_treatment = fallback_calculator.resolve("feature", "not ready") 56 | assert fallback_treatment.treatment == "off" 57 | assert fallback_treatment.label == "fallback - not ready" 58 | assert fallback_treatment.config == '{"prop": "val"}' 59 | 60 | fallback_treatment = fallback_calculator.resolve("feature2", "not ready") 61 | assert fallback_treatment.treatment == "on" 62 | assert fallback_treatment.label == "fallback - not ready" 63 | assert fallback_treatment.config == "{}" 64 | -------------------------------------------------------------------------------- /tests/models/grammar/test_conditions.py: -------------------------------------------------------------------------------- 1 | """Condition model tests module.""" 2 | 3 | from splitio.models.grammar import condition 4 | from splitio.models.grammar import partitions 5 | from splitio.models.grammar import matchers 6 | 7 | class ConditionTests(object): 8 | """Test the condition object model.""" 9 | 10 | raw = { 11 | 'partitions': [ 12 | {'treatment': 'on', 'size': 50}, 13 | {'treatment': 'off', 'size': 50} 14 | ], 15 | 'contitionType': 'WHITELIST', 16 | 'label': 'some_label', 17 | 'matcherGroup': { 18 | 'matchers': [ 19 | { 20 | 'matcherType': 'ALL_KEYS', 21 | 'negate': False, 22 | } 23 | ], 24 | 'combiner': 'AND' 25 | } 26 | } 27 | 28 | def test_parse(self): 29 | """Test parsing from raw dict.""" 30 | parsed = condition.from_raw(self.raw) 31 | assert isinstance(parsed, condition.Condition) 32 | assert parsed.label == 'some_label' 33 | assert parsed.condition_type == condition.ConditionType.WHITELIST 34 | assert isinstance(parsed.matchers[0], matchers.AllKeysMatcher) 35 | assert isinstance(parsed.partitions[0], partitions.Partition) 36 | assert parsed.partitions[0].treatment == 'on' 37 | assert parsed.partitions[0].size == 50 38 | assert parsed.partitions[1].treatment == 'off' 39 | assert parsed.partitions[1].size == 50 40 | assert parsed._combiner == condition._MATCHER_COMBINERS['AND'] 41 | 42 | def test_segment_names(self, mocker): 43 | """Test fetching segment_names.""" 44 | matcher1 = mocker.Mock(spec=matchers.UserDefinedSegmentMatcher) 45 | matcher2 = mocker.Mock(spec=matchers.UserDefinedSegmentMatcher) 46 | matcher1._segment_name = 'segment1' 47 | matcher2._segment_name = 'segment2' 48 | cond = condition.Condition([matcher1, matcher2], condition._MATCHER_COMBINERS['AND'], [], 'some_label') 49 | assert cond.get_segment_names() == ['segment1', 'segment2'] 50 | 51 | def test_to_json(self): 52 | """Test JSON serialization of a condition.""" 53 | as_json = condition.from_raw(self.raw).to_json() 54 | assert as_json['partitions'] == [ 55 | {'treatment': 'on', 'size': 50}, 56 | {'treatment': 'off', 'size': 50} 57 | ] 58 | assert as_json['conditionType'] == 'WHITELIST' 59 | assert as_json['label'] == 'some_label' 60 | assert as_json['matcherGroup']['matchers'][0]['matcherType'] == 'ALL_KEYS' 61 | assert as_json['matcherGroup']['matchers'][0]['negate'] == False 62 | assert as_json['matcherGroup']['combiner'] == 'AND' 63 | 64 | def test_matches(self, mocker): 65 | """Test that matches works properly.""" 66 | matcher1_mock = mocker.Mock(spec=matchers.base.Matcher) 67 | matcher2_mock = mocker.Mock(spec=matchers.base.Matcher) 68 | matcher1_mock.evaluate.return_value = True 69 | matcher2_mock.evaluate.return_value = True 70 | cond = condition.Condition( 71 | [matcher1_mock, matcher2_mock], 72 | condition._MATCHER_COMBINERS['AND'], 73 | [partitions.Partition('on', 50), partitions.Partition('off', 50)], 74 | 'some_label' 75 | ) 76 | assert cond.matches('some_key', {'a': 1}, {'some_context_option': 0}) == True 77 | assert matcher1_mock.evaluate.mock_calls == [mocker.call('some_key', {'a': 1}, {'some_context_option': 0})] 78 | assert matcher2_mock.evaluate.mock_calls == [mocker.call('some_key', {'a': 1}, {'some_context_option': 0})] 79 | -------------------------------------------------------------------------------- /splitio/models/grammar/matchers/misc.py: -------------------------------------------------------------------------------- 1 | """Miscelaneous matchers that don't fall into other categories.""" 2 | import json 3 | 4 | from splitio.models.grammar.matchers.base import Matcher 5 | 6 | 7 | class DependencyMatcher(Matcher): 8 | """Matcher that returns true if the user's key secondary evaluation result matches.""" 9 | 10 | def _build(self, raw_matcher): 11 | """ 12 | Build an DependencyMatcher. 13 | 14 | :param raw_matcher: raw matcher as fetched from splitChanges response. 15 | :type raw_matcher: dict 16 | """ 17 | self._split_name = raw_matcher['dependencyMatcherData']['split'] 18 | self._treatments = raw_matcher['dependencyMatcherData']['treatments'] 19 | 20 | def _match(self, key, attributes=None, context=None): 21 | """ 22 | Evaluate user input against a matcher and return whether the match is successful. 23 | 24 | :param key: User key. 25 | :type key: str. 26 | :param attributes: Custom user attributes. 27 | :type attributes: dict. 28 | :param context: Evaluation context 29 | :type context: dict 30 | 31 | :returns: Wheter the match is successful. 32 | :rtype: bool 33 | """ 34 | evaluator = context.get('evaluator') 35 | assert evaluator is not None 36 | 37 | bucketing_key = context.get('bucketing_key') 38 | result = evaluator.eval_with_context(key, bucketing_key, self._split_name, attributes, context['ec']) 39 | return result['treatment'] in self._treatments 40 | 41 | def _add_matcher_specific_properties_to_json(self): 42 | """Return Dependency specific properties.""" 43 | return { 44 | 'dependencyMatcherData': { 45 | 'split': self._split_name, 46 | 'treatments': self._treatments 47 | } 48 | } 49 | 50 | 51 | class BooleanMatcher(Matcher): 52 | """Matcher that returns true if the user submited value is similar to the stored boolean.""" 53 | 54 | def _build(self, raw_matcher): 55 | """ 56 | Build an BooleanMatcher. 57 | 58 | :param raw_matcher: raw matcher as fetched from splitChanges response. 59 | :type raw_matcher: dict 60 | """ 61 | self._data = raw_matcher['booleanMatcherData'] 62 | 63 | def _match(self, key, attributes=None, context=None): 64 | """ 65 | Evaluate user input against a matcher and return whether the match is successful. 66 | 67 | :param key: User key. 68 | :type key: str. 69 | :param attributes: Custom user attributes. 70 | :type attributes: dict. 71 | :param context: Evaluation context 72 | :type context: dict 73 | 74 | :returns: Wheter the match is successful. 75 | :rtype: bool 76 | """ 77 | matching_data = self._get_matcher_input(key, attributes) 78 | if matching_data is None: 79 | return False 80 | 81 | if isinstance(matching_data, bool): 82 | decoded = matching_data 83 | elif isinstance(matching_data, str): 84 | try: 85 | decoded = json.loads(matching_data.lower()) 86 | if not isinstance(decoded, bool): 87 | return False 88 | 89 | except ValueError: 90 | return False 91 | 92 | else: 93 | return False 94 | 95 | return decoded == self._data 96 | 97 | def _add_matcher_specific_properties_to_json(self): 98 | """Return Boolean specific properties.""" 99 | return {'booleanMatcherData': self._data} 100 | -------------------------------------------------------------------------------- /tests/tasks/test_events_sync.py: -------------------------------------------------------------------------------- 1 | """Impressions synchronization task test module.""" 2 | 3 | import threading 4 | import time 5 | import pytest 6 | 7 | from splitio.api.client import HttpResponse 8 | from splitio.tasks import events_sync 9 | from splitio.storage import EventStorage 10 | from splitio.models.events import Event 11 | from splitio.api.events import EventsAPI 12 | from splitio.sync.event import EventSynchronizer, EventSynchronizerAsync 13 | from splitio.optional.loaders import asyncio 14 | 15 | 16 | class EventsSyncTests(object): 17 | """Impressions Syncrhonization task test cases.""" 18 | 19 | def test_normal_operation(self, mocker): 20 | """Test that the task works properly under normal circumstances.""" 21 | storage = mocker.Mock(spec=EventStorage) 22 | events = [ 23 | Event('key1', 'user', 'purchase', 5.3, 123456, None), 24 | Event('key2', 'user', 'purchase', 5.3, 123456, None), 25 | Event('key3', 'user', 'purchase', 5.3, 123456, None), 26 | Event('key4', 'user', 'purchase', 5.3, 123456, None), 27 | Event('key5', 'user', 'purchase', 5.3, 123456, None), 28 | ] 29 | 30 | storage.pop_many.return_value = events 31 | api = mocker.Mock(spec=EventsAPI) 32 | api.flush_events.return_value = HttpResponse(200, '', {}) 33 | event_synchronizer = EventSynchronizer(api, storage, 5) 34 | task = events_sync.EventsSyncTask(event_synchronizer.synchronize_events, 1) 35 | task.start() 36 | time.sleep(2) 37 | assert task.is_running() 38 | assert storage.pop_many.mock_calls[0] == mocker.call(5) 39 | assert api.flush_events.mock_calls[0] == mocker.call(events) 40 | stop_event = threading.Event() 41 | calls_now = len(api.flush_events.mock_calls) 42 | task.stop(stop_event) 43 | stop_event.wait(5) 44 | assert stop_event.is_set() 45 | assert len(api.flush_events.mock_calls) > calls_now 46 | 47 | 48 | class EventsSyncAsyncTests(object): 49 | """Impressions Syncrhonization task async test cases.""" 50 | 51 | @pytest.mark.asyncio 52 | async def test_normal_operation(self, mocker): 53 | """Test that the task works properly under normal circumstances.""" 54 | self.events = [ 55 | Event('key1', 'user', 'purchase', 5.3, 123456, None), 56 | Event('key2', 'user', 'purchase', 5.3, 123456, None), 57 | Event('key3', 'user', 'purchase', 5.3, 123456, None), 58 | Event('key4', 'user', 'purchase', 5.3, 123456, None), 59 | Event('key5', 'user', 'purchase', 5.3, 123456, None), 60 | ] 61 | storage = mocker.Mock(spec=EventStorage) 62 | self.called = False 63 | async def pop_many(*args): 64 | self.called = True 65 | return self.events 66 | storage.pop_many = pop_many 67 | 68 | api = mocker.Mock(spec=EventsAPI) 69 | self.flushed_events = None 70 | self.count = 0 71 | async def flush_events(events): 72 | self.count += 1 73 | self.flushed_events = events 74 | return HttpResponse(200, '', {}) 75 | api.flush_events = flush_events 76 | 77 | event_synchronizer = EventSynchronizerAsync(api, storage, 5) 78 | task = events_sync.EventsSyncTaskAsync(event_synchronizer.synchronize_events, 1) 79 | task.start() 80 | await asyncio.sleep(2) 81 | 82 | assert task.is_running() 83 | assert self.called 84 | assert self.flushed_events == self.events 85 | 86 | calls_now = self.count 87 | await task.stop() 88 | assert not task.is_running() 89 | assert self.count > calls_now 90 | -------------------------------------------------------------------------------- /splitio/storage/adapters/util.py: -------------------------------------------------------------------------------- 1 | """Custom utilities.""" 2 | 3 | 4 | class DynamicDecorator(object): # pylint: disable=too-few-public-methods 5 | """ 6 | Decorator that will inject a decorator during class construction. 7 | 8 | This decorator will intercept the __init__(self, *args **kwargs) call, 9 | and decorate specified methods by instantiating the supplied decorators, 10 | with arguments extracted and mapped from the constructor call. 11 | For example: 12 | 13 | def decorator(pos_arg_1, keyword_arg_1=3): 14 | pass 15 | 16 | @DynamicDecorator( 17 | decorator, 18 | ['method1', 'method2'], 19 | lambda *p, **_: p[0], 20 | keyword_arg=lambda *_, **kw: kw.get('arg2') 21 | ) 22 | class SomeClass 23 | def __init__(self, arg1, arg2=3: 24 | pass 25 | 26 | def method1(self, x): 27 | pass 28 | 29 | def method2(self, x): 30 | pass 31 | """ 32 | 33 | def __init__(self, decorator, methods_to_decorate, *pos_arg_map, **kw_arg_map): 34 | """ 35 | Construct a decorator with it's mappings. 36 | 37 | :param decorator: Original decorator to apply to specified methods. 38 | :type decorator: callable 39 | :param methods_to_decorate: List of methods (strings) where the decorator should be applied 40 | :type methods_to_decorate: list(string) 41 | :param pos_arg_map: lambdas to be called with __init__ arguments for the decorator's 42 | positional arguments. 43 | :type pos_arg_map: expanded list 44 | :param kw_arg_map: lambdas to be called with __init__ arguments for the decorator's keyword 45 | arguments. 46 | :type kw_arg_map: expanded dict 47 | """ 48 | self._decorator = decorator 49 | self._methods = methods_to_decorate 50 | self._positional_args_lambdas = pos_arg_map 51 | self._keyword_args_lambdas = kw_arg_map 52 | 53 | def __call__(self, to_decorate): 54 | """ 55 | Apply the decorator the specified class. 56 | 57 | :param to_decorate: Class to which the decorator will be applied. 58 | :type to_decorate: class 59 | 60 | :return: a decorated class, which inherits from `to_decorate` 61 | :rtype: to_decorate 62 | """ 63 | decorator = self._decorator 64 | methods = self._methods 65 | positional_args_lambdas = self._positional_args_lambdas 66 | keyword_args_lambdas = self._keyword_args_lambdas 67 | 68 | class _decorated(to_decorate): # pylint: disable=too-few-public-methods 69 | """ 70 | Decorated class wrapper. 71 | 72 | This wrapper uses the __init__ to catch required arguments, 73 | instantiate the decorator with the appropriate parameters and then create a child 74 | class with decorated behaviour. 75 | """ 76 | 77 | def __init__(self, *args, **kwargs): 78 | """Decorate class constructor.""" 79 | # calculate positional and keyword arguments needed to build the decorator. 80 | positional = [pos_func(*args, **kwargs) for pos_func in positional_args_lambdas] 81 | keyword = { 82 | key: func(*args, **kwargs) 83 | for (key, func) in keyword_args_lambdas.items() 84 | } 85 | 86 | # call original class constructor 87 | to_decorate.__init__(self, *args, **kwargs) 88 | 89 | # decorate specified methods 90 | for method in methods: 91 | decorated_method = decorator(*positional, **keyword)(getattr(self, method)) 92 | setattr(to_decorate, method, decorated_method) 93 | 94 | return _decorated 95 | -------------------------------------------------------------------------------- /tests/push/test_segment_worker.py: -------------------------------------------------------------------------------- 1 | """Split Worker tests.""" 2 | import time 3 | import queue 4 | import pytest 5 | 6 | from splitio.api import APIException 7 | from splitio.push.workers import SegmentWorker, SegmentWorkerAsync 8 | from splitio.models.notification import SegmentChangeNotification 9 | from splitio.optional.loaders import asyncio 10 | 11 | change_number_received = None 12 | segment_name_received = None 13 | 14 | 15 | def handler_sync(segment_name, change_number): 16 | global change_number_received 17 | global segment_name_received 18 | change_number_received = change_number 19 | segment_name_received = segment_name 20 | return 21 | 22 | 23 | class SegmentWorkerTests(object): 24 | def test_on_error(self): 25 | q = queue.Queue() 26 | 27 | def handler_sync(change_number): 28 | raise APIException('some') 29 | 30 | segment_worker = SegmentWorker(handler_sync, q) 31 | segment_worker.start() 32 | assert segment_worker.is_running() 33 | 34 | q.put(SegmentChangeNotification('some', 'SEGMENT_UPDATE', 123456789, 'some')) 35 | 36 | with pytest.raises(Exception): 37 | segment_worker._handler() 38 | 39 | assert segment_worker.is_running() 40 | assert segment_worker._worker.is_alive() 41 | segment_worker.stop() 42 | time.sleep(1) 43 | assert not segment_worker.is_running() 44 | assert not segment_worker._worker.is_alive() 45 | 46 | def test_handler(self): 47 | q = queue.Queue() 48 | segment_worker = SegmentWorker(handler_sync, q) 49 | global change_number_received 50 | assert not segment_worker.is_running() 51 | segment_worker.start() 52 | assert segment_worker.is_running() 53 | 54 | q.put(SegmentChangeNotification('some', 'SEGMENT_UPDATE', 123456789, 'some')) 55 | 56 | time.sleep(0.1) 57 | assert change_number_received == 123456789 58 | assert segment_name_received == 'some' 59 | 60 | segment_worker.stop() 61 | assert not segment_worker.is_running() 62 | 63 | class SegmentWorkerAsyncTests(object): 64 | 65 | @pytest.mark.asyncio 66 | async def test_on_error(self): 67 | q = asyncio.Queue() 68 | 69 | def handler_sync(change_number): 70 | raise APIException('some') 71 | 72 | segment_worker = SegmentWorkerAsync(handler_sync, q) 73 | segment_worker.start() 74 | assert segment_worker.is_running() 75 | 76 | await q.put(SegmentChangeNotification('some', 'SEGMENT_UPDATE', 123456789, 'some')) 77 | 78 | with pytest.raises(Exception): 79 | segment_worker._handler() 80 | 81 | assert segment_worker.is_running() 82 | assert(self._worker_running()) 83 | await segment_worker.stop() 84 | await asyncio.sleep(.1) 85 | assert not segment_worker.is_running() 86 | assert(not self._worker_running()) 87 | 88 | def _worker_running(self): 89 | worker_running = False 90 | for task in asyncio.all_tasks(): 91 | if task._coro.cr_code.co_name == '_run' and not task.done(): 92 | worker_running = True 93 | break 94 | return worker_running 95 | 96 | @pytest.mark.asyncio 97 | async def test_handler(self): 98 | q = asyncio.Queue() 99 | segment_worker = SegmentWorkerAsync(handler_sync, q) 100 | global change_number_received 101 | assert not segment_worker.is_running() 102 | segment_worker.start() 103 | assert segment_worker.is_running() 104 | 105 | await q.put(SegmentChangeNotification('some', 'SEGMENT_UPDATE', 123456789, 'some')) 106 | 107 | await asyncio.sleep(.1) 108 | assert change_number_received == 123456789 109 | assert segment_name_received == 'some' 110 | 111 | await segment_worker.stop() 112 | await asyncio.sleep(.1) 113 | assert(not self._worker_running()) 114 | -------------------------------------------------------------------------------- /tests/models/grammar/files/regex.txt: -------------------------------------------------------------------------------- 1 | abc#abc#true 2 | abc#zabcd#true 3 | abc#bc#false 4 | abc#ab#false 5 | ^abc#abc#true 6 | ^abc#abcbdc#true 7 | ^abc#abcabc#true 8 | ^abc#zabcabc#false 9 | abc$#abcabc#true 10 | abc$#zabcabc#true 11 | abc$#abcabcz#false 12 | a|b#abcabcz#true 13 | a|b#zczcz#false 14 | ^abc|abc$#abcabc#true 15 | ^abc|abc$#zabcab#false 16 | ab{2,4}c#abbc#true 17 | ab{2,4}c#abbbc#true 18 | ab{2,4}c#abbbbc#true 19 | ab{2,4}c#abc#false 20 | ab{2,4}c#abzbbc#false 21 | ab{2,4}c#abbbbbbbbbbc#false 22 | ab{2,}c#abbc#true 23 | ab{2,}c#abbbc#true 24 | ab{2,}c#abbbbc#true 25 | ab{2,}c#abc#false 26 | ab{2,}c#abzbbc#false 27 | ab{2,}c#abbbbbbbbbbc#true 28 | ab*c#ac#true 29 | ab*c#abc#true 30 | ab*c#abbc#true 31 | ab*c#abbbc#true 32 | ab*c#ab#false 33 | ab*c#bc#false 34 | ab+c#ac#false 35 | ab+c#abc#true 36 | ab+c#abbc#true 37 | ab+c#abbbc#true 38 | ab+c#ab#false 39 | ab+c#bc#false 40 | ab?c#ac#true 41 | ab?c#abc#true 42 | ab?c#abbc#false 43 | ab?c#abbbc#false 44 | ab?c#ab#false 45 | ab?c#bc#false 46 | a.c#abc#true 47 | a.c#adc#true 48 | a.c#azc#true 49 | a.c#xdc#false 50 | a.c#ac#false 51 | a\.c#abc#false 52 | a\.c#adc#false 53 | a\.c#azc#false 54 | a\.c#xdc#false 55 | a\.c#ac#false 56 | a\.c#a.c#true 57 | [abc]#a#true 58 | [abc]#b#true 59 | [abc]#c#true 60 | [abc]#z#false 61 | [abc]#ab#true 62 | [abc]#ac#true 63 | [Aa]bc#a#false 64 | [Aa]bc#b#false 65 | [Aa]bc#c#false 66 | [Aa]bc#z#false 67 | [Aa]bc#ab#false 68 | [Aa]bc#ac#false 69 | [Aa]bc#abc#true 70 | [Aa]bc#Abc#true 71 | [abc]+#a#true 72 | [abc]+#aba#true 73 | [abc]+#abba#true 74 | [abc]+#acbabcacaa#true 75 | [abc]+#axbaxcaxax#true 76 | [abc]+#xxzyxzyxyx#false 77 | [^abc]+#acbaccacaa#false 78 | [^abc]+#acbacaaa#false 79 | [^abc]+#aa#false 80 | [^abc]+#xzy#true 81 | \d\d#11#true 82 | \d\d#a1#false 83 | \d\d#1b1a1#false 84 | \d\d#1a1#false 85 | \w+#foo#true 86 | \w+#12bar8#true 87 | \w+#foo_1#true 88 | \w+#foo-1#true 89 | \w+#foo- 1#true 90 | \w+#foo- %$1#true 91 | \w+#%$#false 92 | \W+#foo#false 93 | \W+#12bar8#false 94 | \W+#foo_1#false 95 | \W+#foo-1#true 96 | \W+#foo_ 1#true 97 | \W+#foo1#false 98 | \W+#%$#true 99 | 100\s*mk#100mk#true 100 | 100\s*mk#100 mk#true 101 | 100\s*mk#100 X mk#false 102 | abc\b#abc!#true 103 | abc\b#abcd#false 104 | perl\B#perlert#true 105 | perl\B#perl stuff#false 106 | (abc){3}#abcabcabc#true 107 | (abc){3}#abcacabc#false 108 | (abc){3}#abc#false 109 | ^[a-z0-9_-]{3,16}$#my-us3r_n4m3#true 110 | ^[a-z0-9_-]{3,16}$#commonusername#true 111 | ^[a-z0-9_-]{3,16}$#n0#false 112 | ^[a-z0-9_-]{3,16}$#th1s1s-wayt00_l0ngt0beausername#false 113 | ^[a-z0-9-]+$#my-title-here#true 114 | ^[a-z0-9-]+$#my_title_here#false 115 | ^([a-z0-9_\.-]+)@([\da-z\.-]+)\.([a-z\.]{2,6})$#john@doe.com#true 116 | ^([a-z0-9_\.-]+)@([\da-z\.-]+)\.([a-z\.]{2,6})$#john@doe.something#false 117 | ^([a-z0-9_\.-]+)@([\da-z\.-]+)\.([a-z\.]{2,6})$#johndoe.sg#false 118 | ^(https?:\/\/)?([\da-z\.-]+)\.([a-z\.]{2,6})([\/\w \.-]*)*\/?$#http://split.io/about#true 119 | ^(https?:\/\/)?([\da-z\.-]+)\.([a-z\.]{2,6})([\/\w \.-]*)*\/?$#http://google.com/some/file!.html#false 120 | ^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$#73.60.124.136#true 121 | ^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$#256.60.124.136#false 122 | ^\d+$#123#true 123 | ^\d+$#4323#true 124 | ^\d+$#4566663#true 125 | ^\d+$#-10#false 126 | ^\d+$#456.666.3#false 127 | ^-\d+$#4566663#false 128 | ^-\d+$#-10#true 129 | ^-\d+$#456.666.3#false 130 | ^-?\d+$#3534#true 131 | ^-?\d+$#-3534#true 132 | ^-?\d+$#35.34#false 133 | ^-?\d+$#-35.34#false 134 | ^\d*\.?\d+$#12.3#true 135 | ^\d*\.?\d+$#-12.3#false 136 | ^-\d*\.?\d+$#12.3#false 137 | ^-\d*\.?\d+$#-12.3#true 138 | ^-?\d*\.?\d+$#12.3#true 139 | ^-?\d*\.?\d+$#-12.3#true 140 | ^-?\d*\.?\d+$#-1a2.a3#false 141 | ^(19|20)\d{2}$#1900#true 142 | ^(19|20)\d{2}$#2005#true 143 | ^(19|20)\d{2}$#1810#false 144 | ^([1-9]|0[1-9]|[12][0-9]|3[01])\D([1-9]|0[1-9]|1[012])\D(19[0-9][0-9]|20[0-9][0-9])$#11/11/2011#true 145 | ^([1-9]|0[1-9]|[12][0-9]|3[01])\D([1-9]|0[1-9]|1[012])\D(19[0-9][0-9]|20[0-9][0-9])$#13/13/2011#false 146 | -------------------------------------------------------------------------------- /splitio/models/grammar/matchers/__init__.py: -------------------------------------------------------------------------------- 1 | """Matchers entrypoint module.""" 2 | from splitio.models import MatcherNotFoundException 3 | from splitio.models.grammar.matchers.keys import AllKeysMatcher, UserDefinedSegmentMatcher 4 | from splitio.models.grammar.matchers.numeric import BetweenMatcher, EqualToMatcher, \ 5 | GreaterThanOrEqualMatcher, LessThanOrEqualMatcher 6 | from splitio.models.grammar.matchers.sets import ContainsAllOfSetMatcher, \ 7 | ContainsAnyOfSetMatcher, EqualToSetMatcher, PartOfSetMatcher 8 | from splitio.models.grammar.matchers.string import ContainsStringMatcher, \ 9 | EndsWithMatcher, RegexMatcher, StartsWithMatcher, WhitelistMatcher 10 | from splitio.models.grammar.matchers.misc import BooleanMatcher, DependencyMatcher 11 | from splitio.models.grammar.matchers.semver import EqualToSemverMatcher, GreaterThanOrEqualToSemverMatcher, LessThanOrEqualToSemverMatcher, \ 12 | BetweenSemverMatcher, InListSemverMatcher 13 | from splitio.models.grammar.matchers.rule_based_segment import RuleBasedSegmentMatcher 14 | 15 | 16 | MATCHER_TYPE_ALL_KEYS = 'ALL_KEYS' 17 | MATCHER_TYPE_IN_SEGMENT = 'IN_SEGMENT' 18 | MATCHER_TYPE_WHITELIST = 'WHITELIST' 19 | MATCHER_TYPE_EQUAL_TO = 'EQUAL_TO' 20 | MATCHER_TYPE_GREATER_THAN_OR_EQUAL_TO = 'GREATER_THAN_OR_EQUAL_TO' 21 | MATCHER_TYPE_LESS_THAN_OR_EQUAL_TO = 'LESS_THAN_OR_EQUAL_TO' 22 | MATCHER_TYPE_BETWEEN = 'BETWEEN' 23 | MATCHER_TYPE_EQUAL_TO_SET = 'EQUAL_TO_SET' 24 | MATCHER_TYPE_PART_OF_SET = 'PART_OF_SET' 25 | MATCHER_TYPE_CONTAINS_ALL_OF_SET = 'CONTAINS_ALL_OF_SET' 26 | MATCHER_TYPE_CONTAINS_ANY_OF_SET = 'CONTAINS_ANY_OF_SET' 27 | MATCHER_TYPE_STARTS_WITH = 'STARTS_WITH' 28 | MATCHER_TYPE_ENDS_WITH = 'ENDS_WITH' 29 | MATCHER_TYPE_CONTAINS_STRING = 'CONTAINS_STRING' 30 | MATCHER_TYPE_IN_SPLIT_TREATMENT = 'IN_SPLIT_TREATMENT' 31 | MATCHER_TYPE_EQUAL_TO_BOOLEAN = 'EQUAL_TO_BOOLEAN' 32 | MATCHER_TYPE_MATCHES_STRING = 'MATCHES_STRING' 33 | MATCHER_TYPE_EQUAL_TO_SEMVER = 'EQUAL_TO_SEMVER' 34 | MATCHER_GREATER_THAN_OR_EQUAL_TO_SEMVER = 'GREATER_THAN_OR_EQUAL_TO_SEMVER' 35 | MATCHER_LESS_THAN_OR_EQUAL_TO_SEMVER = 'LESS_THAN_OR_EQUAL_TO_SEMVER' 36 | MATCHER_BETWEEN_SEMVER = 'BETWEEN_SEMVER' 37 | MATCHER_INLIST_SEMVER = 'IN_LIST_SEMVER' 38 | MATCHER_IN_RULE_BASED_SEGMENT = 'IN_RULE_BASED_SEGMENT' 39 | 40 | 41 | _MATCHER_BUILDERS = { 42 | MATCHER_TYPE_ALL_KEYS: AllKeysMatcher, 43 | MATCHER_TYPE_IN_SEGMENT: UserDefinedSegmentMatcher, 44 | MATCHER_TYPE_WHITELIST: WhitelistMatcher, 45 | MATCHER_TYPE_EQUAL_TO: EqualToMatcher, 46 | MATCHER_TYPE_GREATER_THAN_OR_EQUAL_TO: GreaterThanOrEqualMatcher, 47 | MATCHER_TYPE_LESS_THAN_OR_EQUAL_TO: LessThanOrEqualMatcher, 48 | MATCHER_TYPE_BETWEEN: BetweenMatcher, 49 | MATCHER_TYPE_EQUAL_TO_SET: EqualToSetMatcher, 50 | MATCHER_TYPE_PART_OF_SET: PartOfSetMatcher, 51 | MATCHER_TYPE_CONTAINS_ALL_OF_SET: ContainsAllOfSetMatcher, 52 | MATCHER_TYPE_CONTAINS_ANY_OF_SET: ContainsAnyOfSetMatcher, 53 | MATCHER_TYPE_STARTS_WITH: StartsWithMatcher, 54 | MATCHER_TYPE_ENDS_WITH: EndsWithMatcher, 55 | MATCHER_TYPE_CONTAINS_STRING: ContainsStringMatcher, 56 | MATCHER_TYPE_IN_SPLIT_TREATMENT: DependencyMatcher, 57 | MATCHER_TYPE_EQUAL_TO_BOOLEAN: BooleanMatcher, 58 | MATCHER_TYPE_MATCHES_STRING: RegexMatcher, 59 | MATCHER_TYPE_EQUAL_TO_SEMVER: EqualToSemverMatcher, 60 | MATCHER_GREATER_THAN_OR_EQUAL_TO_SEMVER: GreaterThanOrEqualToSemverMatcher, 61 | MATCHER_LESS_THAN_OR_EQUAL_TO_SEMVER: LessThanOrEqualToSemverMatcher, 62 | MATCHER_BETWEEN_SEMVER: BetweenSemverMatcher, 63 | MATCHER_INLIST_SEMVER: InListSemverMatcher, 64 | MATCHER_IN_RULE_BASED_SEGMENT: RuleBasedSegmentMatcher 65 | } 66 | 67 | def from_raw(raw_matcher): 68 | """ 69 | Parse a condition from a JSON portion of splitChanges. 70 | 71 | :param raw_matcher: JSON object extracted from a condition's matcher array. 72 | :type raw_matcher: dict 73 | 74 | :return: A concrete Matcher object. 75 | :rtype: Matcher 76 | """ 77 | matcher_type = raw_matcher['matcherType'] 78 | try: 79 | builder = _MATCHER_BUILDERS[matcher_type] 80 | except KeyError: 81 | raise MatcherNotFoundException('Invalid matcher type %s' % matcher_type) 82 | return builder(raw_matcher) 83 | -------------------------------------------------------------------------------- /splitio/engine/impressions/strategies.py: -------------------------------------------------------------------------------- 1 | import abc 2 | 3 | from splitio.engine.impressions.manager import Observer, truncate_time 4 | from splitio.util.time import utctime_ms 5 | 6 | _IMPRESSION_OBSERVER_CACHE_SIZE = 500000 7 | 8 | class BaseStrategy(object, metaclass=abc.ABCMeta): 9 | """Strategy interface.""" 10 | 11 | @abc.abstractmethod 12 | def process_impressions(self): 13 | """ 14 | Return a list(impressions) object 15 | 16 | """ 17 | pass 18 | 19 | class StrategyDebugMode(BaseStrategy): 20 | """Debug mode strategy.""" 21 | 22 | def __init__(self): 23 | """ 24 | Construct a strategy instance for debug mode. 25 | 26 | """ 27 | self._observer = Observer(_IMPRESSION_OBSERVER_CACHE_SIZE) 28 | 29 | def process_impressions(self, impressions): 30 | """ 31 | Process impressions. 32 | 33 | Impressions are analyzed to see if they've been seen before. 34 | 35 | :param impressions: List of impression objects with attributes 36 | :type impressions: list[tuple[splitio.models.impression.Impression, dict]] 37 | 38 | :returns: Tuple of to be stored, observed and counted impressions, and unique keys tuple 39 | :rtype: list[tuple[splitio.models.impression.Impression, dict]], list[], list[], list[] 40 | """ 41 | imps = [] 42 | for imp, attrs in impressions: 43 | if imp.properties is not None: 44 | imps.append((imp, attrs)) 45 | continue 46 | 47 | imps.append((self._observer.test_and_set(imp), attrs)) 48 | 49 | return [i for i, _ in imps], imps, [], [] 50 | 51 | class StrategyNoneMode(BaseStrategy): 52 | """Debug mode strategy.""" 53 | 54 | def process_impressions(self, impressions): 55 | """ 56 | Process impressions. 57 | 58 | Impressions are analyzed to see if they've been seen before and counted. 59 | Unique keys tracking are updated. 60 | 61 | :param impressions: List of impression objects with attributes 62 | :type impressions: list[tuple[splitio.models.impression.Impression, dict]] 63 | 64 | :returns: Tuple of to be stored, observed and counted impressions, and unique keys tuple 65 | :rtype: list[[], dict]], list[splitio.models.impression.Impression], list[splitio.models.impression.Impression], list[(str, str)] 66 | """ 67 | counter_imps = [imp for imp, _ in impressions] 68 | unique_keys_tracker = [] 69 | for i, _ in impressions: 70 | unique_keys_tracker.append((i.matching_key, i.feature_name)) 71 | return [], impressions, counter_imps, unique_keys_tracker 72 | 73 | class StrategyOptimizedMode(BaseStrategy): 74 | """Optimized mode strategy.""" 75 | 76 | def __init__(self): 77 | """ 78 | Construct a strategy instance for optimized mode. 79 | 80 | """ 81 | self._observer = Observer(_IMPRESSION_OBSERVER_CACHE_SIZE) 82 | 83 | def process_impressions(self, impressions): 84 | """ 85 | Process impressions. 86 | 87 | Impressions are analyzed to see if they've been seen before and counted. 88 | 89 | :param impressions: List of impression objects with attributes 90 | :type impressions: list[tuple[splitio.models.impression.Impression, dict]] 91 | 92 | :returns: Tuple of to be stored, observed and counted impressions, and unique keys tuple 93 | :rtype: list[tuple[splitio.models.impression.Impression, dict]], list[splitio.models.impression.Impression], list[splitio.models.impression.Impression], list[] 94 | """ 95 | imps = [] 96 | for imp, attrs in impressions: 97 | if imp.properties is not None: 98 | imps.append((imp, attrs)) 99 | continue 100 | 101 | imps.append((self._observer.test_and_set(imp), attrs)) 102 | 103 | counter_imps = [imp for imp, _ in imps if imp.previous_time != None] 104 | this_hour = truncate_time(utctime_ms()) 105 | return [i for i, _ in imps if i.previous_time is None or i.previous_time < this_hour], imps, counter_imps, [] 106 | -------------------------------------------------------------------------------- /tests/models/test_rule_based_segments.py: -------------------------------------------------------------------------------- 1 | """Split model tests module.""" 2 | import copy 3 | from splitio.models import rule_based_segments 4 | from splitio.models import splits 5 | from splitio.models.grammar.condition import Condition 6 | from splitio.models.grammar.matchers.rule_based_segment import RuleBasedSegmentMatcher 7 | 8 | class RuleBasedSegmentModelTests(object): 9 | """Rule based segment model tests.""" 10 | 11 | raw = { 12 | "changeNumber": 123, 13 | "name": "sample_rule_based_segment", 14 | "status": "ACTIVE", 15 | "trafficTypeName": "user", 16 | "excluded":{ 17 | "keys":["mauro@split.io","gaston@split.io"], 18 | "segments":[] 19 | }, 20 | "conditions": [ 21 | { 22 | "matcherGroup": { 23 | "combiner": "AND", 24 | "matchers": [ 25 | { 26 | "keySelector": { 27 | "trafficType": "user", 28 | "attribute": "email" 29 | }, 30 | "matcherType": "ENDS_WITH", 31 | "negate": False, 32 | "whitelistMatcherData": { 33 | "whitelist": [ 34 | "@split.io" 35 | ] 36 | } 37 | } 38 | ] 39 | } 40 | } 41 | ] 42 | } 43 | 44 | def test_from_raw(self): 45 | """Test split model parsing.""" 46 | parsed = rule_based_segments.from_raw(self.raw) 47 | assert isinstance(parsed, rule_based_segments.RuleBasedSegment) 48 | assert parsed.change_number == 123 49 | assert parsed.name == 'sample_rule_based_segment' 50 | assert parsed.status == splits.Status.ACTIVE 51 | assert len(parsed.conditions) == 1 52 | assert parsed.excluded.get_excluded_keys() == ["mauro@split.io","gaston@split.io"] 53 | assert parsed.excluded.get_excluded_segments() == [] 54 | conditions = parsed.conditions[0].to_json() 55 | assert conditions['matcherGroup']['matchers'][0] == { 56 | 'betweenMatcherData': None, 'booleanMatcherData': None, 'dependencyMatcherData': None, 57 | 'stringMatcherData': None, 'unaryNumericMatcherData': None, 'userDefinedSegmentMatcherData': None, 58 | "keySelector": { 59 | "attribute": "email" 60 | }, 61 | "matcherType": "ENDS_WITH", 62 | "negate": False, 63 | "whitelistMatcherData": { 64 | "whitelist": [ 65 | "@split.io" 66 | ] 67 | } 68 | } 69 | 70 | def test_incorrect_matcher(self): 71 | """Test incorrect matcher in split model parsing.""" 72 | rbs = copy.deepcopy(self.raw) 73 | rbs['conditions'][0]['matcherGroup']['matchers'][0]['matcherType'] = 'INVALID_MATCHER' 74 | rbs = rule_based_segments.from_raw(rbs) 75 | assert rbs.conditions[0].to_json() == splits._DEFAULT_CONDITIONS_TEMPLATE 76 | 77 | # using multiple conditions 78 | rbs = copy.deepcopy(self.raw) 79 | rbs['conditions'].append(rbs['conditions'][0]) 80 | rbs['conditions'][0]['matcherGroup']['matchers'][0]['matcherType'] = 'INVALID_MATCHER' 81 | parsed = rule_based_segments.from_raw(rbs) 82 | assert parsed.conditions[0].to_json() == splits._DEFAULT_CONDITIONS_TEMPLATE 83 | 84 | def test_get_condition_segment_names(self): 85 | rbs = copy.deepcopy(self.raw) 86 | rbs['conditions'].append( 87 | {"matcherGroup": { 88 | "combiner": "AND", 89 | "matchers": [ 90 | { 91 | "matcherType": "IN_SEGMENT", 92 | "negate": False, 93 | "userDefinedSegmentMatcherData": { 94 | "segmentName": "employees" 95 | }, 96 | "whitelistMatcherData": None 97 | } 98 | ] 99 | }, 100 | }) 101 | rbs = rule_based_segments.from_raw(rbs) 102 | 103 | assert rbs.get_condition_segment_names() == {"employees"} -------------------------------------------------------------------------------- /splitio/models/grammar/matchers/base.py: -------------------------------------------------------------------------------- 1 | """Abstract matcher module.""" 2 | import abc 3 | 4 | from splitio.client.key import Key 5 | 6 | 7 | class Matcher(object, metaclass=abc.ABCMeta): 8 | """Matcher abstract class.""" 9 | 10 | def __init__(self, raw_matcher): 11 | """ 12 | Initialize generic data and call matcher-specific parser. 13 | 14 | :param raw_matcher: raw matcher as read from splitChanges response. 15 | :type raw_matcher: dict 16 | 17 | :returns: A concrete matcher object. 18 | :rtype: Matcher 19 | """ 20 | self._negate = raw_matcher['negate'] 21 | self._matcher_type = raw_matcher['matcherType'] 22 | key_selector = raw_matcher.get('keySelector') 23 | if key_selector is not None and 'attribute' in key_selector: 24 | self._attribute_name = raw_matcher['keySelector']['attribute'] 25 | else: 26 | self._attribute_name = None 27 | self._build(raw_matcher) 28 | 29 | def _get_matcher_input(self, key, attributes=None): 30 | """ 31 | Examine split, attributes & key, and return the appropriate matching input. 32 | 33 | :param key: User-submitted key 34 | :type key: str | Key 35 | :param attributes: User-submitted attributes 36 | :type attributes: dict 37 | 38 | :returns: data to use when matching 39 | :rtype: str | set | int | bool 40 | """ 41 | if self._attribute_name is not None: 42 | if attributes is not None and attributes.get(self._attribute_name) is not None: 43 | return attributes[self._attribute_name] 44 | 45 | return None 46 | 47 | if isinstance(key, Key): 48 | return key.matching_key 49 | 50 | return key 51 | 52 | @abc.abstractmethod 53 | def _build(self, raw_matcher): 54 | """ 55 | Build the final matcher according to matcher specific data. 56 | 57 | :param raw_matcher: raw matcher as read from splitChanges response. 58 | :type raw_matcher: dict 59 | """ 60 | pass 61 | 62 | @abc.abstractmethod 63 | def _match(self, key, attributes=None, context=None): 64 | """ 65 | Evaluate user input against matcher and return whether the match is successful. 66 | 67 | :param key: User key. 68 | :type key: str. 69 | :param attributes: Custom user attributes. 70 | :type attributes: dict. 71 | :param context: Evaluation context 72 | :type context: dict 73 | 74 | :returns: Wheter the match is successful. 75 | :rtype: bool 76 | """ 77 | pass 78 | 79 | def evaluate(self, key, attributes=None, context=None): 80 | """ 81 | Perform the actual evaluation taking into account possible matcher negation. 82 | 83 | :param key: User key. 84 | :type key: str. 85 | :param attributes: Custom user attributes. 86 | :type attributes: dict. 87 | :param context: Evaluation context 88 | :type context: dict 89 | """ 90 | return self._negate ^ self._match(key, attributes, context) 91 | 92 | @abc.abstractmethod 93 | def _add_matcher_specific_properties_to_json(self): 94 | """ 95 | Add matcher specific properties to base dict before returning it. 96 | 97 | :return: Dictionary with matcher specific prooperties. 98 | :rtype: dict 99 | """ 100 | pass 101 | 102 | def to_json(self): 103 | """ 104 | Reconstruct the original JSON representation of the matcher. 105 | 106 | :return: JSON representation of a matcher. 107 | :rtype: dict 108 | """ 109 | base = { 110 | "keySelector": {'attribute': self._attribute_name} if self._attribute_name else None, 111 | "matcherType": self._matcher_type, 112 | "negate": self._negate, 113 | "userDefinedSegmentMatcherData": None, 114 | "whitelistMatcherData": None, 115 | "unaryNumericMatcherData": None, 116 | "betweenMatcherData": None, 117 | "dependencyMatcherData": None, 118 | "booleanMatcherData": None, 119 | "stringMatcherData": None, 120 | } 121 | base.update(self._add_matcher_specific_properties_to_json()) 122 | return base 123 | -------------------------------------------------------------------------------- /splitio/engine/cache/lru.py: -------------------------------------------------------------------------------- 1 | """Simple test-and-set LRU Cache.""" 2 | import threading 3 | 4 | 5 | DEFAULT_MAX_SIZE = 5000 6 | 7 | 8 | class SimpleLruCache(object): # pylint: disable=too-many-instance-attributes 9 | """ 10 | Key/Value local memory cache. with expiration & LRU eviction. 11 | 12 | LRU double-linked-list format: 13 | 14 | { 15 | 'key1'--------------------------------------------------------------- 16 | 'key2'------------------------------------ | 17 | 'key3'------------ | | 18 | } | | | 19 | V V V 20 | || MRU || -previous-> || X || ... -previous-> || LRU || -previous-> None 21 | None <---next--- || node || <---next--- || node || ... <---next--- || node || 22 | """ 23 | 24 | class _Node(object): # pylint: disable=too-few-public-methods 25 | """Links to previous an next items in the circular list.""" 26 | 27 | def __init__(self, key, value, previous_element, next_element): 28 | """Class constructor.""" 29 | self.key = key # we also keep the key for O(1) access when removing the LRU. 30 | self.value = value 31 | self.previous = previous_element 32 | self.next = next_element 33 | 34 | def __str__(self): 35 | """Return string representation.""" 36 | return '(%s, %s)' % (self.key, self.value) 37 | 38 | def __init__(self, max_size=DEFAULT_MAX_SIZE): 39 | """Class constructor.""" 40 | self._data = {} 41 | self._lock = threading.Lock() 42 | self._max_size = max_size 43 | self._lru = None 44 | self._mru = None 45 | 46 | def test_and_set(self, key, value): 47 | """ 48 | Set an item in the cache and return the previous value. 49 | 50 | :param key: object key 51 | :type args: object 52 | :param value: object value 53 | :type kwargs: object 54 | 55 | :return: previous value if any. None otherwise 56 | :rtype: object 57 | """ 58 | with self._lock: 59 | node = self._data.get(key) 60 | to_return = node.value if node else None 61 | if node is None: 62 | node = SimpleLruCache._Node(key, value, None, None) 63 | node = self._bubble_up(node) 64 | self._data[key] = node 65 | self._rollover() 66 | return to_return 67 | 68 | def clear(self): 69 | """Clear the cache.""" 70 | self._data = {} 71 | self._lru = None 72 | self._mru = None 73 | 74 | def _bubble_up(self, node): 75 | """Send node to the top of the list (mark it as the MRU).""" 76 | if node is None: 77 | return None 78 | 79 | # First item, just set lru & mru 80 | if not self._data: 81 | self._lru = node 82 | self._mru = node 83 | return node 84 | 85 | # MRU, just return it 86 | if node is self._mru: 87 | return node 88 | 89 | # LRU, update pointer and end-of-list 90 | if node is self._lru: 91 | self._lru = node.next 92 | self._lru.previous = None 93 | 94 | if node.previous is not None: 95 | node.previous.next = node.next 96 | if node.next is not None: 97 | node.next.previous = node.previous 98 | 99 | node.previous = self._mru 100 | node.previous.next = node 101 | node.next = None 102 | self._mru = node 103 | 104 | return node 105 | 106 | def _rollover(self): 107 | """Check we're within the size limit. Otherwise drop the LRU.""" 108 | if len(self._data) > self._max_size: 109 | next_item = self._lru.next 110 | del self._data[self._lru.key] 111 | self._lru = next_item 112 | self._lru.previous = None 113 | 114 | def __str__(self): 115 | """User friendly representation of cache.""" 116 | nodes = [] 117 | node = self._mru 118 | while node is not None: 119 | nodes.append('\t<%s: %s> -->' % (node.key, node.value)) 120 | node = node.previous 121 | return '\n' + '\n'.join(nodes) + '\n' 122 | -------------------------------------------------------------------------------- /splitio/models/fallback_config.py: -------------------------------------------------------------------------------- 1 | """Segment module.""" 2 | from splitio.models.fallback_treatment import FallbackTreatment 3 | from splitio.client.client import CONTROL 4 | 5 | class FallbackTreatmentsConfiguration(object): 6 | """FallbackTreatmentsConfiguration object class.""" 7 | 8 | def __init__(self, global_fallback_treatment=None, by_flag_fallback_treatment=None): 9 | """ 10 | Class constructor. 11 | 12 | :param global_fallback_treatment: global FallbackTreatment. 13 | :type global_fallback_treatment: FallbackTreatment 14 | 15 | :param by_flag_fallback_treatment: Dict of flags and their fallback treatment 16 | :type by_flag_fallback_treatment: {str: FallbackTreatment} 17 | """ 18 | self._global_fallback_treatment = self._build_global_fallback(global_fallback_treatment) 19 | self._by_flag_fallback_treatment = self._build_by_flag_fallback(by_flag_fallback_treatment) 20 | 21 | @property 22 | def global_fallback_treatment(self): 23 | """Return global fallback treatment.""" 24 | return self._global_fallback_treatment 25 | 26 | @global_fallback_treatment.setter 27 | def global_fallback_treatment(self, new_value): 28 | """Set global fallback treatment.""" 29 | self._global_fallback_treatment = new_value 30 | 31 | @property 32 | def by_flag_fallback_treatment(self): 33 | """Return by flag fallback treatment.""" 34 | return self._by_flag_fallback_treatment 35 | 36 | @by_flag_fallback_treatment.setter 37 | def by_flag_fallback_treatment(self, new_value): 38 | """Set global fallback treatment.""" 39 | self.by_flag_fallback_treatment = new_value 40 | 41 | def _build_global_fallback(self, global_fallback_treatment): 42 | if isinstance(global_fallback_treatment, str): 43 | return FallbackTreatment(global_fallback_treatment) 44 | 45 | return global_fallback_treatment 46 | 47 | def _build_by_flag_fallback(self, by_flag_fallback_treatment): 48 | if not isinstance(by_flag_fallback_treatment, dict): 49 | return by_flag_fallback_treatment 50 | 51 | parsed_by_flag_fallback = {} 52 | for key, value in by_flag_fallback_treatment.items(): 53 | if isinstance(value, str): 54 | parsed_by_flag_fallback[key] = FallbackTreatment(value) 55 | else: 56 | parsed_by_flag_fallback[key] = value 57 | 58 | return parsed_by_flag_fallback 59 | 60 | class FallbackTreatmentCalculator(object): 61 | """FallbackTreatmentCalculator object class.""" 62 | 63 | def __init__(self, fallback_treatment_configuration): 64 | """ 65 | Class constructor. 66 | 67 | :param fallback_treatment_configuration: fallback treatment configuration 68 | :type fallback_treatment_configuration: FallbackTreatmentsConfiguration 69 | """ 70 | self._label_prefix = "fallback - " 71 | self._fallback_treatments_configuration = fallback_treatment_configuration 72 | 73 | @property 74 | def fallback_treatments_configuration(self): 75 | """Return fallback treatment configuration.""" 76 | return self._fallback_treatments_configuration 77 | 78 | def resolve(self, flag_name, label): 79 | if self._fallback_treatments_configuration != None: 80 | if self._fallback_treatments_configuration.by_flag_fallback_treatment != None \ 81 | and self._fallback_treatments_configuration.by_flag_fallback_treatment.get(flag_name) != None: 82 | return self._copy_with_label(self._fallback_treatments_configuration.by_flag_fallback_treatment.get(flag_name), \ 83 | self._resolve_label(label)) 84 | 85 | if self._fallback_treatments_configuration.global_fallback_treatment != None: 86 | return self._copy_with_label(self._fallback_treatments_configuration.global_fallback_treatment, \ 87 | self._resolve_label(label)) 88 | 89 | return FallbackTreatment(CONTROL, None, label) 90 | 91 | def _resolve_label(self, label): 92 | if label == None: 93 | return None 94 | 95 | return self._label_prefix + label 96 | 97 | def _copy_with_label(self, fallback_treatment, label): 98 | return FallbackTreatment(fallback_treatment.treatment, fallback_treatment.config, label) 99 | 100 | -------------------------------------------------------------------------------- /tests/tasks/test_unique_keys_sync.py: -------------------------------------------------------------------------------- 1 | """Impressions synchronization task test module.""" 2 | import asyncio 3 | import threading 4 | import time 5 | import pytest 6 | 7 | from splitio.api.client import HttpResponse 8 | from splitio.tasks.unique_keys_sync import UniqueKeysSyncTask, ClearFilterSyncTask,\ 9 | ClearFilterSyncTaskAsync, UniqueKeysSyncTaskAsync 10 | from splitio.api.telemetry import TelemetryAPI 11 | from splitio.sync.unique_keys import UniqueKeysSynchronizer, ClearFilterSynchronizer,\ 12 | UniqueKeysSynchronizerAsync, ClearFilterSynchronizerAsync 13 | from splitio.engine.impressions.unique_keys_tracker import UniqueKeysTracker, UniqueKeysTrackerAsync 14 | 15 | 16 | class UniqueKeysSyncTests(object): 17 | """Unique Keys Syncrhonization task test cases.""" 18 | 19 | def test_normal_operation(self, mocker): 20 | """Test that the task works properly under normal circumstances.""" 21 | api = mocker.Mock(spec=TelemetryAPI) 22 | api.record_unique_keys.return_value = HttpResponse(200, '', {}) 23 | 24 | unique_keys_tracker = UniqueKeysTracker() 25 | unique_keys_tracker.track("key1", "split1") 26 | unique_keys_tracker.track("key2", "split1") 27 | 28 | unique_keys_sync = UniqueKeysSynchronizer(mocker.Mock(), unique_keys_tracker) 29 | task = UniqueKeysSyncTask(unique_keys_sync.send_all, 1) 30 | task.start() 31 | time.sleep(2) 32 | assert task.is_running() 33 | assert api.record_unique_keys.mock_calls == mocker.call() 34 | stop_event = threading.Event() 35 | task.stop(stop_event) 36 | stop_event.wait(5) 37 | assert stop_event.is_set() 38 | 39 | class ClearFilterSyncTests(object): 40 | """Clear Filter Syncrhonization task test cases.""" 41 | 42 | def test_normal_operation(self, mocker): 43 | """Test that the task works properly under normal circumstances.""" 44 | 45 | unique_keys_tracker = UniqueKeysTracker() 46 | unique_keys_tracker.track("key1", "split1") 47 | unique_keys_tracker.track("key2", "split1") 48 | 49 | clear_filter_sync = ClearFilterSynchronizer(unique_keys_tracker) 50 | task = ClearFilterSyncTask(clear_filter_sync.clear_all, 1) 51 | task.start() 52 | time.sleep(2) 53 | assert task.is_running() 54 | assert not unique_keys_tracker._filter.contains("split1key1") 55 | assert not unique_keys_tracker._filter.contains("split1key2") 56 | stop_event = threading.Event() 57 | task.stop(stop_event) 58 | stop_event.wait(5) 59 | assert stop_event.is_set() 60 | 61 | class UniqueKeysSyncAsyncTests(object): 62 | """Unique Keys Syncrhonization task test cases.""" 63 | 64 | @pytest.mark.asyncio 65 | async def test_normal_operation(self, mocker): 66 | """Test that the task works properly under normal circumstances.""" 67 | api = mocker.Mock(spec=TelemetryAPI) 68 | api.record_unique_keys.return_value = HttpResponse(200, '', {}) 69 | 70 | unique_keys_tracker = UniqueKeysTrackerAsync() 71 | await unique_keys_tracker.track("key1", "split1") 72 | await unique_keys_tracker.track("key2", "split1") 73 | 74 | unique_keys_sync = UniqueKeysSynchronizerAsync(mocker.Mock(), unique_keys_tracker) 75 | task = UniqueKeysSyncTaskAsync(unique_keys_sync.send_all, 1) 76 | task.start() 77 | await asyncio.sleep(2) 78 | assert task.is_running() 79 | assert api.record_unique_keys.mock_calls == mocker.call() 80 | await task.stop() 81 | assert not task.is_running() 82 | 83 | class ClearFilterSyncTests(object): 84 | """Clear Filter Syncrhonization task test cases.""" 85 | 86 | @pytest.mark.asyncio 87 | async def test_normal_operation(self, mocker): 88 | """Test that the task works properly under normal circumstances.""" 89 | 90 | unique_keys_tracker = UniqueKeysTrackerAsync() 91 | await unique_keys_tracker.track("key1", "split1") 92 | await unique_keys_tracker.track("key2", "split1") 93 | 94 | clear_filter_sync = ClearFilterSynchronizerAsync(unique_keys_tracker) 95 | task = ClearFilterSyncTaskAsync(clear_filter_sync.clear_all, 1) 96 | task.start() 97 | await asyncio.sleep(2) 98 | assert task.is_running() 99 | assert not unique_keys_tracker._filter.contains("split1key1") 100 | assert not unique_keys_tracker._filter.contains("split1key2") 101 | await task.stop() 102 | assert not task.is_running() 103 | -------------------------------------------------------------------------------- /tests/push/test_parser.py: -------------------------------------------------------------------------------- 1 | """SSE Parser unit tests.""" 2 | import json 3 | import pytest 4 | 5 | from splitio.push.sse import SSEEvent 6 | from splitio.push.parser import parse_incoming_event, BaseUpdate, AblyError, OccupancyMessage, \ 7 | SegmentChangeUpdate, SplitChangeUpdate, SplitKillUpdate, EventParsingException 8 | 9 | 10 | def make_message(channel, data): 11 | return SSEEvent('123', 'message', None, json.dumps({ 12 | 'id':'ZlalwoKlXW:0:0', 13 | 'timestamp':1591996755043, 14 | 'encoding':'json', 15 | 'channel': channel, 16 | 'data': json.dumps(data) 17 | })) 18 | 19 | def make_occupancy(channel, data): 20 | return SSEEvent('123', 'message', None, json.dumps({ 21 | 'id':'ZlalwoKlXW:0:0', 22 | 'timestamp':1591996755043, 23 | 'encoding':'json', 24 | 'channel': channel, 25 | 'name': '[meta]occupancy', 26 | 'data': json.dumps(data) 27 | })) 28 | 29 | 30 | def make_error(payload): 31 | return SSEEvent('123', 'error', None, json.dumps(payload)) 32 | 33 | 34 | class ParserTests(object): 35 | """Parser tests.""" 36 | 37 | def test_exception(self): 38 | """Test exceptions.""" 39 | assert parse_incoming_event(None) is None 40 | 41 | with pytest.raises(EventParsingException): 42 | parse_incoming_event(json.dumps({ 43 | 'data': {'a':1}, 44 | 'event': 'some' 45 | })) 46 | 47 | def test_event_parsing(self): 48 | """Test parse Update event.""" 49 | 50 | e0 = make_message( 51 | 'NDA5ODc2MTAyNg==_MzAyODY0NDkyOA==_splits', 52 | {'type':'SPLIT_KILL','changeNumber':1591996754396,'defaultTreatment':'some','splitName':'test'}, 53 | ) 54 | parsed0 = parse_incoming_event(e0) 55 | assert isinstance(parsed0, SplitKillUpdate) 56 | assert parsed0.default_treatment == 'some' 57 | assert parsed0.change_number == 1591996754396 58 | assert parsed0.feature_flag_name == 'test' 59 | 60 | e1 = make_message( 61 | 'NDA5ODc2MTAyNg==_MzAyODY0NDkyOA==_splits', 62 | {'type':'SPLIT_UPDATE','changeNumber':1591996685190, 'pcn': 12, 'c': 2, 'd': 'eJzEUtFu2kAQ/BU0z4d0hw2Be0MFRVGJIx'}, 63 | ) 64 | parsed1 = parse_incoming_event(e1) 65 | assert isinstance(parsed1, SplitChangeUpdate) 66 | assert parsed1.change_number == 1591996685190 67 | assert parsed1.previous_change_number == 12 68 | assert parsed1.compression == 2 69 | assert parsed1.object_definition == 'eJzEUtFu2kAQ/BU0z4d0hw2Be0MFRVGJIx' 70 | 71 | e1 = make_message( 72 | 'NDA5ODc2MTAyNg==_MzAyODY0NDkyOA==_splits', 73 | {'type':'SPLIT_UPDATE','changeNumber':1591996685190}, 74 | ) 75 | parsed1 = parse_incoming_event(e1) 76 | assert isinstance(parsed1, SplitChangeUpdate) 77 | assert parsed1.change_number == 1591996685190 78 | assert parsed1.previous_change_number == None 79 | assert parsed1.compression == None 80 | assert parsed1.object_definition == None 81 | 82 | e2 = make_message( 83 | 'NDA5ODc2MTAyNg==_MzAyODY0NDkyOA==_segments', 84 | {'type':'SEGMENT_UPDATE','changeNumber':1591988398533,'segmentName':'some'}, 85 | ) 86 | parsed2 = parse_incoming_event(e2) 87 | assert isinstance(parsed2, SegmentChangeUpdate) 88 | assert parsed2.change_number == 1591988398533 89 | assert parsed2.segment_name == 'some' 90 | 91 | def test_error_parsing(self): 92 | """Test parse AblyError event.""" 93 | e0 = make_error({ 94 | 'code': 40142, 95 | 'message': 'Token expired', 96 | 'statusCode': 401, 97 | 'href': 'https://help.io/error/40142', 98 | }) 99 | parsed = parse_incoming_event(e0) 100 | assert isinstance(parsed, AblyError) 101 | assert parsed.code == 40142 102 | assert parsed.status_code == 401 103 | assert parsed.href == 'https://help.io/error/40142' 104 | assert parsed.message == 'Token expired' 105 | assert not parsed.should_be_ignored() 106 | assert parsed.is_retryable() 107 | 108 | def test_occupancy_parsing(self): 109 | """Test parse Occupancy event.""" 110 | e0 = make_occupancy('[?occupancy=metrics.publishers]control_sec', 111 | {'metrics': {'publishers': 1}}) 112 | parsed = parse_incoming_event(e0) 113 | assert isinstance(parsed, OccupancyMessage) 114 | assert parsed.publishers == 1 115 | assert parsed.channel == 'control_sec' 116 | -------------------------------------------------------------------------------- /tests/sync/test_events_synchronizer.py: -------------------------------------------------------------------------------- 1 | """Split Worker tests.""" 2 | 3 | import threading 4 | import time 5 | import pytest 6 | 7 | from splitio.api.client import HttpResponse 8 | from splitio.api import APIException 9 | from splitio.storage import EventStorage 10 | from splitio.models.events import Event 11 | from splitio.sync.event import EventSynchronizer, EventSynchronizerAsync 12 | 13 | 14 | class EventsSynchronizerTests(object): 15 | """Events synchronizer test cases.""" 16 | 17 | def test_synchronize_events_error(self, mocker): 18 | storage = mocker.Mock(spec=EventStorage) 19 | storage.pop_many.return_value = [ 20 | Event('key1', 'user', 'purchase', 5.3, 123456, None), 21 | Event('key2', 'user', 'purchase', 5.3, 123456, None), 22 | ] 23 | 24 | api = mocker.Mock() 25 | 26 | def run(x): 27 | raise APIException("something broke") 28 | 29 | api.flush_events.side_effect = run 30 | event_synchronizer = EventSynchronizer(api, storage, 5) 31 | event_synchronizer.synchronize_events() 32 | assert event_synchronizer._failed.qsize() == 2 33 | 34 | def test_synchronize_events_empty(self, mocker): 35 | storage = mocker.Mock(spec=EventStorage) 36 | storage.pop_many.return_value = [] 37 | 38 | api = mocker.Mock() 39 | 40 | def run(x): 41 | run._called += 1 42 | 43 | run._called = 0 44 | api.flush_events.side_effect = run 45 | event_synchronizer = EventSynchronizer(api, storage, 5) 46 | event_synchronizer.synchronize_events() 47 | assert run._called == 0 48 | 49 | def test_synchronize_impressions(self, mocker): 50 | storage = mocker.Mock(spec=EventStorage) 51 | storage.pop_many.return_value = [ 52 | Event('key1', 'user', 'purchase', 5.3, 123456, None), 53 | Event('key2', 'user', 'purchase', 5.3, 123456, None), 54 | ] 55 | 56 | api = mocker.Mock() 57 | 58 | def run(x): 59 | run._called += 1 60 | return HttpResponse(200, '', {}) 61 | 62 | api.flush_events.side_effect = run 63 | run._called = 0 64 | 65 | event_synchronizer = EventSynchronizer(api, storage, 5) 66 | event_synchronizer.synchronize_events() 67 | assert run._called == 1 68 | assert event_synchronizer._failed.qsize() == 0 69 | 70 | 71 | class EventsSynchronizerAsyncTests(object): 72 | """Events synchronizer async test cases.""" 73 | 74 | @pytest.mark.asyncio 75 | async def test_synchronize_events_error(self, mocker): 76 | storage = mocker.Mock(spec=EventStorage) 77 | async def pop_many(*args): 78 | return [ 79 | Event('key1', 'user', 'purchase', 5.3, 123456, None), 80 | Event('key2', 'user', 'purchase', 5.3, 123456, None), 81 | ] 82 | storage.pop_many = pop_many 83 | 84 | api = mocker.Mock() 85 | async def run(x): 86 | raise APIException("something broke") 87 | 88 | api.flush_events = run 89 | event_synchronizer = EventSynchronizerAsync(api, storage, 5) 90 | await event_synchronizer.synchronize_events() 91 | assert event_synchronizer._failed.qsize() == 2 92 | 93 | @pytest.mark.asyncio 94 | async def test_synchronize_events_empty(self, mocker): 95 | storage = mocker.Mock(spec=EventStorage) 96 | async def pop_many(*args): 97 | return [] 98 | storage.pop_many = pop_many 99 | 100 | api = mocker.Mock() 101 | async def run(x): 102 | run._called += 1 103 | 104 | run._called = 0 105 | api.flush_events = run 106 | event_synchronizer = EventSynchronizerAsync(api, storage, 5) 107 | await event_synchronizer.synchronize_events() 108 | assert run._called == 0 109 | 110 | @pytest.mark.asyncio 111 | async def test_synchronize_impressions(self, mocker): 112 | storage = mocker.Mock(spec=EventStorage) 113 | async def pop_many(*args): 114 | return [ 115 | Event('key1', 'user', 'purchase', 5.3, 123456, None), 116 | Event('key2', 'user', 'purchase', 5.3, 123456, None), 117 | ] 118 | storage.pop_many = pop_many 119 | 120 | api = mocker.Mock() 121 | async def run(x): 122 | run._called += 1 123 | return HttpResponse(200, '', {}) 124 | 125 | api.flush_events.side_effect = run 126 | run._called = 0 127 | 128 | event_synchronizer = EventSynchronizerAsync(api, storage, 5) 129 | await event_synchronizer.synchronize_events() 130 | assert run._called == 1 131 | assert event_synchronizer._failed.qsize() == 0 132 | -------------------------------------------------------------------------------- /splitio/api/auth.py: -------------------------------------------------------------------------------- 1 | """Auth API module.""" 2 | 3 | import logging 4 | import json 5 | 6 | from splitio.api import APIException, headers_from_metadata 7 | from splitio.api.commons import headers_from_metadata, record_telemetry 8 | from splitio.spec import SPEC_VERSION 9 | from splitio.util.time import get_current_epoch_time_ms 10 | from splitio.api.client import HttpClientException 11 | from splitio.models.token import from_raw 12 | from splitio.models.telemetry import HTTPExceptionsAndLatencies 13 | 14 | _LOGGER = logging.getLogger(__name__) 15 | 16 | 17 | class AuthAPI(object): # pylint: disable=too-few-public-methods 18 | """Class that uses an httpClient to communicate with the SDK Auth Service API.""" 19 | 20 | def __init__(self, client, sdk_key, sdk_metadata, telemetry_runtime_producer): 21 | """ 22 | Class constructor. 23 | 24 | :param client: HTTP Client responsble for issuing calls to the backend. 25 | :type client: HttpClient 26 | :param sdk_key: User sdk key. 27 | :type sdk_key: string 28 | :param sdk_metadata: SDK version & machine name & IP. 29 | :type sdk_metadata: splitio.client.util.SdkMetadata 30 | """ 31 | self._client = client 32 | self._sdk_key = sdk_key 33 | self._metadata = headers_from_metadata(sdk_metadata) 34 | self._telemetry_runtime_producer = telemetry_runtime_producer 35 | self._client.set_telemetry_data(HTTPExceptionsAndLatencies.TOKEN, self._telemetry_runtime_producer) 36 | 37 | def authenticate(self): 38 | """ 39 | Perform authentication. 40 | 41 | :return: Json representation of an authentication. 42 | :rtype: splitio.models.token.Token 43 | """ 44 | try: 45 | response = self._client.get( 46 | 'auth', 47 | 'v2/auth?s=' + SPEC_VERSION, 48 | self._sdk_key, 49 | extra_headers=self._metadata, 50 | ) 51 | if 200 <= response.status_code < 300: 52 | payload = json.loads(response.body) 53 | return from_raw(payload) 54 | 55 | else: 56 | if (response.status_code >= 400 and response.status_code < 500): 57 | self._telemetry_runtime_producer.record_auth_rejections() 58 | raise APIException(response.body, response.status_code) 59 | except HttpClientException as exc: 60 | _LOGGER.error('Exception raised while authenticating') 61 | _LOGGER.debug('Exception information: ', exc_info=True) 62 | raise APIException('Could not perform authentication.') from exc 63 | 64 | class AuthAPIAsync(object): # pylint: disable=too-few-public-methods 65 | """Async Class that uses an httpClient to communicate with the SDK Auth Service API.""" 66 | 67 | def __init__(self, client, sdk_key, sdk_metadata, telemetry_runtime_producer): 68 | """ 69 | Class constructor. 70 | 71 | :param client: HTTP Client responsble for issuing calls to the backend. 72 | :type client: HttpClient 73 | :param sdk_key: User sdk key. 74 | :type sdk_key: string 75 | :param sdk_metadata: SDK version & machine name & IP. 76 | :type sdk_metadata: splitio.client.util.SdkMetadata 77 | """ 78 | self._client = client 79 | self._sdk_key = sdk_key 80 | self._metadata = headers_from_metadata(sdk_metadata) 81 | self._telemetry_runtime_producer = telemetry_runtime_producer 82 | self._client.set_telemetry_data(HTTPExceptionsAndLatencies.TOKEN, self._telemetry_runtime_producer) 83 | 84 | async def authenticate(self): 85 | """ 86 | Perform authentication. 87 | 88 | :return: Json representation of an authentication. 89 | :rtype: splitio.models.token.Token 90 | """ 91 | try: 92 | response = await self._client.get( 93 | 'auth', 94 | 'v2/auth?s=' + SPEC_VERSION, 95 | self._sdk_key, 96 | extra_headers=self._metadata, 97 | ) 98 | if 200 <= response.status_code < 300: 99 | payload = json.loads(response.body) 100 | return from_raw(payload) 101 | 102 | else: 103 | if (response.status_code >= 400 and response.status_code < 500): 104 | await self._telemetry_runtime_producer.record_auth_rejections() 105 | raise APIException(response.body, response.status_code) 106 | except HttpClientException as exc: 107 | _LOGGER.error('Exception raised while authenticating') 108 | _LOGGER.debug('Exception information: ', exc_info=True) 109 | raise APIException('Could not perform authentication.') from exc 110 | -------------------------------------------------------------------------------- /splitio/models/grammar/condition.py: -------------------------------------------------------------------------------- 1 | """Split conditions module.""" 2 | 3 | from enum import Enum 4 | 5 | from splitio.models import MatcherNotFoundException 6 | from splitio.models.grammar import matchers 7 | from splitio.models.grammar import partitions 8 | 9 | _MATCHER_COMBINERS = { 10 | 'AND': lambda ms, k, a, c: all(m.evaluate(k, a, c) for m in ms) 11 | } 12 | 13 | 14 | class ConditionType(Enum): 15 | """Feature Flag possible condition types.""" 16 | 17 | WHITELIST = 'WHITELIST' 18 | ROLLOUT = 'ROLLOUT' 19 | 20 | 21 | class Condition(object): 22 | """Condition object class.""" 23 | 24 | def __init__( # pylint: disable=too-many-arguments 25 | self, 26 | matcher_list, 27 | combiner, parts, label, 28 | condition_type=ConditionType.WHITELIST 29 | ): 30 | """ 31 | Class constructor. 32 | 33 | :param matcher: A combining matcher 34 | :type matcher: CombiningMatcher 35 | :param parts: A list of partitions 36 | :type parts: list 37 | """ 38 | self._matchers = matcher_list 39 | self._combiner = combiner 40 | self._partitions = tuple(parts) 41 | self._label = label 42 | self._condition_type = condition_type 43 | 44 | @property 45 | def matchers(self): 46 | """Return the list of matchers associated to the condition.""" 47 | return self._matchers 48 | 49 | @property 50 | def partitions(self): 51 | """Return the list of partitions associated with the condition.""" 52 | return self._partitions 53 | 54 | @property 55 | def label(self): 56 | """Return the label of this condition.""" 57 | return self._label 58 | 59 | @property 60 | def condition_type(self): 61 | """Return the condition type.""" 62 | return self._condition_type 63 | 64 | def matches(self, key, attributes=None, context=None): 65 | """ 66 | Check whether the condition matches against user submitted input. 67 | 68 | :param key: User key 69 | :type key: splitio.client.key.Key 70 | :param attributes: User custom attributes. 71 | :type attributes: dict 72 | :param context: Evaluation context 73 | :type context: dict 74 | """ 75 | return self._combiner(self._matchers, key, attributes, context) 76 | 77 | def get_segment_names(self): 78 | """ 79 | Fetch segment names for all IN_SEGMENT matchers. 80 | 81 | :return: List of segment names 82 | :rtype: list(str) 83 | """ 84 | return [ 85 | matcher._segment_name for matcher in self.matchers # pylint: disable=protected-access 86 | if isinstance(matcher, matchers.UserDefinedSegmentMatcher) 87 | ] 88 | 89 | def __str__(self): 90 | """Return the string representation of the condition.""" 91 | return '{matcher} then split {parts}'.format( 92 | matcher=self._matchers, parts=','.join( 93 | '{size}:{treatment}'.format(size=partition.size, 94 | treatment=partition.treatment) 95 | for partition in self._partitions)) 96 | 97 | def to_json(self): 98 | """Return the JSON representation of this condition.""" 99 | return { 100 | 'conditionType': self._condition_type.name, 101 | 'label': self._label, 102 | 'matcherGroup': { 103 | 'combiner': next( 104 | (k, v) for k, v in _MATCHER_COMBINERS.items() if v == self._combiner 105 | )[0], 106 | 'matchers': [m.to_json() for m in self.matchers] 107 | }, 108 | 'partitions': [p.to_json() for p in self.partitions] 109 | } 110 | 111 | 112 | def from_raw(raw_condition): 113 | """ 114 | Parse a condition from a JSON portion of splitChanges. 115 | 116 | :param raw_condition: JSON object extracted from a feature flag's conditions array. 117 | :type raw_condition: dict 118 | 119 | :return: A condition object. 120 | :rtype: Condition 121 | """ 122 | parsed_partitions = [] 123 | if raw_condition.get("partitions") is not None: 124 | parsed_partitions = [ 125 | partitions.from_raw(raw_partition) 126 | for raw_partition in raw_condition['partitions'] 127 | ] 128 | 129 | matcher_objects = [matchers.from_raw(x) for x in raw_condition['matcherGroup']['matchers']] 130 | 131 | combiner = _MATCHER_COMBINERS[raw_condition['matcherGroup']['combiner']] 132 | label = raw_condition.get('label') 133 | 134 | condition_type = ConditionType(raw_condition.get('conditionType', ConditionType.WHITELIST)) 135 | 136 | return Condition(matcher_objects, combiner, parsed_partitions, label, condition_type) 137 | -------------------------------------------------------------------------------- /splitio/sync/event.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import queue 3 | 4 | from splitio.api import APIException 5 | from splitio.optional.loaders import asyncio 6 | 7 | _LOGGER = logging.getLogger(__name__) 8 | 9 | 10 | class EventSynchronizer(object): 11 | """Event Synchronizer class""" 12 | def __init__(self, events_api, storage, bulk_size): 13 | """ 14 | Class constructor. 15 | 16 | :param events_api: Events Api object to send data to the backend 17 | :type events_api: splitio.api.events.EventsAPI 18 | :param storage: Events Storage 19 | :type storage: splitio.storage.EventStorage 20 | :param bulk_size: How many events to send per push. 21 | :type bulk_size: int 22 | 23 | """ 24 | self._api = events_api 25 | self._event_storage = storage 26 | self._bulk_size = bulk_size 27 | self._failed = queue.Queue() 28 | 29 | def _get_failed(self): 30 | """Return up to events stored in the failed eventes queue.""" 31 | events = [] 32 | count = 0 33 | while count < self._bulk_size: 34 | try: 35 | events.append(self._failed.get(False)) 36 | count += 1 37 | except queue.Empty: 38 | # If no more items in queue, break the loop 39 | break 40 | return events 41 | 42 | def _add_to_failed_queue(self, events): 43 | """ 44 | Add events that were about to be sent to a secondary queue for failed sends. 45 | 46 | :param events: List of events that failed to be pushed. 47 | :type events: list 48 | """ 49 | for event in events: 50 | self._failed.put(event, False) 51 | 52 | def synchronize_events(self): 53 | """Send events from both the failed and new queues.""" 54 | to_send = self._get_failed() 55 | if len(to_send) < self._bulk_size: 56 | # If the amount of previously failed items is less than the bulk 57 | # size, try to complete with new events from storage 58 | to_send.extend(self._event_storage.pop_many(self._bulk_size - len(to_send))) 59 | 60 | if not to_send: 61 | return 62 | 63 | try: 64 | self._api.flush_events(to_send) 65 | except APIException: 66 | _LOGGER.error('Exception raised while reporting events') 67 | _LOGGER.debug('Exception information: ', exc_info=True) 68 | self._add_to_failed_queue(to_send) 69 | 70 | 71 | class EventSynchronizerAsync(object): 72 | """Event Synchronizer async class""" 73 | def __init__(self, events_api, storage, bulk_size): 74 | """ 75 | Class constructor. 76 | 77 | :param events_api: Events Api object to send data to the backend 78 | :type events_api: splitio.api.events.EventsAPI 79 | :param storage: Events Storage 80 | :type storage: splitio.storage.EventStorage 81 | :param bulk_size: How many events to send per push. 82 | :type bulk_size: int 83 | 84 | """ 85 | self._api = events_api 86 | self._event_storage = storage 87 | self._bulk_size = bulk_size 88 | self._failed = asyncio.Queue() 89 | 90 | async def _get_failed(self): 91 | """Return up to events stored in the failed eventes queue.""" 92 | events = [] 93 | count = 0 94 | while count < self._bulk_size and self._failed.qsize() > 0: 95 | try: 96 | events.append(await self._failed.get()) 97 | count += 1 98 | except asyncio.QueueEmpty: 99 | # If no more items in queue, break the loop 100 | break 101 | return events 102 | 103 | async def _add_to_failed_queue(self, events): 104 | """ 105 | Add events that were about to be sent to a secondary queue for failed sends. 106 | 107 | :param events: List of events that failed to be pushed. 108 | :type events: list 109 | """ 110 | for event in events: 111 | await self._failed.put(event) 112 | 113 | async def synchronize_events(self): 114 | """Send events from both the failed and new queues.""" 115 | to_send = await self._get_failed() 116 | if len(to_send) < self._bulk_size: 117 | # If the amount of previously failed items is less than the bulk 118 | # size, try to complete with new events from storage 119 | to_send.extend(await self._event_storage.pop_many(self._bulk_size - len(to_send))) 120 | 121 | if not to_send: 122 | return 123 | 124 | try: 125 | await self._api.flush_events(to_send) 126 | except APIException: 127 | _LOGGER.error('Exception raised while reporting events') 128 | _LOGGER.debug('Exception information: ', exc_info=True) 129 | await self._add_to_failed_queue(to_send) 130 | -------------------------------------------------------------------------------- /splitio/client/listener.py: -------------------------------------------------------------------------------- 1 | """Impression listener module.""" 2 | 3 | import abc 4 | 5 | 6 | class ImpressionListenerException(Exception): 7 | """Custom Exception for Impression Listener.""" 8 | 9 | pass 10 | 11 | class ImpressionListener(object, metaclass=abc.ABCMeta): 12 | """Impression listener interface.""" 13 | 14 | @abc.abstractmethod 15 | def log_impression(self, data): 16 | """ 17 | Accept and impression generated after an evaluation for custom user handling. 18 | 19 | :param data: Impression data in a dictionary format. 20 | :type data: dict 21 | """ 22 | pass 23 | 24 | class ImpressionListenerBase(ImpressionListener): # pylint: disable=too-few-public-methods 25 | """ 26 | Impression listener safe-execution wrapper. 27 | 28 | Wrapper in charge of building all the data that client would require in case 29 | of adding some logic with the treatment and impression results. 30 | """ 31 | 32 | impression_listener = None 33 | 34 | def __init__(self, impression_listener, sdk_metadata): 35 | """ 36 | Class Constructor. 37 | 38 | :param impression_listener: User provided impression listener. 39 | :type impression_listener: ImpressionListener 40 | :param sdk_metadata: SDK version, instance name & IP 41 | :type sdk_metadata: splitio.client.util.SdkMetadata 42 | """ 43 | self.impression_listener = impression_listener 44 | self._metadata = sdk_metadata 45 | 46 | def _construct_data(self, impression, attributes): 47 | data = {} 48 | data['impression'] = impression 49 | data['attributes'] = attributes 50 | data['sdk-language-version'] = self._metadata.sdk_version 51 | data['instance-id'] = self._metadata.instance_name 52 | return data 53 | 54 | def log_impression(self, impression, attributes=None): 55 | pass 56 | 57 | class ImpressionListenerWrapper(ImpressionListenerBase): # pylint: disable=too-few-public-methods 58 | """ 59 | Impression listener safe-execution wrapper. 60 | 61 | Wrapper in charge of building all the data that client would require in case 62 | of adding some logic with the treatment and impression results. 63 | """ 64 | def __init__(self, impression_listener, sdk_metadata): 65 | """ 66 | Class Constructor. 67 | 68 | :param impression_listener: User provided impression listener. 69 | :type impression_listener: ImpressionListener 70 | :param sdk_metadata: SDK version, instance name & IP 71 | :type sdk_metadata: splitio.client.util.SdkMetadata 72 | """ 73 | ImpressionListenerBase.__init__(self, impression_listener, sdk_metadata) 74 | 75 | def log_impression(self, impression, attributes=None): 76 | """ 77 | Send an impression to the user-provided listener. 78 | 79 | :param impression: Imression data 80 | :type impression: dict 81 | :param attributes: User provided attributes when calling get_treatment(s) 82 | :type attributes: dict 83 | """ 84 | data = self._construct_data(impression, attributes) 85 | try: 86 | self.impression_listener.log_impression(data) 87 | except Exception as exc: # pylint: disable=broad-except 88 | raise ImpressionListenerException('Error in log_impression user\'s method is throwing exceptions') from exc 89 | 90 | 91 | class ImpressionListenerWrapperAsync(ImpressionListenerBase): # pylint: disable=too-few-public-methods 92 | """ 93 | Impression listener safe-execution wrapper. 94 | 95 | Wrapper in charge of building all the data that client would require in case 96 | of adding some logic with the treatment and impression results. 97 | """ 98 | def __init__(self, impression_listener, sdk_metadata): 99 | """ 100 | Class Constructor. 101 | 102 | :param impression_listener: User provided impression listener. 103 | :type impression_listener: ImpressionListener 104 | :param sdk_metadata: SDK version, instance name & IP 105 | :type sdk_metadata: splitio.client.util.SdkMetadata 106 | """ 107 | ImpressionListenerBase.__init__(self, impression_listener, sdk_metadata) 108 | 109 | async def log_impression(self, impression, attributes=None): 110 | """ 111 | Send an impression to the user-provided listener. 112 | 113 | :param impression: Imression data 114 | :type impression: dict 115 | :param attributes: User provided attributes when calling get_treatment(s) 116 | :type attributes: dict 117 | """ 118 | data = self._construct_data(impression, attributes) 119 | try: 120 | await self.impression_listener.log_impression(data) 121 | except Exception as exc: # pylint: disable=broad-except 122 | raise ImpressionListenerException('Error in log_impression user\'s method is throwing exceptions') from exc 123 | -------------------------------------------------------------------------------- /splitio/tasks/impressions_sync.py: -------------------------------------------------------------------------------- 1 | """Impressions syncrhonization task.""" 2 | import logging 3 | 4 | from splitio.tasks import BaseSynchronizationTask 5 | from splitio.tasks.util.asynctask import AsyncTask, AsyncTaskAsync 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | 10 | 11 | class ImpressionsSyncTaskBase(BaseSynchronizationTask): 12 | """Impressions synchronization task uses an asynctask.AsyncTask to send impressions.""" 13 | 14 | def start(self): 15 | """Start executing the impressions synchronization task.""" 16 | self._task.start() 17 | 18 | def stop(self, event=None): 19 | """Stop executing the impressions synchronization task.""" 20 | pass 21 | 22 | def is_running(self): 23 | """ 24 | Return whether the task is running or not. 25 | 26 | :return: True if the task is running. False otherwise. 27 | :rtype: bool 28 | """ 29 | return self._task.running() 30 | 31 | def flush(self): 32 | """Flush impressions in storage.""" 33 | _LOGGER.debug('Forcing flush execution for impressions') 34 | self._task.force_execution() 35 | 36 | 37 | class ImpressionsSyncTask(ImpressionsSyncTaskBase): 38 | """Impressions synchronization task uses an asynctask.AsyncTask to send impressions.""" 39 | 40 | def __init__(self, synchronize_impressions, period): 41 | """ 42 | Class constructor. 43 | 44 | :param synchronize_impressions: sender 45 | :type synchronize_impressions: func 46 | :param period: How many seconds to wait between subsequent impressions pushes to the BE. 47 | :type period: int 48 | 49 | """ 50 | self._period = period 51 | self._task = AsyncTask(synchronize_impressions, self._period, 52 | on_stop=synchronize_impressions) 53 | 54 | def stop(self, event=None): 55 | """Stop executing the impressions synchronization task.""" 56 | self._task.stop(event) 57 | 58 | 59 | class ImpressionsSyncTaskAsync(ImpressionsSyncTaskBase): 60 | """Impressions synchronization task uses an asynctask.AsyncTask to send impressions.""" 61 | 62 | def __init__(self, synchronize_impressions, period): 63 | """ 64 | Class constructor. 65 | 66 | :param synchronize_impressions: sender 67 | :type synchronize_impressions: func 68 | :param period: How many seconds to wait between subsequent impressions pushes to the BE. 69 | :type period: int 70 | 71 | """ 72 | self._period = period 73 | self._task = AsyncTaskAsync(synchronize_impressions, self._period, 74 | on_stop=synchronize_impressions) 75 | 76 | async def stop(self, event=None): 77 | """Stop executing the impressions synchronization task.""" 78 | await self._task.stop(True) 79 | 80 | 81 | class ImpressionsCountSyncTaskBase(BaseSynchronizationTask): 82 | """Impressions synchronization task uses an asynctask.AsyncTask to send impressions.""" 83 | 84 | _PERIOD = 1800 # 30 * 60 # 30 minutes 85 | 86 | def start(self): 87 | """Start executing the impressions synchronization task.""" 88 | self._task.start() 89 | 90 | def stop(self, event=None): 91 | """Stop executing the impressions synchronization task.""" 92 | pass 93 | 94 | def is_running(self): 95 | """ 96 | Return whether the task is running or not. 97 | 98 | :return: True if the task is running. False otherwise. 99 | :rtype: bool 100 | """ 101 | return self._task.running() 102 | 103 | def flush(self): 104 | """Flush impressions in storage.""" 105 | self._task.force_execution() 106 | 107 | 108 | class ImpressionsCountSyncTask(ImpressionsCountSyncTaskBase): 109 | """Impressions synchronization task uses an asynctask.AsyncTask to send impressions.""" 110 | 111 | def __init__(self, synchronize_counters): 112 | """ 113 | Class constructor. 114 | 115 | :param synchronize_counters: Handler 116 | :type synchronize_counters: func 117 | 118 | """ 119 | self._task = AsyncTask(synchronize_counters, self._PERIOD, on_stop=synchronize_counters) 120 | 121 | def stop(self, event=None): 122 | """Stop executing the impressions synchronization task.""" 123 | self._task.stop(event) 124 | 125 | 126 | class ImpressionsCountSyncTaskAsync(ImpressionsCountSyncTaskBase): 127 | """Impressions synchronization task uses an asynctask.AsyncTask to send impressions.""" 128 | 129 | def __init__(self, synchronize_counters): 130 | """ 131 | Class constructor. 132 | 133 | :param synchronize_counters: Handler 134 | :type synchronize_counters: func 135 | 136 | """ 137 | self._task = AsyncTaskAsync(synchronize_counters, self._PERIOD, on_stop=synchronize_counters) 138 | 139 | async def stop(self): 140 | """Stop executing the impressions synchronization task.""" 141 | await self._task.stop(True) 142 | -------------------------------------------------------------------------------- /tests/engine/test_unique_keys_tracker.py: -------------------------------------------------------------------------------- 1 | """BloomFilter unit tests.""" 2 | import pytest 3 | 4 | from splitio.engine.impressions.unique_keys_tracker import UniqueKeysTracker, UniqueKeysTrackerAsync 5 | from splitio.engine.filters import BloomFilter 6 | 7 | class UniqueKeysTrackerTests(object): 8 | """StandardRecorderTests test cases.""" 9 | 10 | def test_adding_and_removing_keys(self, mocker): 11 | tracker = UniqueKeysTracker() 12 | 13 | assert(tracker._cache_size > 0) 14 | assert(tracker._current_cache_size == 0) 15 | assert(tracker._cache == {}) 16 | assert(isinstance(tracker._filter, BloomFilter)) 17 | 18 | key1 = 'key1' 19 | key2 = 'key2' 20 | key3 = 'key3' 21 | split1= 'feature1' 22 | split2= 'feature2' 23 | 24 | assert(tracker.track(key1, split1)) 25 | assert(tracker.track(key3, split1)) 26 | assert(not tracker.track(key1, split1)) 27 | assert(tracker.track(key2, split2)) 28 | 29 | assert(tracker._filter.contains(split1+key1)) 30 | assert(not tracker._filter.contains(split1+key2)) 31 | assert(tracker._filter.contains(split2+key2)) 32 | assert(not tracker._filter.contains(split2+key1)) 33 | assert(key1 in tracker._cache[split1]) 34 | assert(key3 in tracker._cache[split1]) 35 | assert(key2 in tracker._cache[split2]) 36 | assert(not key3 in tracker._cache[split2]) 37 | 38 | tracker.clear_filter() 39 | assert(not tracker._filter.contains(split1+key1)) 40 | assert(not tracker._filter.contains(split2+key2)) 41 | 42 | cache_backup = tracker._cache.copy() 43 | cache_size_backup = tracker._current_cache_size 44 | cache, cache_size = tracker.get_cache_info_and_pop_all() 45 | assert(cache_backup == cache) 46 | assert(cache_size_backup == cache_size) 47 | assert(tracker._current_cache_size == 0) 48 | assert(tracker._cache == {}) 49 | 50 | def test_cache_size(self, mocker): 51 | cache_size = 10 52 | tracker = UniqueKeysTracker(cache_size) 53 | 54 | split1= 'feature1' 55 | for x in range(1, cache_size + 1): 56 | tracker.track('key' + str(x), split1) 57 | split2= 'feature2' 58 | for x in range(1, int(cache_size / 2) + 1): 59 | tracker.track('key' + str(x), split2) 60 | 61 | assert(tracker._current_cache_size == (cache_size + (cache_size / 2))) 62 | assert(len(tracker._cache[split1]) == cache_size) 63 | assert(len(tracker._cache[split2]) == cache_size / 2) 64 | 65 | 66 | class UniqueKeysTrackerAsyncTests(object): 67 | """StandardRecorderTests test cases.""" 68 | 69 | @pytest.mark.asyncio 70 | async def test_adding_and_removing_keys(self, mocker): 71 | tracker = UniqueKeysTrackerAsync() 72 | 73 | assert(tracker._cache_size > 0) 74 | assert(tracker._current_cache_size == 0) 75 | assert(tracker._cache == {}) 76 | assert(isinstance(tracker._filter, BloomFilter)) 77 | 78 | key1 = 'key1' 79 | key2 = 'key2' 80 | key3 = 'key3' 81 | split1= 'feature1' 82 | split2= 'feature2' 83 | 84 | assert(await tracker.track(key1, split1)) 85 | assert(await tracker.track(key3, split1)) 86 | assert(not await tracker.track(key1, split1)) 87 | assert(await tracker.track(key2, split2)) 88 | 89 | assert(tracker._filter.contains(split1+key1)) 90 | assert(not tracker._filter.contains(split1+key2)) 91 | assert(tracker._filter.contains(split2+key2)) 92 | assert(not tracker._filter.contains(split2+key1)) 93 | assert(key1 in tracker._cache[split1]) 94 | assert(key3 in tracker._cache[split1]) 95 | assert(key2 in tracker._cache[split2]) 96 | assert(not key3 in tracker._cache[split2]) 97 | 98 | await tracker.clear_filter() 99 | assert(not tracker._filter.contains(split1+key1)) 100 | assert(not tracker._filter.contains(split2+key2)) 101 | 102 | cache_backup = tracker._cache.copy() 103 | cache_size_backup = tracker._current_cache_size 104 | cache, cache_size = await tracker.get_cache_info_and_pop_all() 105 | assert(cache_backup == cache) 106 | assert(cache_size_backup == cache_size) 107 | assert(tracker._current_cache_size == 0) 108 | assert(tracker._cache == {}) 109 | 110 | @pytest.mark.asyncio 111 | async def test_cache_size(self, mocker): 112 | cache_size = 10 113 | tracker = UniqueKeysTrackerAsync(cache_size) 114 | 115 | split1= 'feature1' 116 | for x in range(1, cache_size + 1): 117 | await tracker.track('key' + str(x), split1) 118 | split2= 'feature2' 119 | for x in range(1, int(cache_size / 2) + 1): 120 | await tracker.track('key' + str(x), split2) 121 | 122 | assert(tracker._current_cache_size == (cache_size + (cache_size / 2))) 123 | assert(len(tracker._cache[split1]) == cache_size) 124 | assert(len(tracker._cache[split2]) == cache_size / 2) 125 | -------------------------------------------------------------------------------- /tests/engine/test_hashfns.py: -------------------------------------------------------------------------------- 1 | """Hash function test module.""" 2 | #pylint: disable=no-self-use,protected-access 3 | import io 4 | import json 5 | import os 6 | 7 | import pytest 8 | from splitio.engine import hashfns, splitters 9 | from splitio.engine.hashfns.murmur3py import hash128_x64 as murmur3_128_py 10 | from splitio.models import splits 11 | 12 | 13 | class HashFunctionsTests(object): 14 | """Hash functions test cases.""" 15 | 16 | def test_get_hash_function(self): 17 | """Test that the correct hash function is returned.""" 18 | assert hashfns.get_hash_fn(splits.HashAlgorithm.LEGACY) == hashfns.legacy.legacy_hash 19 | assert hashfns.get_hash_fn(splits.HashAlgorithm.MURMUR) == hashfns._murmur_hash 20 | 21 | def test_legacy_hash_ascii_data(self): 22 | """Test legacy hash function against known results.""" 23 | splitter = splitters.Splitter() 24 | file_name = os.path.join(os.path.dirname(__file__), 'files', 'sample-data.jsonl') 25 | with open(file_name, 'r') as flo: 26 | lines = flo.read().split('\n') 27 | 28 | for line in lines: 29 | if line is None or line == '': 30 | continue 31 | seed, key, hashed, bucket = json.loads(line) 32 | assert hashfns.legacy.legacy_hash(key, seed) == hashed 33 | assert splitter.get_bucket(key, seed, splits.HashAlgorithm.LEGACY) == bucket 34 | 35 | def test_murmur_hash_ascii_data(self): 36 | """Test legacy hash function against known results.""" 37 | splitter = splitters.Splitter() 38 | file_name = os.path.join(os.path.dirname(__file__), 'files', 'murmur3-sample-data-v2.csv') 39 | with open(file_name, 'r') as flo: 40 | lines = flo.read().split('\n') 41 | 42 | for line in lines: 43 | if line is None or line == '': 44 | continue 45 | seed, key, hashed, bucket = line.split(',') 46 | seed = int(seed) 47 | bucket = int(bucket) 48 | hashed = int(hashed) 49 | assert hashfns._murmur_hash(key, seed) == hashed 50 | assert splitter.get_bucket(key, seed, splits.HashAlgorithm.MURMUR) == bucket 51 | 52 | def test_murmur_more_ascii_data(self): 53 | """Test legacy hash function against known results.""" 54 | splitter = splitters.Splitter() 55 | file_name = os.path.join(os.path.dirname(__file__), 'files', 'murmur3-custom-uuids.csv') 56 | with open(file_name, 'r') as flo: 57 | lines = flo.read().split('\n') 58 | 59 | for line in lines: 60 | if line is None or line == '': 61 | continue 62 | seed, key, hashed, bucket = line.split(',') 63 | seed = int(seed) 64 | bucket = int(bucket) 65 | hashed = int(hashed) 66 | assert hashfns._murmur_hash(key, seed) == hashed 67 | assert splitter.get_bucket(key, seed, splits.HashAlgorithm.MURMUR) == bucket 68 | 69 | def test_murmur_hash_non_ascii_data(self): 70 | """Test legacy hash function against known results.""" 71 | splitter = splitters.Splitter() 72 | file_name = os.path.join( 73 | os.path.dirname(__file__), 74 | 'files', 75 | 'murmur3-sample-data-non-alpha-numeric-v2.csv' 76 | ) 77 | with io.open(file_name, 'r', encoding='utf-8') as flo: 78 | lines = flo.read().split('\n') 79 | 80 | for line in lines: 81 | if line is None or line == '': 82 | continue 83 | seed, key, hashed, bucket = line.split(',') 84 | seed = int(seed) 85 | bucket = int(bucket) 86 | hashed = int(hashed) 87 | assert hashfns._murmur_hash(key, seed) == hashed 88 | assert splitter.get_bucket(key, seed, splits.HashAlgorithm.MURMUR) == bucket 89 | 90 | def test_murmur128(self): 91 | """Test legacy hash function against known results.""" 92 | file_name = os.path.join(os.path.dirname(__file__), 'files', 'murmur128_test_suite.csv') 93 | with io.open(file_name, 'r', encoding='utf-8') as flo: 94 | lines = flo.read().split('\n') 95 | 96 | for line in lines: 97 | if line is None or line == '': 98 | continue 99 | key, seed, hashed = line.split(',') 100 | seed = int(seed) 101 | hashed = int(hashed) 102 | assert hashfns.murmur_128(key, seed) == hashed 103 | 104 | def test_murmur128_pure_python(self): 105 | """Test legacy hash function against known results.""" 106 | file_name = os.path.join(os.path.dirname(__file__), 'files', 'murmur128_test_suite.csv') 107 | with io.open(file_name, 'r', encoding='utf-8') as flo: 108 | lines = flo.read().split('\n') 109 | 110 | for line in lines: 111 | if line is None or line == '': 112 | continue 113 | key, seed, hashed = line.split(',') 114 | seed = int(seed) 115 | hashed = int(hashed) 116 | assert murmur3_128_py(key, seed)[0] == hashed 117 | -------------------------------------------------------------------------------- /splitio/tasks/unique_keys_sync.py: -------------------------------------------------------------------------------- 1 | """Impressions syncrhonization task.""" 2 | import logging 3 | 4 | from splitio.tasks import BaseSynchronizationTask 5 | from splitio.tasks.util.asynctask import AsyncTask, AsyncTaskAsync 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | _UNIQUE_KEYS_SYNC_PERIOD = 15 * 60 # 15 minutes 10 | _CLEAR_FILTER_SYNC_PERIOD = 60 * 60 * 24 # 24 hours 11 | 12 | 13 | class UniqueKeysSyncTaskBase(BaseSynchronizationTask): 14 | """Unique Keys synchronization task uses an asynctask.AsyncTask to send MTKs.""" 15 | 16 | def start(self): 17 | """Start executing the unique keys synchronization task.""" 18 | self._task.start() 19 | 20 | def stop(self, event=None): 21 | """Stop executing the unique keys synchronization task.""" 22 | pass 23 | 24 | def is_running(self): 25 | """ 26 | Return whether the task is running or not. 27 | 28 | :return: True if the task is running. False otherwise. 29 | :rtype: bool 30 | """ 31 | return self._task.running() 32 | 33 | def flush(self): 34 | """Flush unique keys.""" 35 | _LOGGER.debug('Forcing flush execution for unique keys') 36 | self._task.force_execution() 37 | 38 | 39 | class UniqueKeysSyncTask(UniqueKeysSyncTaskBase): 40 | """Unique Keys synchronization task uses an asynctask.AsyncTask to send MTKs.""" 41 | 42 | def __init__(self, synchronize_unique_keys, period = _UNIQUE_KEYS_SYNC_PERIOD): 43 | """ 44 | Class constructor. 45 | 46 | :param synchronize_unique_keys: sender 47 | :type synchronize_unique_keys: func 48 | :param period: How many seconds to wait between subsequent unique keys pushes to the BE. 49 | :type period: int 50 | """ 51 | self._task = AsyncTask(synchronize_unique_keys, period, 52 | on_stop=synchronize_unique_keys) 53 | 54 | def stop(self, event=None): 55 | """Stop executing the unique keys synchronization task.""" 56 | self._task.stop(event) 57 | 58 | 59 | class UniqueKeysSyncTaskAsync(UniqueKeysSyncTaskBase): 60 | """Unique Keys synchronization task uses an asynctask.AsyncTask to send MTKs.""" 61 | 62 | def __init__(self, synchronize_unique_keys, period = _UNIQUE_KEYS_SYNC_PERIOD): 63 | """ 64 | Class constructor. 65 | 66 | :param synchronize_unique_keys: sender 67 | :type synchronize_unique_keys: func 68 | :param period: How many seconds to wait between subsequent unique keys pushes to the BE. 69 | :type period: int 70 | """ 71 | self._task = AsyncTaskAsync(synchronize_unique_keys, period, 72 | on_stop=synchronize_unique_keys) 73 | 74 | async def stop(self): 75 | """Stop executing the unique keys synchronization task.""" 76 | await self._task.stop(True) 77 | 78 | 79 | class ClearFilterSyncTaskBase(BaseSynchronizationTask): 80 | """Unique Keys synchronization task uses an asynctask.AsyncTask to send MTKs.""" 81 | 82 | def start(self): 83 | """Start executing the unique keys synchronization task.""" 84 | self._task.start() 85 | 86 | def stop(self, event=None): 87 | """Stop executing the unique keys synchronization task.""" 88 | pass 89 | 90 | def is_running(self): 91 | """ 92 | Return whether the task is running or not. 93 | 94 | :return: True if the task is running. False otherwise. 95 | :rtype: bool 96 | """ 97 | return self._task.running() 98 | 99 | 100 | class ClearFilterSyncTask(ClearFilterSyncTaskBase): 101 | """Unique Keys synchronization task uses an asynctask.AsyncTask to send MTKs.""" 102 | 103 | def __init__(self, clear_filter, period = _CLEAR_FILTER_SYNC_PERIOD): 104 | """ 105 | Class constructor. 106 | 107 | :param synchronize_unique_keys: sender 108 | :type synchronize_unique_keys: func 109 | :param period: How many seconds to wait between subsequent clearing of bloom filter 110 | :type period: int 111 | """ 112 | self._task = AsyncTask(clear_filter, period, 113 | on_stop=clear_filter) 114 | 115 | def stop(self, event=None): 116 | """Stop executing the unique keys synchronization task.""" 117 | self._task.stop(event) 118 | 119 | 120 | class ClearFilterSyncTaskAsync(ClearFilterSyncTaskBase): 121 | """Unique Keys synchronization task uses an asynctask.AsyncTask to send MTKs.""" 122 | 123 | def __init__(self, clear_filter, period = _CLEAR_FILTER_SYNC_PERIOD): 124 | """ 125 | Class constructor. 126 | 127 | :param synchronize_unique_keys: sender 128 | :type synchronize_unique_keys: func 129 | :param period: How many seconds to wait between subsequent clearing of bloom filter 130 | :type period: int 131 | """ 132 | self._task = AsyncTaskAsync(clear_filter, period, 133 | on_stop=clear_filter) 134 | 135 | async def stop(self): 136 | """Stop executing the unique keys synchronization task.""" 137 | await self._task.stop(True) 138 | --------------------------------------------------------------------------------