├── timeline ├── templatetags │ ├── __init__.py │ └── event_tags.py ├── models.py ├── __init__.py ├── base.py └── tests.py ├── .gitignore ├── setup.py └── readme.md /timeline/templatetags/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.py? 2 | .DS_Store 3 | *.egg-info 4 | -------------------------------------------------------------------------------- /timeline/templatetags/event_tags.py: -------------------------------------------------------------------------------- 1 | from django import template 2 | 3 | from templatetag_sugar.register import tag 4 | from templatetag_sugar.parser import Variable 5 | 6 | register = template.Library() 7 | 8 | @tag(register, [Variable("event")]) 9 | def render_event(context, event): 10 | return event.render(context) 11 | -------------------------------------------------------------------------------- /timeline/models.py: -------------------------------------------------------------------------------- 1 | from django.db import models 2 | 3 | class StreamItem(models.Model): 4 | context = models.TextField() 5 | remove = models.BooleanField() 6 | clusters = models.ManyToManyField("StreamCluster", related_name="items") 7 | 8 | class StreamCluster(models.Model): 9 | event_type = models.CharField(max_length=64) 10 | clustered_on = models.CharField(max_length=64) 11 | -------------------------------------------------------------------------------- /timeline/__init__.py: -------------------------------------------------------------------------------- 1 | from django.conf import settings 2 | from django.utils.importlib import import_module 3 | from django.utils.module_loading import module_has_submodule 4 | 5 | def autodiscover(): 6 | """ 7 | Automatically import the events module for all INSTALLED_APPS. Based on 8 | ``django.contrib.admin.autodiscover``. 9 | """ 10 | 11 | for app in settings.INSTALLED_APPS: 12 | mod = import_module(app) 13 | try: 14 | import_module("%s.events" % app) 15 | except ImportError: 16 | if module_has_submodule(mod, "events"): 17 | raise 18 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | 3 | setup( 4 | name='django-timeline', 5 | version='0.5', 6 | description='a Django timeline (activity stream) using redis', 7 | author='Chris Drackett', 8 | author_email='chris@tiltshiftstudio.com', 9 | url = "https://github.com/tiltshift/django-timeline.git", 10 | packages = find_packages(), 11 | include_package_data=True, 12 | install_requires=[ 13 | 'django>=1.3.1', 14 | 'redis>=2.0.0' 15 | ], 16 | classifiers = [ 17 | "Development Status :: 4 - Beta", 18 | "Environment :: Web Environment", 19 | "Intended Audience :: Developers", 20 | "License :: OSI Approved :: BSD License", 21 | "Operating System :: OS Independent", 22 | "Programming Language :: Python", 23 | "Framework :: Django", 24 | ] 25 | ) -------------------------------------------------------------------------------- /readme.md: -------------------------------------------------------------------------------- 1 | Timeline (event streams) 2 | ======================== 3 | 4 | Timeline is a flexable event stream app that uses redis as the backend to keep things fast. 5 | 6 | Instalation 7 | ----------- 8 | 9 | 1. make sure you have redis installed. 10 | 2. `pip install django-timeline` (or clone the source from https://github.com/tiltshift/django-timeline) 11 | 3. add `timeline` to the `INSTALLED_APPS` list in your project’s settings.py file. 12 | 13 | How to Use Timeline 14 | ------------------- 15 | 16 | First up you need to define an event type. Generally the best practice is to add an `events.py` file in the 17 | application related to the event you are creating, but you can define the event anywhere. 18 | 19 | The following will walk you thorugh the steps needed to create a new event definition, add an event of that type, and then display the resulting event in a timeline (stream). 20 | 21 | Here is a generic example for an event definition where a user is adding an item: 22 | 23 | ``` python 24 | from django.contrib.auth.models import User 25 | 26 | from timeline.base import EventType 27 | 28 | from yourapp.apps.items.models import Item 29 | 30 | class UserAddedItem(EventType): 31 | slug = "user_added_item" 32 | context_shape = { 33 | "user": User, 34 | "item": Item, 35 | } 36 | queryable_by = ["user", "item"] 37 | ``` 38 | 39 | As you can see the event definition—`UserAddedItem`—is made up of the following: 40 | 41 | - `slug`: a unique ID for the event type 42 | - `context_shape`: think of this as variables for your event type. This is the stuff you'll be storing for each event of this type. 43 | - `queryable_by`: these are the variables from the context_shape that can be used to find this event. We’ll go over this later on. 44 | 45 | Next you’ll need to write some code to create actual event objects: 46 | 47 | ``` python 48 | from yourapp.apps.items.events import UserAddedItem 49 | 50 | def Add_Item(user, item): 51 | # app code for adding the item here. 52 | 53 | UserAddedItem({ 54 | 'user': user, 55 | 'item': item 56 | }).save() 57 | ``` 58 | 59 | This code is hopefully pretty self explanitory. To create an event of the type `UserAddedItem` you pass your variables—`user` and `item` in this case—and then save the event. 60 | 61 | Now that you've got an event saved, lets look at how to display it: 62 | 63 | ``` python 64 | from timeline.base import Stream 65 | 66 | events = Stream(request.user) 67 | ``` 68 | 69 | Also hopefully pretty simple. By default the stream will take the given query, `request.user` in this case, and find all events where that query exists in the event type’s `queryable_by`. 70 | 71 | Stream can take any number of positional arguments and it will combine their streams. 72 | 73 | It also takes a number of keyword arguments: 74 | 75 | - `event_type` will return only `Events` for a given slug. 76 | - `limit` a number saying how many `Events` should be included, defaults to 20. 77 | - `cluster` a boolean saying whether the data returned should be clustered, if it is than it yields a list 78 | of `Events`, rather than discrete `Events`. 79 | 80 | In your templates you use the `render_event` template tag to render your events. Here is an example: 81 | 82 | ``` python 83 | {% load event_tags %} 84 | 85 | {% for event in events %} 86 | {% render_event event %} 87 | {% endfor %} 88 | ``` 89 | 90 | The last step is to add a template representing each of your event types. The app looks for templates using the event slug: `events/event/user_added_item.html` would be the template you add for an event with the slug `user_added_item`. 91 | 92 | Here is what `user_added_item.html` might look like: 93 | 94 | ``` html 95 | 96 | {{ event.user }} added an item {{ item }}. 97 | 98 | ``` 99 | 100 | you can also access the query object that was used to look up this event with the `{{ query_object }}` variable. 101 | -------------------------------------------------------------------------------- /timeline/base.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | import time 3 | import uuid 4 | from collections import defaultdict, namedtuple 5 | from datetime import datetime, timedelta 6 | 7 | import redis 8 | 9 | from django.conf import settings 10 | from django.db.models import Model 11 | from django.template import Context 12 | from django.template.loader import render_to_string 13 | from django.utils import simplejson as json 14 | 15 | from .models import (StreamItem as StreamItemModel, 16 | StreamCluster as StreamClusterModel) 17 | 18 | # TODO: ... 19 | def get_redis_connection(): 20 | return redis.Redis(**settings.REDIS_SETTINGS) 21 | 22 | class EventTypeMetaclass(type): 23 | def __new__(cls, name, bases, attrs): 24 | new_cls = super(EventTypeMetaclass, cls).__new__(cls, name, bases, attrs) 25 | if hasattr(new_cls, "context_shape"): 26 | for k, v in new_cls.context_shape.iteritems(): 27 | if issubclass(v, Model): 28 | class Klass(ModelContextItemType): 29 | model = v 30 | new_cls.context_shape[k] = Klass 31 | if hasattr(new_cls, "slug"): 32 | new_cls.registry[new_cls.slug] = new_cls 33 | assert new_cls.slug is not None 34 | assert new_cls.context_shape is not None 35 | assert new_cls.queryable_by is not None 36 | assert new_cls.default_cluster_by is not None 37 | 38 | return new_cls 39 | 40 | class EventType(object): 41 | __metaclass__ = EventTypeMetaclass 42 | 43 | registry = {} 44 | 45 | cluster = True 46 | 47 | def __init__(self, context, timestamp=None, remove=False): 48 | if timestamp is None: 49 | timestamp = datetime.now() 50 | self.timestamp = timestamp 51 | for key, spec in self.context_shape.iteritems(): 52 | if key not in context: 53 | raise ValueError("Missing value from context") 54 | if not spec.valid_obj(context[key]): 55 | raise TypeError("Invalid context item for %s: %s" % (key, context[key])) 56 | self.context = context 57 | self.remove = remove 58 | 59 | def serialize_context(self, context): 60 | result = {} 61 | for key, spec in self.context_shape.iteritems(): 62 | result[key] = spec.serialize(context[key]) 63 | return result 64 | 65 | @property 66 | def redis(self): 67 | if not hasattr(self, "_redis"): 68 | self._redis = get_redis_connection() 69 | return self._redis 70 | 71 | def save(self): 72 | context = self.serialize_context(self.context) 73 | s = StreamItemModel.objects.create( 74 | context = json.dumps(context), 75 | remove = self.remove, 76 | ) 77 | 78 | t = self.timestamp 79 | timestamp = time.mktime(t.timetuple()) + 1e-6 * t.microsecond 80 | 81 | record = { 82 | "id": s.pk, 83 | "context": self.serialize_context(self.context), 84 | "remove": self.remove, 85 | "timestamp": tuple(t.timetuple())[:-3], 86 | } 87 | for field in self.queryable_by: 88 | obj_key = self.context_shape[field](self.context[field]).lookup_key() 89 | keys = [obj_key, "%s:%s" % (obj_key, self.slug)] 90 | c = None 91 | for key in keys: 92 | c = self._add_to_key(field, key, timestamp, record, c, s) 93 | self._add_to_key( 94 | self.default_cluster_by, "ALL_EVENTS", timestamp, record, c, s 95 | ) 96 | 97 | def _add_to_key(self, field, key, timestamp, record, c, s): 98 | for item, score in self.redis.zrevrange(key, 0, 5, withscores=True): 99 | cluster_timestamp = datetime.fromtimestamp(score) 100 | data = json.loads(item) 101 | if (data["slug"] == self.slug and self.cluster and 102 | data["items"][0]["context"][data["clustered_on"]] == self.context_shape[field].serialize(self.context[field]) and 103 | self.timestamp - cluster_timestamp < timedelta(minutes=5)): 104 | c = StreamClusterModel.objects.get(pk=data["cluster_id"]) 105 | c.items.add(s) 106 | data["items"].append(record) 107 | self.redis.zrem(key, item) 108 | self.redis.zadd(key, json.dumps(data), score) 109 | break 110 | else: 111 | if c is None: 112 | c = StreamClusterModel.objects.create( 113 | event_type = self.slug, 114 | clustered_on = field, 115 | ) 116 | c.items.add(s) 117 | data = json.dumps({ 118 | "slug": self.slug, 119 | "items": [record], 120 | "clustered_on": field, 121 | "cluster_id": c.pk, 122 | }) 123 | self.redis.zadd(key, data, timestamp) 124 | return c 125 | 126 | class ContextItemType(object): 127 | def __init__(self, obj): 128 | self.obj = obj 129 | 130 | def lookup_key(self): 131 | return self.obj 132 | 133 | @classmethod 134 | def unique_key(cls): 135 | return cls 136 | 137 | @classmethod 138 | def valid_obj(cls, obj): 139 | return True 140 | 141 | @classmethod 142 | def serialize(cls, obj): 143 | return obj 144 | 145 | @classmethod 146 | def deserialize(cls, obj): 147 | return obj 148 | 149 | @classmethod 150 | def deserialize_bulk(cls, objs): 151 | return dict( 152 | (obj, cls.deserialize(obj)) 153 | for obj in objs 154 | ) 155 | 156 | class ModelContextItemType(ContextItemType): 157 | model = None 158 | 159 | def lookup_key(self): 160 | return "%s:%s:%s" % ( 161 | self.model._meta.app_label, 162 | self.model._meta.object_name, 163 | self.obj.pk 164 | ) 165 | 166 | @classmethod 167 | def unique_key(cls): 168 | return cls.model 169 | 170 | @classmethod 171 | def valid_obj(cls, obj): 172 | return isinstance(obj, cls.model) or isinstance(obj, (int, long)) 173 | 174 | @classmethod 175 | def serialize(cls, obj): 176 | return obj.pk 177 | 178 | @classmethod 179 | def deserialize(cls, obj): 180 | return cls.model._default_manager.get(pk=obj) 181 | 182 | @classmethod 183 | def deserialize_bulk(cls, objs): 184 | return cls.model._default_manager.in_bulk(objs) 185 | 186 | class StreamItem(object): 187 | def __init__(self, slug, timestamp, context, item_id, cluster_id): 188 | self.slug = slug 189 | self.timestamp = timestamp 190 | self.context = context 191 | self.item_id = item_id 192 | self.cluster_id = cluster_id 193 | 194 | def __getattr__(self, name): 195 | return self.context[name] 196 | 197 | class StreamCluster(object): 198 | def __init__(self, slug, date_added, events, clustered_on=None, 199 | cluster_id=None): 200 | self.slug = slug 201 | self.date_added = date_added 202 | self.events = events 203 | self.clustered_on = clustered_on 204 | self.cluster_id = cluster_id 205 | 206 | def __iter__(self): 207 | return iter(self.events) 208 | 209 | def __len__(self): 210 | return len(self.events) 211 | 212 | def __unicode__(self): 213 | return self.render() 214 | 215 | @property 216 | def date_updated(self): 217 | return max(e.timestamp for e in self.events) 218 | 219 | def render(self, context=None): 220 | if context is None: 221 | context = Context() 222 | context.push() 223 | context["query_object"] = self.clustered_on 224 | context["cluster_id"] = self.cluster_id 225 | if len(self.events) == 1: 226 | context["event"] = self.events[0] 227 | # The context variable for a full stream is often named events, 228 | # since we're providing the full context, set this to None so the 229 | # event template isn't confused (since events means something 230 | # different). This is safe since we push and pop the context 231 | # before and after modifying it. 232 | context["events"] = None 233 | else: 234 | context["events"] = self 235 | # Same reason as above. 236 | context["event"] = None 237 | try: 238 | return render_to_string("events/event/%s.html" % self.slug, context) 239 | finally: 240 | context.pop() 241 | 242 | RawResults = namedtuple("RawResults", ["field", "vals"]) 243 | Status = namedtuple("Status", ["adds", "removes"]) 244 | 245 | class Stream(object): 246 | def __init__(self, *objs, **kwargs): 247 | event_type = kwargs.pop("event_type", None) 248 | limit = kwargs.pop("limit", 20) 249 | offset = kwargs.pop("offset", 0) 250 | 251 | if kwargs: 252 | raise TypeError("Unexpected keyword argument: %s" % kwargs) 253 | 254 | final_objs = [] 255 | for obj in objs: 256 | if isinstance(obj, Model): 257 | class Klass(ModelContextItemType): 258 | model = type(obj) 259 | final_objs.append(Klass(obj)) 260 | else: 261 | final_objs.append(obj) 262 | 263 | self.objs = final_objs 264 | self.event_type = event_type 265 | self.limit = limit 266 | self.offset = offset 267 | 268 | def __iter__(self): 269 | redis = get_redis_connection() 270 | 271 | postfix = "" 272 | if self.event_type is not None: 273 | postfix += ":%s" % (self.event_type.slug) 274 | lookup_keys = [ 275 | obj.lookup_key() + postfix 276 | for obj in self.objs 277 | ] 278 | 279 | if len(lookup_keys) >= 2: 280 | s = hashlib.sha1() 281 | for lookup_key in lookup_keys: 282 | s.update(lookup_key) 283 | key = s.hexdigest() 284 | redis.zunionstore(key, lookup_keys, aggregate="MIN") 285 | # Expire it in 5 minutes, enough that paginating shouldn't require 286 | # a recompute, but short enough to not clutter the place up. 287 | redis.expire(key, 60 * 5) 288 | elif len(lookup_keys) == 1: 289 | key = lookup_keys[0] 290 | else: 291 | assert not self.event_type 292 | key = "ALL_EVENTS" 293 | 294 | statuses = defaultdict(lambda: Status(0, 0)) 295 | items = list(redis.zrevrange(key, self.offset, self.limit, withscores=True)) 296 | parsed_items = [] 297 | context_items = {} 298 | for cluster, score in items: 299 | data = json.loads(cluster) 300 | parsed_items.append((data, score)) 301 | for o in data["items"]: 302 | status_key = self._status_key(data["slug"], o) 303 | status = statuses[status_key] 304 | if o["remove"]: 305 | statuses[status_key] = status._replace(removes=status.removes+1) 306 | else: 307 | statuses[status_key] = status._replace(adds=status.adds+1) 308 | for key, val in o["context"].iteritems(): 309 | field = EventType.registry[data["slug"]].context_shape[key] 310 | key = field.unique_key() 311 | if key not in context_items: 312 | context_items[key] = RawResults(field, set()) 313 | context_items[key].vals.add(val) 314 | 315 | final_context_items = {} 316 | for key, (field, vals) in context_items.iteritems(): 317 | final_context_items[key] = field.deserialize_bulk(vals) 318 | 319 | for data, score in parsed_items: 320 | cluster_items = [] 321 | timestamp = datetime.fromtimestamp(score) 322 | for o in data["items"]: 323 | item = self._convert_item( 324 | data["slug"], o, timestamp, statuses, final_context_items, data["cluster_id"] 325 | ) 326 | if item is not None: 327 | cluster_items.append(item) 328 | if cluster_items: 329 | clustered_on = None 330 | if data["clustered_on"] is not None: 331 | clustered_on = cluster_items[0].context[data["clustered_on"]] 332 | yield StreamCluster( 333 | data["slug"], 334 | timestamp, 335 | cluster_items, 336 | clustered_on, 337 | data["cluster_id"] 338 | ) 339 | 340 | def _convert_item(self, slug, data, timestamp, statuses, context_items, 341 | cluster_id): 342 | status_key = self._status_key(slug, data) 343 | status = statuses[status_key] 344 | if data["remove"]: 345 | current_attr, other_attr = "removes", "adds" 346 | else: 347 | current_attr, other_attr = "adds", "removes" 348 | 349 | if getattr(status, current_attr) <= getattr(status, other_attr): 350 | return 351 | statuses[status_key] = status._replace(**{ 352 | current_attr: getattr(status, current_attr) - 1 353 | }) 354 | 355 | context = {} 356 | for key, value in data["context"].iteritems(): 357 | field = EventType.registry[slug].context_shape[key] 358 | context[key] = context_items[field.unique_key()][value] 359 | 360 | return StreamItem( 361 | slug, 362 | datetime(*data["timestamp"]), 363 | context, 364 | data["id"], 365 | cluster_id, 366 | ) 367 | 368 | def _status_key(self, slug, data): 369 | return ( 370 | slug, 371 | tuple(sorted(data["context"].items())) 372 | ) 373 | -------------------------------------------------------------------------------- /timeline/tests.py: -------------------------------------------------------------------------------- 1 | from contextlib import contextmanager 2 | from datetime import datetime, timedelta 3 | 4 | from django.conf import settings 5 | from django.contrib.auth.models import User as UserModel 6 | from django.db import connection, connections, DEFAULT_DB_ALIAS 7 | from django.template import TemplateDoesNotExist 8 | from django.test import TestCase 9 | 10 | from .base import get_redis_connection, EventType, ContextItemType, Stream, StreamCluster 11 | from .models import StreamItem, StreamCluster as StreamClusterModel 12 | 13 | class EventTestCase(TestCase): 14 | @contextmanager 15 | def assert_raises(self, error_type): 16 | exc_info = {} 17 | try: 18 | yield exc_info 19 | except error_type, e: 20 | exc_info["exception"] = e 21 | except Exception, e: 22 | self.fail("Exception of type %s expected, %s gotten" % (error_type, e)) 23 | else: 24 | self.fail("Exception of type %s expected, but not raised" % error_type) 25 | 26 | 27 | class User(ContextItemType): 28 | @classmethod 29 | def valid_obj(cls, obj): 30 | return isinstance(obj, basestring) 31 | 32 | 33 | class Follow(EventType): 34 | slug = "follow" 35 | context_shape = { 36 | "follower": User, 37 | "following": User, 38 | } 39 | queryable_by = ["follower", "following"] 40 | default_cluster_by = "follower" 41 | 42 | class Poke(EventType): 43 | slug = "poke" 44 | context_shape = { 45 | "poker": User, 46 | "pokee": User, 47 | } 48 | queryable_by = ["poker", "pokee"] 49 | default_cluster_by = "poker" 50 | 51 | class SomeEvent(EventType): 52 | slug = "some-event" 53 | context_shape = { 54 | "user": UserModel 55 | } 56 | queryable_by = ["user"] 57 | default_cluster_by = "user" 58 | 59 | class AnotherEvent(EventType): 60 | slug = "another-event" 61 | context_shape = { 62 | "user": UserModel 63 | } 64 | queryable_by = ["user"] 65 | default_cluster_by = "user" 66 | 67 | class Review(EventType): 68 | slug = "review" 69 | context_shape = { 70 | "reviewer": User, 71 | } 72 | queryable_by = ["reviewer"] 73 | cluster = False 74 | default_cluster_by = "reviewer" 75 | 76 | _missing = object() 77 | 78 | class EventTests(EventTestCase): 79 | def setUp(self): 80 | self.original_REDIS_SETTINGS = getattr(settings, "REDIS_SETTINGS", _missing) 81 | settings.REDIS_SETTINGS = { 82 | "db": 9, 83 | } 84 | 85 | def tearDown(self): 86 | get_redis_connection().flushdb() 87 | if self.original_REDIS_SETTINGS is _missing: 88 | del settings.REDIS_SETTINGS 89 | else: 90 | settings.REDIS_SETTINGS = self.original_REDIS_SETTINGS 91 | 92 | def assert_stream_equal(self, stream, objs): 93 | got = list(stream) 94 | self.assertEqual(len(got), len(objs)) 95 | for cluster, expected in zip(got, objs): 96 | self.assertEqual(len(cluster), len(expected)) 97 | self.assertEqual(cluster.slug, expected.slug) 98 | self.assertEqual(cluster.date_added, expected.date_added) 99 | self.assertEqual(cluster.date_updated, expected.date_updated) 100 | # This isn't relevant to most tests, so we make it optional. 101 | if expected.clustered_on is not None: 102 | self.assertEqual(cluster.clustered_on, expected.clustered_on) 103 | for res, obj in zip(cluster, expected): 104 | self.assertEqual(res.timestamp, obj.timestamp) 105 | self.assertEqual(res.context, obj.context) 106 | self.assertEqual(res.slug, obj.slug) 107 | 108 | 109 | def test_context_item_coercion(self): 110 | with self.assert_raises(ValueError): 111 | Follow({ 112 | "follower": "alex", 113 | }, timestamp=datetime(2010, 10, 8)) 114 | with self.assert_raises(TypeError): 115 | Follow({ 116 | "following": "alex", 117 | "follower": 2 118 | }, timestamp=datetime(2010, 10, 8)) 119 | 120 | def test_event_save(self): 121 | event = Follow({ 122 | "follower": "alex", 123 | "following": "einstein", 124 | }) 125 | event.save() 126 | 127 | self.assertEqual(StreamItem.objects.count(), 1) 128 | # One for the follower, one for the following, 129 | self.assertEqual(StreamClusterModel.objects.count(), 2) 130 | s = Stream(User("alex")) 131 | c = iter(s).next() 132 | self.assertEqual( 133 | c.cluster_id, 134 | StreamClusterModel.objects.get(clustered_on="follower").pk 135 | ) 136 | 137 | def test_event_stream_single(self): 138 | event = Follow({ 139 | "following": "alex", 140 | "follower": "einstein", 141 | }, timestamp=datetime(2010, 10, 8)) 142 | event.save() 143 | 144 | self.assert_stream_equal(Stream(User("alex")), [ 145 | StreamCluster("follow", datetime(2010, 10, 8), [ 146 | Follow({ 147 | "following": "alex", 148 | "follower": "einstein", 149 | }, datetime(2010, 10, 8)), 150 | ]) 151 | ]) 152 | 153 | def test_event_stream_multiple(self): 154 | event = Follow({ 155 | "following": "alex", 156 | "follower": "jacob", 157 | }, timestamp=datetime(2010, 10, 8, 12, 30, 12)) 158 | event.save() 159 | 160 | self.assert_stream_equal(Stream(User("alex"), User("jacob")), [ 161 | StreamCluster("follow", datetime(2010, 10, 8, 12, 30, 12), [ 162 | Follow({ 163 | "following": "alex", 164 | "follower": "jacob", 165 | }, datetime(2010, 10, 8, 12, 30, 12)), 166 | ]), 167 | StreamCluster("follow", datetime(2010, 10, 8, 12, 30, 12), [ 168 | Follow({ 169 | "following": "alex", 170 | "follower": "jacob", 171 | }, datetime(2010, 10, 8, 12, 30, 12)), 172 | ]), 173 | ]) 174 | 175 | def test_cluster(self): 176 | Follow({ 177 | "following": "alex", 178 | "follower": "jacob", 179 | }, timestamp=datetime(2010, 10, 8, 12, 30)).save() 180 | Follow({ 181 | "following": "alex", 182 | "follower": "daniel", 183 | }, timestamp=datetime(2010, 10, 8, 12, 31)).save() 184 | Follow({ 185 | "following": "alex", 186 | "follower": "james", 187 | }, timestamp=datetime(2010, 10, 8, 13, 30)).save() 188 | 189 | self.assert_stream_equal(Stream(User("alex")), [ 190 | StreamCluster("follow", datetime(2010, 10, 8, 13, 30), [ 191 | Follow({ 192 | "following": "alex", 193 | "follower": "james", 194 | }, datetime(2010, 10, 8, 13, 30)) 195 | ]), 196 | StreamCluster("follow", datetime(2010, 10, 8, 12, 30), [ 197 | Follow({ 198 | "following": "alex", 199 | "follower": "jacob", 200 | }, datetime(2010, 10, 8, 12, 30)), 201 | Follow({ 202 | "following": "alex", 203 | "follower": "daniel", 204 | }, datetime(2010, 10, 8, 12, 31)), 205 | ]), 206 | ]) 207 | 208 | def test_model(self): 209 | u = UserModel.objects.create_user("joe", "joe@schmoe.net", "abc123") 210 | d = datetime(2010, 10, 21, 15, 56, 22) 211 | SomeEvent({ 212 | "user": u, 213 | }, timestamp=d).save() 214 | 215 | self.assert_stream_equal(Stream(u), [ 216 | StreamCluster("some-event", d, [ 217 | SomeEvent({ 218 | "user": u, 219 | }, d) 220 | ]) 221 | ]) 222 | 223 | def test_model_clustered_on(self): 224 | u = UserModel.objects.create_user("joe", "joe@schmoe.net", "abc123") 225 | d = datetime(2010, 10, 21, 15, 56, 22, 330000) 226 | SomeEvent({ 227 | "user": u, 228 | }, timestamp=d).save() 229 | 230 | s = Stream(u) 231 | c = iter(s).next() 232 | self.assertEqual(c.clustered_on, u) 233 | 234 | def test_render_cluster(self): 235 | Follow({ 236 | "following": "alex", 237 | "follower": "daniel", 238 | }).save() 239 | Follow({ 240 | "following": "alex", 241 | "follower": "aaron", 242 | }).save() 243 | s = Stream(User("alex")) 244 | f = iter(s).next() 245 | 246 | with self.assert_raises(TemplateDoesNotExist) as exc_info: 247 | f.render() 248 | self.assertEqual(exc_info["exception"].args, ("events/event/follow.html",)) 249 | 250 | def test_remove(self): 251 | d1 = datetime(2010, 10, 8, 9, 32) 252 | d2 = datetime(2010, 10, 8, 9, 30) 253 | c = { 254 | "following": "alex", 255 | "follower": "daniel", 256 | } 257 | Follow(c, d1).save() 258 | Follow(c, d2, remove=True).save() 259 | 260 | self.assert_stream_equal(Stream(User("alex")), []) 261 | 262 | # TODO: Design decision 263 | # def test_remove_first(self): 264 | # ''' 265 | # A case when event is removed, but there was no paired event before that. 266 | # This can happend if event for following was not created before unfollowing 267 | # ''' 268 | # d1 = datetime(2010, 10, 8, 9, 32) 269 | # d2 = datetime(2010, 10, 8, 9, 30) 270 | # c = { 271 | # "following": "alex", 272 | # "follower": "daniel", 273 | # } 274 | # Follow(c, d1, remove=True).save() 275 | # # Follow(c, d2).save() 276 | # 277 | # self.assert_stream_equal(Stream(User("alex")), []) 278 | 279 | def test_remove_cluster(self): 280 | d1 = datetime(2010, 10, 8, 9, 30) 281 | d2 = datetime(2010, 10, 8, 9, 32) 282 | c = { 283 | "following": "alex", 284 | "follower": "daniel", 285 | } 286 | Follow(c, d1).save() 287 | Follow(c, d1, remove=True).save() 288 | Follow({ 289 | "following": "alex", 290 | "follower": "jacob", 291 | }, d2).save() 292 | 293 | self.assert_stream_equal(Stream(User("alex")), [ 294 | StreamCluster("follow", d1, [ 295 | Follow({ 296 | "following": "alex", 297 | "follower": "jacob", 298 | }, d2) 299 | ]), 300 | ]) 301 | 302 | def test_cluster_types(self): 303 | d1 = datetime(2010, 10, 8, 9, 32) 304 | d2 = datetime(2010, 10, 8, 9, 33) 305 | fc1 = { 306 | "following": "alex", 307 | "follower": "daniel" 308 | } 309 | fc2 = { 310 | "following": "alex", 311 | "follower": "ryan" 312 | } 313 | pc1 = { 314 | "pokee": "alex", 315 | "poker": "michael" 316 | } 317 | pc2 = { 318 | "pokee": "alex", 319 | "poker": "ralph" 320 | } 321 | Follow(fc1, d1).save() 322 | Follow(fc2, d2).save() 323 | Poke(pc1, d1).save() 324 | Poke(pc2, d2).save() 325 | 326 | self.assert_stream_equal(Stream(User("alex")), [ 327 | StreamCluster("poke", d1, [ 328 | Poke(pc1, d1), 329 | Poke(pc2, d2), 330 | ]), 331 | StreamCluster("follow", d1, [ 332 | Follow(fc1, d1), 333 | Follow(fc2, d2), 334 | ]) 335 | ]) 336 | 337 | def test_unpaired_remove(self): 338 | d = datetime(2010, 10, 8, 9, 32) 339 | c = { 340 | "following": "alex", 341 | "follower": "daniel" 342 | } 343 | Follow(c, d, remove=True).save() 344 | self.assert_stream_equal(Stream(User("alex")), [ 345 | StreamCluster("follow", d, [ 346 | Follow(c, d, remove=True) 347 | ]), 348 | ]) 349 | 350 | def test_clustered_on(self): 351 | d = datetime(2010, 10, 8, 9, 32) 352 | c1 = { 353 | "following": "alex", 354 | "follower": "daniel" 355 | } 356 | Follow(c1, d).save() 357 | s = Stream(User("alex")) 358 | c = iter(s).next() 359 | self.assertEqual(c.clustered_on, "alex") 360 | 361 | def test_no_cluster(self): 362 | d = datetime(2010, 10, 8, 9, 32) 363 | c = {"reviewer": "Chris"} 364 | Review(c, d).save() 365 | Review(c, d).save() 366 | self.assert_stream_equal(Stream(User("Chris")), [ 367 | StreamCluster("review", d, [ 368 | Review(c, d), 369 | ]), 370 | StreamCluster("review", d, [ 371 | Review(c, d), 372 | ]), 373 | ]) 374 | 375 | def test_efficient_context_deserialize(self): 376 | d1 = datetime(2010, 10, 8, 9, 32) 377 | d2 = datetime(2010, 10, 8, 9, 33) 378 | u1 = UserModel.objects.create_user("joe", "joe@schmoe.net", "abc123") 379 | u2 = UserModel.objects.create_user("bob", "bob@schmoe.net", "123abc") 380 | SomeEvent({"user": u1}, d1).save() 381 | SomeEvent({"user": u2}, d2).save() 382 | 383 | # 1 query to get all of the users, that's it. 384 | with self.assertNumQueries(1): 385 | list(Stream(u1, u2)) 386 | 387 | def test_efficient_context_deserialiez_different_events(self): 388 | d1 = datetime(2010, 10, 8, 9, 32) 389 | d2 = datetime(2010, 10, 8, 9, 33) 390 | u1 = UserModel.objects.create_user("joe", "joe@schmoe.net", "abc123") 391 | u2 = UserModel.objects.create_user("bob", "bob@schmoe.net", "123abc") 392 | SomeEvent({"user": u1}, d1).save() 393 | AnotherEvent({"user": u2}, d2).save() 394 | 395 | # 1 query to get all of the users, even though they're on different 396 | # event types. 397 | with self.assertNumQueries(1): 398 | list(Stream(u1, u2)) 399 | 400 | def test_multiple_create_remove(self): 401 | c = { 402 | "follower": "alex", 403 | "following": "daniel" 404 | } 405 | ds = [ 406 | datetime(2010, 10, 8, 12) + timedelta(minutes=1) * i for i in xrange(4) 407 | ] 408 | for d in ds: 409 | Follow(c, d).save() 410 | Follow(c, d + timedelta(seconds=30), remove=True).save() 411 | Follow(c, datetime(2010, 10, 8, 12, 4, 30)).save() 412 | 413 | self.assert_stream_equal(Stream(User("alex")), [ 414 | StreamCluster("follow", datetime(2010, 10, 8, 12), [ 415 | Follow(c, datetime(2010, 10, 8, 12)), 416 | ]) 417 | ]) 418 | 419 | def test_cluster_db(self): 420 | c = { 421 | "follower": "alex", 422 | "following": "daniel" 423 | } 424 | ds = [ 425 | datetime(2010, 10, 8, 12) + timedelta(minutes=1) * i for i in xrange(4) 426 | ] 427 | for d in ds: 428 | Follow(c, d).save() 429 | 430 | self.assertEqual(StreamClusterModel.objects.count(), 2) 431 | s = StreamClusterModel.objects.get(clustered_on="follower") 432 | self.assertEqual(s.event_type, "follow") 433 | self.assertEqual(s.items.count(), 4) 434 | 435 | def test_all_stream(self): 436 | d1 = datetime(2010, 10, 8, 12, 30) 437 | d2 = datetime(2010, 10, 8, 12, 33) 438 | 439 | c1 = { 440 | "follower": "alex", 441 | "following": "daniel" 442 | } 443 | c2 = { 444 | "follower": "aaron", 445 | "following": "charlie", 446 | } 447 | 448 | Follow(c1, d1).save() 449 | Follow(c2, d2).save() 450 | 451 | self.assert_stream_equal(Stream(), [ 452 | StreamCluster("follow", d2, [ 453 | Follow(c2, d2), 454 | ], clustered_on="aaron"), 455 | StreamCluster("follow", d1, [ 456 | Follow(c1, d1), 457 | ], clustered_on="alex") 458 | ]) 459 | 460 | def test_shared_key(self): 461 | d1 = datetime(2010, 10, 8, 12, 30) 462 | d2 = datetime(2010, 10, 8, 12, 33) 463 | c1 = { 464 | "follower": "alex", 465 | "following": "daniel" 466 | } 467 | c2 = { 468 | "follower": "aaron", 469 | "following": "charlie", 470 | } 471 | Follow(c1, d1).save() 472 | Follow(c2, d2).save() 473 | 474 | redis = get_redis_connection() 475 | # 1 - ALL_EVENTS 476 | # 8 - each username + each username:follow 477 | # 9 478 | self.assertEqual(len(redis.keys()), 9) 479 | 480 | list(Stream(User("alex"), User("aaron"))) 481 | self.assertEqual(len(redis.keys()), 10) 482 | list(Stream(User("alex"), User("aaron"))) 483 | self.assertEqual(len(redis.keys()), 10) 484 | 485 | def test_offset(self): 486 | d1 = datetime(2010, 10, 8, 12, 30) 487 | d2 = datetime(2010, 10, 8, 12, 33) 488 | c1 = { 489 | "follower": "alex", 490 | "following": "daniel" 491 | } 492 | c2 = { 493 | "follower": "aaron", 494 | "following": "charlie", 495 | } 496 | Follow(c1, d1).save() 497 | Follow(c2, d2).save() 498 | 499 | self.assert_stream_equal(Stream(offset=1), [ 500 | StreamCluster("follow", d1, [ 501 | Follow(c1, d1) 502 | ]) 503 | ]) 504 | 505 | def test_cluster_by_model(self): 506 | u = UserModel.objects.create_user("me", "hi", "me@me.com") 507 | 508 | d1 = datetime(2010, 10, 8, 12, 30) 509 | d2 = datetime(2010, 10, 8, 12, 33) 510 | c = {"user": u} 511 | SomeEvent(c, d1).save() 512 | SomeEvent(c, d2).save() 513 | 514 | self.assert_stream_equal(Stream(u), [ 515 | StreamCluster("some-event", d1, [ 516 | SomeEvent(c, d1), 517 | SomeEvent(c, d2), 518 | ]) 519 | ]) 520 | 521 | def test_cluster_id(self): 522 | d1 = datetime(2010, 10, 8, 12, 30) 523 | d2 = datetime(2010, 10, 8, 12, 33) 524 | c1 = { 525 | "follower": "alex", 526 | "following": "daniel" 527 | } 528 | c2 = { 529 | "follower": "alex", 530 | "following": "aaron", 531 | } 532 | Follow(c1, d1).save() 533 | 534 | s = iter(Stream(User("alex"))).next() 535 | self.assertEqual( 536 | s.cluster_id, 537 | StreamClusterModel.objects.get(clustered_on="follower").pk 538 | ) 539 | self.assertEqual(s.events[0].cluster_id, s.cluster_id) 540 | 541 | Follow(c2, d2).save() 542 | s = iter(Stream(User("alex"))).next() 543 | self.assertEqual( 544 | s.cluster_id, 545 | StreamClusterModel.objects.get(clustered_on="follower").pk 546 | ) 547 | self.assertEqual(s.events[0].cluster_id, s.cluster_id) 548 | self.assertEqual(s.events[1].cluster_id, s.cluster_id) 549 | 550 | def test_times(self): 551 | d1 = datetime(2010, 10, 8, 12, 30) 552 | d2 = datetime(2010, 10, 8, 12, 33) 553 | c1 = { 554 | "follower": "alex", 555 | "following": "daniel" 556 | } 557 | c2 = { 558 | "follower": "alex", 559 | "following": "aaron", 560 | } 561 | Follow(c1, d1).save() 562 | Follow(c2, d2).save() 563 | 564 | s = iter(Stream(User("alex"))).next() 565 | self.assertEqual(s.date_added, d1) 566 | self.assertEqual(s.date_updated, d2) 567 | --------------------------------------------------------------------------------