From 09d6d192f8db0f68f6ac4dc01fb4848f04def668 Mon Sep 17 00:00:00 2001 From: Nicholas Kobald Date: Tue, 10 Oct 2017 10:42:02 -0700 Subject: [PATCH 01/56] Updates python dateutil install requirements --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index ad645f1..b193186 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ test_suite='nose.collector', install_requires=[ 'riak==2.5.4', - 'python-dateutil==1.5', + 'python-dateutil>=1.5, != 2.0', 'protobuf==2.6.1', ], options={'easy_install': {'allow_hosts': 'pypi.python.org'}}, From e666d7a0ea36478bc8f5e4e73dcb8504f73758e9 Mon Sep 17 00:00:00 2001 From: Jonas Trappenberg Date: Sun, 19 Nov 2017 16:09:11 -0800 Subject: [PATCH 02/56] Let riak handle its dependencies --- setup.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/setup.py b/setup.py index b193186..d8af902 100644 --- a/setup.py +++ b/setup.py @@ -20,9 +20,8 @@ packages=find_packages(exclude=['tests']), test_suite='nose.collector', install_requires=[ - 'riak==2.5.4', + 'riak', 'python-dateutil>=1.5, != 2.0', - 'protobuf==2.6.1', ], options={'easy_install': {'allow_hosts': 'pypi.python.org'}}, tests_require=tests_require, From f85a00e1084201bac5e5ba441333add7f34cdcc2 Mon Sep 17 00:00:00 2001 From: Jonas Trappenberg Date: Sun, 19 Nov 2017 16:52:38 -0800 Subject: [PATCH 03/56] Update exception-catching syntax to support python 3 --- sunspear/backends/riak.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sunspear/backends/riak.py b/sunspear/backends/riak.py index b3271f9..0444b92 100644 --- a/sunspear/backends/riak.py +++ b/sunspear/backends/riak.py @@ -570,7 +570,7 @@ def _dehydrate_sub_activity(self, sub_activity, obj_list, skip_sub_activities=Fa for i, item in enumerate(sub_activity[collection]['items']): try: dehydrated_sub_items.append(self._dehydrate_sub_activity(item, obj_list)) - except KeyError, e: + except KeyError as e: pass sub_activity[collection]['items'] = dehydrated_sub_items sub_activity[collection]['totalItems'] = len(dehydrated_sub_items) From eabb687918396dca121bef60a59fa5f830dafee3 Mon Sep 17 00:00:00 2001 From: Jonas Trappenberg Date: Thu, 28 Dec 2017 15:31:37 -0800 Subject: [PATCH 04/56] Pass through unrecognized keyword arguments to riak client --- sunspear/backends/riak.py | 42 +++++++++++++++++---------------------- sunspear/lib/rfc3339.py | 1 - 2 files changed, 18 insertions(+), 25 deletions(-) diff --git a/sunspear/backends/riak.py b/sunspear/backends/riak.py index 0444b92..553bb66 100644 --- a/sunspear/backends/riak.py +++ b/sunspear/backends/riak.py @@ -149,38 +149,32 @@ class RiakBackend(BaseBackend): def __init__( self, protocol="pbc", nodes=[], objects_bucket_name="objects", - activities_bucket_name="activities", **kwargs): - - self._riak_backend = RiakClient(protocol=protocol, nodes=nodes) - - r_value = kwargs.get("r") - w_value = kwargs.get("w") - dw_value = kwargs.get("dw") - pr_value = kwargs.get("pr") - pw_value = kwargs.get("pw") + activities_bucket_name="activities", r=None, w=None, dw=None, + pr=None, pw=None, **kwargs): + self._riak_backend = RiakClient(protocol=protocol, nodes=nodes, **kwargs) self._objects = self._riak_backend.bucket(objects_bucket_name) self._activities = self._riak_backend.bucket(activities_bucket_name) - if r_value: - self._objects.r = r_value - self._activities.r = r_value + if r: + self._objects.r = r + self._activities.r = r - if w_value: - self._objects.w = w_value - self._activities.w = w_value + if w: + self._objects.w = w + self._activities.w = w - if dw_value: - self._objects.dw = dw_value - self._activities.dw = dw_value + if dw: + self._objects.dw = dw + self._activities.dw = dw - if pr_value: - self._objects.pr = pr_value - self._activities.pr = pr_value + if pr: + self._objects.pr = pr + self._activities.pr = pr - if pw_value: - self._objects.pw = pw_value - self._activities.pw = pw_value + if pw: + self._objects.pw = pw + self._activities.pw = pw def clear_all(self, **kwargs): """ diff --git a/sunspear/lib/rfc3339.py b/sunspear/lib/rfc3339.py index bfca1f4..a0057e5 100644 --- a/sunspear/lib/rfc3339.py +++ b/sunspear/lib/rfc3339.py @@ -28,7 +28,6 @@ import datetime import time -import unittest def _timezone(utc_offset): From 4f3aad88cdc31894fefe9c58c9f95a18161107b4 Mon Sep 17 00:00:00 2001 From: Jonas Trappenberg Date: Sun, 19 Nov 2017 16:53:45 -0800 Subject: [PATCH 05/56] Check for string in python3-compatible way --- setup.py | 3 +- sunspear/activitystreams/models.py | 10 +++---- sunspear/backends/riak.py | 25 ++++++++-------- tests/test_activitystreams.py | 17 +++++------ tests/test_backend.py | 46 +++++++++++++++--------------- 5 files changed, 52 insertions(+), 49 deletions(-) diff --git a/setup.py b/setup.py index d8af902..df455e1 100644 --- a/setup.py +++ b/setup.py @@ -20,8 +20,9 @@ packages=find_packages(exclude=['tests']), test_suite='nose.collector', install_requires=[ - 'riak', 'python-dateutil>=1.5, != 2.0', + 'riak', + 'six', ], options={'easy_install': {'allow_hosts': 'pypi.python.org'}}, tests_require=tests_require, diff --git a/sunspear/activitystreams/models.py b/sunspear/activitystreams/models.py index 02ca5e4..213a158 100644 --- a/sunspear/activitystreams/models.py +++ b/sunspear/activitystreams/models.py @@ -1,10 +1,10 @@ -from sunspear.exceptions import SunspearValidationException - -from sunspear.lib.rfc3339 import rfc3339 +import datetime +import six from dateutil.parser import parse -import datetime +from sunspear.exceptions import SunspearValidationException +from sunspear.lib.rfc3339 import rfc3339 __all__ = ('Model', 'Activity', 'ReplyActivity', 'LikeActivity', 'Object', 'MediaLink', ) @@ -113,7 +113,7 @@ def get_dict(self): def _parse_date(self, date=None, utc=True, use_system_timezone=False): dt = None if date is None or not isinstance(date, datetime.datetime): - if isinstance(date, basestring): + if isinstance(date, six.string_types): try: dt = parse(date) except ValueError: diff --git a/sunspear/backends/riak.py b/sunspear/backends/riak.py index 553bb66..7f6e4a9 100644 --- a/sunspear/backends/riak.py +++ b/sunspear/backends/riak.py @@ -17,16 +17,17 @@ """ from __future__ import absolute_import -from sunspear.activitystreams.models import Object, Activity, Model -from sunspear.exceptions import (SunspearValidationException) -from sunspear.backends.base import BaseBackend, SUB_ACTIVITY_MAP +import calendar +import copy +import datetime +import uuid +import six from riak import RiakClient -import uuid -import copy -import datetime -import calendar +from sunspear.activitystreams.models import Activity, Model, Object +from sunspear.backends.base import SUB_ACTIVITY_MAP, BaseBackend +from sunspear.exceptions import SunspearValidationException __all__ = ('RiakBackend', ) @@ -586,9 +587,9 @@ def _extract_object_keys(self, activity, skip_sub_activities=False): for in_reply_to_obj in objects['inReplyTo']] if isinstance(objects, list): for item in objects: - if isinstance(item, basestring): + if isinstance(item, six.string_types): keys.append(item) - if isinstance(objects, basestring): + if isinstance(objects, six.string_types): keys.append(objects) if not skip_sub_activities: @@ -614,9 +615,9 @@ def _dehydrate_object_keys(self, activity, objects_dict, skip_sub_activities=Fal objects_dict, skip_sub_activities=skip_sub_activities) if isinstance(activity_objects, list): for i, obj_id in enumerate(activity_objects): - if isinstance(activity[object_key][i], basestring): + if isinstance(activity[object_key][i], six.string_types): activity[object_key][i] = objects_dict.get(obj_id, {}) - if isinstance(activity_objects, basestring): + if isinstance(activity_objects, six.string_types): activity[object_key] = objects_dict.get(activity_objects, {}) if not skip_sub_activities: @@ -678,7 +679,7 @@ def _extract_id(self, activity_or_id): Helper that returns an id if the activity has one. """ this_id = None - if isinstance(activity_or_id, basestring): + if isinstance(activity_or_id, six.string_types): this_id = activity_or_id elif isinstance(activity_or_id, dict): this_id = activity_or_id.get('id', None) diff --git a/tests/test_activitystreams.py b/tests/test_activitystreams.py index 7f492f0..0df9f24 100644 --- a/tests/test_activitystreams.py +++ b/tests/test_activitystreams.py @@ -1,12 +1,13 @@ from __future__ import absolute_import -from nose.tools import ok_, eq_, raises, set_trace -from mock import MagicMock, call, ANY +import datetime -from sunspear.activitystreams.models import Activity, MediaLink, Object, Model, ReplyActivity, LikeActivity -from sunspear.exceptions import SunspearValidationException +import six +from mock import MagicMock +from nose.tools import eq_, ok_, raises -import datetime +from sunspear.activitystreams.models import Activity, MediaLink, Model, Object +from sunspear.exceptions import SunspearValidationException class TestActivityModel(object): @@ -214,7 +215,7 @@ def test__set_defaults(self): obj = Model({}, backend=MagicMock()) obj_dict = obj._set_defaults({'id': 12}) - ok_(isinstance(obj_dict.get('id'), basestring)) + ok_(isinstance(obj_dict.get('id'), six.string_types)) def test__set_defaults_no_id_does_not_fail(self): obj = Model({}, backend=MagicMock()) @@ -229,7 +230,7 @@ def test__parse_date(self): eq_(obj._parse_date(d), d.strftime('%Y-%m-%dT%H:%M:%S') + "Z") #badly formatted string date - ok_(isinstance(obj._parse_date(date="qwerty"), basestring)) + ok_(isinstance(obj._parse_date(date="qwerty"), six.string_types)) #no date passed - ok_(isinstance(obj._parse_date(date=None), basestring)) + ok_(isinstance(obj._parse_date(date=None), six.string_types)) diff --git a/tests/test_backend.py b/tests/test_backend.py index 7b160ef..2f8ea21 100644 --- a/tests/test_backend.py +++ b/tests/test_backend.py @@ -1,13 +1,14 @@ from __future__ import absolute_import -from nose.tools import ok_, eq_, set_trace, raises -from mock import MagicMock, call, ANY +import datetime + +import six +from mock import ANY, MagicMock, call +from nose.tools import eq_, ok_, raises, set_trace -from sunspear.exceptions import SunspearValidationException from sunspear.aggregators.property import PropertyAggregator from sunspear.backends.riak import RiakBackend - -import datetime +from sunspear.exceptions import SunspearValidationException riak_connection_options = { "nodes": [ @@ -172,7 +173,7 @@ def test_create_activity_stored_as_sparse(self): riak_obj = self._backend._activities.get('5') riak_obj_data = riak_obj.data - ok_(isinstance(riak_obj_data.get("target"), basestring)) + ok_(isinstance(riak_obj_data.get("target"), six.string_types)) def test_delete_activity(self): self._backend._activities.get('5').delete() @@ -730,8 +731,8 @@ def test_create_reply_maintains_dehydrate_state(self): self._backend.create_activity({"id": 5, "title": "Stream Item", "verb": "post", "actor": actor, "object": obj}) riak_obj_data = self._backend._activities.get(key="5").data - ok_(isinstance(riak_obj_data.get("actor"), basestring)) - ok_(isinstance(riak_obj_data.get("object"), basestring)) + ok_(isinstance(riak_obj_data.get("actor"), six.string_types)) + ok_(isinstance(riak_obj_data.get("object"), six.string_types)) #now create a reply for the activity reply_activity_dict, activity_obj_dict = self._backend.sub_activity_create( @@ -739,8 +740,8 @@ def test_create_reply_maintains_dehydrate_state(self): sub_activity_verb='reply') riak_obj_data = self._backend._activities.get(key="5").data - ok_(isinstance(riak_obj_data.get("actor"), basestring)) - ok_(isinstance(riak_obj_data.get("object"), basestring)) + ok_(isinstance(riak_obj_data.get("actor"), six.string_types)) + ok_(isinstance(riak_obj_data.get("object"), six.string_types)) def test_create_reply_with_extra_data(self): self._backend._activities.get('5').delete() @@ -843,8 +844,8 @@ def test_create_like_maintains_dehydrate_state(self): self._backend.create_activity({"id": 5, "title": "Stream Item", "verb": "post", "actor": actor, "object": obj}) riak_obj_data = self._backend._activities.get(key="5").data - ok_(isinstance(riak_obj_data.get("actor"), basestring)) - ok_(isinstance(riak_obj_data.get("object"), basestring)) + ok_(isinstance(riak_obj_data.get("actor"), six.string_types)) + ok_(isinstance(riak_obj_data.get("object"), six.string_types)) #now create a reply for the activity like_activity_dict, activity_obj_dict = self._backend.sub_activity_create( @@ -852,8 +853,8 @@ def test_create_like_maintains_dehydrate_state(self): sub_activity_verb='like') riak_obj_data = self._backend._activities.get(key="5").data - ok_(isinstance(riak_obj_data.get("actor"), basestring)) - ok_(isinstance(riak_obj_data.get("object"), basestring)) + ok_(isinstance(riak_obj_data.get("actor"), six.string_types)) + ok_(isinstance(riak_obj_data.get("object"), six.string_types)) def test_create_like(self): self._backend._activities.get('5').delete() @@ -950,15 +951,15 @@ def test_delete_like_maintains_dehydrated_state(self): 5, actor2_id, "", sub_activity_verb='like') riak_obj_data = self._backend._activities.get(key="5").data - ok_(isinstance(riak_obj_data.get("actor"), basestring)) - ok_(isinstance(riak_obj_data.get("object"), basestring)) + ok_(isinstance(riak_obj_data.get("actor"), six.string_types)) + ok_(isinstance(riak_obj_data.get("object"), six.string_types)) #now delete the like and make sure everything is ok: self._backend.sub_activity_delete(like_activity_dict['id'], 'like') riak_obj_data = self._backend._activities.get(key="5").data - ok_(isinstance(riak_obj_data.get("actor"), basestring)) - ok_(isinstance(riak_obj_data.get("object"), basestring)) + ok_(isinstance(riak_obj_data.get("actor"), six.string_types)) + ok_(isinstance(riak_obj_data.get("object"), six.string_types)) def test_reply_delete_maintains_dehydrated_state(self): self._backend._activities.get('5').delete() @@ -983,16 +984,16 @@ def test_reply_delete_maintains_dehydrated_state(self): 5, actor2_id, "This is a reply.", sub_activity_verb='reply') riak_obj_data = self._backend._activities.get(key="5").data - ok_(isinstance(riak_obj_data.get("actor"), basestring)) - ok_(isinstance(riak_obj_data.get("object"), basestring)) + ok_(isinstance(riak_obj_data.get("actor"), six.string_types)) + ok_(isinstance(riak_obj_data.get("object"), six.string_types)) #now delete the reply and make sure everything is ok: self._backend.sub_activity_delete( reply_activity_dict['id'], 'reply') riak_obj_data = self._backend._activities.get(key="5").data - ok_(isinstance(riak_obj_data.get("actor"), basestring)) - ok_(isinstance(riak_obj_data.get("object"), basestring)) + ok_(isinstance(riak_obj_data.get("actor"), six.string_types)) + ok_(isinstance(riak_obj_data.get("object"), six.string_types)) def test_reply_delete(self): self._backend._activities.get('5').delete() @@ -1719,4 +1720,3 @@ def test_create_sub_activity_indexes(self): eq_(filter(lambda x: x[0] == 'actor_bin', riak_obj.indexes)[0][1], actor2_id) eq_(filter(lambda x: x[0] == 'object_bin', riak_obj.indexes)[0][1], like_activity_dict['object']['id']) eq_(filter(lambda x: x[0] == 'inreplyto_bin', riak_obj.indexes)[0][1], '5') - From db3d0f6a026332f76b3abb29e7e07bf81fe22f96 Mon Sep 17 00:00:00 2001 From: Jonas Trappenberg Date: Sun, 19 Nov 2017 16:54:36 -0800 Subject: [PATCH 06/56] long doesn't exist in python 3 --- sunspear/backends/riak.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/sunspear/backends/riak.py b/sunspear/backends/riak.py index 7f6e4a9..3a7d864 100644 --- a/sunspear/backends/riak.py +++ b/sunspear/backends/riak.py @@ -699,7 +699,7 @@ def _get_timestamp(self): returns a unix timestamp representing the ``datetime`` object """ dt_obj = datetime.datetime.utcnow() - return long((calendar.timegm(dt_obj.utctimetuple()) * 1000)) + (dt_obj.microsecond / 1000) + return int((calendar.timegm(dt_obj.utctimetuple()) * 1000)) + (dt_obj.microsecond / 1000) def get_new_id(self): """ @@ -709,5 +709,3 @@ def get_new_id(self): :return: a new id """ return uuid.uuid1().hex - # now = datetime.datetime.utcnow() - # return str(long(calendar.timegm(now.utctimetuple()) - calendar.timegm(self.custom_epoch.utctimetuple())) + now.microsecond) From 3d8392d47a2157ef5c646aeca40296810734c996 Mon Sep 17 00:00:00 2001 From: Jonas Trappenberg Date: Sun, 26 Nov 2017 11:44:48 -0800 Subject: [PATCH 07/56] Add isort configuration --- setup.cfg | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/setup.cfg b/setup.cfg index ce26f6a..b6c1cfb 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,2 +1,23 @@ [nosetests] where=tests + +[isort] +line_length=120 +known_standard_library= +known_third_party= +known_first_party=sunspear +balanced_wrapping=true +combine_star=true +# 0: grid +# 1: vertical +# 2: hanging +# 3: vert-hanging +# 4: vert-grid +# 5: vert-grid-grouped +multi_line_output=4 +not_skip=__init__.py + +# Don't sort one-letter classes (like Q) first +order_by_type=false + +enforce_white_space=true From f0031ccd24ae92c7c2f98555e12ef84dcf16f6d1 Mon Sep 17 00:00:00 2001 From: Jonas Trappenberg Date: Sun, 26 Nov 2017 11:51:29 -0800 Subject: [PATCH 08/56] Add safe future imports --- docs/source/conf.py | 5 ++++- setup.py | 4 +++- sunspear/activitystreams/models.py | 2 ++ sunspear/aggregators/base.py | 3 +++ sunspear/aggregators/property.py | 9 +++++---- sunspear/backends/base.py | 10 ++++++---- sunspear/backends/riak.py | 4 ++-- sunspear/clients.py | 3 +++ sunspear/exceptions.py | 5 ++++- sunspear/lib/dotdict.py | 1 + sunspear/lib/rfc3339.py | 1 + tests/test_activitystreams.py | 2 +- tests/test_aggregators.py | 8 ++++---- tests/test_backend.py | 4 ++-- tests/test_client.py | 11 +++++------ tests/test_dotdict.py | 6 +++--- tests/test_rfc3339.py | 8 ++++---- 17 files changed, 53 insertions(+), 33 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index b435b95..48c79ac 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -11,7 +11,10 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import sys, os +from __future__ import absolute_import, division, print_function + +import os +import sys FILE_ROOT = os.path.abspath(os.path.dirname(__file__)) #add the apps dir to the python path. diff --git a/setup.py b/setup.py index df455e1..955390b 100644 --- a/setup.py +++ b/setup.py @@ -1,6 +1,8 @@ #!/usr/bin/python -from setuptools import setup, find_packages +from __future__ import absolute_import, division, print_function + +from setuptools import find_packages, setup tests_require=[ 'nose', diff --git a/sunspear/activitystreams/models.py b/sunspear/activitystreams/models.py index 213a158..7cc434c 100644 --- a/sunspear/activitystreams/models.py +++ b/sunspear/activitystreams/models.py @@ -1,3 +1,5 @@ +from __future__ import absolute_import, division, print_function + import datetime import six diff --git a/sunspear/aggregators/base.py b/sunspear/aggregators/base.py index a2d8991..669ed48 100644 --- a/sunspear/aggregators/base.py +++ b/sunspear/aggregators/base.py @@ -1,3 +1,6 @@ +from __future__ import absolute_import, division, print_function + + class BaseAggregator(object): def __init__(self, *args, **kwargs): pass diff --git a/sunspear/aggregators/property.py b/sunspear/aggregators/property.py index 277743f..fbaaacb 100644 --- a/sunspear/aggregators/property.py +++ b/sunspear/aggregators/property.py @@ -1,10 +1,11 @@ -from sunspear.aggregators.base import BaseAggregator -from sunspear.lib.dotdict import dotdictify - -from itertools import groupby +from __future__ import absolute_import, division, print_function import copy import re +from itertools import groupby + +from sunspear.aggregators.base import BaseAggregator +from sunspear.lib.dotdict import dotdictify class PropertyAggregator(BaseAggregator): diff --git a/sunspear/backends/base.py b/sunspear/backends/base.py index 3e415f9..6f75e60 100644 --- a/sunspear/backends/base.py +++ b/sunspear/backends/base.py @@ -1,9 +1,11 @@ -from sunspear.activitystreams.models import Activity, ReplyActivity, LikeActivity -from sunspear.exceptions import (SunspearDuplicateEntryException, SunspearInvalidActivityException, - SunspearInvalidObjectException) +from __future__ import absolute_import, division, print_function -import uuid import copy +import uuid + +from sunspear.activitystreams.models import Activity, LikeActivity, ReplyActivity +from sunspear.exceptions import ( + SunspearDuplicateEntryException, SunspearInvalidActivityException, SunspearInvalidObjectException) __all__ = ('BaseBackend', 'SUB_ACTIVITY_MAP') diff --git a/sunspear/backends/riak.py b/sunspear/backends/riak.py index 3a7d864..7637373 100644 --- a/sunspear/backends/riak.py +++ b/sunspear/backends/riak.py @@ -15,7 +15,7 @@ specific language governing permissions and limitations under the License. """ -from __future__ import absolute_import +from __future__ import absolute_import, print_function import calendar import copy @@ -26,7 +26,7 @@ from riak import RiakClient from sunspear.activitystreams.models import Activity, Model, Object -from sunspear.backends.base import SUB_ACTIVITY_MAP, BaseBackend +from sunspear.backends.base import BaseBackend, SUB_ACTIVITY_MAP from sunspear.exceptions import SunspearValidationException __all__ = ('RiakBackend', ) diff --git a/sunspear/clients.py b/sunspear/clients.py index 491e4ac..3348539 100644 --- a/sunspear/clients.py +++ b/sunspear/clients.py @@ -1,3 +1,6 @@ +from __future__ import absolute_import, division, print_function + + class SunspearClient(object): """ The class is used to create, delete, remove and update activity stream items. diff --git a/sunspear/exceptions.py b/sunspear/exceptions.py index 47eb1bc..b3b2792 100644 --- a/sunspear/exceptions.py +++ b/sunspear/exceptions.py @@ -1,3 +1,6 @@ +from __future__ import absolute_import, division, print_function, unicode_literals + + class SunspearBaseException(Exception): pass @@ -26,4 +29,4 @@ class SunspearInvalidActivityException(SunspearBaseException): pass class SunspearInvalidObjectException(SunspearBaseException): - pass \ No newline at end of file + pass diff --git a/sunspear/lib/dotdict.py b/sunspear/lib/dotdict.py index 50e4ce6..a09e6fa 100644 --- a/sunspear/lib/dotdict.py +++ b/sunspear/lib/dotdict.py @@ -1,5 +1,6 @@ #Originally 'borrowed' from http://stackoverflow.com/questions/3797957/python-easily-access-deeply-nested-dict-get-and-set #Some modifications mad to suit the needs of this project +from __future__ import absolute_import, division, print_function class dotdictify(dict): diff --git a/sunspear/lib/rfc3339.py b/sunspear/lib/rfc3339.py index a0057e5..1c3c938 100644 --- a/sunspear/lib/rfc3339.py +++ b/sunspear/lib/rfc3339.py @@ -20,6 +20,7 @@ .. _BitBucket: https://bitbucket.org/henry/clan.cx/issues ''' +from __future__ import absolute_import, division, print_function __author__ = 'Henry Precheur ' __license__ = 'ISCL' diff --git a/tests/test_activitystreams.py b/tests/test_activitystreams.py index 0df9f24..20d54c6 100644 --- a/tests/test_activitystreams.py +++ b/tests/test_activitystreams.py @@ -1,4 +1,4 @@ -from __future__ import absolute_import +from __future__ import absolute_import, division, print_function import datetime diff --git a/tests/test_aggregators.py b/tests/test_aggregators.py index d493b62..139fa77 100644 --- a/tests/test_aggregators.py +++ b/tests/test_aggregators.py @@ -1,10 +1,10 @@ -from __future__ import absolute_import +from __future__ import absolute_import, division, print_function -from nose.tools import ok_, eq_, raises, set_trace +from itertools import groupby -from sunspear.aggregators.property import PropertyAggregator +from nose.tools import eq_, ok_, raises, set_trace -from itertools import groupby +from sunspear.aggregators.property import PropertyAggregator class TestPropertyAggregator(object): diff --git a/tests/test_backend.py b/tests/test_backend.py index 2f8ea21..c826e02 100644 --- a/tests/test_backend.py +++ b/tests/test_backend.py @@ -1,9 +1,9 @@ -from __future__ import absolute_import +from __future__ import absolute_import, division, print_function import datetime import six -from mock import ANY, MagicMock, call +from mock import ANY, call, MagicMock from nose.tools import eq_, ok_, raises, set_trace from sunspear.aggregators.property import PropertyAggregator diff --git a/tests/test_client.py b/tests/test_client.py index 26ec940..ea69e8e 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -1,14 +1,14 @@ -from __future__ import absolute_import +from __future__ import absolute_import, division, print_function -from nose.tools import ok_, eq_, set_trace, raises -from mock import MagicMock, call, ANY +import datetime + +from mock import ANY, call, MagicMock +from nose.tools import eq_, ok_, raises, set_trace from sunspear.aggregators.property import PropertyAggregator from sunspear.backends.riak import RiakBackend from sunspear.clients import SunspearClient -import datetime - riak_connection_options = { "nodes": [ {'http_port': 8098, 'host': '127.0.0.1'}], @@ -615,4 +615,3 @@ def test_get_activities_with_aggregation_pipline(self): activities = self._client.get_activities(activity_ids=activity_ids, aggregation_pipeline=[PropertyAggregator(properties=['verb', 'actor'])]) eq_([{u'id': u'7779', u'verb': u'like', u'target': {u'objectType': u'something', u'id': u'31415', u'published': u'2012-07-05T12:00:00Z'}, u'object': {u'inReplyTo': [], u'objectType': u'like', u'id': u'6669', u'published': u'2012-08-05T12:00:00Z'}, u'actor': {u'objectType': u'something', u'id': u'1234', u'published': u'2012-07-05T12:00:00Z'}}, {u'id': u'8889', u'verb': u'reply', u'target': {u'objectType': u'something', u'id': u'31415', u'published': u'2012-07-05T12:00:00Z'}, u'object': {u'content': u'This is my first reply', u'inReplyTo': [], u'objectType': u'reply', u'id': u'9999', u'published': u'2012-08-05T12:00:00Z'}, u'actor': {u'objectType': u'something', u'id': u'1234', u'published': u'2012-07-05T12:00:00Z'}}, {'grouped_by_attributes': ['verb', 'actor'], u'title': [u'Stream Item', u'Stream Item'], u'object': [{u'objectType': u'something', u'id': u'4353', u'published': u'2012-07-05T12:00:00Z'}, {u'published': u'2012-07-05T12:00:00Z', u'id': u'4353', u'objectType': u'something'}], u'actor': {u'published': u'2012-07-05T12:00:00Z', u'id': u'4321', u'objectType': u'something'}, u'verb': u'post', u'replies': [{u'totalItems': 2, u'items': [{u'verb': u'reply', u'actor': {u'objectType': u'something', u'id': u'1234', u'published': u'2012-07-05T12:00:00Z'}, u'object': {u'target': {u'objectType': u'something', u'id': u'31415', u'published': u'2012-07-05T12:00:00Z'}, u'object': {u'content': u'This is my first reply', u'inReplyTo': [], u'objectType': u'reply', u'id': u'9999', u'published': u'2012-08-05T12:00:00Z'}, u'actor': {u'objectType': u'something', u'id': u'1234', u'published': u'2012-07-05T12:00:00Z'}, u'verb': u'reply', u'id': u'8889', u'objectType': u'activity'}}, {u'verb': u'reply', u'actor': {u'objectType': u'something', u'id': u'1234', u'published': u'2012-07-05T12:00:00Z'}, u'object': {u'target': {u'objectType': u'something', u'id': u'1234', u'published': u'2012-07-05T12:00:00Z'}, u'object': {u'content': u'This is my second reply', u'inReplyTo': [], u'objectType': u'reply', u'id': u'9998', u'published': u'2012-08-05T12:05:00Z'}, u'actor': {u'objectType': u'something', u'id': u'4321', u'published': u'2012-07-05T12:00:00Z'}, u'verb': u'reply', u'id': u'8888', u'objectType': u'activity'}}]}, {u'totalItems': 2, u'items': [{u'verb': u'reply', u'actor': {u'objectType': u'something', u'id': u'1234', u'published': u'2012-07-05T12:00:00Z'}, u'object': {u'target': {u'objectType': u'something', u'id': u'31415', u'published': u'2012-07-05T12:00:00Z'}, u'object': {u'content': u'This is my first reply', u'inReplyTo': [], u'objectType': u'reply', u'id': u'9999', u'published': u'2012-08-05T12:00:00Z'}, u'actor': {u'objectType': u'something', u'id': u'1234', u'published': u'2012-07-05T12:00:00Z'}, u'verb': u'reply', u'id': u'8889', u'objectType': u'activity'}}, {u'verb': u'reply', u'actor': {u'objectType': u'something', u'id': u'1234', u'published': u'2012-07-05T12:00:00Z'}, u'object': {u'target': {u'objectType': u'something', u'id': u'1234', u'published': u'2012-07-05T12:00:00Z'}, u'object': {u'content': u'This is my second reply', u'inReplyTo': [], u'objectType': u'reply', u'id': u'9998', u'published': u'2012-08-05T12:05:00Z'}, u'actor': {u'objectType': u'something', u'id': u'4321', u'published': u'2012-07-05T12:00:00Z'}, u'verb': u'reply', u'id': u'8888', u'objectType': u'activity'}}]}], u'id': [u'5555', u'5556'], 'grouped_by_values': [u'post', {u'published': u'2012-07-05T12:00:00Z', u'id': u'4321', u'objectType': u'something'}]}, {u'id': u'7778', u'verb': u'like', u'target': {u'objectType': u'something', u'id': u'31415', u'published': u'2012-07-05T12:00:00Z'}, u'object': {u'inReplyTo': [], u'objectType': u'like', u'id': u'6669', u'published': u'2012-08-05T12:00:00Z'}, u'actor': {u'objectType': u'something', u'id': u'1234', u'published': u'2012-07-05T12:00:00Z'}}, {u'id': u'8888', u'verb': u'reply', u'target': {u'objectType': u'something', u'id': u'1234', u'published': u'2012-07-05T12:00:00Z'}, u'object': {u'content': u'This is my second reply', u'inReplyTo': [], u'objectType': u'reply', u'id': u'9998', u'published': u'2012-08-05T12:05:00Z'}, u'actor': {u'objectType': u'something', u'id': u'4321', u'published': u'2012-07-05T12:00:00Z'}}], activities) - diff --git a/tests/test_dotdict.py b/tests/test_dotdict.py index 602704e..14c74ff 100644 --- a/tests/test_dotdict.py +++ b/tests/test_dotdict.py @@ -1,8 +1,8 @@ -from __future__ import absolute_import +from __future__ import absolute_import, division, print_function -from sunspear.lib.dotdict import dotdictify +from nose.tools import eq_, ok_, raises -from nose.tools import ok_, eq_, raises +from sunspear.lib.dotdict import dotdictify class TestDotDictify(object): diff --git a/tests/test_rfc3339.py b/tests/test_rfc3339.py index a7bc723..7bb6725 100644 --- a/tests/test_rfc3339.py +++ b/tests/test_rfc3339.py @@ -1,11 +1,11 @@ -from __future__ import absolute_import +from __future__ import absolute_import, division, print_function -from sunspear.lib.rfc3339 import rfc3339, _timezone, _utc_offset, _timedelta_to_seconds +import datetime +import time from nose.tools import eq_, ok_ -import datetime -import time +from sunspear.lib.rfc3339 import _timedelta_to_seconds, _timezone, _utc_offset, rfc3339 class TestRFC3339(object): From a28268259e597799bdf6a20559ccd1ae72efb2cd Mon Sep 17 00:00:00 2001 From: Jonas Trappenberg Date: Sun, 26 Nov 2017 11:52:24 -0800 Subject: [PATCH 09/56] Add other future imports --- docs/source/conf.py | 2 +- setup.py | 2 +- sunspear/activitystreams/models.py | 2 +- sunspear/aggregators/base.py | 2 +- sunspear/aggregators/property.py | 2 +- sunspear/backends/base.py | 2 +- sunspear/backends/riak.py | 2 +- sunspear/clients.py | 2 +- sunspear/lib/dotdict.py | 2 +- sunspear/lib/rfc3339.py | 2 +- tests/test_activitystreams.py | 2 +- tests/test_aggregators.py | 2 +- tests/test_backend.py | 2 +- tests/test_client.py | 2 +- tests/test_dotdict.py | 2 +- tests/test_rfc3339.py | 2 +- 16 files changed, 16 insertions(+), 16 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 48c79ac..c75a7ba 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -11,7 +11,7 @@ # All configuration values have a default; values that are commented out # serve to show the default. -from __future__ import absolute_import, division, print_function +from __future__ import absolute_import, division, print_function, unicode_literals import os import sys diff --git a/setup.py b/setup.py index 955390b..c38d4ac 100644 --- a/setup.py +++ b/setup.py @@ -1,6 +1,6 @@ #!/usr/bin/python -from __future__ import absolute_import, division, print_function +from __future__ import absolute_import, division, print_function, unicode_literals from setuptools import find_packages, setup diff --git a/sunspear/activitystreams/models.py b/sunspear/activitystreams/models.py index 7cc434c..00d4845 100644 --- a/sunspear/activitystreams/models.py +++ b/sunspear/activitystreams/models.py @@ -1,4 +1,4 @@ -from __future__ import absolute_import, division, print_function +from __future__ import absolute_import, division, print_function, unicode_literals import datetime diff --git a/sunspear/aggregators/base.py b/sunspear/aggregators/base.py index 669ed48..49b9b3d 100644 --- a/sunspear/aggregators/base.py +++ b/sunspear/aggregators/base.py @@ -1,4 +1,4 @@ -from __future__ import absolute_import, division, print_function +from __future__ import absolute_import, division, print_function, unicode_literals class BaseAggregator(object): diff --git a/sunspear/aggregators/property.py b/sunspear/aggregators/property.py index fbaaacb..71e3edc 100644 --- a/sunspear/aggregators/property.py +++ b/sunspear/aggregators/property.py @@ -1,4 +1,4 @@ -from __future__ import absolute_import, division, print_function +from __future__ import absolute_import, division, print_function, unicode_literals import copy import re diff --git a/sunspear/backends/base.py b/sunspear/backends/base.py index 6f75e60..6ffae46 100644 --- a/sunspear/backends/base.py +++ b/sunspear/backends/base.py @@ -1,4 +1,4 @@ -from __future__ import absolute_import, division, print_function +from __future__ import absolute_import, division, print_function, unicode_literals import copy import uuid diff --git a/sunspear/backends/riak.py b/sunspear/backends/riak.py index 7637373..0a51034 100644 --- a/sunspear/backends/riak.py +++ b/sunspear/backends/riak.py @@ -15,7 +15,7 @@ specific language governing permissions and limitations under the License. """ -from __future__ import absolute_import, print_function +from __future__ import absolute_import, division, print_function, unicode_literals import calendar import copy diff --git a/sunspear/clients.py b/sunspear/clients.py index 3348539..88c56ff 100644 --- a/sunspear/clients.py +++ b/sunspear/clients.py @@ -1,4 +1,4 @@ -from __future__ import absolute_import, division, print_function +from __future__ import absolute_import, division, print_function, unicode_literals class SunspearClient(object): diff --git a/sunspear/lib/dotdict.py b/sunspear/lib/dotdict.py index a09e6fa..78576c1 100644 --- a/sunspear/lib/dotdict.py +++ b/sunspear/lib/dotdict.py @@ -1,6 +1,6 @@ #Originally 'borrowed' from http://stackoverflow.com/questions/3797957/python-easily-access-deeply-nested-dict-get-and-set #Some modifications mad to suit the needs of this project -from __future__ import absolute_import, division, print_function +from __future__ import absolute_import, division, print_function, unicode_literals class dotdictify(dict): diff --git a/sunspear/lib/rfc3339.py b/sunspear/lib/rfc3339.py index 1c3c938..46e525d 100644 --- a/sunspear/lib/rfc3339.py +++ b/sunspear/lib/rfc3339.py @@ -20,7 +20,7 @@ .. _BitBucket: https://bitbucket.org/henry/clan.cx/issues ''' -from __future__ import absolute_import, division, print_function +from __future__ import absolute_import, division, print_function, unicode_literals __author__ = 'Henry Precheur ' __license__ = 'ISCL' diff --git a/tests/test_activitystreams.py b/tests/test_activitystreams.py index 20d54c6..b741b2c 100644 --- a/tests/test_activitystreams.py +++ b/tests/test_activitystreams.py @@ -1,4 +1,4 @@ -from __future__ import absolute_import, division, print_function +from __future__ import absolute_import, division, print_function, unicode_literals import datetime diff --git a/tests/test_aggregators.py b/tests/test_aggregators.py index 139fa77..91ece85 100644 --- a/tests/test_aggregators.py +++ b/tests/test_aggregators.py @@ -1,4 +1,4 @@ -from __future__ import absolute_import, division, print_function +from __future__ import absolute_import, division, print_function, unicode_literals from itertools import groupby diff --git a/tests/test_backend.py b/tests/test_backend.py index c826e02..51968ff 100644 --- a/tests/test_backend.py +++ b/tests/test_backend.py @@ -1,4 +1,4 @@ -from __future__ import absolute_import, division, print_function +from __future__ import absolute_import, division, print_function, unicode_literals import datetime diff --git a/tests/test_client.py b/tests/test_client.py index ea69e8e..ed0df43 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -1,4 +1,4 @@ -from __future__ import absolute_import, division, print_function +from __future__ import absolute_import, division, print_function, unicode_literals import datetime diff --git a/tests/test_dotdict.py b/tests/test_dotdict.py index 14c74ff..f387db5 100644 --- a/tests/test_dotdict.py +++ b/tests/test_dotdict.py @@ -1,4 +1,4 @@ -from __future__ import absolute_import, division, print_function +from __future__ import absolute_import, division, print_function, unicode_literals from nose.tools import eq_, ok_, raises diff --git a/tests/test_rfc3339.py b/tests/test_rfc3339.py index 7bb6725..75a612b 100644 --- a/tests/test_rfc3339.py +++ b/tests/test_rfc3339.py @@ -1,4 +1,4 @@ -from __future__ import absolute_import, division, print_function +from __future__ import absolute_import, division, print_function, unicode_literals import datetime import time From e5c0df7696df09b78f77bc4fdc740b1db4eeb166 Mon Sep 17 00:00:00 2001 From: Jonas Trappenberg Date: Sun, 26 Nov 2017 11:57:00 -0800 Subject: [PATCH 10/56] Automatic 2to3 transformation --- docs/source/conf.py | 16 ++++++++-------- sunspear/activitystreams/models.py | 2 +- sunspear/aggregators/property.py | 8 ++++---- sunspear/backends/base.py | 2 +- sunspear/backends/riak.py | 10 ++++------ tests/test_backend.py | 14 +++++++------- tests/test_client.py | 2 +- tests/test_rfc3339.py | 4 ++-- 8 files changed, 28 insertions(+), 30 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index c75a7ba..96c6570 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -49,8 +49,8 @@ master_doc = 'index' # General information about the project. -project = u'Sunspear' -copyright = u'2013, Numan Sachwani' +project = 'Sunspear' +copyright = '2013, Numan Sachwani' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -192,8 +192,8 @@ # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ - ('index', 'Sunspear.tex', u'Sunspear Documentation', - u'Numan Sachwani', 'manual'), + ('index', 'Sunspear.tex', 'Sunspear Documentation', + 'Numan Sachwani', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of @@ -222,8 +222,8 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - ('index', 'sunspear', u'Sunspear Documentation', - [u'Numan Sachwani'], 1) + ('index', 'sunspear', 'Sunspear Documentation', + ['Numan Sachwani'], 1) ] # If true, show URL addresses after external links. @@ -236,8 +236,8 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - ('index', 'Sunspear', u'Sunspear Documentation', - u'Numan Sachwani', 'Sunspear', 'One line description of project.', + ('index', 'Sunspear', 'Sunspear Documentation', + 'Numan Sachwani', 'Sunspear', 'One line description of project.', 'Miscellaneous'), ] diff --git a/sunspear/activitystreams/models.py b/sunspear/activitystreams/models.py index 00d4845..bdec8fc 100644 --- a/sunspear/activitystreams/models.py +++ b/sunspear/activitystreams/models.py @@ -91,7 +91,7 @@ def parse_data(self, data, *args, **kwargs): _parsed_data[c] = _parsed_data[c].parse_data(_parsed_data[c].get_dict()) #parse anything that is a dictionary for things like datetime fields that are datetime objects - for k, v in _parsed_data.items(): + for k, v in list(_parsed_data.items()): if isinstance(v, dict) and k not in self._response_fields: _parsed_data[k] = self.parse_data(v) diff --git a/sunspear/aggregators/property.py b/sunspear/aggregators/property.py index 71e3edc..fcc3389 100644 --- a/sunspear/aggregators/property.py +++ b/sunspear/aggregators/property.py @@ -49,12 +49,12 @@ def _listify_attributes(self, group_by_attributes=[], activity={}): nested_root, rest = attr.split('.', 1) #store a list of nested roots. We'll have to be careful not to listify these nested_root_attributes.append(nested_root) - for nested_dict_key, nested_dict_value in activity.get(nested_dict).items(): + for nested_dict_key, nested_dict_value in list(activity.get(nested_dict).items()): if nested_dict_key != deepest_attr: listified_dict['.'.join([nested_dict, nested_dict_key])] = [nested_dict_value] #now we listify all other non nested attributes - for key, val in activity.items(): + for key, val in list(activity.items()): if key not in group_by_attributes and key not in nested_root_attributes: listified_dict[key] = [val] @@ -97,7 +97,7 @@ def _aggregate_activities(self, group_by_attributes=[], grouped_activities=[]): #aggregate the rest of the activities into lists for activity in group_list[1:]: activity = dotdictify(activity) - for key in aggregated_activity.keys(): + for key in list(aggregated_activity.keys()): if key not in group_by_attributes and key not in nested_root_attributes: aggregated_activity[key].append(activity.get(key)) @@ -108,7 +108,7 @@ def _aggregate_activities(self, group_by_attributes=[], grouped_activities=[]): if nested_val is not None: nested_dict, deepest_attr = attr.rsplit('.', 1) - for nested_dict_key, nested_dict_value in activity.get(nested_dict).items(): + for nested_dict_key, nested_dict_value in list(activity.get(nested_dict).items()): if nested_dict_key != deepest_attr: aggregated_activity['.'.join([nested_dict, nested_dict_key])].append(nested_dict_value) diff --git a/sunspear/backends/base.py b/sunspear/backends/base.py index 6ffae46..9c30e97 100644 --- a/sunspear/backends/base.py +++ b/sunspear/backends/base.py @@ -77,7 +77,7 @@ def create_activity(self, activity, **kwargs): objs_created = [] objs_modified = [] - for key, value in activity_copy.items(): + for key, value in list(activity_copy.items()): if key in Activity._object_fields and isinstance(value, dict): if self.obj_exists(value): previous_value = self.get_obj([self._extract_id(value)])[0] diff --git a/sunspear/backends/riak.py b/sunspear/backends/riak.py index 0a51034..4aad206 100644 --- a/sunspear/backends/riak.py +++ b/sunspear/backends/riak.py @@ -232,7 +232,7 @@ def set_general_indexes(self, riak_object): :type riak_object: RiakObject :param riak_object: a RiakObject representing the model of the class """ - if not filter(lambda x: x[0] == "timestamp_int", riak_object.indexes): + if not [x for x in riak_object.indexes if x[0] == "timestamp_int"]: riak_object.add_index("timestamp_int", self._get_timestamp()) riak_object.remove_index('modified_int') @@ -355,7 +355,7 @@ def activity_get( :return: list -- a list of activities matching ``activity_ids``. If the activities is not found, it is not included in the result set. Activities are returned in the order of ids provided. """ - activity_ids = map(self._extract_id, activity_ids) + activity_ids = list(map(self._extract_id, activity_ids)) if not activity_ids: return [] @@ -428,9 +428,7 @@ def sub_activity_delete(self, sub_activity, sub_activity_verb, **kwargs): activity = self._activities.get(key=in_reply_to_key) activity_data = activity.data activity_data[sub_activity_model.sub_item_key]['totalItems'] -= 1 - activity_data[sub_activity_model.sub_item_key]['items'] = filter( - lambda x: x["id"] != sub_activity_id, - activity_data[sub_activity_model.sub_item_key]['items']) + activity_data[sub_activity_model.sub_item_key]['items'] = [x for x in activity_data[sub_activity_model.sub_item_key]['items'] if x["id"] != sub_activity_id] updated_activity = self.update_activity(activity_data, **kwargs) self.delete_activity(sub_activity_id) @@ -669,7 +667,7 @@ def _get_many_activities(self, activity_ids=[], raw_filter="", filters=None, inc #riak does not return the results in any particular order (unless we sort). So, #we have to put the objects returned by riak back in order - results_map = dict(map(lambda result: (result['id'], result,), results)) + results_map = dict([(result['id'], result,) for result in results]) reordered_results = [results_map[id] for id in activity_ids if id in results_map] return reordered_results diff --git a/tests/test_backend.py b/tests/test_backend.py index 51968ff..27f3dc0 100644 --- a/tests/test_backend.py +++ b/tests/test_backend.py @@ -1577,7 +1577,7 @@ def test_get_activities_with_aggregation_pipline(self): activities = self._backend.activity_get(activity_ids, aggregation_pipeline=[PropertyAggregator(properties=['verb', 'actor'])]) - eq_([{u'id': u'7779', u'verb': u'like', u'target': {u'objectType': u'something', u'id': u'31415', u'published': u'2012-07-05T12:00:00Z'}, u'object': {u'inReplyTo': [], u'objectType': u'like', u'id': u'6669', u'published': u'2012-08-05T12:00:00Z'}, u'actor': {u'objectType': u'something', u'id': u'1234', u'published': u'2012-07-05T12:00:00Z'}}, {u'id': u'8889', u'verb': u'reply', u'target': {u'objectType': u'something', u'id': u'31415', u'published': u'2012-07-05T12:00:00Z'}, u'object': {u'content': u'This is my first reply', u'inReplyTo': [], u'objectType': u'reply', u'id': u'9999', u'published': u'2012-08-05T12:00:00Z'}, u'actor': {u'objectType': u'something', u'id': u'1234', u'published': u'2012-07-05T12:00:00Z'}}, {'grouped_by_attributes': ['verb', 'actor'], u'title': [u'Stream Item', u'Stream Item'], u'object': [{u'objectType': u'something', u'id': u'4353', u'published': u'2012-07-05T12:00:00Z'}, {u'published': u'2012-07-05T12:00:00Z', u'id': u'4353', u'objectType': u'something'}], u'actor': {u'published': u'2012-07-05T12:00:00Z', u'id': u'4321', u'objectType': u'something'}, u'verb': u'post', u'replies': [{u'totalItems': 2, u'items': [{u'verb': u'reply', u'actor': {u'objectType': u'something', u'id': u'1234', u'published': u'2012-07-05T12:00:00Z'}, u'object': {u'target': {u'objectType': u'something', u'id': u'31415', u'published': u'2012-07-05T12:00:00Z'}, u'object': {u'content': u'This is my first reply', u'inReplyTo': [], u'objectType': u'reply', u'id': u'9999', u'published': u'2012-08-05T12:00:00Z'}, u'actor': {u'objectType': u'something', u'id': u'1234', u'published': u'2012-07-05T12:00:00Z'}, u'verb': u'reply', u'id': u'8889', u'objectType': u'activity'}}, {u'verb': u'reply', u'actor': {u'objectType': u'something', u'id': u'1234', u'published': u'2012-07-05T12:00:00Z'}, u'object': {u'target': {u'objectType': u'something', u'id': u'1234', u'published': u'2012-07-05T12:00:00Z'}, u'object': {u'content': u'This is my second reply', u'inReplyTo': [], u'objectType': u'reply', u'id': u'9998', u'published': u'2012-08-05T12:05:00Z'}, u'actor': {u'objectType': u'something', u'id': u'4321', u'published': u'2012-07-05T12:00:00Z'}, u'verb': u'reply', u'id': u'8888', u'objectType': u'activity'}}]}, {u'totalItems': 2, u'items': [{u'verb': u'reply', u'actor': {u'objectType': u'something', u'id': u'1234', u'published': u'2012-07-05T12:00:00Z'}, u'object': {u'target': {u'objectType': u'something', u'id': u'31415', u'published': u'2012-07-05T12:00:00Z'}, u'object': {u'content': u'This is my first reply', u'inReplyTo': [], u'objectType': u'reply', u'id': u'9999', u'published': u'2012-08-05T12:00:00Z'}, u'actor': {u'objectType': u'something', u'id': u'1234', u'published': u'2012-07-05T12:00:00Z'}, u'verb': u'reply', u'id': u'8889', u'objectType': u'activity'}}, {u'verb': u'reply', u'actor': {u'objectType': u'something', u'id': u'1234', u'published': u'2012-07-05T12:00:00Z'}, u'object': {u'target': {u'objectType': u'something', u'id': u'1234', u'published': u'2012-07-05T12:00:00Z'}, u'object': {u'content': u'This is my second reply', u'inReplyTo': [], u'objectType': u'reply', u'id': u'9998', u'published': u'2012-08-05T12:05:00Z'}, u'actor': {u'objectType': u'something', u'id': u'4321', u'published': u'2012-07-05T12:00:00Z'}, u'verb': u'reply', u'id': u'8888', u'objectType': u'activity'}}]}], u'id': [u'5555', u'5556'], 'grouped_by_values': [u'post', {u'published': u'2012-07-05T12:00:00Z', u'id': u'4321', u'objectType': u'something'}]}, {u'id': u'7778', u'verb': u'like', u'target': {u'objectType': u'something', u'id': u'31415', u'published': u'2012-07-05T12:00:00Z'}, u'object': {u'inReplyTo': [], u'objectType': u'like', u'id': u'6669', u'published': u'2012-08-05T12:00:00Z'}, u'actor': {u'objectType': u'something', u'id': u'1234', u'published': u'2012-07-05T12:00:00Z'}}, {u'id': u'8888', u'verb': u'reply', u'target': {u'objectType': u'something', u'id': u'1234', u'published': u'2012-07-05T12:00:00Z'}, u'object': {u'content': u'This is my second reply', u'inReplyTo': [], u'objectType': u'reply', u'id': u'9998', u'published': u'2012-08-05T12:05:00Z'}, u'actor': {u'objectType': u'something', u'id': u'4321', u'published': u'2012-07-05T12:00:00Z'}}], activities) + eq_([{'id': '7779', 'verb': 'like', 'target': {'objectType': 'something', 'id': '31415', 'published': '2012-07-05T12:00:00Z'}, 'object': {'inReplyTo': [], 'objectType': 'like', 'id': '6669', 'published': '2012-08-05T12:00:00Z'}, 'actor': {'objectType': 'something', 'id': '1234', 'published': '2012-07-05T12:00:00Z'}}, {'id': '8889', 'verb': 'reply', 'target': {'objectType': 'something', 'id': '31415', 'published': '2012-07-05T12:00:00Z'}, 'object': {'content': 'This is my first reply', 'inReplyTo': [], 'objectType': 'reply', 'id': '9999', 'published': '2012-08-05T12:00:00Z'}, 'actor': {'objectType': 'something', 'id': '1234', 'published': '2012-07-05T12:00:00Z'}}, {'grouped_by_attributes': ['verb', 'actor'], 'title': ['Stream Item', 'Stream Item'], 'object': [{'objectType': 'something', 'id': '4353', 'published': '2012-07-05T12:00:00Z'}, {'published': '2012-07-05T12:00:00Z', 'id': '4353', 'objectType': 'something'}], 'actor': {'published': '2012-07-05T12:00:00Z', 'id': '4321', 'objectType': 'something'}, 'verb': 'post', 'replies': [{'totalItems': 2, 'items': [{'verb': 'reply', 'actor': {'objectType': 'something', 'id': '1234', 'published': '2012-07-05T12:00:00Z'}, 'object': {'target': {'objectType': 'something', 'id': '31415', 'published': '2012-07-05T12:00:00Z'}, 'object': {'content': 'This is my first reply', 'inReplyTo': [], 'objectType': 'reply', 'id': '9999', 'published': '2012-08-05T12:00:00Z'}, 'actor': {'objectType': 'something', 'id': '1234', 'published': '2012-07-05T12:00:00Z'}, 'verb': 'reply', 'id': '8889', 'objectType': 'activity'}}, {'verb': 'reply', 'actor': {'objectType': 'something', 'id': '1234', 'published': '2012-07-05T12:00:00Z'}, 'object': {'target': {'objectType': 'something', 'id': '1234', 'published': '2012-07-05T12:00:00Z'}, 'object': {'content': 'This is my second reply', 'inReplyTo': [], 'objectType': 'reply', 'id': '9998', 'published': '2012-08-05T12:05:00Z'}, 'actor': {'objectType': 'something', 'id': '4321', 'published': '2012-07-05T12:00:00Z'}, 'verb': 'reply', 'id': '8888', 'objectType': 'activity'}}]}, {'totalItems': 2, 'items': [{'verb': 'reply', 'actor': {'objectType': 'something', 'id': '1234', 'published': '2012-07-05T12:00:00Z'}, 'object': {'target': {'objectType': 'something', 'id': '31415', 'published': '2012-07-05T12:00:00Z'}, 'object': {'content': 'This is my first reply', 'inReplyTo': [], 'objectType': 'reply', 'id': '9999', 'published': '2012-08-05T12:00:00Z'}, 'actor': {'objectType': 'something', 'id': '1234', 'published': '2012-07-05T12:00:00Z'}, 'verb': 'reply', 'id': '8889', 'objectType': 'activity'}}, {'verb': 'reply', 'actor': {'objectType': 'something', 'id': '1234', 'published': '2012-07-05T12:00:00Z'}, 'object': {'target': {'objectType': 'something', 'id': '1234', 'published': '2012-07-05T12:00:00Z'}, 'object': {'content': 'This is my second reply', 'inReplyTo': [], 'objectType': 'reply', 'id': '9998', 'published': '2012-08-05T12:05:00Z'}, 'actor': {'objectType': 'something', 'id': '4321', 'published': '2012-07-05T12:00:00Z'}, 'verb': 'reply', 'id': '8888', 'objectType': 'activity'}}]}], 'id': ['5555', '5556'], 'grouped_by_values': ['post', {'published': '2012-07-05T12:00:00Z', 'id': '4321', 'objectType': 'something'}]}, {'id': '7778', 'verb': 'like', 'target': {'objectType': 'something', 'id': '31415', 'published': '2012-07-05T12:00:00Z'}, 'object': {'inReplyTo': [], 'objectType': 'like', 'id': '6669', 'published': '2012-08-05T12:00:00Z'}, 'actor': {'objectType': 'something', 'id': '1234', 'published': '2012-07-05T12:00:00Z'}}, {'id': '8888', 'verb': 'reply', 'target': {'objectType': 'something', 'id': '1234', 'published': '2012-07-05T12:00:00Z'}, 'object': {'content': 'This is my second reply', 'inReplyTo': [], 'objectType': 'reply', 'id': '9998', 'published': '2012-08-05T12:05:00Z'}, 'actor': {'objectType': 'something', 'id': '4321', 'published': '2012-07-05T12:00:00Z'}}], activities) class TestIndexes(object): @@ -1656,8 +1656,8 @@ def test_create_obj_indexes(self): riak_obj = self._backend._objects.get(key=actstream_obj['id']) riak_obj.data - ok_(filter(lambda x: x[0] == 'timestamp_int', riak_obj.indexes) != []) - ok_(filter(lambda x: x[0] == 'modified_int', riak_obj.indexes) != []) + ok_([x for x in riak_obj.indexes if x[0] == 'timestamp_int'] != []) + ok_([x for x in riak_obj.indexes if x[0] == 'modified_int'] != []) def test_create_activity_indexes(self): self._backend._activities.get('5').delete() @@ -1679,8 +1679,8 @@ def test_create_activity_indexes(self): riak_obj = self._backend._activities.get(key=act_obj_dict['id']) riak_obj.data - ok_(filter(lambda x: x[0] == 'timestamp_int', riak_obj.indexes) != []) - ok_(filter(lambda x: x[0] == 'modified_int', riak_obj.indexes) != []) + ok_([x for x in riak_obj.indexes if x[0] == 'timestamp_int'] != []) + ok_([x for x in riak_obj.indexes if x[0] == 'modified_int'] != []) eq_(filter(lambda x: x[0] == 'verb_bin', riak_obj.indexes)[0][1], 'post') eq_(filter(lambda x: x[0] == 'actor_bin', riak_obj.indexes)[0][1], actor_id) eq_(filter(lambda x: x[0] == 'object_bin', riak_obj.indexes)[0][1], object_id) @@ -1714,8 +1714,8 @@ def test_create_sub_activity_indexes(self): riak_obj = self._backend._activities.get(key=like_activity_dict['id']) riak_obj.data - ok_(filter(lambda x: x[0] == 'timestamp_int', riak_obj.indexes) != []) - ok_(filter(lambda x: x[0] == 'modified_int', riak_obj.indexes) != []) + ok_([x for x in riak_obj.indexes if x[0] == 'timestamp_int'] != []) + ok_([x for x in riak_obj.indexes if x[0] == 'modified_int'] != []) eq_(filter(lambda x: x[0] == 'verb_bin', riak_obj.indexes)[0][1], 'like') eq_(filter(lambda x: x[0] == 'actor_bin', riak_obj.indexes)[0][1], actor2_id) eq_(filter(lambda x: x[0] == 'object_bin', riak_obj.indexes)[0][1], like_activity_dict['object']['id']) diff --git a/tests/test_client.py b/tests/test_client.py index ed0df43..3da4b3a 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -614,4 +614,4 @@ def test_get_activities_with_aggregation_pipline(self): activities = self._client.get_activities(activity_ids=activity_ids, aggregation_pipeline=[PropertyAggregator(properties=['verb', 'actor'])]) - eq_([{u'id': u'7779', u'verb': u'like', u'target': {u'objectType': u'something', u'id': u'31415', u'published': u'2012-07-05T12:00:00Z'}, u'object': {u'inReplyTo': [], u'objectType': u'like', u'id': u'6669', u'published': u'2012-08-05T12:00:00Z'}, u'actor': {u'objectType': u'something', u'id': u'1234', u'published': u'2012-07-05T12:00:00Z'}}, {u'id': u'8889', u'verb': u'reply', u'target': {u'objectType': u'something', u'id': u'31415', u'published': u'2012-07-05T12:00:00Z'}, u'object': {u'content': u'This is my first reply', u'inReplyTo': [], u'objectType': u'reply', u'id': u'9999', u'published': u'2012-08-05T12:00:00Z'}, u'actor': {u'objectType': u'something', u'id': u'1234', u'published': u'2012-07-05T12:00:00Z'}}, {'grouped_by_attributes': ['verb', 'actor'], u'title': [u'Stream Item', u'Stream Item'], u'object': [{u'objectType': u'something', u'id': u'4353', u'published': u'2012-07-05T12:00:00Z'}, {u'published': u'2012-07-05T12:00:00Z', u'id': u'4353', u'objectType': u'something'}], u'actor': {u'published': u'2012-07-05T12:00:00Z', u'id': u'4321', u'objectType': u'something'}, u'verb': u'post', u'replies': [{u'totalItems': 2, u'items': [{u'verb': u'reply', u'actor': {u'objectType': u'something', u'id': u'1234', u'published': u'2012-07-05T12:00:00Z'}, u'object': {u'target': {u'objectType': u'something', u'id': u'31415', u'published': u'2012-07-05T12:00:00Z'}, u'object': {u'content': u'This is my first reply', u'inReplyTo': [], u'objectType': u'reply', u'id': u'9999', u'published': u'2012-08-05T12:00:00Z'}, u'actor': {u'objectType': u'something', u'id': u'1234', u'published': u'2012-07-05T12:00:00Z'}, u'verb': u'reply', u'id': u'8889', u'objectType': u'activity'}}, {u'verb': u'reply', u'actor': {u'objectType': u'something', u'id': u'1234', u'published': u'2012-07-05T12:00:00Z'}, u'object': {u'target': {u'objectType': u'something', u'id': u'1234', u'published': u'2012-07-05T12:00:00Z'}, u'object': {u'content': u'This is my second reply', u'inReplyTo': [], u'objectType': u'reply', u'id': u'9998', u'published': u'2012-08-05T12:05:00Z'}, u'actor': {u'objectType': u'something', u'id': u'4321', u'published': u'2012-07-05T12:00:00Z'}, u'verb': u'reply', u'id': u'8888', u'objectType': u'activity'}}]}, {u'totalItems': 2, u'items': [{u'verb': u'reply', u'actor': {u'objectType': u'something', u'id': u'1234', u'published': u'2012-07-05T12:00:00Z'}, u'object': {u'target': {u'objectType': u'something', u'id': u'31415', u'published': u'2012-07-05T12:00:00Z'}, u'object': {u'content': u'This is my first reply', u'inReplyTo': [], u'objectType': u'reply', u'id': u'9999', u'published': u'2012-08-05T12:00:00Z'}, u'actor': {u'objectType': u'something', u'id': u'1234', u'published': u'2012-07-05T12:00:00Z'}, u'verb': u'reply', u'id': u'8889', u'objectType': u'activity'}}, {u'verb': u'reply', u'actor': {u'objectType': u'something', u'id': u'1234', u'published': u'2012-07-05T12:00:00Z'}, u'object': {u'target': {u'objectType': u'something', u'id': u'1234', u'published': u'2012-07-05T12:00:00Z'}, u'object': {u'content': u'This is my second reply', u'inReplyTo': [], u'objectType': u'reply', u'id': u'9998', u'published': u'2012-08-05T12:05:00Z'}, u'actor': {u'objectType': u'something', u'id': u'4321', u'published': u'2012-07-05T12:00:00Z'}, u'verb': u'reply', u'id': u'8888', u'objectType': u'activity'}}]}], u'id': [u'5555', u'5556'], 'grouped_by_values': [u'post', {u'published': u'2012-07-05T12:00:00Z', u'id': u'4321', u'objectType': u'something'}]}, {u'id': u'7778', u'verb': u'like', u'target': {u'objectType': u'something', u'id': u'31415', u'published': u'2012-07-05T12:00:00Z'}, u'object': {u'inReplyTo': [], u'objectType': u'like', u'id': u'6669', u'published': u'2012-08-05T12:00:00Z'}, u'actor': {u'objectType': u'something', u'id': u'1234', u'published': u'2012-07-05T12:00:00Z'}}, {u'id': u'8888', u'verb': u'reply', u'target': {u'objectType': u'something', u'id': u'1234', u'published': u'2012-07-05T12:00:00Z'}, u'object': {u'content': u'This is my second reply', u'inReplyTo': [], u'objectType': u'reply', u'id': u'9998', u'published': u'2012-08-05T12:05:00Z'}, u'actor': {u'objectType': u'something', u'id': u'4321', u'published': u'2012-07-05T12:00:00Z'}}], activities) + eq_([{'id': '7779', 'verb': 'like', 'target': {'objectType': 'something', 'id': '31415', 'published': '2012-07-05T12:00:00Z'}, 'object': {'inReplyTo': [], 'objectType': 'like', 'id': '6669', 'published': '2012-08-05T12:00:00Z'}, 'actor': {'objectType': 'something', 'id': '1234', 'published': '2012-07-05T12:00:00Z'}}, {'id': '8889', 'verb': 'reply', 'target': {'objectType': 'something', 'id': '31415', 'published': '2012-07-05T12:00:00Z'}, 'object': {'content': 'This is my first reply', 'inReplyTo': [], 'objectType': 'reply', 'id': '9999', 'published': '2012-08-05T12:00:00Z'}, 'actor': {'objectType': 'something', 'id': '1234', 'published': '2012-07-05T12:00:00Z'}}, {'grouped_by_attributes': ['verb', 'actor'], 'title': ['Stream Item', 'Stream Item'], 'object': [{'objectType': 'something', 'id': '4353', 'published': '2012-07-05T12:00:00Z'}, {'published': '2012-07-05T12:00:00Z', 'id': '4353', 'objectType': 'something'}], 'actor': {'published': '2012-07-05T12:00:00Z', 'id': '4321', 'objectType': 'something'}, 'verb': 'post', 'replies': [{'totalItems': 2, 'items': [{'verb': 'reply', 'actor': {'objectType': 'something', 'id': '1234', 'published': '2012-07-05T12:00:00Z'}, 'object': {'target': {'objectType': 'something', 'id': '31415', 'published': '2012-07-05T12:00:00Z'}, 'object': {'content': 'This is my first reply', 'inReplyTo': [], 'objectType': 'reply', 'id': '9999', 'published': '2012-08-05T12:00:00Z'}, 'actor': {'objectType': 'something', 'id': '1234', 'published': '2012-07-05T12:00:00Z'}, 'verb': 'reply', 'id': '8889', 'objectType': 'activity'}}, {'verb': 'reply', 'actor': {'objectType': 'something', 'id': '1234', 'published': '2012-07-05T12:00:00Z'}, 'object': {'target': {'objectType': 'something', 'id': '1234', 'published': '2012-07-05T12:00:00Z'}, 'object': {'content': 'This is my second reply', 'inReplyTo': [], 'objectType': 'reply', 'id': '9998', 'published': '2012-08-05T12:05:00Z'}, 'actor': {'objectType': 'something', 'id': '4321', 'published': '2012-07-05T12:00:00Z'}, 'verb': 'reply', 'id': '8888', 'objectType': 'activity'}}]}, {'totalItems': 2, 'items': [{'verb': 'reply', 'actor': {'objectType': 'something', 'id': '1234', 'published': '2012-07-05T12:00:00Z'}, 'object': {'target': {'objectType': 'something', 'id': '31415', 'published': '2012-07-05T12:00:00Z'}, 'object': {'content': 'This is my first reply', 'inReplyTo': [], 'objectType': 'reply', 'id': '9999', 'published': '2012-08-05T12:00:00Z'}, 'actor': {'objectType': 'something', 'id': '1234', 'published': '2012-07-05T12:00:00Z'}, 'verb': 'reply', 'id': '8889', 'objectType': 'activity'}}, {'verb': 'reply', 'actor': {'objectType': 'something', 'id': '1234', 'published': '2012-07-05T12:00:00Z'}, 'object': {'target': {'objectType': 'something', 'id': '1234', 'published': '2012-07-05T12:00:00Z'}, 'object': {'content': 'This is my second reply', 'inReplyTo': [], 'objectType': 'reply', 'id': '9998', 'published': '2012-08-05T12:05:00Z'}, 'actor': {'objectType': 'something', 'id': '4321', 'published': '2012-07-05T12:00:00Z'}, 'verb': 'reply', 'id': '8888', 'objectType': 'activity'}}]}], 'id': ['5555', '5556'], 'grouped_by_values': ['post', {'published': '2012-07-05T12:00:00Z', 'id': '4321', 'objectType': 'something'}]}, {'id': '7778', 'verb': 'like', 'target': {'objectType': 'something', 'id': '31415', 'published': '2012-07-05T12:00:00Z'}, 'object': {'inReplyTo': [], 'objectType': 'like', 'id': '6669', 'published': '2012-08-05T12:00:00Z'}, 'actor': {'objectType': 'something', 'id': '1234', 'published': '2012-07-05T12:00:00Z'}}, {'id': '8888', 'verb': 'reply', 'target': {'objectType': 'something', 'id': '1234', 'published': '2012-07-05T12:00:00Z'}, 'object': {'content': 'This is my second reply', 'inReplyTo': [], 'objectType': 'reply', 'id': '9998', 'published': '2012-08-05T12:05:00Z'}, 'actor': {'objectType': 'something', 'id': '4321', 'published': '2012-07-05T12:00:00Z'}}], activities) diff --git a/tests/test_rfc3339.py b/tests/test_rfc3339.py index 75a612b..2522222 100644 --- a/tests/test_rfc3339.py +++ b/tests/test_rfc3339.py @@ -91,13 +91,13 @@ def test_timestamp_utc(self): self.local_timezone)) def test_before_1970(self): - d = datetime.date(1885, 01, 04) + d = datetime.date(1885, 0o1, 0o4) ok_(rfc3339(d).startswith('1885-01-04T00:00:00')) eq_(rfc3339(d, utc=True, use_system_timezone=False), '1885-01-04T00:00:00Z') def test_1920(self): - d = datetime.date(1920, 02, 29) + d = datetime.date(1920, 0o2, 29) x = rfc3339(d, utc=False, use_system_timezone=True) ok_(x.startswith('1920-02-29T00:00:00')) From c86563c499ff089b7edc2280601d9975b0521922 Mon Sep 17 00:00:00 2001 From: Jonas Trappenberg Date: Sun, 17 Dec 2017 20:00:48 -0800 Subject: [PATCH 11/56] Use integer division --- sunspear/backends/riak.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sunspear/backends/riak.py b/sunspear/backends/riak.py index 4aad206..30c415e 100644 --- a/sunspear/backends/riak.py +++ b/sunspear/backends/riak.py @@ -697,7 +697,7 @@ def _get_timestamp(self): returns a unix timestamp representing the ``datetime`` object """ dt_obj = datetime.datetime.utcnow() - return int((calendar.timegm(dt_obj.utctimetuple()) * 1000)) + (dt_obj.microsecond / 1000) + return int((calendar.timegm(dt_obj.utctimetuple()) * 1000)) + (dt_obj.microsecond // 1000) def get_new_id(self): """ From cf8ffedcd0568bbf5441a1a8fc139480dea84f74 Mon Sep 17 00:00:00 2001 From: Jonas Trappenberg Date: Tue, 2 Jan 2018 17:09:36 -0800 Subject: [PATCH 12/56] str in both python2 and 3 --- sunspear/backends/riak.py | 33 +++++++++++++++++---------------- sunspear/compat.py | 25 +++++++++++++++++++++++++ tests/test_backend.py | 22 +++++++++++----------- 3 files changed, 53 insertions(+), 27 deletions(-) create mode 100644 sunspear/compat.py diff --git a/sunspear/backends/riak.py b/sunspear/backends/riak.py index 30c415e..c5e8fca 100644 --- a/sunspear/backends/riak.py +++ b/sunspear/backends/riak.py @@ -27,6 +27,7 @@ from sunspear.activitystreams.models import Activity, Model, Object from sunspear.backends.base import BaseBackend, SUB_ACTIVITY_MAP +from sunspear.compat import must_be_str from sunspear.exceptions import SunspearValidationException __all__ = ('RiakBackend', ) @@ -154,8 +155,8 @@ def __init__( pr=None, pw=None, **kwargs): self._riak_backend = RiakClient(protocol=protocol, nodes=nodes, **kwargs) - self._objects = self._riak_backend.bucket(objects_bucket_name) - self._activities = self._riak_backend.bucket(activities_bucket_name) + self._objects = self._riak_backend.bucket(must_be_str(objects_bucket_name)) + self._activities = self._riak_backend.bucket(must_be_str(activities_bucket_name)) if r: self._objects.r = r @@ -232,11 +233,11 @@ def set_general_indexes(self, riak_object): :type riak_object: RiakObject :param riak_object: a RiakObject representing the model of the class """ - if not [x for x in riak_object.indexes if x[0] == "timestamp_int"]: - riak_object.add_index("timestamp_int", self._get_timestamp()) + if not any(must_be_str(name) == must_be_str('timestamp_int') for name, value in riak_object.indexes): + riak_object.add_index(must_be_str("timestamp_int"), self._get_timestamp()) - riak_object.remove_index('modified_int') - riak_object.add_index("modified_int", self._get_timestamp()) + riak_object.remove_index(must_be_str('modified_int')) + riak_object.add_index(must_be_str("modified_int"), self._get_timestamp()) return riak_object def obj_update(self, obj, **kwargs): @@ -301,15 +302,15 @@ def set_activity_indexes(self, riak_object): """ _dict = riak_object.data - riak_object.remove_index('verb_bin') - riak_object.remove_index('actor_bin') - riak_object.remove_index('object_bin') - riak_object.add_index("verb_bin", self._extract_id(_dict['verb'])) - riak_object.add_index("actor_bin", self._extract_id(_dict['actor'])) - riak_object.add_index("object_bin", self._extract_id(_dict['object'])) + riak_object.remove_index(must_be_str('verb_bin')) + riak_object.remove_index(must_be_str('actor_bin')) + riak_object.remove_index(must_be_str('object_bin')) + riak_object.add_index(must_be_str("verb_bin"), self._extract_id(_dict['verb'])) + riak_object.add_index(must_be_str("actor_bin"), self._extract_id(_dict['actor'])) + riak_object.add_index(must_be_str("object_bin"), self._extract_id(_dict['object'])) if 'target' in _dict and _dict.get("target"): - riak_object.remove_index('target_bin') - riak_object.add_index("target_bin", self._extract_id(_dict['target'])) + riak_object.remove_index(must_be_str('target_bin')) + riak_object.add_index(must_be_str("target_bin"), self._extract_id(_dict['target'])) return riak_object @@ -446,8 +447,8 @@ def set_sub_item_indexes(self, riak_object, **kwargs): original_activity_id = kwargs.get('activity_id') if not original_activity_id: raise SunspearValidationException() - riak_object.remove_index('inreplyto_bin') - riak_object.add_index("inreplyto_bin", str(original_activity_id)) + riak_object.remove_index(must_be_str('inreplyto_bin')) + riak_object.add_index(must_be_str("inreplyto_bin"), str(original_activity_id)) return riak_object diff --git a/sunspear/compat.py b/sunspear/compat.py new file mode 100644 index 0000000..61060ca --- /dev/null +++ b/sunspear/compat.py @@ -0,0 +1,25 @@ +from __future__ import absolute_import, division, print_function, unicode_literals + +import traceback +from logging import getLogger + +import six + +logger = getLogger(__name__) + + +def must_be_str(arg): + """ + Some functions require `str` in Python 2, i.e. its binary type, + but also `str` in Python 3, which is its text type... + + Accommodate both. + """ + if six.PY2: + if isinstance(arg, six.text_type): + return arg.encode('utf-8') + # The idea of this function is to simply remove all function calls once we're on python 3, so let's be sure we + # always have the right type passed in in python 2, i.e. `unicode`. + traceback.print_stack() + logger.warn('Unexpectedly got non-unicode in `must_be_str`...', extra={'stack': True}) + return arg diff --git a/tests/test_backend.py b/tests/test_backend.py index 27f3dc0..29143c4 100644 --- a/tests/test_backend.py +++ b/tests/test_backend.py @@ -4,17 +4,17 @@ import six from mock import ANY, call, MagicMock -from nose.tools import eq_, ok_, raises, set_trace +from nose.tools import eq_, ok_, raises from sunspear.aggregators.property import PropertyAggregator from sunspear.backends.riak import RiakBackend +from sunspear.compat import must_be_str from sunspear.exceptions import SunspearValidationException riak_connection_options = { "nodes": [ {'http_port': 8098, 'host': '127.0.0.1'}], 'protocol': 'http', - # "nodes": [{'host': '127.0.0.1', 'pb_port': 10017}, {'host': '127.0.0.1', 'pb_port': 10027}, {'host': '127.0.0.1', 'pb_port': 10037}], } @@ -1592,10 +1592,10 @@ def test_set_sub_item_indexes(self): self._backend.set_activity_indexes(riak_obj_mock) calls = [ - call.add_index('verb_bin', 'post'), - call.add_index('actor_bin', '1234'), - call.add_index('object_bin', '5678'), - call.add_index('target_bin', '4333'), + call.add_index(must_be_str('verb_bin'), 'post'), + call.add_index(must_be_str('actor_bin'), '1234'), + call.add_index(must_be_str('object_bin'), '5678'), + call.add_index(must_be_str('target_bin'), '4333'), ] riak_obj_mock.assert_has_calls(calls, any_order=True) @@ -1608,7 +1608,7 @@ def test_set_sub_item_indexes_reply(self): self._backend.set_sub_item_indexes(riak_obj_mock, activity_id=1234) calls = [ - call.add_index('inreplyto_bin', ANY), + call.add_index(must_be_str('inreplyto_bin'), ANY), ] riak_obj_mock.assert_has_calls(calls, any_order=True) @@ -1628,8 +1628,8 @@ def test_set_general_indexes_not_already_created_set(self): self._backend.set_general_indexes(riak_obj_mock) calls = [ - call.add_index('timestamp_int', ANY), - call.add_index('modified_int', ANY), + call.add_index(must_be_str('timestamp_int'), ANY), + call.add_index(must_be_str('modified_int'), ANY), ] riak_obj_mock.assert_has_calls(calls, any_order=True) @@ -1637,12 +1637,12 @@ def test_set_general_indexes_not_already_created_set(self): def test_set_general_indexes_already_created(self): riak_obj_mock = MagicMock() - riak_obj_mock.indexes = [('timestamp_int', 12343214,)] + riak_obj_mock.indexes = [(must_be_str('timestamp_int'), 12343214,)] self._backend.set_general_indexes(riak_obj_mock) calls = [ - call.add_index('modified_int', ANY), + call.add_index(must_be_str('modified_int'), ANY), ] riak_obj_mock.assert_has_calls(calls, any_order=True) From c0de8ea997af8d5c5635bffd3424acfe0cfcb1c2 Mon Sep 17 00:00:00 2001 From: Numan Sachwani Date: Sun, 30 Oct 2016 19:40:43 -0700 Subject: [PATCH 13/56] Update setup.py to include sqlalchemy --- setup.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/setup.py b/setup.py index c38d4ac..1a1e722 100644 --- a/setup.py +++ b/setup.py @@ -25,6 +25,9 @@ 'python-dateutil>=1.5, != 2.0', 'riak', 'six', + 'riak==2.5.4', + 'protobuf==2.6.1', + 'sqlalchemy==1.1.3', ], options={'easy_install': {'allow_hosts': 'pypi.python.org'}}, tests_require=tests_require, From 4ae71cdb5a9d8d6a933b14e0d65f100ad56863f6 Mon Sep 17 00:00:00 2001 From: Numan Sachwani Date: Mon, 31 Oct 2016 21:10:18 -0700 Subject: [PATCH 14/56] Add schema for the database backend --- sunspear/backends/database/__init__.py | 0 sunspear/backends/database/schema.py | 56 ++++++++++++++++++ sunspear/backends/database/types.py | 82 ++++++++++++++++++++++++++ 3 files changed, 138 insertions(+) create mode 100644 sunspear/backends/database/__init__.py create mode 100644 sunspear/backends/database/schema.py create mode 100644 sunspear/backends/database/types.py diff --git a/sunspear/backends/database/__init__.py b/sunspear/backends/database/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sunspear/backends/database/schema.py b/sunspear/backends/database/schema.py new file mode 100644 index 0000000..a2fb6f1 --- /dev/null +++ b/sunspear/backends/database/schema.py @@ -0,0 +1,56 @@ +from sqlalchemy import Table, Column, DateTime, Integer, String, Text, MetaData, ForeignKey, UniqueConstraint +import types as custom_types + + +metadata = MetaData() + +objects_table = Table('objects', metadata, + Column('id', String, primary_key=True), + Column('object_type', String, nullable=False), + Column('display_name', String), + Column('display_name', String), + Column('content', Text), + Column('published', DateTime, nullable=False), + Column('image'), custom_types.JSONSmallDict(4096), + Column('other_data'), custom_types.JSONDict) + +activities_table = Table('activities', metadata, + Column('id', String, primary_key=True), + Column('verb', String, nullable=False), + Column('actor', ForeignKey('objects.id'), nullable=False), + Column('object', ForeignKey('objects.id')), + Column('target', ForeignKey('objects.id')), + Column('author', ForeignKey('objects.id')), + Column('generator', String), + Column('provider', String), + Column('content', Text), + Column('published', DateTime, nullable=False), + Column('updated', DateTime), + Column('icon'), custom_types.JSONSmallDict(4096), + Column('other_data'), custom_types.JSONDict) + +subitem_fields = (Column('id', String, primary_key=True), + Column('in_reply_to', ForeignKey('activities.id'), nullable=False), + Column('published', DateTime, nullable=False), + Column('actor', ForeignKey('objects.id'), nullable=False), + Column('content', Text), + UniqueConstraint('actor', 'in_reply_to') + ) + +replies_table = Table('replies', metadata, **subitem_fields) + +likes_table = Table('likes', metadata, **subitem_fields) + +shared_with_fields = (Column('id', Integer, primary_key=True), + Column('object', ForeignKey('objects.id')), + Column('activity', ForeignKey('activities.id')), + UniqueConstraint('object', 'activity')) + +to_table = Table('to', metadata, **shared_with_fields) + +bto_table = Table('bto', metadata, **shared_with_fields) + +cc_table = Table('cc', metadata, **shared_with_fields) + +bcc_table = Table('bcc', metadata, **shared_with_fields) + diff --git a/sunspear/backends/database/types.py b/sunspear/backends/database/types.py new file mode 100644 index 0000000..94319c7 --- /dev/null +++ b/sunspear/backends/database/types.py @@ -0,0 +1,82 @@ +from sqlalchemy.sql import operators +from sqlalchemy import String +from sqlalchemy.types import TypeDecorator, CHAR, VARCHAR, TEXT +from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy.ext.mutable import MutableDict + +import json +import uuid + +__all__ = ['GUID', 'JSONDict', 'JSONSmallDict'] + + +class GUID(TypeDecorator): + """Platform-independent GUID type. + + Uses PostgreSQL's UUID type, otherwise uses + CHAR(32), storing as stringified hex values. + + """ + impl = CHAR + + def load_dialect_impl(self, dialect): + if dialect.name == 'postgresql': + return dialect.type_descriptor(UUID()) + else: + return dialect.type_descriptor(CHAR(32)) + + def process_bind_param(self, value, dialect): + if value is None: + return value + elif dialect.name == 'postgresql': + return str(value) + else: + if not isinstance(value, uuid.UUID): + return "%.32x" % uuid.UUID(value).int + else: + # hexstring + return "%.32x" % value.int + + def process_result_value(self, value, dialect): + if value is None: + return value + else: + return uuid.UUID(value) + + +class JSONEncodedSmallDict(TypeDecorator): + """Represents an immutable structure as a json-encoded string. + + Usage:: + + JSONEncodedDict(255) + + """ + + impl = VARCHAR + + def coerce_compared_value(self, op, value): + if op in (operators.like_op, operators.notlike_op): + return String() + else: + return self + + def process_bind_param(self, value, dialect): + if value is not None: + value = json.dumps(value) + + return value + + def process_result_value(self, value, dialect): + if value is not None: + value = json.loads(value) + return value + + +class JSONEncodedBigDict(JSONEncodedSmallDict): + + impl = TEXT + + +JSONDict = MutableDict.as_mutable(JSONEncodedBigDict) +JSONSmallDict = MutableDict.as_mutable(JSONEncodedSmallDict) From baa52257940c8ac259f03c3df51c518bcf7b6b01 Mon Sep 17 00:00:00 2001 From: Numan Sachwani Date: Mon, 31 Oct 2016 21:19:45 -0700 Subject: [PATCH 15/56] Rename test file --- tests/{test_backend.py => test_riak.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename tests/{test_backend.py => test_riak.py} (100%) diff --git a/tests/test_backend.py b/tests/test_riak.py similarity index 100% rename from tests/test_backend.py rename to tests/test_riak.py From 502654070a1133f47ae45f11e2f58dba3e269591 Mon Sep 17 00:00:00 2001 From: Numan Sachwani Date: Mon, 31 Oct 2016 21:33:00 -0700 Subject: [PATCH 16/56] Make the command louder so I can debug properly --- .travis.yml | 2 +- sunspear/backends/database/db.py | 18 ++++++++++++++++++ sunspear/backends/riak.py | 2 +- 3 files changed, 20 insertions(+), 2 deletions(-) create mode 100644 sunspear/backends/database/db.py diff --git a/.travis.yml b/.travis.yml index 6006c82..28158f3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,7 +3,7 @@ python: - "2.7" before_install: - sudo apt-get update - - sudo apt-get -o Dpkg::Options::="--force-confnew" -qq -y upgrade + - sudo apt-get -o Dpkg::Options::="--force-confnew" -y upgrade - sudo apt-get install -qq protobuf-compiler services: - riak diff --git a/sunspear/backends/database/db.py b/sunspear/backends/database/db.py new file mode 100644 index 0000000..b5840ea --- /dev/null +++ b/sunspear/backends/database/db.py @@ -0,0 +1,18 @@ +""" +Copyright 2016 Numan Sachwani + +This file is provided to you under the Apache License, +Version 2.0 (the "License"); you may not use this file +except in compliance with the License. You may obtain +a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. +""" +from __future__ import absolute_import \ No newline at end of file diff --git a/sunspear/backends/riak.py b/sunspear/backends/riak.py index c5e8fca..443599f 100644 --- a/sunspear/backends/riak.py +++ b/sunspear/backends/riak.py @@ -1,5 +1,5 @@ """ -Copyright 2013 Numan Sachwani +Copyright 2016 Numan Sachwani This file is provided to you under the Apache License, Version 2.0 (the "License"); you may not use this file From 9adb21c83c428305dc2000d8e278612f7a1f5b99 Mon Sep 17 00:00:00 2001 From: Numan Sachwani Date: Mon, 31 Oct 2016 21:40:16 -0700 Subject: [PATCH 17/56] Simplify travis.yml --- .travis.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 28158f3..e5d3064 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,8 +2,7 @@ language: python python: - "2.7" before_install: - - sudo apt-get update - - sudo apt-get -o Dpkg::Options::="--force-confnew" -y upgrade + - sudo apt-get update -qq - sudo apt-get install -qq protobuf-compiler services: - riak From a3b22b45d935efe8aeb29a88e3025a27e94cdbec Mon Sep 17 00:00:00 2001 From: Numan Sachwani Date: Mon, 31 Oct 2016 21:43:12 -0700 Subject: [PATCH 18/56] Pip no longer has that option --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index e5d3064..0c912f3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -7,5 +7,5 @@ before_install: services: - riak install: - - pip install -q -e . --use-mirrors + - pip install -q -e . script: python setup.py nosetests From 17dc92470491fd5a8bdac4ae27b811ff06a95084 Mon Sep 17 00:00:00 2001 From: Numan Sachwani Date: Thu, 3 Nov 2016 13:03:11 -0700 Subject: [PATCH 19/56] Started to add initial scaffolding for testing --- sunspear/backends/base.py | 30 ++++-- sunspear/backends/database/__init__.py | 1 + sunspear/backends/database/db.py | 124 +++++++++++++++++++++- sunspear/backends/database/schema.py | 98 ++++++++++------- sunspear/backends/database/types.py | 4 +- sunspear/backends/riak.py | 10 +- sunspear/exceptions.py | 5 + tests/test_db.py | 139 +++++++++++++++++++++++++ tests/test_rfc3339.py | 2 +- tests/test_riak.py | 1 + 10 files changed, 362 insertions(+), 52 deletions(-) create mode 100644 tests/test_db.py diff --git a/sunspear/backends/base.py b/sunspear/backends/base.py index 9c30e97..abf6e4e 100644 --- a/sunspear/backends/base.py +++ b/sunspear/backends/base.py @@ -386,16 +386,6 @@ def _listify(self, list_or_string): return list_or_string - def _extract_id(self, activity_or_id): - """ - Helper that returns an id if the activity has one. - """ - this_id = activity_or_id - if isinstance(activity_or_id, dict): - this_id = activity_or_id.get('id', None) - - return this_id - def get_new_id(self): """ Generates a new unique ID. The default implementation uses uuid1 to @@ -404,3 +394,23 @@ def get_new_id(self): :return: a new id """ return uuid.uuid1().hex + + def _extract_id(self, activity_or_id): + """ + Helper that returns an id if the activity has one. + """ + this_id = None + if isinstance(activity_or_id, basestring): + this_id = activity_or_id + elif isinstance(activity_or_id, dict): + this_id = activity_or_id.get('id', None) + try: + this_id = str(this_id) + except: + pass + else: + try: + this_id = str(activity_or_id) + except: + pass + return this_id diff --git a/sunspear/backends/database/__init__.py b/sunspear/backends/database/__init__.py index e69de29..eda7f14 100644 --- a/sunspear/backends/database/__init__.py +++ b/sunspear/backends/database/__init__.py @@ -0,0 +1 @@ +from db import DatabaseBackend diff --git a/sunspear/backends/database/db.py b/sunspear/backends/database/db.py index b5840ea..40cb39c 100644 --- a/sunspear/backends/database/db.py +++ b/sunspear/backends/database/db.py @@ -15,4 +15,126 @@ specific language governing permissions and limitations under the License. """ -from __future__ import absolute_import \ No newline at end of file +from __future__ import absolute_import + +import calendar +import copy +import datetime +import uuid + +from dateutil.parser import parse +from dateutil import tz + +from sqlalchemy import create_engine, sql +from sqlalchemy.pool import QueuePool +from sunspear.activitystreams.models import Activity, Model, Object +from sunspear.backends.base import SUB_ACTIVITY_MAP, BaseBackend +from sunspear.exceptions import (SunspearOperationNotSupportedException, + SunspearValidationException) + +from . import schema + +DB_OBJ_FIELD_MAPPING = { + 'id': 'id', + 'objectType': 'object_type', + 'displayName': 'display_name', + 'content': 'content', + 'published': 'published', + 'image': 'image', +} + + +class DatabaseBackend(BaseBackend): + + def __init__(self, db_connection_string=None, verbose=False, poolsize=10, + max_overflow=5, **kwargs): + self._engine = create_engine(db_connection_string, echo=verbose, poolclass=QueuePool, + pool_size=poolsize, max_overflow=max_overflow) + + @property + def engine(self): + return self._engine + + def _get_connection(self): + return self.engine.connect() + + def create_tables(self): + schema.metadata.create_all(self.engine) + + def drop_tables(self): + schema.metadata.drop_all(self.engine) + + def clear_all(self): + self.drop_tables() + self.create_tables() + + def clear_all_objects(self): + raise SunspearOperationNotSupportedException() + + def clear_all_activities(self): + self.engine.execute(schema.tables['activities'].delete()) + + def obj_exists(self, obj, **kwargs): + return self.engine.execute(sql.select([sql.exists().where(schema.tables['objects'].c.id == self.test_obj['id'])])) + + def obj_create(self, obj, **kwargs): + obj = Object(obj, backend=self) + + obj.validate() + obj_dict = obj.get_parsed_dict() + + obj_db_schema_dict = self._obj_dict_to_db_schema(obj_dict) + + self.engine.execute(schema.tables['objects'].insert(), [obj_db_schema_dict]) + + return obj_dict + + def _obj_dict_to_db_schema(self, obj): + # we make a copy because we will be mutating the dict. + # we will map official fields to db fields, and put the rest in `other_data` + obj_copy = copy.deepcopy(obj) + schema_dict = {} + + for obj_field, db_schema_field in DB_OBJ_FIELD_MAPPING.items(): + if obj_field in obj_copy: + data = obj_copy.pop(obj_field) + + # SQLAlchemy requires datetime fields to be datetime instances + if obj_field in Model._datetime_fields: + data = self._get_db_compatiable_date_string(data) + + schema_dict[db_schema_field] = data + + # all standard fields should no longer be part of the dictionary + if obj_copy: + schema_dict['other_data'] = obj_copy + + return schema_dict + + def _get_datetime_obj(self, datetime_instance): + if isinstance(datetime_instance, basestring): + datetime_instance = parse(datetime_instance) + utctimezone = tz.tzutc() + + # Assume UTC if we don't have a timezone + if datetime_instance.tzinfo is None: + datetime_instance.replace(tzinfo=utctimezone) + # If we do have a timezone, convert it to UTC + elif datetime.tzinfo != utctimezone: + datetime_instance.astimezone(utctimezone) + + return datetime_instance + + def _get_db_compatiable_date_string(self, datetime_instance): + datetime_instance = self._get_datetime_obj(datetime_instance) + + return datetime_instance.strftime('%Y-%m-%d %H:%M:%S') + + def get_new_id(self): + """ + Generates a new unique ID. The default implementation uses uuid1 to + generate a unique ID. + + :return: a new id + """ + return uuid.uuid1().hex diff --git a/sunspear/backends/database/schema.py b/sunspear/backends/database/schema.py index a2fb6f1..1d5a86c 100644 --- a/sunspear/backends/database/schema.py +++ b/sunspear/backends/database/schema.py @@ -5,52 +5,80 @@ metadata = MetaData() objects_table = Table('objects', metadata, - Column('id', String, primary_key=True), - Column('object_type', String, nullable=False), - Column('display_name', String), - Column('display_name', String), + Column('id', String(32), primary_key=True), + Column('object_type', String(256, convert_unicode=True), nullable=False), + Column('display_name', String(256, convert_unicode=True)), Column('content', Text), - Column('published', DateTime, nullable=False), - Column('image'), custom_types.JSONSmallDict(4096), - Column('other_data'), custom_types.JSONDict) + Column('published', DateTime(timezone=True), nullable=False), + Column('updated', DateTime(timezone=True)), + Column('image', custom_types.JSONSmallDict(4096)), + Column('other_data', custom_types.JSONDict())) activities_table = Table('activities', metadata, - Column('id', String, primary_key=True), - Column('verb', String, nullable=False), - Column('actor', ForeignKey('objects.id'), nullable=False), - Column('object', ForeignKey('objects.id')), - Column('target', ForeignKey('objects.id')), - Column('author', ForeignKey('objects.id')), - Column('generator', String), - Column('provider', String), + Column('id', String(32), primary_key=True), + Column('verb', String(256, convert_unicode=True), nullable=False), + Column('actor', ForeignKey('objects.id', ondelete='CASCADE'), nullable=False), + Column('object', ForeignKey('objects.id', ondelete='CASCADE')), + Column('target', ForeignKey('objects.id', ondelete='CASCADE')), + Column('author', ForeignKey('objects.id', ondelete='CASCADE')), + Column('generator', String(1024, convert_unicode=True)), + Column('provider', String(1024, convert_unicode=True)), Column('content', Text), - Column('published', DateTime, nullable=False), - Column('updated', DateTime), - Column('icon'), custom_types.JSONSmallDict(4096), - Column('other_data'), custom_types.JSONDict) + Column('published', DateTime(timezone=True), nullable=False), + Column('updated', DateTime(timezone=True)), + Column('icon', custom_types.JSONSmallDict(4096)), + Column('other_data', custom_types.JSONDict())) -subitem_fields = (Column('id', String, primary_key=True), - Column('in_reply_to', ForeignKey('activities.id'), nullable=False), - Column('published', DateTime, nullable=False), - Column('actor', ForeignKey('objects.id'), nullable=False), - Column('content', Text), - UniqueConstraint('actor', 'in_reply_to') - ) - -replies_table = Table('replies', metadata, **subitem_fields) +replies_table = Table('replies', metadata, + Column('id', String(32), primary_key=True), + Column('in_reply_to', ForeignKey('activities.id', ondelete='CASCADE'), nullable=False), + Column('actor', ForeignKey('objects.id', ondelete='CASCADE'), nullable=False), + Column('published', DateTime(timezone=True), nullable=False), + Column('updated', DateTime(timezone=True)), + Column('content', Text), + Column('other_data', custom_types.JSONDict())) -likes_table = Table('likes', metadata, **subitem_fields) +likes_table = Table('likes', metadata, + Column('id', String(32), primary_key=True), + Column('in_reply_to', ForeignKey('activities.id', ondelete='CASCADE'), nullable=False), + Column('actor', ForeignKey('objects.id', ondelete='CASCADE'), nullable=False), + Column('published', DateTime(timezone=True), nullable=False), + Column('content', Text), + Column('other_data', custom_types.JSONDict()), + UniqueConstraint('actor', 'in_reply_to')) shared_with_fields = (Column('id', Integer, primary_key=True), - Column('object', ForeignKey('objects.id')), - Column('activity', ForeignKey('activities.id')), + Column('object', ForeignKey('objects.id', ondelete='CASCADE')), + Column('activity', ForeignKey('activities.id', ondelete='CASCADE')), UniqueConstraint('object', 'activity')) -to_table = Table('to', metadata, **shared_with_fields) +to_table = Table('to', metadata, + Column('id', Integer, primary_key=True), + Column('object', ForeignKey('objects.id', ondelete='CASCADE')), + Column('activity', ForeignKey('activities.id', ondelete='CASCADE'))) -bto_table = Table('bto', metadata, **shared_with_fields) +bto_table = Table('bto', metadata, + Column('id', Integer, primary_key=True), + Column('object', ForeignKey('objects.id', ondelete='CASCADE')), + Column('activity', ForeignKey('activities.id', ondelete='CASCADE'))) -cc_table = Table('cc', metadata, **shared_with_fields) +cc_table = Table('cc', metadata, + Column('id', Integer, primary_key=True), + Column('object', ForeignKey('objects.id', ondelete='CASCADE')), + Column('activity', ForeignKey('activities.id', ondelete='CASCADE'))) -bcc_table = Table('bcc', metadata, **shared_with_fields) +bcc_table = Table('bcc', metadata, + Column('id', Integer, primary_key=True), + Column('object', ForeignKey('objects.id', ondelete='CASCADE')), + Column('activity', ForeignKey('activities.id', ondelete='CASCADE'))) +tables = { + 'objects': objects_table, + 'activities': activities_table, + 'replies': replies_table, + 'likes': likes_table, + 'to': to_table, + 'bto': bto_table, + 'cc': cc_table, + 'bcc': bcc_table, +} diff --git a/sunspear/backends/database/types.py b/sunspear/backends/database/types.py index 94319c7..5dd2ed1 100644 --- a/sunspear/backends/database/types.py +++ b/sunspear/backends/database/types.py @@ -78,5 +78,5 @@ class JSONEncodedBigDict(JSONEncodedSmallDict): impl = TEXT -JSONDict = MutableDict.as_mutable(JSONEncodedBigDict) -JSONSmallDict = MutableDict.as_mutable(JSONEncodedSmallDict) +JSONDict = JSONEncodedBigDict +JSONSmallDict = JSONEncodedSmallDict diff --git a/sunspear/backends/riak.py b/sunspear/backends/riak.py index 443599f..01426c0 100644 --- a/sunspear/backends/riak.py +++ b/sunspear/backends/riak.py @@ -21,7 +21,6 @@ import copy import datetime import uuid - import six from riak import RiakClient @@ -147,8 +146,6 @@ class RiakBackend(BaseBackend): - custom_epoch = datetime.datetime(month=1, day=1, year=2013) - def __init__( self, protocol="pbc", nodes=[], objects_bucket_name="objects", activities_bucket_name="activities", r=None, w=None, dw=None, @@ -673,6 +670,7 @@ def _get_many_activities(self, activity_ids=[], raw_filter="", filters=None, inc return reordered_results +<<<<<<< HEAD def _extract_id(self, activity_or_id): """ Helper that returns an id if the activity has one. @@ -693,6 +691,8 @@ def _extract_id(self, activity_or_id): pass return this_id +======= +>>>>>>> Started to add initial scaffolding for testing def _get_timestamp(self): """ returns a unix timestamp representing the ``datetime`` object @@ -708,3 +708,7 @@ def get_new_id(self): :return: a new id """ return uuid.uuid1().hex +<<<<<<< HEAD +======= + +>>>>>>> Started to add initial scaffolding for testing diff --git a/sunspear/exceptions.py b/sunspear/exceptions.py index b3b2792..89422c6 100644 --- a/sunspear/exceptions.py +++ b/sunspear/exceptions.py @@ -28,5 +28,10 @@ class SunspearDuplicateEntryException(SunspearBaseException): class SunspearInvalidActivityException(SunspearBaseException): pass + class SunspearInvalidObjectException(SunspearBaseException): pass + + +class SunspearOperationNotSupportedException(SunspearBaseException): + pass diff --git a/tests/test_db.py b/tests/test_db.py new file mode 100644 index 0000000..9c79166 --- /dev/null +++ b/tests/test_db.py @@ -0,0 +1,139 @@ +from __future__ import absolute_import + +from nose.tools import ok_, eq_, raises +from sqlalchemy import create_engine, sql + +from sunspear.backends.database.db import * +from sunspear.exceptions import SunspearOperationNotSupportedException +from sunspear.backends.database import schema +from sunspear.activitystreams.models import Model + +import copy +import os +import datetime + + +DB_CONNECTION_STRING = os.environ.get('DB_CONNECTION_STRING', 'mysql://root:@localhost') +DB_TYPE = os.environ.get('DB_TYPE', 'mysql') +DB_USER = os.environ.get('DB_USER', 'root') +DB_PASS = os.environ.get('DB_PASSWORD', '') +DB_HOST = os.environ.get('DB_HOST', 'localhost') +DB_PORT = int(os.environ.get('DB_PORT', 3306)) + +DB_NAME = os.environ.get('DB_NAME', 'sunspear_test_database') + + +class TestDatabaseBackend(object): + @classmethod + def setUpClass(cls): + database_name = DB_NAME + cls._setup_db(database_name) + database_connection_string = cls.get_connection_string_with_database(database_name) + + cls._backend = DatabaseBackend(db_connection_string=database_connection_string, verbose=False) + cls._backend.drop_tables() + cls._engine = cls._backend.engine + + @classmethod + def tearDownClass(cls): + database_name = DB_NAME + cls._cleanup_db(database_name) + + @classmethod + def get_connection_string(cls): + return '{0}://{1}:{2}@{3}:{4}'.format(DB_TYPE, DB_USER, DB_PASS, DB_HOST, DB_PORT) + + @classmethod + def get_connection_string_with_database(cls, database_name): + return '{0}/{1}'.format(cls.get_connection_string(), database_name) + + @classmethod + def _cleanup_db(cls, db_name): + connection_string = cls.get_connection_string() + + # This engine just used to query for list of databases + engine = create_engine(connection_string) + + engine.execute("DROP DATABASE {};".format(db_name)) + + @classmethod + def _setup_db(cls, db_name): + connection_string = cls.get_connection_string() + + # This engine just used to query for list of databases + engine = create_engine(connection_string) + conn = engine.connect() + + # Query for existing databases + existing_databases = conn.execute("SHOW DATABASES;") + # Results are a list of single item tuples, so unpack each tuple + existing_databases = [d[0] for d in existing_databases] + + # Create database if not exists + if db_name not in existing_databases: + conn.execute("CREATE DATABASE {0}".format(db_name)) + print("Created database {0}".format(db_name)) + + conn.close() + + def setUp(self): + self._backend.create_tables() + self._setup_objs() + + def tearDown(self): + self._backend.drop_tables() + + def _setup_objs(self): + self.test_objs = [{ + 'id': 'AxsdSG244BfduiIZ', + 'objectType': u'use\u0403', + 'displayName': u'\u019duman S', + 'content': u'Foo bar!\u03ee', + 'published': self._datetime_to_string(datetime.datetime.utcnow()), + 'image': { + 'url': 'https://www.google.com/cool_image.png', + 'displayName': u'Cool \u0268mage', + 'width': '500px', + 'height': '500px' + }, + 'foo': 'bar', + 'baz': u'go\u0298', + 'zoo': {'zee': 12, 'tim': {'zde': u'\u0268\u0298'}} + }] + + self.test_obj = self.test_objs[0] + + @raises(SunspearOperationNotSupportedException) + def test_sample_test(self): + self._backend.clear_all_objects() + + def test__obj_dict_to_db_schema(self): + obj_dict = self.test_obj + obj_dict_copy = copy.deepcopy(obj_dict) + + db_schema_dict = self._backend._obj_dict_to_db_schema(obj_dict) + + # Confirm the original dict was not modified + eq_(obj_dict, obj_dict_copy) + + for obj_field, db_schema_field in DB_OBJ_FIELD_MAPPING.items(): + data = obj_dict[obj_field] + if obj_field in Model._datetime_fields: + data = self._backend._get_db_compatiable_date_string(data) + + eq_(data, db_schema_dict[db_schema_field]) + # Remove all "supported" fields. What we have left should be what went to `other_data` + obj_dict_copy.pop(obj_field) + + # Everything was placed in other_data + eq_(obj_dict_copy, db_schema_dict['other_data']) + + def test_obj_create(self): + self._backend.obj_create(self.test_obj) + + obj_exists = self._engine.execute(sql.select([sql.exists().where(schema.tables['objects'].c.id == self.test_obj['id'])])) + + ok_(obj_exists) + + def _datetime_to_string(self, datetime_instance): + return datetime_instance.strftime('%Y-%m-%dT%H:%M:%S') + "Z" diff --git a/tests/test_rfc3339.py b/tests/test_rfc3339.py index 2522222..e047981 100644 --- a/tests/test_rfc3339.py +++ b/tests/test_rfc3339.py @@ -22,7 +22,7 @@ def setUp(self): def test_datetime(self): d = datetime.datetime.now() eq_(rfc3339(d), - d.strftime('%Y-%m-%dT%H:%M:%S') + self.local_timezone) + d.strftime('%Y-%m-%dT%H:%M:%S') + self.local_timezone) def test_datetime_timezone(self): diff --git a/tests/test_riak.py b/tests/test_riak.py index 29143c4..7b33fa2 100644 --- a/tests/test_riak.py +++ b/tests/test_riak.py @@ -5,6 +5,7 @@ import six from mock import ANY, call, MagicMock from nose.tools import eq_, ok_, raises +from mock import MagicMock, call, ANY from sunspear.aggregators.property import PropertyAggregator from sunspear.backends.riak import RiakBackend From 0e822ed0c186a869e3186205ae5c82abc91002b2 Mon Sep 17 00:00:00 2001 From: Numan Sachwani Date: Thu, 3 Nov 2016 14:35:15 -0700 Subject: [PATCH 20/56] Add test for object exists --- sunspear/backends/database/db.py | 5 ++++- tests/test_db.py | 18 +++++++++++++++++- 2 files changed, 21 insertions(+), 2 deletions(-) diff --git a/sunspear/backends/database/db.py b/sunspear/backends/database/db.py index 40cb39c..5a41a88 100644 --- a/sunspear/backends/database/db.py +++ b/sunspear/backends/database/db.py @@ -75,7 +75,10 @@ def clear_all_activities(self): self.engine.execute(schema.tables['activities'].delete()) def obj_exists(self, obj, **kwargs): - return self.engine.execute(sql.select([sql.exists().where(schema.tables['objects'].c.id == self.test_obj['id'])])) + obj_id = self._extract_id(obj) + objs_db_table = schema.tables['objects'] + + return self.engine.execute(sql.select([sql.exists().where(objs_db_table.c.id == obj_id)])) def obj_create(self, obj, **kwargs): obj = Object(obj, backend=self) diff --git a/tests/test_db.py b/tests/test_db.py index 9c79166..8228f76 100644 --- a/tests/test_db.py +++ b/tests/test_db.py @@ -33,6 +33,7 @@ def setUpClass(cls): cls._backend = DatabaseBackend(db_connection_string=database_connection_string, verbose=False) cls._backend.drop_tables() cls._engine = cls._backend.engine + cls.now = datetime.datetime.utcnow() @classmethod def tearDownClass(cls): @@ -89,7 +90,7 @@ def _setup_objs(self): 'objectType': u'use\u0403', 'displayName': u'\u019duman S', 'content': u'Foo bar!\u03ee', - 'published': self._datetime_to_string(datetime.datetime.utcnow()), + 'published': self._datetime_to_string(self.now), 'image': { 'url': 'https://www.google.com/cool_image.png', 'displayName': u'Cool \u0268mage', @@ -135,5 +136,20 @@ def test_obj_create(self): ok_(obj_exists) + def test_obj_exists(self): + obj = {'id': 'dsaCDF34V4VvbgzAc', 'objectType': 'user', 'published': self._datetime_to_db_compatibal_str(self.now)} + db_obj = self._backend._obj_dict_to_db_schema(obj) + + objects_table = schema.tables['objects'] + + self._engine.execute(objects_table.insert(), [ + db_obj + ]) + + ok_(self._backend.obj_exists(obj)) + + def _datetime_to_db_compatibal_str(self, datetime_instance): + return datetime_instance.strftime('%Y-%m-%d %H:%M:%S') + def _datetime_to_string(self, datetime_instance): return datetime_instance.strftime('%Y-%m-%dT%H:%M:%S') + "Z" From 9a65e13710ccf702b90d8409dcd9cfcd153ebf44 Mon Sep 17 00:00:00 2001 From: Numan Sachwani Date: Fri, 4 Nov 2016 15:39:03 -0700 Subject: [PATCH 21/56] Start building more of the db api and tests --- sunspear/activitystreams/models.py | 12 +-- sunspear/backends/base.py | 6 ++ sunspear/backends/database/db.py | 81 ++++++++++++--- sunspear/backends/database/schema.py | 10 +- sunspear/backends/database/types.py | 1 - tests/test_db.py | 146 ++++++++++++++++++++++++++- 6 files changed, 225 insertions(+), 31 deletions(-) diff --git a/sunspear/activitystreams/models.py b/sunspear/activitystreams/models.py index bdec8fc..39e2c52 100644 --- a/sunspear/activitystreams/models.py +++ b/sunspear/activitystreams/models.py @@ -66,31 +66,31 @@ def validate(self): Object(sub_obj, backend=self._backend).validate() def parse_data(self, data, *args, **kwargs): - #TODO Rename to jsonify_dict + # TODO Rename to jsonify_dict _parsed_data = data.copy() - #parse datetime fields + # parse datetime fields for d in self._datetime_fields: if d in _parsed_data and _parsed_data[d]: _parsed_data[d] = self._parse_date(_parsed_data[d], utc=True, use_system_timezone=False) - #parse object fields + # parse object fields for c in self._object_fields: if c in _parsed_data and _parsed_data[c] and isinstance(_parsed_data[c], Model): _parsed_data[c] = _parsed_data[c].parse_data(_parsed_data[c].get_dict()) - #parse direct and indirect audience targeting + # parse direct and indirect audience targeting for c in self._indirect_audience_targeting_fields + self._direct_audience_targeting_fields: if c in _parsed_data and _parsed_data[c]: _parsed_data[c] = [obj.parse_data(obj.get_dict()) if isinstance(obj, Model) else obj\ for obj in _parsed_data[c]] - #parse media fields + # parse media fields for c in self._media_fields: if c in _parsed_data and _parsed_data[c] and isinstance(_parsed_data[c], Model): _parsed_data[c] = _parsed_data[c].parse_data(_parsed_data[c].get_dict()) - #parse anything that is a dictionary for things like datetime fields that are datetime objects + # parse anything that is a dictionary for things like datetime fields that are datetime objects for k, v in list(_parsed_data.items()): if isinstance(v, dict) and k not in self._response_fields: _parsed_data[k] = self.parse_data(v) diff --git a/sunspear/backends/base.py b/sunspear/backends/base.py index abf6e4e..369b2fa 100644 --- a/sunspear/backends/base.py +++ b/sunspear/backends/base.py @@ -1,4 +1,10 @@ +<<<<<<< HEAD from __future__ import absolute_import, division, print_function, unicode_literals +======= +from sunspear.activitystreams.models import Activity, ReplyActivity, LikeActivity +from sunspear.exceptions import ( + SunspearDuplicateEntryException, SunspearInvalidActivityException, SunspearInvalidObjectException) +>>>>>>> Start building more of the db api and tests import copy import uuid diff --git a/sunspear/backends/database/db.py b/sunspear/backends/database/db.py index 5a41a88..6e16308 100644 --- a/sunspear/backends/database/db.py +++ b/sunspear/backends/database/db.py @@ -29,8 +29,8 @@ from sqlalchemy.pool import QueuePool from sunspear.activitystreams.models import Activity, Model, Object from sunspear.backends.base import SUB_ACTIVITY_MAP, BaseBackend -from sunspear.exceptions import (SunspearOperationNotSupportedException, - SunspearValidationException) +from sunspear.exceptions import ( + SunspearOperationNotSupportedException, SunspearValidationException, SunspearDuplicateEntryException) from . import schema @@ -43,6 +43,21 @@ 'image': 'image', } +DB_ACTIVITY_FIELD_MAPPING = { + 'id': 'id', + 'verb': 'verb', + 'actor': 'actor', + 'object': 'object', + 'target': 'target', + 'author': 'author', + 'generator': 'generator', + 'provider': 'provider', + 'content': 'content', + 'published': 'published', + 'updated': 'updated', + 'icon': 'icon', +} + class DatabaseBackend(BaseBackend): @@ -55,6 +70,14 @@ def __init__(self, db_connection_string=None, verbose=False, poolsize=10, def engine(self): return self._engine + @property + def activities_table(self): + return schema.tables['activities'] + + @property + def objects_table(self): + return schema.tables['objects'] + def _get_connection(self): return self.engine.connect() @@ -72,14 +95,20 @@ def clear_all_objects(self): raise SunspearOperationNotSupportedException() def clear_all_activities(self): - self.engine.execute(schema.tables['activities'].delete()) + self.engine.execute(self.activities_table.delete()) def obj_exists(self, obj, **kwargs): obj_id = self._extract_id(obj) - objs_db_table = schema.tables['objects'] + objs_db_table = self.objects_table return self.engine.execute(sql.select([sql.exists().where(objs_db_table.c.id == obj_id)])) + def activity_exists(self, activity, **kwargs): + activity_id = self._extract_id(activity) + activities_db_table = self.activities_table + + return self.engine.execute(sql.select([sql.exists().where(activities_db_table.c.id == activity_id)])) + def obj_create(self, obj, **kwargs): obj = Object(obj, backend=self) @@ -88,17 +117,40 @@ def obj_create(self, obj, **kwargs): obj_db_schema_dict = self._obj_dict_to_db_schema(obj_dict) - self.engine.execute(schema.tables['objects'].insert(), [obj_db_schema_dict]) + self.engine.execute(self.objects_table.insert(), [obj_db_schema_dict]) return obj_dict - def _obj_dict_to_db_schema(self, obj): + def activity_create(self, activity, **kwargs): + """ + Creates an activity. This assumes the activity is already dehydrated (ie has refrences + to the objects and not the actual objects itself) + """ + activity = Activity(activity, backend=self) + + activity.validate() + activity_dict = activity.get_parsed_dict() + + activity_db_schema_dict = self._activity_dict_to_db_schema(activity_dict) + + self.engine.execute(self.activities_table.insert(), [activity_db_schema_dict]) + + def get_new_id(self): + """ + Generates a new unique ID. The default implementation uses uuid1 to + generate a unique ID. + + :return: a new id + """ + return uuid.uuid1().hex + + def _convert_to_db_schema(self, obj, field_mapping): # we make a copy because we will be mutating the dict. # we will map official fields to db fields, and put the rest in `other_data` obj_copy = copy.deepcopy(obj) schema_dict = {} - for obj_field, db_schema_field in DB_OBJ_FIELD_MAPPING.items(): + for obj_field, db_schema_field in field_mapping.items(): if obj_field in obj_copy: data = obj_copy.pop(obj_field) @@ -114,6 +166,12 @@ def _obj_dict_to_db_schema(self, obj): return schema_dict + def _obj_dict_to_db_schema(self, obj): + return self._convert_to_db_schema(obj, DB_OBJ_FIELD_MAPPING) + + def _activity_dict_to_db_schema(self, activity): + return self._convert_to_db_schema(activity, DB_ACTIVITY_FIELD_MAPPING) + def _get_datetime_obj(self, datetime_instance): if isinstance(datetime_instance, basestring): datetime_instance = parse(datetime_instance) @@ -132,12 +190,3 @@ def _get_db_compatiable_date_string(self, datetime_instance): datetime_instance = self._get_datetime_obj(datetime_instance) return datetime_instance.strftime('%Y-%m-%d %H:%M:%S') - - def get_new_id(self): - """ - Generates a new unique ID. The default implementation uses uuid1 to - generate a unique ID. - - :return: a new id - """ - return uuid.uuid1().hex diff --git a/sunspear/backends/database/schema.py b/sunspear/backends/database/schema.py index 1d5a86c..43a7ba2 100644 --- a/sunspear/backends/database/schema.py +++ b/sunspear/backends/database/schema.py @@ -18,11 +18,11 @@ Column('id', String(32), primary_key=True), Column('verb', String(256, convert_unicode=True), nullable=False), Column('actor', ForeignKey('objects.id', ondelete='CASCADE'), nullable=False), - Column('object', ForeignKey('objects.id', ondelete='CASCADE')), - Column('target', ForeignKey('objects.id', ondelete='CASCADE')), - Column('author', ForeignKey('objects.id', ondelete='CASCADE')), - Column('generator', String(1024, convert_unicode=True)), - Column('provider', String(1024, convert_unicode=True)), + Column('object', ForeignKey('objects.id', ondelete='SET NULL')), + Column('target', ForeignKey('objects.id', ondelete='SET NULL')), + Column('author', ForeignKey('objects.id', ondelete='SET NULL')), + Column('generator', ForeignKey('objects.id', ondelete='SET NULL')), + Column('provider', ForeignKey('objects.id', ondelete='SET NULL')), Column('content', Text), Column('published', DateTime(timezone=True), nullable=False), Column('updated', DateTime(timezone=True)), diff --git a/sunspear/backends/database/types.py b/sunspear/backends/database/types.py index 5dd2ed1..a2b5347 100644 --- a/sunspear/backends/database/types.py +++ b/sunspear/backends/database/types.py @@ -2,7 +2,6 @@ from sqlalchemy import String from sqlalchemy.types import TypeDecorator, CHAR, VARCHAR, TEXT from sqlalchemy.dialects.postgresql import UUID -from sqlalchemy.ext.mutable import MutableDict import json import uuid diff --git a/tests/test_db.py b/tests/test_db.py index 8228f76..3da3080 100644 --- a/tests/test_db.py +++ b/tests/test_db.py @@ -80,6 +80,7 @@ def _setup_db(cls, db_name): def setUp(self): self._backend.create_tables() self._setup_objs() + self._setup_activities() def tearDown(self): self._backend.drop_tables() @@ -104,6 +105,121 @@ def _setup_objs(self): self.test_obj = self.test_objs[0] + def _setup_activities(self): + self.test_activities = [{ + 'id': 'WvgYP43bfg64fsdDHt3', + 'verb': 'join', + 'actor': 'user:1', + 'object': 'recognition:1', + 'target': 'badge:2', + 'author': 'user:435', + 'generator': 'mobile:phone:android', + 'provider': 'mobile:phone:android', + 'content': 'foo baz', + 'published': self.now, + 'updated': self.now, + 'icon': { + 'url': 'https://www.google.com/cool_image.png', + 'displayName': u'Cool \u0268mage', + 'width': '500px', + 'height': '500px' + }, + 'foo': 'bar', + 'baz': u'go\u0298', + 'zoo': {'zee': 12, 'tim': {'zde': u'\u0268\u0298'}}, + }] + + self.test_objs_for_activities = [{ + 'id': 'user:1', + 'objectType': u'use\u0403', + 'displayName': u'\u019duman S1', + 'content': u'Foo bar!\u03ee', + 'published': self._datetime_to_string(self.now), + 'image': { + 'url': 'https://www.google.com/cool_image.png', + 'displayName': u'Cool \u0268mage', + 'width': '500px', + 'height': '500px' + }, + 'foo': 'bar', + 'baz': u'go\u0298', + 'zoo': {'zee': 12, 'tim': {'zde': u'\u0268\u0298'}} + }, { + 'id': 'recognition:1', + 'objectType': u'use\u0403', + 'displayName': u'\u019dRecognitionBadge', + 'content': u'Good Work on everything\u03ee', + 'published': self._datetime_to_string(self.now), + 'image': { + 'url': 'https://www.google.com/cool_image.png', + 'displayName': u'Cool \u0268mage', + 'width': '500px', + 'height': '500px' + }, + 'foo': 'bar', + 'baz': u'go\u0298', + 'zoo': {'zee': 12, 'tim': {'zde': u'\u0268\u0298'}} + }, { + 'id': 'badge:2', + 'objectType': u'use\u0403', + 'displayName': u'\u019dAwesomeness', + 'content': u'Just for being awesome\u03ee', + 'published': self._datetime_to_string(self.now), + 'image': { + 'url': 'https://www.google.com/cool_image.png', + 'displayName': u'Cool \u0268mage', + 'width': '500px', + 'height': '500px' + }, + 'foo': 'bar', + 'baz': u'go\u0298', + 'zoo': {'zee': 12, 'tim': {'zde': u'\u0268\u0298'}} + }, { + 'id': 'user:435', + 'objectType': u'use\u0403', + 'displayName': u'\u019duman S435', + 'content': u'Foo bar!\u03ee', + 'published': self._datetime_to_string(self.now), + 'image': { + 'url': 'https://www.google.com/cool_image.png', + 'displayName': u'Cool \u0268mage', + 'width': '500px', + 'height': '500px' + }, + 'foo': 'bar', + 'baz': u'go\u0298', + 'zoo': {'zee': 12, 'tim': {'zde': u'\u0268\u0298'}} + }, { + 'id': 'mobile:phone:android', + 'objectType': u'androidmobilephone\u0403', + 'displayName': u'\u019dobile Phone Android', + 'content': u'Foo bar!\u03ee', + 'published': self._datetime_to_string(self.now), + 'image': { + 'url': 'https://www.google.com/cool_image.png', + 'displayName': u'Cool \u0268mage', + 'width': '500px', + 'height': '500px' + }, + 'foo': 'bar', + 'baz': u'go\u0298', + 'zoo': {'zee': 12, 'tim': {'zde': u'\u0268\u0298'}} + }] + + self.test_activity = self.test_activities[0] + + self.hydrated_test_activity = self._build_hydrated_activity(self.test_activity, self.test_objs_for_activities) + + def _build_hydrated_activity(self, dehydrated_activity, objs): + hydrated_activity = copy.deepcopy(dehydrated_activity) + for obj_field in Model._object_fields: + if obj_field in hydrated_activity: + obj_id = hydrated_activity[obj_field] + obj = [obj for obj in objs if obj['id'] == obj_id][0] + hydrated_activity[obj_field] = obj + + return hydrated_activity + @raises(SunspearOperationNotSupportedException) def test_sample_test(self): self._backend.clear_all_objects() @@ -137,8 +253,7 @@ def test_obj_create(self): ok_(obj_exists) def test_obj_exists(self): - obj = {'id': 'dsaCDF34V4VvbgzAc', 'objectType': 'user', 'published': self._datetime_to_db_compatibal_str(self.now)} - db_obj = self._backend._obj_dict_to_db_schema(obj) + db_obj = self._backend._obj_dict_to_db_schema(self.test_obj) objects_table = schema.tables['objects'] @@ -146,7 +261,32 @@ def test_obj_exists(self): db_obj ]) - ok_(self._backend.obj_exists(obj)) + ok_(self._backend.obj_exists(self.test_obj)) + + def test_activity_exists(self): + db_activity = self._backend._activity_dict_to_db_schema(self.test_activity) + db_objs = map(self._backend._obj_dict_to_db_schema, self.test_objs_for_activities) + + activities_table = schema.tables['activities'] + objects_table = schema.tables['objects'] + + self._engine.execute(objects_table.insert(), db_objs) + + self._engine.execute(activities_table.insert(), [ + db_activity + ]) + + ok_(self._backend.activity_exists(self.test_activity)) + + def test_activity_create(self): + db_objs = map(self._backend._obj_dict_to_db_schema, self.test_objs_for_activities) + + objects_table = schema.tables['objects'] + self._engine.execute(objects_table.insert(), db_objs) + + self._backend.activity_create(self.test_activity) + + ok_(self._backend.activity_exists(self.test_activity)) def _datetime_to_db_compatibal_str(self, datetime_instance): return datetime_instance.strftime('%Y-%m-%d %H:%M:%S') From accb4349eb016b10ade167c1f94c17fe672ed94b Mon Sep 17 00:00:00 2001 From: Numan Sachwani Date: Sat, 12 Nov 2016 17:59:56 -0800 Subject: [PATCH 22/56] Add converter from obj dict to activity stream schema for rehydration --- sunspear/activitystreams/models.py | 3 +- sunspear/backends/base.py | 17 ++-- sunspear/backends/database/db.py | 137 ++++++++++++++++++++++++++--- tests/test_db.py | 91 +++++++++++++++++-- 4 files changed, 221 insertions(+), 27 deletions(-) diff --git a/sunspear/activitystreams/models.py b/sunspear/activitystreams/models.py index 39e2c52..0297d94 100644 --- a/sunspear/activitystreams/models.py +++ b/sunspear/activitystreams/models.py @@ -98,8 +98,7 @@ def parse_data(self, data, *args, **kwargs): return _parsed_data def get_parsed_dict(self, *args, **kwargs): - - #we are suppose to maintain our own published and updated fields + # we are suppose to maintain our own published and updated fields if not self._dict.get('published', None): self._dict['published'] = datetime.datetime.utcnow() elif 'updated' in self._reserved_fields: diff --git a/sunspear/backends/base.py b/sunspear/backends/base.py index 369b2fa..9047513 100644 --- a/sunspear/backends/base.py +++ b/sunspear/backends/base.py @@ -56,6 +56,15 @@ def activity_exists(self, activity, **kwargs): """ raise NotImplementedError() + def _resolve_activity_id(self, activity, **kwargs): + activity_id = self._extract_id(activity) + if activity_id and self.activity_exists(activity, **kwargs): + raise SunspearDuplicateEntryException() + else: + activity_id = self.get_new_id() + + return activity_id + #TODO: Tests def create_activity(self, activity, **kwargs): """ @@ -72,12 +81,8 @@ def create_activity(self, activity, **kwargs): :raises: ``SunspearDuplicateEntryException`` if the record already exists in the database. :return: dict representing the new activity. """ - activity_id = self._extract_id(activity) - if activity_id: - if self.activity_exists(activity, **kwargs): - raise SunspearDuplicateEntryException() - else: - activity['id'] = self.get_new_id() + activity_id = self._resolve_activity_id(activity, **kwargs) + activity['id'] = activity_id activity_copy = copy.copy(activity) diff --git a/sunspear/backends/database/db.py b/sunspear/backends/database/db.py index 6e16308..2add9fd 100644 --- a/sunspear/backends/database/db.py +++ b/sunspear/backends/database/db.py @@ -97,29 +97,52 @@ def clear_all_objects(self): def clear_all_activities(self): self.engine.execute(self.activities_table.delete()) + def obj_create(self, obj, **kwargs): + obj_dict = self._get_parsed_and_validated_obj_dict(obj) + obj_db_schema_dict = self._obj_dict_to_db_schema(obj_dict) + + self.engine.execute(self.objects_table.insert(), [obj_db_schema_dict]) + + return obj_dict + def obj_exists(self, obj, **kwargs): obj_id = self._extract_id(obj) objs_db_table = self.objects_table - return self.engine.execute(sql.select([sql.exists().where(objs_db_table.c.id == obj_id)])) + return self.engine.execute(sql.select([sql.exists().where(objs_db_table.c.id == obj_id)])).scalar() - def activity_exists(self, activity, **kwargs): - activity_id = self._extract_id(activity) - activities_db_table = self.activities_table + def obj_update(self, obj, **kwargs): + obj_dict = self._get_parsed_and_validated_obj_dict(obj) + obj_id = self._extract_id(obj_dict) + obj_db_schema_dict = self._obj_dict_to_db_schema(obj_dict) - return self.engine.execute(sql.select([sql.exists().where(activities_db_table.c.id == activity_id)])) + self.engine.execute( + self.objects_table.update().where(self.objects_table.c.id == obj_id).values(**obj_db_schema_dict)) - def obj_create(self, obj, **kwargs): - obj = Object(obj, backend=self) + def obj_get(self, obj, **kwargs): + """ + Given a list of object ids, returns a list of objects + """ + if not obj: + return obj - obj.validate() - obj_dict = obj.get_parsed_dict() + obj_ids = [self._extract_id(o) for o in obj] - obj_db_schema_dict = self._obj_dict_to_db_schema(obj_dict) + s = self._get_select_multiple_objects_query(obj_ids) + results = self.engine.execute(s).fetchall() + results = map(self._db_schema_to_obj_dict, results) - self.engine.execute(self.objects_table.insert(), [obj_db_schema_dict]) + return results - return obj_dict + def obj_delete(self, obj, **kwargs): + obj_id = self._extract_id(obj) + self._objects.new(key=obj_id).delete() + + def activity_exists(self, activity, **kwargs): + activity_id = self._extract_id(activity) + activities_db_table = self.activities_table + + return self.engine.execute(sql.select([sql.exists().where(activities_db_table.c.id == activity_id)])).scalar() def activity_create(self, activity, **kwargs): """ @@ -135,6 +158,56 @@ def activity_create(self, activity, **kwargs): self.engine.execute(self.activities_table.insert(), [activity_db_schema_dict]) + def create_activity(self, activity, **kwargs): + activity_id = self._resolve_activity_id(activity, **kwargs) + activity['id'] = activity_id + + activity_copy = copy.copy(activity) + + activity_objs = {} + ids_of_objs_with_no_dict = [] + + for key, value in activity_copy.items(): + if key in Activity._object_fields: + if isinstance(value, dict): + activity_obj_id = self._extract_id(value) + activity_objs[activity_obj_id] = value + + activity[key] = activity_obj_id + else: + ids_of_objs_with_no_dict.append(value) + + # For all of the objects in the activity, find out which ones actually already have existing + # objects in the database + obj_ids = self._flatten([ids_of_objs_with_no_dict, activity_objs.keys()]) + + s = self._get_select_multiple_objects_query(obj_ids) + results = self.engine.execute(s).fetchall() + results = self._flatten(results) + + objs_need_to_be_inserted = [] + objs_need_to_be_updated = [] + + for obj_id, obj in activity_objs.items(): + parsed_validated_schema_dict = self._get_parsed_and_validated_obj_dict(obj) + parsed_validated_schema_dict = self._obj_dict_to_db_schema(parsed_validated_schema_dict) + if obj_id not in results: + objs_need_to_be_inserted.append(parsed_validated_schema_dict) + else: + objs_need_to_be_updated.append(parsed_validated_schema_dict) + + # Upsert all objects for the activity + with self.engine.begin() as connection: + if objs_need_to_be_inserted: + connection.execute(self.objects_table.insert(), objs_need_to_be_inserted) + for obj in objs_need_to_be_updated: + connection.execute( + self.objects_table.update().where(self.objects_table.c.id == self._extract_id(obj)).values(**obj)) + + return_val = self.activity_create(activity, **kwargs) + + return return_val + def get_new_id(self): """ Generates a new unique ID. The default implementation uses uuid1 to @@ -166,12 +239,37 @@ def _convert_to_db_schema(self, obj, field_mapping): return schema_dict + def _convert_to_activity_stream_schema(self, schema_dict, field_mapping): + # we make a copy because we will be mutating the dict. + # we will map official fields to db fields, and put the rest in `other_data` + obj_dict = {} + + for obj_field, db_schema_field in field_mapping.items(): + if db_schema_field in schema_dict: + data = schema_dict[db_schema_field] + + # SQLAlchemy requires datetime fields to be datetime instances + if obj_field in Model._datetime_fields: + data = self._get_datetime_obj(data) + obj_dict[obj_field] = data + + if 'other_data' in schema_dict: + obj_dict.update(schema_dict['other_data']) + + return obj_dict + def _obj_dict_to_db_schema(self, obj): return self._convert_to_db_schema(obj, DB_OBJ_FIELD_MAPPING) def _activity_dict_to_db_schema(self, activity): return self._convert_to_db_schema(activity, DB_ACTIVITY_FIELD_MAPPING) + def _db_schema_to_obj_dict(self, obj): + return self._convert_to_activity_stream_schema(obj, DB_OBJ_FIELD_MAPPING) + + def _db_schema_to_activity_dict(self, activity): + return self._convert_to_activity_stream_schema(activity, DB_ACTIVITY_FIELD_MAPPING) + def _get_datetime_obj(self, datetime_instance): if isinstance(datetime_instance, basestring): datetime_instance = parse(datetime_instance) @@ -190,3 +288,18 @@ def _get_db_compatiable_date_string(self, datetime_instance): datetime_instance = self._get_datetime_obj(datetime_instance) return datetime_instance.strftime('%Y-%m-%d %H:%M:%S') + + def _flatten(self, list_of_lists): + return [item for sublist in list_of_lists for item in sublist] + + def _get_parsed_and_validated_obj_dict(self, obj): + obj = Object(obj, backend=self) + + obj.validate() + obj_dict = obj.get_parsed_dict() + + return obj_dict + + def _get_select_multiple_objects_query(self, obj_ids): + s = sql.select([self.objects_table.c.id]).where(self.objects_table.c.id.in_(obj_ids)) + return s diff --git a/tests/test_db.py b/tests/test_db.py index 3da3080..7c5253f 100644 --- a/tests/test_db.py +++ b/tests/test_db.py @@ -1,11 +1,11 @@ from __future__ import absolute_import -from nose.tools import ok_, eq_, raises +from nose.tools import assert_raises, ok_, eq_, raises from sqlalchemy import create_engine, sql +from sqlalchemy.exc import IntegrityError from sunspear.backends.database.db import * from sunspear.exceptions import SunspearOperationNotSupportedException -from sunspear.backends.database import schema from sunspear.activitystreams.models import Model import copy @@ -81,10 +81,33 @@ def setUp(self): self._backend.create_tables() self._setup_objs() self._setup_activities() + self._setup_db_schema_dicts() def tearDown(self): self._backend.drop_tables() + def _setup_db_schema_dicts(self): + self.test_db_schema_dicts = [{ + 'id': 'AxsdSG244BfduiIZ', + 'object_type': u'use\u0403', + 'display_name': u'\u019duman S', + 'content': u'Foo bar!\u03ee', + 'published': self._datetime_to_string(self.now), + 'image': { + 'url': 'https://www.google.com/cool_image.png', + 'displayName': u'Cool \u0268mage', + 'width': '500px', + 'height': '500px' + }, + 'other_data': { + 'foo': 'bar', + 'baz': u'go\u0298', + 'zoo': {'zee': 12, 'tim': {'zde': u'\u0268\u0298'}} + } + }] + + self.test_db_schema_dict = self.test_db_schema_dicts[0] + def _setup_objs(self): self.test_objs = [{ 'id': 'AxsdSG244BfduiIZ', @@ -245,30 +268,51 @@ def test__obj_dict_to_db_schema(self): # Everything was placed in other_data eq_(obj_dict_copy, db_schema_dict['other_data']) + def test_db_schema_to_obj_dict(self): + db_schema_dict = self.test_db_schema_dict + db_schema_dict_copy = copy.deepcopy(db_schema_dict) + + obj_dict = self._backend._db_schema_to_obj_dict(db_schema_dict) + + # Confirm the original dict was not modified + eq_(db_schema_dict, db_schema_dict_copy) + + for obj_field, db_schema_field in DB_OBJ_FIELD_MAPPING.items(): + data = db_schema_dict[db_schema_field] + if obj_field in Model._datetime_fields: + data = self._backend._get_datetime_obj(data) + + eq_(data, obj_dict[obj_field]) + + for key, value in db_schema_dict['other_data'].items(): + eq_(obj_dict[key], value) + def test_obj_create(self): self._backend.obj_create(self.test_obj) - obj_exists = self._engine.execute(sql.select([sql.exists().where(schema.tables['objects'].c.id == self.test_obj['id'])])) + obj_exists = self._engine.execute( + sql.select([sql.exists().where(self._backend.objects_table.c.id == self.test_obj['id'])])) ok_(obj_exists) def test_obj_exists(self): db_obj = self._backend._obj_dict_to_db_schema(self.test_obj) - objects_table = schema.tables['objects'] + objects_table = self._backend.objects_table self._engine.execute(objects_table.insert(), [ db_obj ]) ok_(self._backend.obj_exists(self.test_obj)) + ok_(not self._backend.obj_exists('someunknownid')) def test_activity_exists(self): db_activity = self._backend._activity_dict_to_db_schema(self.test_activity) db_objs = map(self._backend._obj_dict_to_db_schema, self.test_objs_for_activities) - activities_table = schema.tables['activities'] - objects_table = schema.tables['objects'] + activities_table = self._backend.activities_table + objects_table = self._backend.objects_table self._engine.execute(objects_table.insert(), db_objs) @@ -277,17 +321,50 @@ def test_activity_exists(self): ]) ok_(self._backend.activity_exists(self.test_activity)) + ok_(not self._backend.activity_exists('someunknownid')) def test_activity_create(self): db_objs = map(self._backend._obj_dict_to_db_schema, self.test_objs_for_activities) - objects_table = schema.tables['objects'] + objects_table = self._backend.objects_table self._engine.execute(objects_table.insert(), db_objs) self._backend.activity_create(self.test_activity) ok_(self._backend.activity_exists(self.test_activity)) + def test_create_activity(self): + self._backend.create_activity(self.hydrated_test_activity) + ok_(self._backend.activity_exists(self.hydrated_test_activity)) + + def test_create_activity_with_already_existing_objs(self): + db_objs = map(self._backend._obj_dict_to_db_schema, self.test_objs_for_activities) + objects_table = self._backend.objects_table + self._engine.execute(objects_table.insert(), db_objs) + + self._backend.create_activity(self.hydrated_test_activity) + ok_(self._backend.activity_exists(self.hydrated_test_activity)) + + def test_create_activity_with_some_already_existing_objs(self): + db_objs = map(self._backend._obj_dict_to_db_schema, self.test_objs_for_activities) + objects_table = self._backend.objects_table + self._engine.execute(objects_table.insert(), db_objs[1:]) + + self._backend.create_activity(self.hydrated_test_activity) + ok_(self._backend.activity_exists(self.hydrated_test_activity)) + + def test_create_activity_with_only_ids_for_objs(self): + db_objs = map(self._backend._obj_dict_to_db_schema, self.test_objs_for_activities) + objects_table = self._backend.objects_table + self._engine.execute(objects_table.insert(), db_objs) + + self._backend.create_activity(self.test_activity) + ok_(self._backend.activity_exists(self.test_activity)) + + def test_create_activity_with_non_existing_objects_doesnt_work(self): + assert_raises(IntegrityError, self._backend.create_activity, self.test_activity) + ok_(not self._backend.activity_exists(self.test_activity)) + def _datetime_to_db_compatibal_str(self, datetime_instance): return datetime_instance.strftime('%Y-%m-%d %H:%M:%S') From 163ae8f61264b335298297c73517dc171087ff9b Mon Sep 17 00:00:00 2001 From: Numan Sachwani Date: Thu, 18 May 2017 12:51:25 -0700 Subject: [PATCH 23/56] Fix issue with obj_get. Finish implementing activity_create --- setup.py | 1 + sunspear/activitystreams/models.py | 2 +- sunspear/backends/database/db.py | 44 ++++++++++++++++++++---- sunspear/backends/database/schema.py | 11 ++---- sunspear/backends/riak.py | 7 ---- tests/test_db.py | 51 ++++++++++++++++++++-------- 6 files changed, 79 insertions(+), 37 deletions(-) diff --git a/setup.py b/setup.py index 1a1e722..2d1d363 100644 --- a/setup.py +++ b/setup.py @@ -28,6 +28,7 @@ 'riak==2.5.4', 'protobuf==2.6.1', 'sqlalchemy==1.1.3', + 'six' ], options={'easy_install': {'allow_hosts': 'pypi.python.org'}}, tests_require=tests_require, diff --git a/sunspear/activitystreams/models.py b/sunspear/activitystreams/models.py index 0297d94..5e5c4a1 100644 --- a/sunspear/activitystreams/models.py +++ b/sunspear/activitystreams/models.py @@ -48,7 +48,7 @@ def validate(self): for field in self._reserved_fields: if self._dict.get(field, None) is not None\ and field not in ['updated', 'published']: - #updated and publised are special eceptions because if they are in reserved fields, the'll be overridden + # updated and publised are special eceptions because if they are in reserved fields, the'll be overridden raise SunspearValidationException("Reserved field name used: %s" % field) for field in self._media_fields: diff --git a/sunspear/backends/database/db.py b/sunspear/backends/database/db.py index 2add9fd..567db2b 100644 --- a/sunspear/backends/database/db.py +++ b/sunspear/backends/database/db.py @@ -15,12 +15,14 @@ specific language governing permissions and limitations under the License. """ -from __future__ import absolute_import +from __future__ import absolute_import, unicode_literals import calendar import copy import datetime import uuid +import six +import json from dateutil.parser import parse from dateutil import tz @@ -58,13 +60,15 @@ 'icon': 'icon', } +DICT_FIELDS = ['image', 'other_data', 'icon', ] + class DatabaseBackend(BaseBackend): def __init__(self, db_connection_string=None, verbose=False, poolsize=10, max_overflow=5, **kwargs): self._engine = create_engine(db_connection_string, echo=verbose, poolclass=QueuePool, - pool_size=poolsize, max_overflow=max_overflow) + pool_size=poolsize, max_overflow=max_overflow, convert_unicode=True) @property def engine(self): @@ -127,8 +131,8 @@ def obj_get(self, obj, **kwargs): return obj obj_ids = [self._extract_id(o) for o in obj] - s = self._get_select_multiple_objects_query(obj_ids) + results = self.engine.execute(s).fetchall() results = map(self._db_schema_to_obj_dict, results) @@ -136,7 +140,9 @@ def obj_get(self, obj, **kwargs): def obj_delete(self, obj, **kwargs): obj_id = self._extract_id(obj) - self._objects.new(key=obj_id).delete() + + stmt = self.objects_table.delete().where(self.objects_table.c.id == obj_id) + self.engine.execute(stmt) def activity_exists(self, activity, **kwargs): activity_id = self._extract_id(activity) @@ -208,6 +214,13 @@ def create_activity(self, activity, **kwargs): return return_val + def activity_get(self, activity_ids, **kwargs): + activity_ids = map(self._extract_id, activity_ids) + if not activity_ids: + return [] + + activities = None + def get_new_id(self): """ Generates a new unique ID. The default implementation uses uuid1 to @@ -227,8 +240,9 @@ def _convert_to_db_schema(self, obj, field_mapping): if obj_field in obj_copy: data = obj_copy.pop(obj_field) - # SQLAlchemy requires datetime fields to be datetime instances + # SQLAlchemy requires datetime fields to be datetime strings if obj_field in Model._datetime_fields: + data = self._get_datetime_obj(data) data = self._get_db_compatiable_date_string(data) schema_dict[db_schema_field] = data @@ -239,6 +253,11 @@ def _convert_to_db_schema(self, obj, field_mapping): return schema_dict + def _need_to_parse_json(self, schema_field_name, data): + if schema_field_name in DICT_FIELDS and isinstance(data, six.string_types) and data: + return True + return False + def _convert_to_activity_stream_schema(self, schema_dict, field_mapping): # we make a copy because we will be mutating the dict. # we will map official fields to db fields, and put the rest in `other_data` @@ -247,14 +266,21 @@ def _convert_to_activity_stream_schema(self, schema_dict, field_mapping): for obj_field, db_schema_field in field_mapping.items(): if db_schema_field in schema_dict: data = schema_dict[db_schema_field] + if self._need_to_parse_json(db_schema_field, data): + data = json.loads(data) # SQLAlchemy requires datetime fields to be datetime instances if obj_field in Model._datetime_fields: data = self._get_datetime_obj(data) + data = '{}Z'.format(data.isoformat()) + obj_dict[obj_field] = data if 'other_data' in schema_dict: - obj_dict.update(schema_dict['other_data']) + other_data = schema_dict['other_data'] + if self._need_to_parse_json('other_data', other_data): + other_data = json.loads(other_data) + obj_dict.update(other_data) return obj_dict @@ -301,5 +327,9 @@ def _get_parsed_and_validated_obj_dict(self, obj): return obj_dict def _get_select_multiple_objects_query(self, obj_ids): - s = sql.select([self.objects_table.c.id]).where(self.objects_table.c.id.in_(obj_ids)) + s = sql.select(['*']).where(self.objects_table.c.id.in_(obj_ids)) + return s + + def _get_select_multiple_activities_query(self, activity_ids): + s = sql.select(['*']).where(self.activities_table.c.id.in_(activity_ids)) return s diff --git a/sunspear/backends/database/schema.py b/sunspear/backends/database/schema.py index 43a7ba2..12ea5cc 100644 --- a/sunspear/backends/database/schema.py +++ b/sunspear/backends/database/schema.py @@ -6,8 +6,8 @@ objects_table = Table('objects', metadata, Column('id', String(32), primary_key=True), - Column('object_type', String(256, convert_unicode=True), nullable=False), - Column('display_name', String(256, convert_unicode=True)), + Column('object_type', String(256), nullable=False), + Column('display_name', String(256)), Column('content', Text), Column('published', DateTime(timezone=True), nullable=False), Column('updated', DateTime(timezone=True)), @@ -16,7 +16,7 @@ activities_table = Table('activities', metadata, Column('id', String(32), primary_key=True), - Column('verb', String(256, convert_unicode=True), nullable=False), + Column('verb', String(256), nullable=False), Column('actor', ForeignKey('objects.id', ondelete='CASCADE'), nullable=False), Column('object', ForeignKey('objects.id', ondelete='SET NULL')), Column('target', ForeignKey('objects.id', ondelete='SET NULL')), @@ -47,11 +47,6 @@ Column('other_data', custom_types.JSONDict()), UniqueConstraint('actor', 'in_reply_to')) -shared_with_fields = (Column('id', Integer, primary_key=True), - Column('object', ForeignKey('objects.id', ondelete='CASCADE')), - Column('activity', ForeignKey('activities.id', ondelete='CASCADE')), - UniqueConstraint('object', 'activity')) - to_table = Table('to', metadata, Column('id', Integer, primary_key=True), Column('object', ForeignKey('objects.id', ondelete='CASCADE')), diff --git a/sunspear/backends/riak.py b/sunspear/backends/riak.py index 01426c0..6211ec1 100644 --- a/sunspear/backends/riak.py +++ b/sunspear/backends/riak.py @@ -670,7 +670,6 @@ def _get_many_activities(self, activity_ids=[], raw_filter="", filters=None, inc return reordered_results -<<<<<<< HEAD def _extract_id(self, activity_or_id): """ Helper that returns an id if the activity has one. @@ -691,8 +690,6 @@ def _extract_id(self, activity_or_id): pass return this_id -======= ->>>>>>> Started to add initial scaffolding for testing def _get_timestamp(self): """ returns a unix timestamp representing the ``datetime`` object @@ -708,7 +705,3 @@ def get_new_id(self): :return: a new id """ return uuid.uuid1().hex -<<<<<<< HEAD -======= - ->>>>>>> Started to add initial scaffolding for testing diff --git a/tests/test_db.py b/tests/test_db.py index 7c5253f..437c05c 100644 --- a/tests/test_db.py +++ b/tests/test_db.py @@ -1,17 +1,16 @@ from __future__ import absolute_import -from nose.tools import assert_raises, ok_, eq_, raises +import copy +import datetime +import os + from sqlalchemy import create_engine, sql from sqlalchemy.exc import IntegrityError - +from sunspear.activitystreams.models import Model from sunspear.backends.database.db import * from sunspear.exceptions import SunspearOperationNotSupportedException -from sunspear.activitystreams.models import Model - -import copy -import os -import datetime +from nose.tools import assert_raises, eq_, ok_, raises DB_CONNECTION_STRING = os.environ.get('DB_CONNECTION_STRING', 'mysql://root:@localhost') DB_TYPE = os.environ.get('DB_TYPE', 'mysql') @@ -46,7 +45,7 @@ def get_connection_string(cls): @classmethod def get_connection_string_with_database(cls, database_name): - return '{0}/{1}'.format(cls.get_connection_string(), database_name) + return '{0}/{1}?charset=utf8'.format(cls.get_connection_string(), database_name) @classmethod def _cleanup_db(cls, db_name): @@ -243,6 +242,15 @@ def _build_hydrated_activity(self, dehydrated_activity, objs): return hydrated_activity + def _insert_obj(self, obj): + db_obj = self._backend._obj_dict_to_db_schema(obj) + + objects_table = self._backend.objects_table + + self._engine.execute(objects_table.insert(), [ + db_obj + ]) + @raises(SunspearOperationNotSupportedException) def test_sample_test(self): self._backend.clear_all_objects() @@ -281,6 +289,7 @@ def test_db_schema_to_obj_dict(self): data = db_schema_dict[db_schema_field] if obj_field in Model._datetime_fields: data = self._backend._get_datetime_obj(data) + data = '{}Z'.format(data.isoformat()) eq_(data, obj_dict[obj_field]) @@ -296,16 +305,30 @@ def test_obj_create(self): ok_(obj_exists) def test_obj_exists(self): - db_obj = self._backend._obj_dict_to_db_schema(self.test_obj) + self._insert_obj(self.test_obj) + + ok_(self._backend.obj_exists(self.test_obj)) + ok_(not self._backend.obj_exists('someunknownid')) + def test_obj_delete(self): objects_table = self._backend.objects_table + obj_id = self.test_obj['id'] - self._engine.execute(objects_table.insert(), [ - db_obj - ]) + self._insert_obj(self.test_obj) - ok_(self._backend.obj_exists(self.test_obj)) - ok_(not self._backend.obj_exists('someunknownid')) + self._backend.obj_delete(obj_id) + + exists = self._engine.execute(sql.select([sql.exists().where(objects_table.c.id == obj_id)])).scalar() + ok_(not exists) + + def test_obj_get(self): + obj_id = self.test_obj['id'] + + self._insert_obj(self.test_obj) + print self.test_obj + + objs = self._backend.obj_get([obj_id]) + eq_(objs[0], self.test_obj) def test_activity_exists(self): db_activity = self._backend._activity_dict_to_db_schema(self.test_activity) From 5f759dbbcf46f1f6aad1b301ba153fdb74bdce35 Mon Sep 17 00:00:00 2001 From: Numan Sachwani Date: Tue, 23 May 2017 12:26:54 -0700 Subject: [PATCH 24/56] Add the ability to retrieve an activity --- sunspear/backends/base.py | 76 +++++++++++++++++++++++++++----- sunspear/backends/database/db.py | 48 ++++++++++++++++---- sunspear/backends/riak.py | 26 +++++------ tests/test_db.py | 34 ++++++++++---- 4 files changed, 142 insertions(+), 42 deletions(-) diff --git a/sunspear/backends/base.py b/sunspear/backends/base.py index 9047513..18734aa 100644 --- a/sunspear/backends/base.py +++ b/sunspear/backends/base.py @@ -1,17 +1,13 @@ -<<<<<<< HEAD from __future__ import absolute_import, division, print_function, unicode_literals -======= -from sunspear.activitystreams.models import Activity, ReplyActivity, LikeActivity -from sunspear.exceptions import ( - SunspearDuplicateEntryException, SunspearInvalidActivityException, SunspearInvalidObjectException) ->>>>>>> Start building more of the db api and tests import copy import uuid -from sunspear.activitystreams.models import Activity, LikeActivity, ReplyActivity -from sunspear.exceptions import ( - SunspearDuplicateEntryException, SunspearInvalidActivityException, SunspearInvalidObjectException) +from sunspear.activitystreams.models import (Activity, LikeActivity, Model, + ReplyActivity) +from sunspear.exceptions import (SunspearDuplicateEntryException, + SunspearInvalidActivityException, + SunspearInvalidObjectException) __all__ = ('BaseBackend', 'SUB_ACTIVITY_MAP') @@ -58,14 +54,17 @@ def activity_exists(self, activity, **kwargs): def _resolve_activity_id(self, activity, **kwargs): activity_id = self._extract_id(activity) - if activity_id and self.activity_exists(activity, **kwargs): + if activity_id: + if self.activity_exists(activity, **kwargs): raise SunspearDuplicateEntryException() + else: + activity_id = activity_id else: activity_id = self.get_new_id() return activity_id - #TODO: Tests + # TODO: Tests def create_activity(self, activity, **kwargs): """ Stores a new ``activity`` in the backend. If an object with the same id already exists in @@ -406,6 +405,33 @@ def get_new_id(self): """ return uuid.uuid1().hex + def _extract_object_keys(self, activity, skip_sub_activities=False): + keys = [] + for object_key in Model._object_fields + Activity._direct_audience_targeting_fields \ + + Activity._indirect_audience_targeting_fields: + if object_key not in activity: + continue + objects = activity.get(object_key) + if isinstance(objects, dict): + if objects.get('objectType', None) == 'activity': + keys = keys + self._extract_object_keys(objects) + if objects.get('inReplyTo', None): + [keys.extend(self._extract_object_keys(in_reply_to_obj, skip_sub_activities=skip_sub_activities)) \ + for in_reply_to_obj in objects['inReplyTo']] + if isinstance(objects, list): + for item in objects: + if isinstance(item, basestring): + keys.append(item) + if isinstance(objects, basestring): + keys.append(objects) + + if not skip_sub_activities: + for collection in Activity._response_fields: + if collection in activity and activity[collection]['items']: + for item in activity[collection]['items']: + keys.extend(self._extract_object_keys(item)) + return keys + def _extract_id(self, activity_or_id): """ Helper that returns an id if the activity has one. @@ -425,3 +451,31 @@ def _extract_id(self, activity_or_id): except: pass return this_id + + def _dehydrate_object_keys(self, activity, objects_dict, skip_sub_activities=False): + for object_key in Model._object_fields + Activity._direct_audience_targeting_fields \ + + Activity._indirect_audience_targeting_fields: + if object_key not in activity: + continue + activity_objects = activity.get(object_key) + if isinstance(activity_objects, dict): + if activity_objects.get('objectType', None) == 'activity': + activity[object_key] = self._dehydrate_object_keys(activity_objects, objects_dict, skip_sub_activities=skip_sub_activities) + if activity_objects.get('inReplyTo', None): + for i, in_reply_to_obj in enumerate(activity_objects['inReplyTo']): + activity_objects['inReplyTo'][i] = \ + self._dehydrate_object_keys(activity_objects['inReplyTo'][i], \ + objects_dict, skip_sub_activities=skip_sub_activities) + if isinstance(activity_objects, list): + for i, obj_id in enumerate(activity_objects): + if isinstance(activity[object_key][i], basestring): + activity[object_key][i] = objects_dict.get(obj_id, {}) + if isinstance(activity_objects, basestring): + activity[object_key] = objects_dict.get(activity_objects, {}) + + if not skip_sub_activities: + for collection in Activity._response_fields: + if collection in activity and activity[collection]['items']: + for i, item in enumerate(activity[collection]['items']): + activity[collection]['items'][i] = self._dehydrate_object_keys(item, objects_dict) + return activity diff --git a/sunspear/backends/database/db.py b/sunspear/backends/database/db.py index 567db2b..964352f 100644 --- a/sunspear/backends/database/db.py +++ b/sunspear/backends/database/db.py @@ -20,19 +20,19 @@ import calendar import copy import datetime -import uuid -import six import json +import uuid -from dateutil.parser import parse +import six from dateutil import tz - +from dateutil.parser import parse from sqlalchemy import create_engine, sql from sqlalchemy.pool import QueuePool from sunspear.activitystreams.models import Activity, Model, Object from sunspear.backends.base import SUB_ACTIVITY_MAP, BaseBackend -from sunspear.exceptions import ( - SunspearOperationNotSupportedException, SunspearValidationException, SunspearDuplicateEntryException) +from sunspear.exceptions import (SunspearDuplicateEntryException, + SunspearOperationNotSupportedException, + SunspearValidationException) from . import schema @@ -60,7 +60,7 @@ 'icon': 'icon', } -DICT_FIELDS = ['image', 'other_data', 'icon', ] +DICT_FIELDS = Activity._media_fields + Object._media_fields + Activity._object_fields + ['other_data',] class DatabaseBackend(BaseBackend): @@ -216,10 +216,38 @@ def create_activity(self, activity, **kwargs): def activity_get(self, activity_ids, **kwargs): activity_ids = map(self._extract_id, activity_ids) + object_ids = set() if not activity_ids: return [] - activities = None + s = self._get_select_multiple_activities_query(activity_ids) + activities = self.engine.execute(s).fetchall() + activities = [self._db_schema_to_activity_dict(activity) for activity in activities] + activities = self.dehydrate_activities(activities) + + return activities + + def dehydrate_activities(self, activities): + """ + Takes a raw list of activities returned from riak and replace keys with contain ids for riak objects with actual riak object + TODO: This can probably be refactored out of the riak backend once everything like + sub activities and shared with fields are implemented + """ + # collect a list of unique object ids. We only iterate through the fields that we know + # for sure are objects. User is responsible for hydrating all other fields. + object_ids = set() + for activity in activities: + object_ids.update(self._extract_object_keys(activity)) + + # Get the objects for the ids we have collected + objects = self.get_obj(object_ids) + objects_dict = dict(((obj["id"], obj,) for obj in objects)) + + # replace the object ids with the hydrated objects + for activity in activities: + activity = self._dehydrate_object_keys(activity, objects_dict) + + return activities def get_new_id(self): """ @@ -255,7 +283,9 @@ def _convert_to_db_schema(self, obj, field_mapping): def _need_to_parse_json(self, schema_field_name, data): if schema_field_name in DICT_FIELDS and isinstance(data, six.string_types) and data: - return True + # TODO: This seems hacky. Is there a better way to do this? + if '{' in data or '[' in data: + return True return False def _convert_to_activity_stream_schema(self, schema_dict, field_mapping): diff --git a/sunspear/backends/riak.py b/sunspear/backends/riak.py index 6211ec1..0f98f25 100644 --- a/sunspear/backends/riak.py +++ b/sunspear/backends/riak.py @@ -455,39 +455,39 @@ def dehydrate_activities(self, activities): """ activities = self._extract_sub_activities(activities) - #collect a list of unique object ids. We only iterate through the fields that we know - #for sure are objects. User is responsible for hydrating all other fields. + # collect a list of unique object ids. We only iterate through the fields that we know + # for sure are objects. User is responsible for hydrating all other fields. object_ids = set() for activity in activities: object_ids.update(self._extract_object_keys(activity)) - #Get the objects for the ids we have collected + # Get the objects for the ids we have collected objects = self.get_obj(object_ids) objects_dict = dict(((obj["id"], obj,) for obj in objects)) - #We also need to extract any activities that were diguised as objects. IE activities with - #objectType=activity + # We also need to extract any activities that were diguised as objects. IE activities with + # objectType=activity activities_in_objects_ids = set() - #replace the object ids with the hydrated objects + # replace the object ids with the hydrated objects for activity in activities: activity = self._dehydrate_object_keys(activity, objects_dict) - #Extract keys of any activities that were objects + # Extract keys of any activities that were objects activities_in_objects_ids.update(self._extract_activity_keys(activity, skip_sub_activities=True)) - #If we did have activities that were objects, we need to hydrate those activities and - #the objects for those activities + # If we did have activities that were objects, we need to hydrate those activities and + # the objects for those activities if activities_in_objects_ids: sub_activities = self._get_many_activities(activities_in_objects_ids) activities_in_objects_dict = dict(((sub_activity["id"], sub_activity,) for sub_activity in sub_activities)) for activity in activities: activity = self._dehydrate_sub_activity(activity, activities_in_objects_dict, skip_sub_activities=True) - #we have to do one more round of object dehydration for our new sub-activities + # we have to do one more round of object dehydration for our new sub-activities object_ids.update(self._extract_object_keys(activity)) - #now get all the objects we don't already have and for sub-activities and and hydrate them into - #our list of activities + # now get all the objects we don't already have and for sub-activities and and hydrate them into + # our list of activities object_ids -= set(objects_dict.keys()) objects = self.get_obj(object_ids) for obj in objects: @@ -517,7 +517,7 @@ def _extract_sub_activities(self, activities): for sub_activity in sub_activities: activities_dict[sub_activity["id"]] = sub_activity - #Dehydrate out any subactivities we may have + # Dehydrate out any subactivities we may have for activity in activities: activity = self._dehydrate_sub_activity(activity, activities_dict) diff --git a/tests/test_db.py b/tests/test_db.py index 437c05c..8f7b284 100644 --- a/tests/test_db.py +++ b/tests/test_db.py @@ -138,13 +138,14 @@ def _setup_activities(self): 'generator': 'mobile:phone:android', 'provider': 'mobile:phone:android', 'content': 'foo baz', - 'published': self.now, - 'updated': self.now, + 'published': self._datetime_to_string(self.now), + 'updated': self._datetime_to_string(self.now), 'icon': { 'url': 'https://www.google.com/cool_image.png', 'displayName': u'Cool \u0268mage', 'width': '500px', - 'height': '500px' + 'height': '500px', + 'id': 'icon:1' }, 'foo': 'bar', 'baz': u'go\u0298', @@ -161,7 +162,8 @@ def _setup_activities(self): 'url': 'https://www.google.com/cool_image.png', 'displayName': u'Cool \u0268mage', 'width': '500px', - 'height': '500px' + 'height': '500px', + 'id': 'img:1', }, 'foo': 'bar', 'baz': u'go\u0298', @@ -176,7 +178,8 @@ def _setup_activities(self): 'url': 'https://www.google.com/cool_image.png', 'displayName': u'Cool \u0268mage', 'width': '500px', - 'height': '500px' + 'height': '500px', + 'id': 'img:2', }, 'foo': 'bar', 'baz': u'go\u0298', @@ -191,7 +194,8 @@ def _setup_activities(self): 'url': 'https://www.google.com/cool_image.png', 'displayName': u'Cool \u0268mage', 'width': '500px', - 'height': '500px' + 'height': '500px', + 'id': 'img:3', }, 'foo': 'bar', 'baz': u'go\u0298', @@ -206,7 +210,8 @@ def _setup_activities(self): 'url': 'https://www.google.com/cool_image.png', 'displayName': u'Cool \u0268mage', 'width': '500px', - 'height': '500px' + 'height': '500px', + 'id': 'img:4', }, 'foo': 'bar', 'baz': u'go\u0298', @@ -221,7 +226,8 @@ def _setup_activities(self): 'url': 'https://www.google.com/cool_image.png', 'displayName': u'Cool \u0268mage', 'width': '500px', - 'height': '500px' + 'height': '500px', + 'id': 'img:5', }, 'foo': 'bar', 'baz': u'go\u0298', @@ -325,7 +331,6 @@ def test_obj_get(self): obj_id = self.test_obj['id'] self._insert_obj(self.test_obj) - print self.test_obj objs = self._backend.obj_get([obj_id]) eq_(objs[0], self.test_obj) @@ -388,6 +393,17 @@ def test_create_activity_with_non_existing_objects_doesnt_work(self): assert_raises(IntegrityError, self._backend.create_activity, self.test_activity) ok_(not self._backend.activity_exists(self.test_activity)) + def test_get_activities(self): + activity_copy = copy.deepcopy(self.hydrated_test_activity) + + self._backend.create_activity(self.hydrated_test_activity) + activity = self._backend.get_activity(self.hydrated_test_activity['id'])[0] + + # Updated is changed when an activity is saved + activity_copy['updated'] = activity['updated'] + + eq_(activity, activity_copy) + def _datetime_to_db_compatibal_str(self, datetime_instance): return datetime_instance.strftime('%Y-%m-%d %H:%M:%S') From 8c73796d1930f510b8d18dc9f7804277ed9f19e1 Mon Sep 17 00:00:00 2001 From: Numan Sachwani Date: Tue, 23 May 2017 13:28:24 -0700 Subject: [PATCH 25/56] change the name of the method so it makes more sense --- sunspear/backends/database/db.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sunspear/backends/database/db.py b/sunspear/backends/database/db.py index 964352f..5d081d7 100644 --- a/sunspear/backends/database/db.py +++ b/sunspear/backends/database/db.py @@ -223,11 +223,11 @@ def activity_get(self, activity_ids, **kwargs): s = self._get_select_multiple_activities_query(activity_ids) activities = self.engine.execute(s).fetchall() activities = [self._db_schema_to_activity_dict(activity) for activity in activities] - activities = self.dehydrate_activities(activities) + activities = self.hydrate_activities(activities) return activities - def dehydrate_activities(self, activities): + def hydrate_activities(self, activities): """ Takes a raw list of activities returned from riak and replace keys with contain ids for riak objects with actual riak object TODO: This can probably be refactored out of the riak backend once everything like From 39328deadbad97a5a05a2e82125867e243828081 Mon Sep 17 00:00:00 2001 From: Numan Sachwani Date: Tue, 23 May 2017 15:51:11 -0700 Subject: [PATCH 26/56] Fix issue with extracting id when its not found --- sunspear/backends/base.py | 21 ++++++++++----------- tests/test_riak.py | 10 ++++------ 2 files changed, 14 insertions(+), 17 deletions(-) diff --git a/sunspear/backends/base.py b/sunspear/backends/base.py index 18734aa..d75e4c9 100644 --- a/sunspear/backends/base.py +++ b/sunspear/backends/base.py @@ -18,6 +18,7 @@ class BaseBackend(object): + def clear_all_objects(self): """ Clears all objects from the backend. @@ -57,8 +58,6 @@ def _resolve_activity_id(self, activity, **kwargs): if activity_id: if self.activity_exists(activity, **kwargs): raise SunspearDuplicateEntryException() - else: - activity_id = activity_id else: activity_id = self.get_new_id() @@ -436,21 +435,21 @@ def _extract_id(self, activity_or_id): """ Helper that returns an id if the activity has one. """ - this_id = None if isinstance(activity_or_id, basestring): - this_id = activity_or_id + return activity_or_id elif isinstance(activity_or_id, dict): this_id = activity_or_id.get('id', None) + if this_id is None: + return None try: - this_id = str(this_id) - except: - pass + return str(this_id) + except Exception: + return None else: try: - this_id = str(activity_or_id) - except: - pass - return this_id + return str(activity_or_id) + except Exception: + return None def _dehydrate_object_keys(self, activity, objects_dict, skip_sub_activities=False): for object_key in Model._object_fields + Activity._direct_audience_targeting_fields \ diff --git a/tests/test_riak.py b/tests/test_riak.py index 7b33fa2..e285ccd 100644 --- a/tests/test_riak.py +++ b/tests/test_riak.py @@ -1,20 +1,18 @@ from __future__ import absolute_import, division, print_function, unicode_literals import datetime - import six -from mock import ANY, call, MagicMock -from nose.tools import eq_, ok_, raises from mock import MagicMock, call, ANY from sunspear.aggregators.property import PropertyAggregator from sunspear.backends.riak import RiakBackend from sunspear.compat import must_be_str from sunspear.exceptions import SunspearValidationException +from nose.tools import eq_, ok_, raises riak_connection_options = { "nodes": [ - {'http_port': 8098, 'host': '127.0.0.1'}], + {'http_port': 8098, 'host': '192.168.99.100'}], 'protocol': 'http', } @@ -1705,10 +1703,10 @@ def test_create_sub_activity_indexes(self): self._backend.create_obj(actor2) - #create the activity + # create the activity self._backend.create_activity({"id": 5, "title": "Stream Item", "verb": "post", "actor": actor, "object": obj}) - #now create a reply for the activity + # now create a reply for the activity like_activity_dict, activity_obj_dict = self._backend.sub_activity_create( 5, actor2_id, "", sub_activity_verb='like') From df2e975357b02d85012c0718af1498ea89a81189 Mon Sep 17 00:00:00 2001 From: Numan Sachwani Date: Tue, 23 May 2017 16:17:37 -0700 Subject: [PATCH 27/56] Fix test riak ip address --- tests/test_riak.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_riak.py b/tests/test_riak.py index e285ccd..aa1c2a2 100644 --- a/tests/test_riak.py +++ b/tests/test_riak.py @@ -12,7 +12,7 @@ riak_connection_options = { "nodes": [ - {'http_port': 8098, 'host': '192.168.99.100'}], + {'http_port': 8098, 'host': '127.0.0.1'}], 'protocol': 'http', } From 9e185f0d31b86d5844a3e7b9c9821f369392028c Mon Sep 17 00:00:00 2001 From: Numan Sachwani Date: Tue, 23 May 2017 16:30:16 -0700 Subject: [PATCH 28/56] Add mysql service --- .travis.yml | 1 + setup.py | 1 + 2 files changed, 2 insertions(+) diff --git a/.travis.yml b/.travis.yml index 0c912f3..76deffc 100644 --- a/.travis.yml +++ b/.travis.yml @@ -6,6 +6,7 @@ before_install: - sudo apt-get install -qq protobuf-compiler services: - riak + - mysql install: - pip install -q -e . script: python setup.py nosetests diff --git a/setup.py b/setup.py index 2d1d363..58f0aba 100644 --- a/setup.py +++ b/setup.py @@ -28,6 +28,7 @@ 'riak==2.5.4', 'protobuf==2.6.1', 'sqlalchemy==1.1.3', + 'MySQL-python==1.2.5', 'six' ], options={'easy_install': {'allow_hosts': 'pypi.python.org'}}, From 5a09cd3bdd769a0d4d19f480bc5d3b74c1e57ba8 Mon Sep 17 00:00:00 2001 From: Numan Sachwani Date: Tue, 20 Jun 2017 11:23:31 -0700 Subject: [PATCH 29/56] First version of creating sub activities --- sunspear/backends/base.py | 15 ++++++- sunspear/backends/database/db.py | 72 ++++++++++++++++++++++++++++---- sunspear/backends/riak.py | 11 +---- tests/test_db.py | 17 ++++++++ tests/test_riak.py | 4 +- 5 files changed, 99 insertions(+), 20 deletions(-) diff --git a/sunspear/backends/base.py b/sunspear/backends/base.py index d75e4c9..9e90fcf 100644 --- a/sunspear/backends/base.py +++ b/sunspear/backends/base.py @@ -7,7 +7,8 @@ ReplyActivity) from sunspear.exceptions import (SunspearDuplicateEntryException, SunspearInvalidActivityException, - SunspearInvalidObjectException) + SunspearInvalidObjectException, + SunspearOperationNotSupportedException) __all__ = ('BaseBackend', 'SUB_ACTIVITY_MAP') @@ -311,6 +312,15 @@ def get_obj(self, obj_ids=[], **kwargs): def obj_get(self, obj, **kwargs): raise NotImplementedError() + def is_sub_activity_verb_valid(self, sub_activity_verb): + return sub_activity_verb.lower() in SUB_ACTIVITY_MAP + + def get_sub_activity_model(self, sub_activity_verb): + return SUB_ACTIVITY_MAP[sub_activity_verb.lower()][0] + + def get_sub_activity_attribute(self, sub_activity_verb): + return SUB_ACTIVITY_MAP[sub_activity_verb.lower()][1] + def create_sub_activity(self, activity, actor, content, extra={}, sub_activity_verb="", **kwargs): """ Creates a new sub-activity as a child of ``activity``. @@ -337,6 +347,9 @@ def create_sub_activity(self, activity, actor, content, extra={}, sub_activity_v if not activity_id: raise SunspearInvalidActivityException() + if not self.is_sub_activity_verb_valid(sub_activity_verb): + raise SunspearOperationNotSupportedException('Verb not supported') + return self.sub_activity_create(activity, actor, content, extra=extra, sub_activity_verb=sub_activity_verb, **kwargs) diff --git a/sunspear/backends/database/db.py b/sunspear/backends/database/db.py index 5d081d7..38085d0 100644 --- a/sunspear/backends/database/db.py +++ b/sunspear/backends/database/db.py @@ -82,6 +82,14 @@ def activities_table(self): def objects_table(self): return schema.tables['objects'] + @property + def likes_table(self): + return schema.tables['likes'] + + @property + def replies_table(self): + return schema.tables['replies'] + def _get_connection(self): return self.engine.connect() @@ -164,6 +172,8 @@ def activity_create(self, activity, **kwargs): self.engine.execute(self.activities_table.insert(), [activity_db_schema_dict]) + return self.get_activity(activity_dict) + def create_activity(self, activity, **kwargs): activity_id = self._resolve_activity_id(activity, **kwargs) activity['id'] = activity_id @@ -215,18 +225,35 @@ def create_activity(self, activity, **kwargs): return return_val def activity_get(self, activity_ids, **kwargs): - activity_ids = map(self._extract_id, activity_ids) - object_ids = set() - if not activity_ids: - return [] - - s = self._get_select_multiple_activities_query(activity_ids) - activities = self.engine.execute(s).fetchall() - activities = [self._db_schema_to_activity_dict(activity) for activity in activities] + activity_ids = self._listify(activity_ids) + activities = self._get_raw_activities(activity_ids, **kwargs) activities = self.hydrate_activities(activities) return activities + def sub_activity_create(self, activity, actor, content, extra={}, sub_activity_verb="", published=None, **kwargs): + object_type = kwargs.get('object_type', sub_activity_verb) + sub_activity_model = self.get_sub_activity_model(sub_activity_verb) + sub_activity_attribute = self.get_sub_activity_attribute(sub_activity_verb) + + activity_id = self._extract_id(activity) + raw_activity = self._get_raw_activities([activity_id])[0] + activity_model = Activity(raw_activity, backend=self) + + sub_activity_table = getattr(self, '{}_table'.format(sub_activity_attribute)) + + sub_activity, original_activity = activity_model\ + .get_parsed_sub_activity_dict( + actor=actor, content=content, verb=sub_activity_verb, + object_type=object_type, collection=sub_activity_attribute, + activity_class=sub_activity_model, published=published, extra=extra) + + sub_activity = self.create_activity(sub_activity)[0] + sub_activity_db_schema = self._convert_sub_activity_to_db_schema(sub_activity, original_activity) + self.engine.execute(sub_activity_table.insert(), [sub_activity_db_schema]) + + return sub_activity, original_activity + def hydrate_activities(self, activities): """ Takes a raw list of activities returned from riak and replace keys with contain ids for riak objects with actual riak object @@ -258,6 +285,33 @@ def get_new_id(self): """ return uuid.uuid1().hex + def _get_raw_activities(self, activity_ids, **kwargs): + activity_ids = map(self._extract_id, activity_ids) + if not activity_ids: + return [] + + s = self._get_select_multiple_activities_query(activity_ids) + activities = self.engine.execute(s).fetchall() + activities = [self._db_schema_to_activity_dict(activity) for activity in activities] + + return activities + + def _convert_sub_activity_to_db_schema(self, sub_activity, activity): + # Find all the fields in the sub activity that aren't part of the standard activity object + converted_subactivity = self._activity_dict_to_db_schema(sub_activity) + other_data = converted_subactivity.get('other_data') + sub_activity = { + 'id': sub_activity['id'], + 'in_reply_to': activity['id'], + 'actor': sub_activity['actor']['id'], + 'published': self._get_db_compatiable_date_string(sub_activity['published']), + 'updated': self._get_db_compatiable_date_string(sub_activity['published']), + 'content': sub_activity['object']['content'], + } + if other_data: + sub_activity['other_data'] = other_data + return sub_activity + def _convert_to_db_schema(self, obj, field_mapping): # we make a copy because we will be mutating the dict. # we will map official fields to db fields, and put the rest in `other_data` @@ -306,7 +360,7 @@ def _convert_to_activity_stream_schema(self, schema_dict, field_mapping): obj_dict[obj_field] = data - if 'other_data' in schema_dict: + if 'other_data' in schema_dict and schema_dict['other_data'] is not None: other_data = schema_dict['other_data'] if self._need_to_parse_json('other_data', other_data): other_data = json.loads(other_data) diff --git a/sunspear/backends/riak.py b/sunspear/backends/riak.py index 0f98f25..d479620 100644 --- a/sunspear/backends/riak.py +++ b/sunspear/backends/riak.py @@ -369,18 +369,11 @@ def activity_get( activities = aggregator.process(activities, original_activities, aggregation_pipeline) return activities - def create_sub_activity(self, activity, actor, content, extra={}, sub_activity_verb="", **kwargs): - if sub_activity_verb.lower() not in SUB_ACTIVITY_MAP: - raise Exception('Verb not supported') - return super(RiakBackend, self).create_sub_activity( - activity, actor, content, extra=extra, - sub_activity_verb=sub_activity_verb, **kwargs) - def sub_activity_create( self, activity, actor, content, extra={}, sub_activity_verb="", published=None, **kwargs): - sub_activity_model = SUB_ACTIVITY_MAP[sub_activity_verb.lower()][0] - sub_activity_attribute = SUB_ACTIVITY_MAP[sub_activity_verb.lower()][1] + sub_activity_model = self.get_sub_activity_model(sub_activity_verb) + sub_activity_attribute = self.get_sub_activity_attribute(sub_activity_verb) object_type = kwargs.get('object_type', sub_activity_verb) activity_id = self._extract_id(activity) diff --git a/tests/test_db.py b/tests/test_db.py index 8f7b284..7ee753d 100644 --- a/tests/test_db.py +++ b/tests/test_db.py @@ -404,6 +404,23 @@ def test_get_activities(self): eq_(activity, activity_copy) + def test_create_reply(self): + actor_id = '1234' + published_time = datetime.datetime.utcnow() + + actor = {"objectType": "something", "id": actor_id, "published": published_time} + + # create the activity + self._backend.create_activity(self.hydrated_test_activity) + + # now create a reply for the activity + reply_activity_dict, activity_obj_dict = self._backend.sub_activity_create( + self.hydrated_test_activity, actor, "This is a reply.", + sub_activity_verb='reply') + + sub_activity_exists = self._engine.execute(sql.select([sql.exists().where(self._backend.replies_table.c.id == reply_activity_dict['id'])])).scalar() + ok_(sub_activity_exists) + def _datetime_to_db_compatibal_str(self, datetime_instance): return datetime_instance.strftime('%Y-%m-%d %H:%M:%S') diff --git a/tests/test_riak.py b/tests/test_riak.py index aa1c2a2..f1ec793 100644 --- a/tests/test_riak.py +++ b/tests/test_riak.py @@ -3,6 +3,7 @@ import datetime import six from mock import MagicMock, call, ANY +import os from sunspear.aggregators.property import PropertyAggregator from sunspear.backends.riak import RiakBackend @@ -12,12 +13,13 @@ riak_connection_options = { "nodes": [ - {'http_port': 8098, 'host': '127.0.0.1'}], + {'http_port': 8098, 'host': os.environ.get('RIAK_HOST', '127.0.0.1')}], 'protocol': 'http', } class TestRiakBackend(object): + def setUp(self): backend = RiakBackend(**riak_connection_options) self._backend = backend From 171630735231cf8652238417d1d55dbea2d8d1e8 Mon Sep 17 00:00:00 2001 From: Numan Sachwani Date: Fri, 29 Sep 2017 10:45:37 -0700 Subject: [PATCH 30/56] Upgrade sql alchemy --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 58f0aba..3219f5b 100644 --- a/setup.py +++ b/setup.py @@ -27,7 +27,7 @@ 'six', 'riak==2.5.4', 'protobuf==2.6.1', - 'sqlalchemy==1.1.3', + 'sqlalchemy==1.1.14', 'MySQL-python==1.2.5', 'six' ], From 75df5416101281c9faf9d98bfdb6e4eceb7120b0 Mon Sep 17 00:00:00 2001 From: Numan Sachwani Date: Fri, 29 Sep 2017 11:04:22 -0700 Subject: [PATCH 31/56] Force DB table encoding --- tests/test_db.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_db.py b/tests/test_db.py index 7ee753d..8cb4be9 100644 --- a/tests/test_db.py +++ b/tests/test_db.py @@ -71,7 +71,7 @@ def _setup_db(cls, db_name): # Create database if not exists if db_name not in existing_databases: - conn.execute("CREATE DATABASE {0}".format(db_name)) + conn.execute("CREATE DATABASE {0} character set UTF8mb4 collate utf8mb4_bin".format(db_name)) print("Created database {0}".format(db_name)) conn.close() From a6ba3ab3199ef9c988f7dbaac3183678c0ac1bbc Mon Sep 17 00:00:00 2001 From: Numan Sachwani Date: Mon, 2 Oct 2017 10:30:54 -0700 Subject: [PATCH 32/56] Create likes sub activity --- tests/test_db.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/tests/test_db.py b/tests/test_db.py index 8cb4be9..88c9fa1 100644 --- a/tests/test_db.py +++ b/tests/test_db.py @@ -421,6 +421,24 @@ def test_create_reply(self): sub_activity_exists = self._engine.execute(sql.select([sql.exists().where(self._backend.replies_table.c.id == reply_activity_dict['id'])])).scalar() ok_(sub_activity_exists) + def test_create_like(self): + actor_id = '1234' + published_time = datetime.datetime.utcnow() + + actor = {"objectType": "something", "id": actor_id, "published": published_time} + + # create the activity + self._backend.create_activity(self.hydrated_test_activity) + + # now create a reply for the activity + like_activity_dict, activity_obj_dict = self._backend.sub_activity_create( + self.hydrated_test_activity, actor, "This is a like.", + sub_activity_verb='like') + + sub_activity_exists = self._engine.execute(sql.select([sql.exists().where(self._backend.likes_table.c.id == like_activity_dict['id'])])).scalar() + ok_(sub_activity_exists) + + def _datetime_to_db_compatibal_str(self, datetime_instance): return datetime_instance.strftime('%Y-%m-%d %H:%M:%S') From 019f9d98d09b155310c5aae03c6ff9f52f20d8f8 Mon Sep 17 00:00:00 2001 From: Numan Sachwani Date: Wed, 4 Oct 2017 12:29:47 -0700 Subject: [PATCH 33/56] add support for audience targeting fields --- sunspear/backends/database/db.py | 33 +++++++++-- tests/test_db.py | 99 +++++++++++++++++++++++++++----- 2 files changed, 112 insertions(+), 20 deletions(-) diff --git a/sunspear/backends/database/db.py b/sunspear/backends/database/db.py index 38085d0..d455b71 100644 --- a/sunspear/backends/database/db.py +++ b/sunspear/backends/database/db.py @@ -174,6 +174,17 @@ def activity_create(self, activity, **kwargs): return self.get_activity(activity_dict) + def _extract_activity_obj_key(self, obj_or_value): + activity_obj = None + + if isinstance(obj_or_value, dict): + activity_obj_id = self._extract_id(obj_or_value) + activity_obj = obj_or_value + else: + activity_obj_id = obj_or_value + + return activity_obj, activity_obj_id + def create_activity(self, activity, **kwargs): activity_id = self._resolve_activity_id(activity, **kwargs) activity['id'] = activity_id @@ -183,15 +194,27 @@ def create_activity(self, activity, **kwargs): activity_objs = {} ids_of_objs_with_no_dict = [] + audience_targeting_fields = Activity._direct_audience_targeting_fields + Activity._indirect_audience_targeting_fields + for key, value in activity_copy.items(): if key in Activity._object_fields: - if isinstance(value, dict): - activity_obj_id = self._extract_id(value) - activity_objs[activity_obj_id] = value - + activity_obj, activity_obj_id = self._extract_activity_obj_key(value) + if activity_obj: + activity_objs[activity_obj_id] = activity_obj activity[key] = activity_obj_id else: - ids_of_objs_with_no_dict.append(value) + ids_of_objs_with_no_dict.append(activity_obj_id) + + if key in audience_targeting_fields and value: + activity_audience_targeting_objs = [] + for activity_obj_or_value in value: + activity_obj, activity_obj_id = self._extract_activity_obj_key(activity_obj_or_value) + if activity_obj: + activity_objs[activity_obj_id] = activity_obj + activity_audience_targeting_objs.append(activity_obj_id) + else: + ids_of_objs_with_no_dict.append(activity_obj_id) + activity[key] = activity_audience_targeting_objs # For all of the objects in the activity, find out which ones actually already have existing # objects in the database diff --git a/tests/test_db.py b/tests/test_db.py index 88c9fa1..7270599 100644 --- a/tests/test_db.py +++ b/tests/test_db.py @@ -108,22 +108,72 @@ def _setup_db_schema_dicts(self): self.test_db_schema_dict = self.test_db_schema_dicts[0] def _setup_objs(self): - self.test_objs = [{ - 'id': 'AxsdSG244BfduiIZ', - 'objectType': u'use\u0403', - 'displayName': u'\u019duman S', - 'content': u'Foo bar!\u03ee', - 'published': self._datetime_to_string(self.now), - 'image': { - 'url': 'https://www.google.com/cool_image.png', - 'displayName': u'Cool \u0268mage', - 'width': '500px', - 'height': '500px' + self.test_objs = [ + { + 'id': 'AxsdSG244BfduiIZ', + 'objectType': u'use\u0403', + 'displayName': u'\u019duman S', + 'content': u'Foo bar!\u03ee', + 'published': self._datetime_to_string(self.now), + 'image': { + 'url': 'https://www.google.com/cool_image.png', + 'displayName': u'Cool \u0268mage', + 'width': '500px', + 'height': '500px' + }, + 'foo': 'bar', + 'baz': u'go\u0298', + 'zoo': {'zee': 12, 'tim': {'zde': u'\u0268\u0298'}} }, - 'foo': 'bar', - 'baz': u'go\u0298', - 'zoo': {'zee': 12, 'tim': {'zde': u'\u0268\u0298'}} - }] + { + 'id': '1000', + 'objectType': u'user', + 'displayName': u'\u019duman S2', + 'content': u'Foo bar!\u03ee', + 'published': self._datetime_to_string(self.now), + 'image': { + 'url': 'https://www.google.com/cool_image.png', + 'displayName': u'Cool \u0268mage', + 'width': '500px', + 'height': '500px' + }, + 'foo': 'bar', + 'baz': u'go\u0298', + 'zoo': {'zee': 12, 'tim': {'zde': u'\u0268\u0298'}} + }, + { + 'id': '1001', + 'objectType': u'user', + 'displayName': u'\u019duman S3', + 'content': u'Foo bar!\u03ee', + 'published': self._datetime_to_string(self.now), + 'image': { + 'url': 'https://www.google.com/cool_image.png', + 'displayName': u'Cool \u0268mage', + 'width': '500px', + 'height': '500px' + }, + 'foo': 'bar', + 'baz': u'go\u0298', + 'zoo': {'zee': 12, 'tim': {'zde': u'\u0268\u0298'}} + }, + { + 'id': '1002', + 'objectType': u'user', + 'displayName': u'\u019duman S4', + 'content': u'Foo bar!\u03ee', + 'published': self._datetime_to_string(self.now), + 'image': { + 'url': 'https://www.google.com/cool_image.png', + 'displayName': u'Cool \u0268mage', + 'width': '500px', + 'height': '500px' + }, + 'foo': 'bar', + 'baz': u'go\u0298', + 'zoo': {'zee': 12, 'tim': {'zde': u'\u0268\u0298'}} + } + ] self.test_obj = self.test_objs[0] @@ -365,6 +415,25 @@ def test_create_activity(self): self._backend.create_activity(self.hydrated_test_activity) ok_(self._backend.activity_exists(self.hydrated_test_activity)) + def test_create_activity_with_audience_targeting(self): + db_obj = self._backend._obj_dict_to_db_schema(self.test_objs[3]) + objects_table = self._backend.objects_table + self._engine.execute(objects_table.insert(), db_obj) + + self.hydrated_test_activity['to'] = [self.test_objs[0]] + self.hydrated_test_activity['bto'] = [self.test_objs[1]] + self.hydrated_test_activity['cc'] = [self.test_objs[0], self.test_objs[1]] + self.hydrated_test_activity['bcc'] = [self.test_objs[2], self.test_objs[3]['id']] + + self._backend.create_activity(self.hydrated_test_activity) + + ok_(self._backend.activity_exists(self.hydrated_test_activity)) + + ok_(self._backend.obj_exists(self.test_objs[0])) + ok_(self._backend.obj_exists(self.test_objs[1])) + ok_(self._backend.obj_exists(self.test_objs[2])) + ok_(self._backend.obj_exists(self.test_objs[3])) + def test_create_activity_with_already_existing_objs(self): db_objs = map(self._backend._obj_dict_to_db_schema, self.test_objs_for_activities) objects_table = self._backend.objects_table From 5c22b7a708049e619336e7b79de98332ecfb231d Mon Sep 17 00:00:00 2001 From: Nicholas Kobald Date: Tue, 10 Apr 2018 11:51:41 -0700 Subject: [PATCH 34/56] typo --- docs/source/userguide/introduction.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/userguide/introduction.rst b/docs/source/userguide/introduction.rst index 67cb03a..7f47158 100644 --- a/docs/source/userguide/introduction.rst +++ b/docs/source/userguide/introduction.rst @@ -49,11 +49,11 @@ The main takeaway points are: .. note:: For more info, see the specifications for `activity `_ and `object `_. -**Sunspear** also implements parts of some extensions to the specificiations. More specifically, `Audience Targeting `_ and `Responses `_. +**Sunspear** also implements parts of some extensions to the specification. More specifically, `Audience Targeting `_ and `Responses `_. What it isn't -------------- **Sunspear** strictly deals with storage and retrival of JSON activity stream items. It does not include all adquate indexes that allow you to build a fully fledged feed system. -For indexing, you'll probably want to use something like `Sandsnake `_, a sorted index backed by `redis `_. \ No newline at end of file +For indexing, you'll probably want to use something like `Sandsnake `_, a sorted index backed by `redis `_. From 2c6fcf7d030d01938334bd184b75da2e7d7514b6 Mon Sep 17 00:00:00 2001 From: Nicholas Kobald Date: Wed, 25 Apr 2018 11:17:53 -0700 Subject: [PATCH 35/56] unused imports --- sunspear/backends/database/db.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/sunspear/backends/database/db.py b/sunspear/backends/database/db.py index d455b71..1c29217 100644 --- a/sunspear/backends/database/db.py +++ b/sunspear/backends/database/db.py @@ -17,7 +17,6 @@ """ from __future__ import absolute_import, unicode_literals -import calendar import copy import datetime import json @@ -29,10 +28,9 @@ from sqlalchemy import create_engine, sql from sqlalchemy.pool import QueuePool from sunspear.activitystreams.models import Activity, Model, Object -from sunspear.backends.base import SUB_ACTIVITY_MAP, BaseBackend -from sunspear.exceptions import (SunspearDuplicateEntryException, - SunspearOperationNotSupportedException, - SunspearValidationException) +from sunspear.backends.base import BaseBackend +from sunspear.exceptions import SunspearOperationNotSupportedException + from . import schema From ed43215cc90ea38ae8a95dfd98c8bdba63552299 Mon Sep 17 00:00:00 2001 From: Nicholas Kobald Date: Wed, 25 Apr 2018 13:36:21 -0700 Subject: [PATCH 36/56] style --- sunspear/backends/database/db.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/sunspear/backends/database/db.py b/sunspear/backends/database/db.py index 1c29217..d838949 100644 --- a/sunspear/backends/database/db.py +++ b/sunspear/backends/database/db.py @@ -63,10 +63,13 @@ class DatabaseBackend(BaseBackend): - def __init__(self, db_connection_string=None, verbose=False, poolsize=10, - max_overflow=5, **kwargs): - self._engine = create_engine(db_connection_string, echo=verbose, poolclass=QueuePool, - pool_size=poolsize, max_overflow=max_overflow, convert_unicode=True) + def __init__(self, db_connection_string=None, verbose=False, poolsize=10, max_overflow=5, **kwargs): + self._engine = create_engine(db_connection_string, + echo=verbose, + poolclass=QueuePool, + pool_size=poolsize, + max_overflow=max_overflow, + convert_unicode=True) @property def engine(self): From e14237b3cc248925524ef3b2643618b1794b166f Mon Sep 17 00:00:00 2001 From: Nicholas Kobald Date: Thu, 26 Apr 2018 11:16:54 -0700 Subject: [PATCH 37/56] updates objects table name --- sunspear/backends/database/schema.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sunspear/backends/database/schema.py b/sunspear/backends/database/schema.py index 12ea5cc..81d8d5f 100644 --- a/sunspear/backends/database/schema.py +++ b/sunspear/backends/database/schema.py @@ -4,7 +4,7 @@ metadata = MetaData() -objects_table = Table('objects', metadata, +objects_table = Table('sgactivitystream_objects', metadata, Column('id', String(32), primary_key=True), Column('object_type', String(256), nullable=False), Column('display_name', String(256)), From 114348e06e77954604657b406f63e30b281ec87a Mon Sep 17 00:00:00 2001 From: Nicholas Kobald Date: Thu, 26 Apr 2018 15:09:54 -0700 Subject: [PATCH 38/56] Upsert in the least safe way FIXME --- sunspear/backends/database/db.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/sunspear/backends/database/db.py b/sunspear/backends/database/db.py index d838949..aeb5d2d 100644 --- a/sunspear/backends/database/db.py +++ b/sunspear/backends/database/db.py @@ -64,12 +64,13 @@ class DatabaseBackend(BaseBackend): def __init__(self, db_connection_string=None, verbose=False, poolsize=10, max_overflow=5, **kwargs): - self._engine = create_engine(db_connection_string, - echo=verbose, - poolclass=QueuePool, - pool_size=poolsize, - max_overflow=max_overflow, - convert_unicode=True) + self._engine = create_engine( + db_connection_string, + echo=verbose, + poolclass=QueuePool, + pool_size=poolsize, + max_overflow=max_overflow, + convert_unicode=True) @property def engine(self): @@ -114,7 +115,10 @@ def obj_create(self, obj, **kwargs): obj_dict = self._get_parsed_and_validated_obj_dict(obj) obj_db_schema_dict = self._obj_dict_to_db_schema(obj_dict) - self.engine.execute(self.objects_table.insert(), [obj_db_schema_dict]) + if self.obj_exists(obj): + self.obj_update(obj) + else: + self.engine.execute(self.objects_table.insert(), [obj_db_schema_dict]) return obj_dict From 130e186fa93550dbdf92645d848942101e86686b Mon Sep 17 00:00:00 2001 From: Nicholas Kobald Date: Thu, 26 Apr 2018 16:36:01 -0700 Subject: [PATCH 39/56] work on activity table a little --- sunspear/backends/base.py | 3 +-- sunspear/backends/database/db.py | 2 ++ sunspear/backends/database/schema.py | 14 +++++++------- 3 files changed, 10 insertions(+), 9 deletions(-) diff --git a/sunspear/backends/base.py b/sunspear/backends/base.py index 9e90fcf..8a7d580 100644 --- a/sunspear/backends/base.py +++ b/sunspear/backends/base.py @@ -102,7 +102,7 @@ def create_activity(self, activity, **kwargs): new_obj = self.create_obj(value) objs_created.append(new_obj) except Exception: - #there was an error, undo everything we just did + # there was an error, undo everything we just did self._rollback(objs_created, objs_modified) raise @@ -230,7 +230,6 @@ def create_obj(self, obj, **kwargs): :type obj: dict :param obj: obj we want to store in the backend - :raises: ``SunspearDuplicateEntryException`` if the record already exists in the database. :return: dict representing the new obj. """ obj_id = self._extract_id(obj) diff --git a/sunspear/backends/database/db.py b/sunspear/backends/database/db.py index aeb5d2d..9e6e477 100644 --- a/sunspear/backends/database/db.py +++ b/sunspear/backends/database/db.py @@ -25,8 +25,10 @@ import six from dateutil import tz from dateutil.parser import parse + from sqlalchemy import create_engine, sql from sqlalchemy.pool import QueuePool +from sqlalchemy.sql.expression import insert from sunspear.activitystreams.models import Activity, Model, Object from sunspear.backends.base import BaseBackend from sunspear.exceptions import SunspearOperationNotSupportedException diff --git a/sunspear/backends/database/schema.py b/sunspear/backends/database/schema.py index 81d8d5f..d166b4e 100644 --- a/sunspear/backends/database/schema.py +++ b/sunspear/backends/database/schema.py @@ -14,15 +14,15 @@ Column('image', custom_types.JSONSmallDict(4096)), Column('other_data', custom_types.JSONDict())) -activities_table = Table('activities', metadata, +activities_table = Table('sgactivitystream_activities', metadata, Column('id', String(32), primary_key=True), Column('verb', String(256), nullable=False), - Column('actor', ForeignKey('objects.id', ondelete='CASCADE'), nullable=False), - Column('object', ForeignKey('objects.id', ondelete='SET NULL')), - Column('target', ForeignKey('objects.id', ondelete='SET NULL')), - Column('author', ForeignKey('objects.id', ondelete='SET NULL')), - Column('generator', ForeignKey('objects.id', ondelete='SET NULL')), - Column('provider', ForeignKey('objects.id', ondelete='SET NULL')), + Column('actor', ForeignKey('sgactivitystream_objects.id', ondelete='CASCADE'), nullable=False), + Column('object', ForeignKey('sgactivitystream_objects.id', ondelete='SET NULL')), + Column('target', ForeignKey('sgactivitystream_objects.id', ondelete='SET NULL')), + Column('author', ForeignKey('sgactivitystream_objects.id', ondelete='SET NULL')), + Column('generator', ForeignKey('sgactivitystream_objects.id', ondelete='SET NULL')), + Column('provider', ForeignKey('sgactivitystream_objects.id', ondelete='SET NULL')), Column('content', Text), Column('published', DateTime(timezone=True), nullable=False), Column('updated', DateTime(timezone=True)), From cc2f732b19693c4e1d06e8780c347893021f2dcf Mon Sep 17 00:00:00 2001 From: Nicholas Kobald Date: Thu, 26 Apr 2018 16:41:25 -0700 Subject: [PATCH 40/56] more sgactivitystream --- sunspear/backends/database/schema.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/sunspear/backends/database/schema.py b/sunspear/backends/database/schema.py index d166b4e..4aa476c 100644 --- a/sunspear/backends/database/schema.py +++ b/sunspear/backends/database/schema.py @@ -31,8 +31,8 @@ replies_table = Table('replies', metadata, Column('id', String(32), primary_key=True), - Column('in_reply_to', ForeignKey('activities.id', ondelete='CASCADE'), nullable=False), - Column('actor', ForeignKey('objects.id', ondelete='CASCADE'), nullable=False), + Column('in_reply_to', ForeignKey('sgactivitiystream_activities.id', ondelete='CASCADE'), nullable=False), + Column('actor', ForeignKey('sgactivitystream_objects.id', ondelete='CASCADE'), nullable=False), Column('published', DateTime(timezone=True), nullable=False), Column('updated', DateTime(timezone=True)), Column('content', Text), @@ -40,8 +40,8 @@ likes_table = Table('likes', metadata, Column('id', String(32), primary_key=True), - Column('in_reply_to', ForeignKey('activities.id', ondelete='CASCADE'), nullable=False), - Column('actor', ForeignKey('objects.id', ondelete='CASCADE'), nullable=False), + Column('in_reply_to', ForeignKey('sgactivitystream_activities.id', ondelete='CASCADE'), nullable=False), + Column('actor', ForeignKey('sgactivitystream_objects.id', ondelete='CASCADE'), nullable=False), Column('published', DateTime(timezone=True), nullable=False), Column('content', Text), Column('other_data', custom_types.JSONDict()), @@ -49,23 +49,23 @@ to_table = Table('to', metadata, Column('id', Integer, primary_key=True), - Column('object', ForeignKey('objects.id', ondelete='CASCADE')), - Column('activity', ForeignKey('activities.id', ondelete='CASCADE'))) + Column('object', ForeignKey('sgactivitystream_objects.id', ondelete='CASCADE')), + Column('activity', ForeignKey('sgactivitystream_activities.id', ondelete='CASCADE'))) bto_table = Table('bto', metadata, Column('id', Integer, primary_key=True), - Column('object', ForeignKey('objects.id', ondelete='CASCADE')), - Column('activity', ForeignKey('activities.id', ondelete='CASCADE'))) + Column('object', ForeignKey('sgactivitystream_objects.id', ondelete='CASCADE')), + Column('activity', ForeignKey('sgactivitystream_activities.id', ondelete='CASCADE'))) cc_table = Table('cc', metadata, Column('id', Integer, primary_key=True), - Column('object', ForeignKey('objects.id', ondelete='CASCADE')), - Column('activity', ForeignKey('activities.id', ondelete='CASCADE'))) + Column('object', ForeignKey('sgactivitystream_objects.id', ondelete='CASCADE')), + Column('activity', ForeignKey('sgactivitystream_activities.id', ondelete='CASCADE'))) bcc_table = Table('bcc', metadata, Column('id', Integer, primary_key=True), - Column('object', ForeignKey('objects.id', ondelete='CASCADE')), - Column('activity', ForeignKey('activities.id', ondelete='CASCADE'))) + Column('object', ForeignKey('sgactivitystream_objects.id', ondelete='CASCADE')), + Column('activity', ForeignKey('sgactivitystream_activities.id', ondelete='CASCADE'))) tables = { 'objects': objects_table, From f1a779a3cd11a9b2cdbabbed2eedf819376c36e5 Mon Sep 17 00:00:00 2001 From: Nicholas Kobald Date: Thu, 26 Apr 2018 18:39:40 -0700 Subject: [PATCH 41/56] Allow for Django to change the name of fields Consider passing in the names to Sunspear on init? --- sunspear/backends/database/schema.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/sunspear/backends/database/schema.py b/sunspear/backends/database/schema.py index 4aa476c..5a9d31b 100644 --- a/sunspear/backends/database/schema.py +++ b/sunspear/backends/database/schema.py @@ -17,12 +17,12 @@ activities_table = Table('sgactivitystream_activities', metadata, Column('id', String(32), primary_key=True), Column('verb', String(256), nullable=False), - Column('actor', ForeignKey('sgactivitystream_objects.id', ondelete='CASCADE'), nullable=False), - Column('object', ForeignKey('sgactivitystream_objects.id', ondelete='SET NULL')), - Column('target', ForeignKey('sgactivitystream_objects.id', ondelete='SET NULL')), - Column('author', ForeignKey('sgactivitystream_objects.id', ondelete='SET NULL')), - Column('generator', ForeignKey('sgactivitystream_objects.id', ondelete='SET NULL')), - Column('provider', ForeignKey('sgactivitystream_objects.id', ondelete='SET NULL')), + Column('actor_id', ForeignKey('sgactivitystream_objects.id', ondelete='CASCADE'), nullable=False), + Column('object_id', ForeignKey('sgactivitystream_objects.id', ondelete='SET NULL')), + Column('target_id', ForeignKey('sgactivitystream_objects.id', ondelete='SET NULL')), + Column('author_id', ForeignKey('sgactivitystream_objects.id', ondelete='SET NULL')), + Column('generator_id', ForeignKey('sgactivitystream_objects.id', ondelete='SET NULL')), + Column('provider_id', ForeignKey('sgactivitystream_objects.id', ondelete='SET NULL')), Column('content', Text), Column('published', DateTime(timezone=True), nullable=False), Column('updated', DateTime(timezone=True)), From f8495fdb3d954b0081d7b60fcdd4d15267f3225b Mon Sep 17 00:00:00 2001 From: Nicholas Kobald Date: Fri, 27 Apr 2018 17:31:58 -0700 Subject: [PATCH 42/56] come on --- sunspear/backends/database/db.py | 45 ++++++++++++++-------------- sunspear/backends/database/schema.py | 21 ++++++------- 2 files changed, 33 insertions(+), 33 deletions(-) diff --git a/sunspear/backends/database/db.py b/sunspear/backends/database/db.py index 9e6e477..9f45968 100644 --- a/sunspear/backends/database/db.py +++ b/sunspear/backends/database/db.py @@ -28,7 +28,6 @@ from sqlalchemy import create_engine, sql from sqlalchemy.pool import QueuePool -from sqlalchemy.sql.expression import insert from sunspear.activitystreams.models import Activity, Model, Object from sunspear.backends.base import BaseBackend from sunspear.exceptions import SunspearOperationNotSupportedException @@ -48,12 +47,12 @@ DB_ACTIVITY_FIELD_MAPPING = { 'id': 'id', 'verb': 'verb', - 'actor': 'actor', - 'object': 'object', - 'target': 'target', - 'author': 'author', - 'generator': 'generator', - 'provider': 'provider', + 'actor': 'actor_id', + 'object': 'object_id', + 'target': 'target_id', + 'author': 'author_id', + 'generator': 'generator_id', + 'provider': 'provider_id', 'content': 'content', 'published': 'published', 'updated': 'updated', @@ -120,7 +119,7 @@ def obj_create(self, obj, **kwargs): if self.obj_exists(obj): self.obj_update(obj) else: - self.engine.execute(self.objects_table.insert(), [obj_db_schema_dict]) + self.engine.execute(self.objects_table.insert(), [obj_db_schema_dict]).close() return obj_dict @@ -167,7 +166,7 @@ def activity_exists(self, activity, **kwargs): def activity_create(self, activity, **kwargs): """ - Creates an activity. This assumes the activity is already dehydrated (ie has refrences + Creates an activity. This assumes the activity is already dehydrated (ie has references to the objects and not the actual objects itself) """ activity = Activity(activity, backend=self) @@ -177,7 +176,7 @@ def activity_create(self, activity, **kwargs): activity_db_schema_dict = self._activity_dict_to_db_schema(activity_dict) - self.engine.execute(self.activities_table.insert(), [activity_db_schema_dict]) + self.engine.execute(self.activities_table.insert(), [activity_db_schema_dict]).close() return self.get_activity(activity_dict) @@ -224,7 +223,7 @@ def create_activity(self, activity, **kwargs): activity[key] = activity_audience_targeting_objs # For all of the objects in the activity, find out which ones actually already have existing - # objects in the database + # entries in the database obj_ids = self._flatten([ids_of_objs_with_no_dict, activity_objs.keys()]) s = self._get_select_multiple_objects_query(obj_ids) @@ -235,20 +234,20 @@ def create_activity(self, activity, **kwargs): objs_need_to_be_updated = [] for obj_id, obj in activity_objs.items(): - parsed_validated_schema_dict = self._get_parsed_and_validated_obj_dict(obj) - parsed_validated_schema_dict = self._obj_dict_to_db_schema(parsed_validated_schema_dict) - if obj_id not in results: - objs_need_to_be_inserted.append(parsed_validated_schema_dict) - else: - objs_need_to_be_updated.append(parsed_validated_schema_dict) + # parsed_validated_schema_dict = self._get_parsed_and_validated_obj_dict(obj) + self.obj_create(obj) + # if obj_id not in results: + # objs_need_to_be_inserted.append(parsed_validated_schema_dict) + # else: + # objs_need_to_be_updated.append(parsed_validated_schema_dict) # Upsert all objects for the activity - with self.engine.begin() as connection: - if objs_need_to_be_inserted: - connection.execute(self.objects_table.insert(), objs_need_to_be_inserted) - for obj in objs_need_to_be_updated: - connection.execute( - self.objects_table.update().where(self.objects_table.c.id == self._extract_id(obj)).values(**obj)) + # with self.engine.begin() as connection: + # if objs_need_to_be_inserted: + # connection.execute(self.objects_table.insert(), objs_need_to_be_inserted) + # for obj in objs_need_to_be_updated: + # connection.execute( + # self.objects_table.update().where(self.objects_table.c.id == self._extract_id(obj)).values(**obj)) return_val = self.activity_create(activity, **kwargs) diff --git a/sunspear/backends/database/schema.py b/sunspear/backends/database/schema.py index 5a9d31b..ca3702a 100644 --- a/sunspear/backends/database/schema.py +++ b/sunspear/backends/database/schema.py @@ -1,3 +1,4 @@ +from datetime import datetime from sqlalchemy import Table, Column, DateTime, Integer, String, Text, MetaData, ForeignKey, UniqueConstraint import types as custom_types @@ -7,12 +8,12 @@ objects_table = Table('sgactivitystream_objects', metadata, Column('id', String(32), primary_key=True), Column('object_type', String(256), nullable=False), - Column('display_name', String(256)), - Column('content', Text), + Column('display_name', String(256), default=''), + Column('content', Text, default=''), Column('published', DateTime(timezone=True), nullable=False), - Column('updated', DateTime(timezone=True)), - Column('image', custom_types.JSONSmallDict(4096)), - Column('other_data', custom_types.JSONDict())) + Column('updated', DateTime(timezone=True), default=datetime.now(), onupdate=datetime.now()), + Column('image', custom_types.JSONSmallDict(4096), default={}), + Column('other_data', custom_types.JSONDict(), default={})) activities_table = Table('sgactivitystream_activities', metadata, Column('id', String(32), primary_key=True), @@ -23,11 +24,11 @@ Column('author_id', ForeignKey('sgactivitystream_objects.id', ondelete='SET NULL')), Column('generator_id', ForeignKey('sgactivitystream_objects.id', ondelete='SET NULL')), Column('provider_id', ForeignKey('sgactivitystream_objects.id', ondelete='SET NULL')), - Column('content', Text), + Column('content', Text, default-''), Column('published', DateTime(timezone=True), nullable=False), - Column('updated', DateTime(timezone=True)), - Column('icon', custom_types.JSONSmallDict(4096)), - Column('other_data', custom_types.JSONDict())) + Column('updated', DateTime(timezone=True), default=datetime.now(), onupdate=datetime.now()), + Column('icon', custom_types.JSONSmallDict(4096), default={}), + Column('other_data', custom_types.JSONDict(), default={})) replies_table = Table('replies', metadata, Column('id', String(32), primary_key=True), @@ -35,7 +36,7 @@ Column('actor', ForeignKey('sgactivitystream_objects.id', ondelete='CASCADE'), nullable=False), Column('published', DateTime(timezone=True), nullable=False), Column('updated', DateTime(timezone=True)), - Column('content', Text), + Column('content', Text, nullable=False), Column('other_data', custom_types.JSONDict())) likes_table = Table('likes', metadata, From 8f7933b7ddc03b1264f6b69dc8d3ee0f64f25260 Mon Sep 17 00:00:00 2001 From: Nicholas Kobald Date: Mon, 30 Apr 2018 12:08:10 -0700 Subject: [PATCH 43/56] Implement activity delete --- sunspear/backends/database/db.py | 7 +++++++ sunspear/backends/database/schema.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/sunspear/backends/database/db.py b/sunspear/backends/database/db.py index 9f45968..8b9e867 100644 --- a/sunspear/backends/database/db.py +++ b/sunspear/backends/database/db.py @@ -152,6 +152,7 @@ def obj_get(self, obj, **kwargs): return results + def obj_delete(self, obj, **kwargs): obj_id = self._extract_id(obj) @@ -180,6 +181,12 @@ def activity_create(self, activity, **kwargs): return self.get_activity(activity_dict) + def activity_delete(self, activity, **kwargs): + activity_id = activity['id'] + statement = self.activities_table.delete().where( + self.activities_table.c.id == activity_id) + self.engine.execute(statement) + def _extract_activity_obj_key(self, obj_or_value): activity_obj = None diff --git a/sunspear/backends/database/schema.py b/sunspear/backends/database/schema.py index ca3702a..0478a1f 100644 --- a/sunspear/backends/database/schema.py +++ b/sunspear/backends/database/schema.py @@ -24,7 +24,7 @@ Column('author_id', ForeignKey('sgactivitystream_objects.id', ondelete='SET NULL')), Column('generator_id', ForeignKey('sgactivitystream_objects.id', ondelete='SET NULL')), Column('provider_id', ForeignKey('sgactivitystream_objects.id', ondelete='SET NULL')), - Column('content', Text, default-''), + Column('content', Text, default=''), Column('published', DateTime(timezone=True), nullable=False), Column('updated', DateTime(timezone=True), default=datetime.now(), onupdate=datetime.now()), Column('icon', custom_types.JSONSmallDict(4096), default={}), From 7ec82b3a08d8821f0f77cf5e11fb5771e0e04f25 Mon Sep 17 00:00:00 2001 From: Nicholas Kobald Date: Mon, 30 Apr 2018 16:44:27 -0700 Subject: [PATCH 44/56] Use inheritance --- sunspear/backends/database/db.py | 12 +----------- sunspear/backends/riak.py | 8 -------- sunspear/clients.py | 7 +++++-- tests/test_rfc3339.py | 4 ++-- 4 files changed, 8 insertions(+), 23 deletions(-) diff --git a/sunspear/backends/database/db.py b/sunspear/backends/database/db.py index 8b9e867..1b3719a 100644 --- a/sunspear/backends/database/db.py +++ b/sunspear/backends/database/db.py @@ -20,7 +20,6 @@ import copy import datetime import json -import uuid import six from dateutil import tz @@ -308,19 +307,10 @@ def hydrate_activities(self, activities): # replace the object ids with the hydrated objects for activity in activities: - activity = self._dehydrate_object_keys(activity, objects_dict) + self._dehydrate_object_keys(activity, objects_dict) return activities - def get_new_id(self): - """ - Generates a new unique ID. The default implementation uses uuid1 to - generate a unique ID. - - :return: a new id - """ - return uuid.uuid1().hex - def _get_raw_activities(self, activity_ids, **kwargs): activity_ids = map(self._extract_id, activity_ids) if not activity_ids: diff --git a/sunspear/backends/riak.py b/sunspear/backends/riak.py index d479620..7aee0c4 100644 --- a/sunspear/backends/riak.py +++ b/sunspear/backends/riak.py @@ -690,11 +690,3 @@ def _get_timestamp(self): dt_obj = datetime.datetime.utcnow() return int((calendar.timegm(dt_obj.utctimetuple()) * 1000)) + (dt_obj.microsecond // 1000) - def get_new_id(self): - """ - Generates a new unique ID. The default implementation uses uuid1 to - generate a unique ID. - - :return: a new id - """ - return uuid.uuid1().hex diff --git a/sunspear/clients.py b/sunspear/clients.py index 88c56ff..3d7a2cc 100644 --- a/sunspear/clients.py +++ b/sunspear/clients.py @@ -123,7 +123,7 @@ def get_objects(self, object_ids=[]): """ return self._backend.get_obj(object_ids) - def get_activities(self, activity_ids=[], **kwargs): + def get_activities(self, activity_ids, **kwargs): """ Gets a list of activities. Specific backends may support other arguments. Please see reference of the specific backends to see all ``kwargs`` supported. @@ -131,7 +131,10 @@ def get_activities(self, activity_ids=[], **kwargs): :type activity_ids: list :param activity_ids: The list of activities you want to retrieve """ - return self._backend.get_activity(activity_ids=activity_ids, **kwargs) + # if activity_ids is None: + # activity_ids = [] + + return self._backend.get_activity(activity_ids, **kwargs) def get_backend(self): """ diff --git a/tests/test_rfc3339.py b/tests/test_rfc3339.py index e047981..022720d 100644 --- a/tests/test_rfc3339.py +++ b/tests/test_rfc3339.py @@ -9,10 +9,10 @@ class TestRFC3339(object): - ''' + """ Test the use of the timezone saved locally. Since it is hard to test using doctest. - ''' + """ def setUp(self): local_utcoffset = _utc_offset(datetime.datetime.now(), True) From 777cb8bbc2430401e48ae72d193a2f320605002b Mon Sep 17 00:00:00 2001 From: Nicholas Kobald Date: Mon, 30 Apr 2018 16:46:09 -0700 Subject: [PATCH 45/56] remove some dead code --- sunspear/backends/database/db.py | 31 ++----------------------------- tests/test_db.py | 1 - 2 files changed, 2 insertions(+), 30 deletions(-) diff --git a/sunspear/backends/database/db.py b/sunspear/backends/database/db.py index 1b3719a..77bbb87 100644 --- a/sunspear/backends/database/db.py +++ b/sunspear/backends/database/db.py @@ -151,7 +151,6 @@ def obj_get(self, obj, **kwargs): return results - def obj_delete(self, obj, **kwargs): obj_id = self._extract_id(obj) @@ -228,42 +227,16 @@ def create_activity(self, activity, **kwargs): ids_of_objs_with_no_dict.append(activity_obj_id) activity[key] = activity_audience_targeting_objs - # For all of the objects in the activity, find out which ones actually already have existing - # entries in the database - obj_ids = self._flatten([ids_of_objs_with_no_dict, activity_objs.keys()]) - - s = self._get_select_multiple_objects_query(obj_ids) - results = self.engine.execute(s).fetchall() - results = self._flatten(results) - - objs_need_to_be_inserted = [] - objs_need_to_be_updated = [] - for obj_id, obj in activity_objs.items(): - # parsed_validated_schema_dict = self._get_parsed_and_validated_obj_dict(obj) self.obj_create(obj) - # if obj_id not in results: - # objs_need_to_be_inserted.append(parsed_validated_schema_dict) - # else: - # objs_need_to_be_updated.append(parsed_validated_schema_dict) - - # Upsert all objects for the activity - # with self.engine.begin() as connection: - # if objs_need_to_be_inserted: - # connection.execute(self.objects_table.insert(), objs_need_to_be_inserted) - # for obj in objs_need_to_be_updated: - # connection.execute( - # self.objects_table.update().where(self.objects_table.c.id == self._extract_id(obj)).values(**obj)) - return_val = self.activity_create(activity, **kwargs) - - return return_val + return self.activity_create(activity, **kwargs) def activity_get(self, activity_ids, **kwargs): activity_ids = self._listify(activity_ids) activities = self._get_raw_activities(activity_ids, **kwargs) activities = self.hydrate_activities(activities) - + # assert len(activities) == 1, "activity_get should return exactly 1 activity" return activities def sub_activity_create(self, activity, actor, content, extra={}, sub_activity_verb="", published=None, **kwargs): diff --git a/tests/test_db.py b/tests/test_db.py index 7270599..8a783dc 100644 --- a/tests/test_db.py +++ b/tests/test_db.py @@ -507,7 +507,6 @@ def test_create_like(self): sub_activity_exists = self._engine.execute(sql.select([sql.exists().where(self._backend.likes_table.c.id == like_activity_dict['id'])])).scalar() ok_(sub_activity_exists) - def _datetime_to_db_compatibal_str(self, datetime_instance): return datetime_instance.strftime('%Y-%m-%d %H:%M:%S') From c9f5a0ac22a425dbade1ae78f3721f86653d06de Mon Sep 17 00:00:00 2001 From: Nicholas Kobald Date: Wed, 2 May 2018 14:36:28 -0700 Subject: [PATCH 46/56] moving in the (right?) direction --- sunspear/activitystreams/models.py | 4 +++- sunspear/backends/database/db.py | 21 ++++++++++++++++++--- sunspear/backends/riak.py | 16 +++++++++------- 3 files changed, 30 insertions(+), 11 deletions(-) diff --git a/sunspear/activitystreams/models.py b/sunspear/activitystreams/models.py index 5e5c4a1..3237cc9 100644 --- a/sunspear/activitystreams/models.py +++ b/sunspear/activitystreams/models.py @@ -13,6 +13,7 @@ class Model(object): + _required_fields = [] _media_fields = [] _reserved_fields = [] @@ -139,6 +140,7 @@ def __getitem__(self, key): class Activity(Model): + _required_fields = ['verb', 'actor', 'object'] _media_fields = ['icon'] _reserved_fields = ['updated'] @@ -159,7 +161,7 @@ def _set_defaults(self, model_dict): def get_parsed_sub_activity_dict(self, actor, content="", verb="reply", object_type="reply", \ collection="replies", activity_class=None, extra={}, published=None, **kwargs): - #TODO: Doesn't feel like this should be here Feels like it belongs in the backend. + # TODO: Doesn't feel like this should be here Feels like it belongs in the backend. if published is None: published = datetime.datetime.utcnow() diff --git a/sunspear/backends/database/db.py b/sunspear/backends/database/db.py index 77bbb87..20e1332 100644 --- a/sunspear/backends/database/db.py +++ b/sunspear/backends/database/db.py @@ -232,8 +232,23 @@ def create_activity(self, activity, **kwargs): return self.activity_create(activity, **kwargs) - def activity_get(self, activity_ids, **kwargs): - activity_ids = self._listify(activity_ids) + def activity_get(self, + activity_ids, + raw_filters=None, + filters="", + include_public=False, + audience_targeting=None, + aggregation_pipeline=None, + **kwargs): + if filters is None: + filters = {} + if audience_targeting is None: + audience_targeting = [] + if aggregation_pipeline is None: + aggregation_pipeline = [] + activity_ids = self._listify(activity_ids) # TODO: likely don't need to listify here. + + activities = self._get_raw_activities(activity_ids, **kwargs) activities = self.hydrate_activities(activities) # assert len(activities) == 1, "activity_get should return exactly 1 activity" @@ -285,7 +300,7 @@ def hydrate_activities(self, activities): return activities def _get_raw_activities(self, activity_ids, **kwargs): - activity_ids = map(self._extract_id, activity_ids) + activity_ids = map(self._extract_id, activity_ids) # Likely don't need to do this if not activity_ids: return [] diff --git a/sunspear/backends/riak.py b/sunspear/backends/riak.py index 7aee0c4..89603ab 100644 --- a/sunspear/backends/riak.py +++ b/sunspear/backends/riak.py @@ -146,6 +146,7 @@ class RiakBackend(BaseBackend): + def __init__( self, protocol="pbc", nodes=[], objects_bucket_name="objects", activities_bucket_name="activities", r=None, w=None, dw=None, @@ -220,7 +221,7 @@ def obj_create(self, obj, **kwargs): riak_obj.store() - #finally save the data + # finally save the data return obj_dict def set_general_indexes(self, riak_object): @@ -326,7 +327,7 @@ def activity_update(self, activity, **kwargs): return self.activity_create(activity, **kwargs) def activity_get( - self, activity_ids=[], raw_filter="", filters={}, include_public=False, + self, activity_ids, raw_filter="", filters={}, include_public=False, audience_targeting={}, aggregation_pipeline=[], **kwargs): """ Gets a list of activities. You can also group activities by providing a list of attributes to group @@ -339,7 +340,7 @@ def activity_get( Filters do not work for nested dictionaries. :type raw_filter: string :param raw_filter: allows you to specify a javascript function as a string. The function should return ``true`` if the activity should be included in the result set - or ``false`` it shouldn't. If you specify a raw filter, the filters specified in ``filters`` will not run. How ever, the results will still be filtered based on + or ``false`` it shouldn't. If you specify a raw filter, the filters specified in ``filters`` will not run. However, the results will still be filtered based on the ``audience_targeting`` parameter. :type include_public: boolean :param include_public: If ``True``, and the ``audience_targeting`` dictionary is defined, activities that are @@ -624,7 +625,7 @@ def _get_many_activities(self, activity_ids=[], raw_filter="", filters=None, inc :param activity_ids: The list of activities you want to retrieve :type raw_filter: string :param raw_filter: allows you to specify a javascript function as a string. The function should return ``true`` if the activity should be included in the result set - or ``false`` it shouldn't. If you specify a raw filter, the filters specified in ``filters`` will not run. How ever, the results will still be filtered based on + or ``false`` it shouldn't. If you specify a raw filter, the filters specified in ``filters`` will not run. However, the results will still be filtered based on the ``audience_targeting`` parameter. :type filters: dict :param filters: filters list of activities by key, value pair. For example, ``{'verb': 'comment'}`` would only return activities where the ``verb`` was ``comment``. @@ -647,9 +648,10 @@ def _get_many_activities(self, activity_ids=[], raw_filter="", filters=None, inc results = results.reduce(JS_REDUCE_FILTER_AUD_TARGETTING, options={'arg': {'public': include_public, 'filters': audience_targeting}}) if filters or raw_filter: - # An empty `filters` dict would cause all activities to be filtered out. If you wanted that effect, you - # wouldn't have to call this function, so let's assume that an empty dict is a typical default value and - # should denote that there are no filters to apply. + # An empty `filters` dict would cause all activities to be filtered out. If you + # wanted that effect, you wouldn't have to call this function, so let's assume that + # an empty dict is a typical default value and should denote that there are no + # filters to apply. filters = filters or None results = results.reduce(JS_REDUCE_FILTER_PROP, options={'arg': {'raw_filter': raw_filter, 'filters': filters}}) From ac588c7131037bc78123a0b92055168910419ff6 Mon Sep 17 00:00:00 2001 From: Nicholas Kobald Date: Wed, 9 May 2018 14:08:26 -0700 Subject: [PATCH 47/56] connect to CC, BCC, To, and BTO tables --- sunspear/activitystreams/models.py | 2 +- sunspear/backends/database/db.py | 2 +- sunspear/backends/database/schema.py | 8 ++++---- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/sunspear/activitystreams/models.py b/sunspear/activitystreams/models.py index 3237cc9..90bf7c2 100644 --- a/sunspear/activitystreams/models.py +++ b/sunspear/activitystreams/models.py @@ -216,7 +216,7 @@ def get_parsed_sub_activity_dict(self, actor, content="", verb="reply", object_t return _activity, parent_activity def parse_data(self, data, *args, **kwargs): - #TODO Rename to jsonify_dict + # TODO Rename to jsonify_dict _parsed_data = super(Activity, self).parse_data(data, *args, **kwargs) for response_field in self._response_fields: if response_field in _parsed_data: diff --git a/sunspear/backends/database/db.py b/sunspear/backends/database/db.py index 20e1332..0070f93 100644 --- a/sunspear/backends/database/db.py +++ b/sunspear/backends/database/db.py @@ -247,9 +247,9 @@ def activity_get(self, if aggregation_pipeline is None: aggregation_pipeline = [] activity_ids = self._listify(activity_ids) # TODO: likely don't need to listify here. + activities = self._get_raw_activities(activity_ids, **kwargs) - activities = self._get_raw_activities(activity_ids, **kwargs) activities = self.hydrate_activities(activities) # assert len(activities) == 1, "activity_get should return exactly 1 activity" return activities diff --git a/sunspear/backends/database/schema.py b/sunspear/backends/database/schema.py index 0478a1f..f127cf5 100644 --- a/sunspear/backends/database/schema.py +++ b/sunspear/backends/database/schema.py @@ -48,22 +48,22 @@ Column('other_data', custom_types.JSONDict()), UniqueConstraint('actor', 'in_reply_to')) -to_table = Table('to', metadata, +to_table = Table('sgactivitystream_to', metadata, Column('id', Integer, primary_key=True), Column('object', ForeignKey('sgactivitystream_objects.id', ondelete='CASCADE')), Column('activity', ForeignKey('sgactivitystream_activities.id', ondelete='CASCADE'))) -bto_table = Table('bto', metadata, +bto_table = Table('sgactivitystream_bto', metadata, Column('id', Integer, primary_key=True), Column('object', ForeignKey('sgactivitystream_objects.id', ondelete='CASCADE')), Column('activity', ForeignKey('sgactivitystream_activities.id', ondelete='CASCADE'))) -cc_table = Table('cc', metadata, +cc_table = Table('sgactivitystream_cc', metadata, Column('id', Integer, primary_key=True), Column('object', ForeignKey('sgactivitystream_objects.id', ondelete='CASCADE')), Column('activity', ForeignKey('sgactivitystream_activities.id', ondelete='CASCADE'))) -bcc_table = Table('bcc', metadata, +bcc_table = Table('sgactivitystream_bcc', metadata, Column('id', Integer, primary_key=True), Column('object', ForeignKey('sgactivitystream_objects.id', ondelete='CASCADE')), Column('activity', ForeignKey('sgactivitystream_activities.id', ondelete='CASCADE'))) From 7c75783813881ef2f11244ff22a4a231cc29783c Mon Sep 17 00:00:00 2001 From: Nicholas Kobald Date: Wed, 9 May 2018 16:23:28 -0700 Subject: [PATCH 48/56] properly create bcc, cc, bto, to tables --- sunspear/backends/database/db.py | 16 ++++++++++++---- sunspear/backends/database/schema.py | 16 ++++++++-------- 2 files changed, 20 insertions(+), 12 deletions(-) diff --git a/sunspear/backends/database/db.py b/sunspear/backends/database/db.py index 0070f93..d31e586 100644 --- a/sunspear/backends/database/db.py +++ b/sunspear/backends/database/db.py @@ -163,12 +163,12 @@ def activity_exists(self, activity, **kwargs): return self.engine.execute(sql.select([sql.exists().where(activities_db_table.c.id == activity_id)])).scalar() - def activity_create(self, activity, **kwargs): + def activity_create(self, activity_dict, **kwargs): """ Creates an activity. This assumes the activity is already dehydrated (ie has references to the objects and not the actual objects itself) """ - activity = Activity(activity, backend=self) + activity = Activity(activity_dict, backend=self) activity.validate() activity_dict = activity.get_parsed_dict() @@ -177,6 +177,16 @@ def activity_create(self, activity, **kwargs): self.engine.execute(self.activities_table.insert(), [activity_db_schema_dict]).close() + obj_id = activity_dict['object'] + activity_id = activity_dict['id'] + for audience_field in ['cc', 'bcc', 'to', 'bto']: + if audience_field in activity_dict: + table = schema.tables[audience_field] + self.engine.execute(table.insert(), dict( + obj_id=obj_id, + activity_id=activity_id + )).close() + return self.get_activity(activity_dict) def activity_delete(self, activity, **kwargs): @@ -248,8 +258,6 @@ def activity_get(self, aggregation_pipeline = [] activity_ids = self._listify(activity_ids) # TODO: likely don't need to listify here. activities = self._get_raw_activities(activity_ids, **kwargs) - - activities = self.hydrate_activities(activities) # assert len(activities) == 1, "activity_get should return exactly 1 activity" return activities diff --git a/sunspear/backends/database/schema.py b/sunspear/backends/database/schema.py index f127cf5..f96d70d 100644 --- a/sunspear/backends/database/schema.py +++ b/sunspear/backends/database/schema.py @@ -50,23 +50,23 @@ to_table = Table('sgactivitystream_to', metadata, Column('id', Integer, primary_key=True), - Column('object', ForeignKey('sgactivitystream_objects.id', ondelete='CASCADE')), - Column('activity', ForeignKey('sgactivitystream_activities.id', ondelete='CASCADE'))) + Column('obj_id', ForeignKey('sgactivitystream_objects.id', ondelete='CASCADE')), + Column('activity_id', ForeignKey('sgactivitystream_activities.id', ondelete='CASCADE'))) bto_table = Table('sgactivitystream_bto', metadata, Column('id', Integer, primary_key=True), - Column('object', ForeignKey('sgactivitystream_objects.id', ondelete='CASCADE')), - Column('activity', ForeignKey('sgactivitystream_activities.id', ondelete='CASCADE'))) + Column('obj_id', ForeignKey('sgactivitystream_objects.id', ondelete='CASCADE')), + Column('activity_id', ForeignKey('sgactivitystream_activities.id', ondelete='CASCADE'))) cc_table = Table('sgactivitystream_cc', metadata, Column('id', Integer, primary_key=True), - Column('object', ForeignKey('sgactivitystream_objects.id', ondelete='CASCADE')), - Column('activity', ForeignKey('sgactivitystream_activities.id', ondelete='CASCADE'))) + Column('obj_id', ForeignKey('sgactivitystream_objects.id', ondelete='CASCADE')), + Column('activity_id', ForeignKey('sgactivitystream_activities.id', ondelete='CASCADE'))) bcc_table = Table('sgactivitystream_bcc', metadata, Column('id', Integer, primary_key=True), - Column('object', ForeignKey('sgactivitystream_objects.id', ondelete='CASCADE')), - Column('activity', ForeignKey('sgactivitystream_activities.id', ondelete='CASCADE'))) + Column('obj_id', ForeignKey('sgactivitystream_objects.id', ondelete='CASCADE')), + Column('activity_id', ForeignKey('sgactivitystream_activities.id', ondelete='CASCADE'))) tables = { 'objects': objects_table, From 0bbf5259e2bab64457f68d17b081906b9b3ab78f Mon Sep 17 00:00:00 2001 From: Nicholas Kobald Date: Wed, 9 May 2018 17:09:56 -0700 Subject: [PATCH 49/56] Try to respect audience targetting --- sunspear/backends/database/db.py | 28 ++++++++++++++++++++++++++-- 1 file changed, 26 insertions(+), 2 deletions(-) diff --git a/sunspear/backends/database/db.py b/sunspear/backends/database/db.py index d31e586..77e5029 100644 --- a/sunspear/backends/database/db.py +++ b/sunspear/backends/database/db.py @@ -92,6 +92,22 @@ def likes_table(self): def replies_table(self): return schema.tables['replies'] + @property + def cc_table(self): + return schema.tables['cc'] + + @property + def bcc_table(self): + return schema.tables['bcc'] + + @property + def to_table(self): + return schema.tables['to'] + + @property + def bto_table(self): + return schema.tables['bto'] + def _get_connection(self): return self.engine.connect() @@ -253,13 +269,21 @@ def activity_get(self, if filters is None: filters = {} if audience_targeting is None: - audience_targeting = [] + audience_targeting = {} if aggregation_pipeline is None: aggregation_pipeline = [] activity_ids = self._listify(activity_ids) # TODO: likely don't need to listify here. + + assert len(audience_targeting) == 1, "I can't be wrong about this assumption, right?" + + for audience_type, object_id in audience_targeting.items(): + audience_table = schema[audience_type] + + sql.select(['*']).where(audience_table.c.obj_id == object_id) + activities = self._get_raw_activities(activity_ids, **kwargs) activities = self.hydrate_activities(activities) - # assert len(activities) == 1, "activity_get should return exactly 1 activity" + return activities def sub_activity_create(self, activity, actor, content, extra={}, sub_activity_verb="", published=None, **kwargs): From 5f349bf9260e979fc0d106eb5906e22ece3fc8b0 Mon Sep 17 00:00:00 2001 From: Nicholas Kobald Date: Mon, 21 May 2018 14:46:06 -0700 Subject: [PATCH 50/56] i hope --- sunspear/backends/database/db.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/sunspear/backends/database/db.py b/sunspear/backends/database/db.py index 77e5029..a58fbd7 100644 --- a/sunspear/backends/database/db.py +++ b/sunspear/backends/database/db.py @@ -274,13 +274,23 @@ def activity_get(self, aggregation_pipeline = [] activity_ids = self._listify(activity_ids) # TODO: likely don't need to listify here. - assert len(audience_targeting) == 1, "I can't be wrong about this assumption, right?" + assert len(audience_targeting) == 1 or len(audience_targeting) == 0, "I can't be wrong about this. I hope" + audience_activity_ids = None for audience_type, object_id in audience_targeting.items(): - audience_table = schema[audience_type] + # audience_table = schema.tables[audience_type] + # audience_query = sql.select([audience_table.c.activity_id]).where( + # audience_table.c.obj_id == object_id) - sql.select(['*']).where(audience_table.c.obj_id == object_id) + audience_activity_ids = self.engine.execute(sql.select([self.activities_table.c.id]).where( + self.activities_table.c.actor_id.in_(audience_targeting[audience_type]) + )).fetchall() + # audience_activities_query = sql.select(['*']).where(self.activities_table.c.id.in_(audience_query)) + # result_proxy = self.engine.execute(audience_activities_query) + + if audience_activity_ids is not None and not include_public: # only filter then?? + activity_ids = list(set(audience_activity_ids).intersection(set(activity_ids))) activities = self._get_raw_activities(activity_ids, **kwargs) activities = self.hydrate_activities(activities) From 91f55efd6309dffda0c365dd8a508ed3cdd42921 Mon Sep 17 00:00:00 2001 From: Nicholas Kobald Date: Wed, 23 May 2018 16:40:56 -0700 Subject: [PATCH 51/56] push --- sunspear/backends/base.py | 2 -- sunspear/backends/database/db.py | 15 +++++++++++++-- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/sunspear/backends/base.py b/sunspear/backends/base.py index 8a7d580..a841062 100644 --- a/sunspear/backends/base.py +++ b/sunspear/backends/base.py @@ -402,8 +402,6 @@ def _listify(self, list_or_string): """ if not isinstance(list_or_string, (list, tuple, set)): list_or_string = [list_or_string] - else: - list_or_string = list_or_string return list_or_string diff --git a/sunspear/backends/database/db.py b/sunspear/backends/database/db.py index a58fbd7..f51b363 100644 --- a/sunspear/backends/database/db.py +++ b/sunspear/backends/database/db.py @@ -266,6 +266,7 @@ def activity_get(self, audience_targeting=None, aggregation_pipeline=None, **kwargs): + if filters is None: filters = {} if audience_targeting is None: @@ -276,6 +277,7 @@ def activity_get(self, assert len(audience_targeting) == 1 or len(audience_targeting) == 0, "I can't be wrong about this. I hope" + """ audience_activity_ids = None for audience_type, object_id in audience_targeting.items(): # audience_table = schema.tables[audience_type] @@ -288,14 +290,23 @@ def activity_get(self, # audience_activities_query = sql.select(['*']).where(self.activities_table.c.id.in_(audience_query)) # result_proxy = self.engine.execute(audience_activities_query) + """ + # if audience_activity_ids is not None and not include_public: # only filter then?? + # activity_ids = list(set(audience_activity_ids) & (set(activity_ids))) + self.filter_activities_by_audience(activity_ids, audience_targeting) - if audience_activity_ids is not None and not include_public: # only filter then?? - activity_ids = list(set(audience_activity_ids).intersection(set(activity_ids))) activities = self._get_raw_activities(activity_ids, **kwargs) activities = self.hydrate_activities(activities) return activities + def filter_activities_by_audience(self, activity_ids, audience_targeting): + # s = sql.select(['*']).where(self.objects_table.c.id.in_(obj_ids)) + # TODO: bcc, bto, etc.. + cc_query = sql.select(['*']).where(self.cc_table.c.activity_id.in_(activity_ids)) + res = self.engine.execute(cc_query).fetchall() + return None + def sub_activity_create(self, activity, actor, content, extra={}, sub_activity_verb="", published=None, **kwargs): object_type = kwargs.get('object_type', sub_activity_verb) sub_activity_model = self.get_sub_activity_model(sub_activity_verb) From fd8a19fdc017a76671e96de6135fdd3002792757 Mon Sep 17 00:00:00 2001 From: Nicholas Kobald Date: Wed, 27 Jun 2018 14:44:31 -0700 Subject: [PATCH 52/56] Unpin riak --- setup.py | 1 - 1 file changed, 1 deletion(-) diff --git a/setup.py b/setup.py index 3219f5b..394567c 100644 --- a/setup.py +++ b/setup.py @@ -25,7 +25,6 @@ 'python-dateutil>=1.5, != 2.0', 'riak', 'six', - 'riak==2.5.4', 'protobuf==2.6.1', 'sqlalchemy==1.1.14', 'MySQL-python==1.2.5', From 7dd8ef6c78ca92c9cb45272a3d2f34b1dfc2b702 Mon Sep 17 00:00:00 2001 From: Nicholas Kobald Date: Wed, 27 Jun 2018 15:48:30 -0700 Subject: [PATCH 53/56] try also unpinning sqlalchemy --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 394567c..9213b84 100644 --- a/setup.py +++ b/setup.py @@ -26,7 +26,7 @@ 'riak', 'six', 'protobuf==2.6.1', - 'sqlalchemy==1.1.14', + 'sqlalchemy', 'MySQL-python==1.2.5', 'six' ], From a50697527aca0ae97c8de2de4010ce62141ecda7 Mon Sep 17 00:00:00 2001 From: Nicholas Kobald Date: Fri, 29 Jun 2018 16:31:21 -0700 Subject: [PATCH 54/56] update schema to match updates in the app --- sunspear/backends/database/db.py | 7 ++++++- sunspear/backends/database/schema.py | 16 ++++++++++++++-- 2 files changed, 20 insertions(+), 3 deletions(-) diff --git a/sunspear/backends/database/db.py b/sunspear/backends/database/db.py index f51b363..e8f2d40 100644 --- a/sunspear/backends/database/db.py +++ b/sunspear/backends/database/db.py @@ -20,6 +20,7 @@ import copy import datetime import json +import logging import six from dateutil import tz @@ -31,9 +32,11 @@ from sunspear.backends.base import BaseBackend from sunspear.exceptions import SunspearOperationNotSupportedException - from . import schema + +logger = logging.getLogger(__name__) + DB_OBJ_FIELD_MAPPING = { 'id': 'id', 'objectType': 'object_type', @@ -64,6 +67,8 @@ class DatabaseBackend(BaseBackend): def __init__(self, db_connection_string=None, verbose=False, poolsize=10, max_overflow=5, **kwargs): + if db_connection_string is None: + logger.info("WARNING: Not given a valid db connection string") self._engine = create_engine( db_connection_string, echo=verbose, diff --git a/sunspear/backends/database/schema.py b/sunspear/backends/database/schema.py index f96d70d..cc83f22 100644 --- a/sunspear/backends/database/schema.py +++ b/sunspear/backends/database/schema.py @@ -5,23 +5,35 @@ metadata = MetaData() -objects_table = Table('sgactivitystream_objects', metadata, +objects_table = Table('sgactivitystream_streamobject', metadata, Column('id', String(32), primary_key=True), Column('object_type', String(256), nullable=False), Column('display_name', String(256), default=''), + Column('sunspear_id', String(256)), + Column('badge_id', ForeignKey('sgrecognition_badge.id', ondelete='SET NULL')), + Column('badgerecipient_id', ForeignKey('sgrecognition_badgerecipient.id', ondelete='SET NULL')), + Column('checkin_id', ForeignKey('sgcheckin_checkin.id', ondelete='SET NULL')), + Column('goal_id', ForeignKey('sggoals_goal.id', ondelete='SET NULL')), + Column('keyresult_id', ForeignKey('sggoals_keyresult.id', ondelete='SET NULL')), + Column('oneonone_id', ForeignKey('sgoneonone_oneonone.id', ondelete='SET NULL')), + Column('sgnetwork_id', ForeignKey('sgnetworks_sgnetwork.container_ptr_id', ondelete='SET NULL')), + Column('team_id', ForeignKey('sgteam_team.id', ondelete='SET NULL')), + Column('userprofile_id', ForeignKey('core_userprofile.id', ondelete='SET NULL')), Column('content', Text, default=''), Column('published', DateTime(timezone=True), nullable=False), Column('updated', DateTime(timezone=True), default=datetime.now(), onupdate=datetime.now()), Column('image', custom_types.JSONSmallDict(4096), default={}), Column('other_data', custom_types.JSONDict(), default={})) -activities_table = Table('sgactivitystream_activities', metadata, +activities_table = Table('sgactivitystream_streamactivity', metadata, Column('id', String(32), primary_key=True), Column('verb', String(256), nullable=False), + Column('unique_verb', String(256), nullable=False), Column('actor_id', ForeignKey('sgactivitystream_objects.id', ondelete='CASCADE'), nullable=False), Column('object_id', ForeignKey('sgactivitystream_objects.id', ondelete='SET NULL')), Column('target_id', ForeignKey('sgactivitystream_objects.id', ondelete='SET NULL')), Column('author_id', ForeignKey('sgactivitystream_objects.id', ondelete='SET NULL')), + Column('sgnetwork_id', ForeignKey('sgnetworks_sgnetwork.container_ptr_id', ondelete='CASCADE'), nullable=True), Column('generator_id', ForeignKey('sgactivitystream_objects.id', ondelete='SET NULL')), Column('provider_id', ForeignKey('sgactivitystream_objects.id', ondelete='SET NULL')), Column('content', Text, default=''), From ca79fcc711c9c5bdb536b7af14f86b1abe15f06a Mon Sep 17 00:00:00 2001 From: Nicholas Kobald Date: Fri, 29 Jun 2018 16:32:33 -0700 Subject: [PATCH 55/56] MySQL-python -> mysqlclient in setup.py --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 9213b84..037a2ab 100644 --- a/setup.py +++ b/setup.py @@ -27,7 +27,7 @@ 'six', 'protobuf==2.6.1', 'sqlalchemy', - 'MySQL-python==1.2.5', + 'mysqlclient', 'six' ], options={'easy_install': {'allow_hosts': 'pypi.python.org'}}, From a95a1297185968617d05b03d0ec1214b66a675f6 Mon Sep 17 00:00:00 2001 From: Nicholas Kobald Date: Wed, 29 Aug 2018 14:43:27 -0700 Subject: [PATCH 56/56] This stuff was staged, so I better commit it --- sunspear/activitystreams/models.py | 22 ++++++++++++++++++---- sunspear/backends/database/db.py | 3 ++- sunspear/backends/database/schema.py | 6 +++--- 3 files changed, 23 insertions(+), 8 deletions(-) diff --git a/sunspear/activitystreams/models.py b/sunspear/activitystreams/models.py index 90bf7c2..8c733cc 100644 --- a/sunspear/activitystreams/models.py +++ b/sunspear/activitystreams/models.py @@ -83,7 +83,7 @@ def parse_data(self, data, *args, **kwargs): # parse direct and indirect audience targeting for c in self._indirect_audience_targeting_fields + self._direct_audience_targeting_fields: if c in _parsed_data and _parsed_data[c]: - _parsed_data[c] = [obj.parse_data(obj.get_dict()) if isinstance(obj, Model) else obj\ + _parsed_data[c] = [obj.parse_data(obj.get_dict()) if isinstance(obj, Model) else obj for obj in _parsed_data[c]] # parse media fields @@ -96,6 +96,12 @@ def parse_data(self, data, *args, **kwargs): if isinstance(v, dict) and k not in self._response_fields: _parsed_data[k] = self.parse_data(v) + if 'id' in _parsed_data: + # we need to let the database take care of generating ids + # since there are size constraints that the sunspear convention violates, + # however to be safe we'll store the sunspear id as well. + _parsed_data['sunspear_id'] = _parsed_data['id'] + del _parsed_data['id'] return _parsed_data def get_parsed_dict(self, *args, **kwargs): @@ -159,8 +165,16 @@ def _set_defaults(self, model_dict): return model_dict - def get_parsed_sub_activity_dict(self, actor, content="", verb="reply", object_type="reply", \ - collection="replies", activity_class=None, extra={}, published=None, **kwargs): + def get_parsed_sub_activity_dict(self, + actor, + content="", + verb="reply", + object_type="reply", + collection="replies", + activity_class=None, + extra={}, + published=None, + **kwargs): # TODO: Doesn't feel like this should be here Feels like it belongs in the backend. if published is None: @@ -208,7 +222,7 @@ def get_parsed_sub_activity_dict(self, actor, content="", verb="reply", object_t } self._dict[collection]['totalItems'] += 1 - #insert the newest comment at the top of the list + # insert the newest comment at the top of the list self._dict[collection]['items'].insert(0, _sub_dict) parent_activity = self.parse_data(self._dict, **kwargs) diff --git a/sunspear/backends/database/db.py b/sunspear/backends/database/db.py index e8f2d40..d3da99c 100644 --- a/sunspear/backends/database/db.py +++ b/sunspear/backends/database/db.py @@ -146,7 +146,8 @@ def obj_create(self, obj, **kwargs): def obj_exists(self, obj, **kwargs): obj_id = self._extract_id(obj) objs_db_table = self.objects_table - + # this probably needs to use sunspear id now? i think... + # mmm.. return self.engine.execute(sql.select([sql.exists().where(objs_db_table.c.id == obj_id)])).scalar() def obj_update(self, obj, **kwargs): diff --git a/sunspear/backends/database/schema.py b/sunspear/backends/database/schema.py index cc83f22..8b11f06 100644 --- a/sunspear/backends/database/schema.py +++ b/sunspear/backends/database/schema.py @@ -6,15 +6,15 @@ metadata = MetaData() objects_table = Table('sgactivitystream_streamobject', metadata, - Column('id', String(32), primary_key=True), + Column('id', String(32), primary_key=True, nullable=True), Column('object_type', String(256), nullable=False), Column('display_name', String(256), default=''), - Column('sunspear_id', String(256)), + Column('sunspear_id', String(256), nullable=True), Column('badge_id', ForeignKey('sgrecognition_badge.id', ondelete='SET NULL')), Column('badgerecipient_id', ForeignKey('sgrecognition_badgerecipient.id', ondelete='SET NULL')), Column('checkin_id', ForeignKey('sgcheckin_checkin.id', ondelete='SET NULL')), Column('goal_id', ForeignKey('sggoals_goal.id', ondelete='SET NULL')), - Column('keyresult_id', ForeignKey('sggoals_keyresult.id', ondelete='SET NULL')), + Column('keyresult_id', ForeignKey('sggoals_kyresult.id', ondelete='SET NULL')), Column('oneonone_id', ForeignKey('sgoneonone_oneonone.id', ondelete='SET NULL')), Column('sgnetwork_id', ForeignKey('sgnetworks_sgnetwork.container_ptr_id', ondelete='SET NULL')), Column('team_id', ForeignKey('sgteam_team.id', ondelete='SET NULL')),