diff --git a/assembl/alembic/versions/116f128b0000_locale_as_str.py b/assembl/alembic/versions/116f128b0000_locale_as_str.py index b1eabde6ed..b8d3000fea 100644 --- a/assembl/alembic/versions/116f128b0000_locale_as_str.py +++ b/assembl/alembic/versions/116f128b0000_locale_as_str.py @@ -172,9 +172,16 @@ def downgrade(pyramid_env): 'locale.id', onupdate='CASCADE', ondelete='CASCADE'))) - op.execute('INSERT INTO locale (code, rtl) values ' - + ','.join(["('%s', %s)" % (loc, str(is_rtl(loc)).lower()) - for loc in locales])) + op.execute( + ( + 'INSERT INTO locale (code, rtl) values ' + + ','.join( + "('%s', %s)" % (loc, str(is_rtl(loc)).lower()) + for loc in locales + ) + ) + ) + op.execute('''UPDATE locale_label SET named_locale_id = (SELECT id FROM locale diff --git a/assembl/alembic/versions/1593228f01ab_hopefully_fix_duplicate_imported_.py b/assembl/alembic/versions/1593228f01ab_hopefully_fix_duplicate_imported_.py index 1935d882ba..ead6c8f7b4 100644 --- a/assembl/alembic/versions/1593228f01ab_hopefully_fix_duplicate_imported_.py +++ b/assembl/alembic/versions/1593228f01ab_hopefully_fix_duplicate_imported_.py @@ -66,8 +66,6 @@ def upgrade(pyramid_env): ) """) op.create_unique_constraint(config.get('db_schema')+"_"+config.get('db_user')+"_imported_post_UNQC_source_post_id_source_id", "imported_post", ["source_post_id","source_id"]) - pass - # Do stuff with the app's models here. from assembl import models as m db = m.get_session_maker()() diff --git a/assembl/alembic/versions/1931a5603650_store_mime_type_of_post_body.py b/assembl/alembic/versions/1931a5603650_store_mime_type_of_post_body.py index 8ff661197c..3636e61738 100644 --- a/assembl/alembic/versions/1931a5603650_store_mime_type_of_post_body.py +++ b/assembl/alembic/versions/1931a5603650_store_mime_type_of_post_body.py @@ -25,8 +25,6 @@ def upgrade(pyramid_env): UPDATE imported_post SET body_mime_type = 'text/plain' """) - pass - # Do stuff with the app's models here. from assembl import models as m db = m.get_session_maker()() @@ -38,4 +36,3 @@ def downgrade(pyramid_env): with context.begin_transaction(): #ALTER TABLE assembl..imported_post DROP COLUMN body_mime_type op.drop_column('imported_post', 'body_mime_type') - pass diff --git a/assembl/alembic/versions/27f58fce7b77_override_social_autologin.py b/assembl/alembic/versions/27f58fce7b77_override_social_autologin.py index fd17e7fbb9..f3ea942ac6 100644 --- a/assembl/alembic/versions/27f58fce7b77_override_social_autologin.py +++ b/assembl/alembic/versions/27f58fce7b77_override_social_autologin.py @@ -40,11 +40,13 @@ def upgrade(pyramid_env): if 'default_permissions' in values: found = False for role, permissions in list(values['default_permissions'].items()): - if role not in base_roles: - if P_OVERRIDE_SOCIAL_AUTOLOGIN not in permissions: - permissions.append(P_OVERRIDE_SOCIAL_AUTOLOGIN) - values['default_permissions'][role] = permissions - found = True + if ( + role not in base_roles + and P_OVERRIDE_SOCIAL_AUTOLOGIN not in permissions + ): + permissions.append(P_OVERRIDE_SOCIAL_AUTOLOGIN) + values['default_permissions'][role] = permissions + found = True if found: changes.append({'id': id, 'pref_json': dumps(values)}) if changes: diff --git a/assembl/alembic/versions/30365b67b36d_idea_link_creation.py b/assembl/alembic/versions/30365b67b36d_idea_link_creation.py index 3c53fc1ee0..d199f809cd 100644 --- a/assembl/alembic/versions/30365b67b36d_idea_link_creation.py +++ b/assembl/alembic/versions/30365b67b36d_idea_link_creation.py @@ -52,11 +52,15 @@ def upgrade(pyramid_env): m.IdeaLink.tombstone_date == None).first()[0] <= 1 bases.sort() first = bases[0] - db.execute("""UPDATE idea_idea_link + db.execute( + """UPDATE idea_idea_link SET base_id = %d, creation_date = (SELECT creation_date FROM idea_idea_link AS il2 WHERE base_id=%d LIMIT 1) WHERE base_id IN (%s) - """ % (first, first, ','.join([str(id) for id in bases[1:]]))) + """ + % (first, first, ','.join(str(id) for id in bases[1:])) + ) + mark_changed() diff --git a/assembl/alembic/versions/33c20d131cb6_fix_notification_constraints.py b/assembl/alembic/versions/33c20d131cb6_fix_notification_constraints.py index 4ce8185cbb..fb16eebbf7 100644 --- a/assembl/alembic/versions/33c20d131cb6_fix_notification_constraints.py +++ b/assembl/alembic/versions/33c20d131cb6_fix_notification_constraints.py @@ -23,41 +23,13 @@ def upgrade(pyramid_env): NotificationSubscriptionStatus, NotificationSubscriptionStatus) schema = config.get('db_schema')+"."+config.get('db_user') with context.begin_transaction(): - #No clean way to address constraints, and I didn't find a way to add JUST the constraint from sqlalchemy data structures - constraintNameOld = "ck_"+config.get('db_schema')+"_"+config.get('db_user')+"_notification_subscription_notification_status" - op.execute("""ALTER TABLE notification_subscription DROP CONSTRAINT """+constraintNameOld) - constraintNameNew = "ck_"+config.get('db_schema')+"_"+config.get('db_user')+"_notification_subscription_notification_subscription_status" - op.execute("""ALTER TABLE notification_subscription ADD CONSTRAINT """+constraintNameNew+""" + #No clean way to address constraints, and I didn't find a way to add JUST the constraint from sqlalchemy data structures + constraintNameOld = "ck_"+config.get('db_schema')+"_"+config.get('db_user')+"_notification_subscription_notification_status" + op.execute("""ALTER TABLE notification_subscription DROP CONSTRAINT """+constraintNameOld) + constraintNameNew = "ck_"+config.get('db_schema')+"_"+config.get('db_user')+"_notification_subscription_notification_subscription_status" + op.execute("""ALTER TABLE notification_subscription ADD CONSTRAINT """+constraintNameNew+""" CHECK (status IN ('ACTIVE', 'INACTIVE_DFT', 'UNSUBSCRIBED'))""") - - if False: - """ WOW, no column rename in virtuoso! - For the record, even the following doesn't work (chicken and the egg on null values)! Darn virtuoso""" - op.add_column('notification_subscription', - sa.Column('status_temp', - NotificationSubscriptionStatus.db_type(), - nullable=True, - index = True, - default = NotificationSubscriptionStatus.ACTIVE), - schema=schema) - - op.execute("""UPDATE notification_subscription SET - status_temp = status""") - op.drop_column('notification_subscription', 'status') - op.add_column('notification_subscription', - sa.Column('status', - NotificationSubscriptionStatus.db_type(), - nullable=True, - index = True, - default = NotificationSubscriptionStatus.ACTIVE), - schema=schema) - op.execute("""UPDATE notification_subscription SET - status = status_temp""") - op.execute("""ALTER TABLE notification_subscription - MODIFY status varchar(256) NOT NULL""") - op.drop_column('notification_subscription', 'status_temp') - def downgrade(pyramid_env): with context.begin_transaction(): pass diff --git a/assembl/alembic/versions/368a596ab4b5_tombstone_date.py b/assembl/alembic/versions/368a596ab4b5_tombstone_date.py index 2d193713ea..7c6b422a39 100644 --- a/assembl/alembic/versions/368a596ab4b5_tombstone_date.py +++ b/assembl/alembic/versions/368a596ab4b5_tombstone_date.py @@ -90,7 +90,7 @@ def reconstruct_idea_history(db): if len(live_id): live_id = live_id[0] if live_id else l[-1] non_synth = [id for id in l if id not in synthesis_dates] - if not non_synth[-1] == live_id: + if non_synth[-1] != live_id: import pdb; pdb.set_trace() else: live_id = l[-1] @@ -135,7 +135,7 @@ def reconstruct_vote_history(db): similar_votes.sort(key=lambda v: v.id) previous = None assert not similar_votes[-1].is_tombstone - assert all([vote.is_tombstone for vote in similar_votes[:-1]]) + assert all(vote.is_tombstone for vote in similar_votes[:-1]) live_vote_id = similar_votes[-1].id for vote in similar_votes: vote.base_id = live_vote_id diff --git a/assembl/alembic/versions/4ea8eee4b157_refactor_subject_body_to_content.py b/assembl/alembic/versions/4ea8eee4b157_refactor_subject_body_to_content.py index 1a2d71302b..7fa7888ada 100644 --- a/assembl/alembic/versions/4ea8eee4b157_refactor_subject_body_to_content.py +++ b/assembl/alembic/versions/4ea8eee4b157_refactor_subject_body_to_content.py @@ -44,8 +44,11 @@ def upgrade(pyramid_env): def downgrade(pyramid_env): from assembl.semantic.virtuoso_mapping import get_session from assembl.models import Content - assert not ('body' in Content.__table__.c or 'subject' in Content.__table__.c), \ - "Comment out the body and subject from Content to run the back migration" + assert ( + 'body' not in Content.__table__.c + and 'subject' not in Content.__table__.c + ), "Comment out the body and subject from Content to run the back migration" + dbsession = get_session() try: dbsession.execute("SPARQL drop quad map quadnames:col_pattern_Content_subject") diff --git a/assembl/alembic/versions/525b7451056e_migrate_to_social_auth_account.py b/assembl/alembic/versions/525b7451056e_migrate_to_social_auth_account.py index 0480695759..9ce056dd12 100644 --- a/assembl/alembic/versions/525b7451056e_migrate_to_social_auth_account.py +++ b/assembl/alembic/versions/525b7451056e_migrate_to_social_auth_account.py @@ -109,9 +109,11 @@ def downgrade(pyramid_env): prov_to_dom = { id: old_domains[prov] for (id, prov) in providers.items()} - case_clause = "CASE provider_id %s END" % "\n ".join([ + case_clause = "CASE provider_id %s END" % "\n ".join( "WHEN %d THEN '%s'" % (id, dom) - for (id, dom) in prov_to_dom.items()]) + for (id, dom) in prov_to_dom.items() + ) + db.execute("""INSERT INTO idprovider_agent_account (id, provider_id, username, userid, profile_info, picture_url, domain) diff --git a/assembl/alembic/versions/aad68410c38b_langstring_on_announcements.py b/assembl/alembic/versions/aad68410c38b_langstring_on_announcements.py index 93ecebb712..ad95ef691a 100644 --- a/assembl/alembic/versions/aad68410c38b_langstring_on_announcements.py +++ b/assembl/alembic/versions/aad68410c38b_langstring_on_announcements.py @@ -68,8 +68,7 @@ def upgrade(pyramid_env): lang = candidate_langs[0] def as_lang_string(text): - ls = m.LangString.create(text, lang) - return ls + return m.LangString.create(text, lang) if title: announcement.title = as_lang_string(title) diff --git a/assembl/alembic/versions/ca1c445a2e24_langstring_on_synthesis.py b/assembl/alembic/versions/ca1c445a2e24_langstring_on_synthesis.py index 655a6b1c00..cb03d09efa 100644 --- a/assembl/alembic/versions/ca1c445a2e24_langstring_on_synthesis.py +++ b/assembl/alembic/versions/ca1c445a2e24_langstring_on_synthesis.py @@ -72,8 +72,7 @@ def upgrade(pyramid_env): lang = candidate_langs[0] def as_lang_string(text): - ls = m.LangString.create(text, lang) - return ls + return m.LangString.create(text, lang) if subject: synthesis.subject = as_lang_string(subject) diff --git a/assembl/alembic/versions/f7d61062eccf_multi_extracts.py b/assembl/alembic/versions/f7d61062eccf_multi_extracts.py index 928435997a..afd7bdbfc4 100644 --- a/assembl/alembic/versions/f7d61062eccf_multi_extracts.py +++ b/assembl/alembic/versions/f7d61062eccf_multi_extracts.py @@ -84,7 +84,7 @@ def downgrade(pyramid_env): FROM extract LEFT OUTER JOIN idea_content_link as icl ON (icl.extract_id = extract.id) WHERE icl.id IS NULL - """) + """) connected_extracts = list(db.execute( """SELECT id, extract_id from idea_content_link WHERE extract_id IS NOT NULL""")) @@ -95,16 +95,18 @@ def downgrade(pyramid_env): duplicates = set() for e, icls in by_extract.items(): if len(icls) > 1: - if e in icls: - keep = e - else: - keep = min(*icls) + keep = e if e in icls else min(*icls) dups = set(icls) dups.remove(keep) duplicates.update(dups) if duplicates: - db.execute("DELETE FROM idea_content_link WHERE id IN (%s)" % ( - ",".join([str(x) for x in duplicates]))) + db.execute( + ( + "DELETE FROM idea_content_link WHERE id IN (%s)" + % ",".join(str(x) for x in duplicates) + ) + ) + connected_extracts = [(icl, e) for (icl, e) in connected_extracts if icl not in duplicates] mismatched = [(icl, e) for (icl, e) in connected_extracts if icl != e] diff --git a/assembl/auth/util.py b/assembl/auth/util.py index 9ca5111285..2ad3b941a9 100644 --- a/assembl/auth/util.py +++ b/assembl/auth/util.py @@ -37,10 +37,7 @@ def get_user(request): user = 0 if user is 0: logged_in = request.unauthenticated_userid - if logged_in: - request._user = User.get(logged_in) - else: - request._user = None + request._user = User.get(logged_in) if logged_in else None return request._user @@ -181,10 +178,9 @@ def discussion_id_from_request(request): """Obtain the discussion_id from the request, possibly without fetching the discussion""" from assembl.views.traversal import BaseContext - if request.matchdict: - if 'discussion_id' in request.matchdict: - discussion_id = int(request.matchdict['discussion_id']) - return discussion_id + if request.matchdict and 'discussion_id' in request.matchdict: + discussion_id = int(request.matchdict['discussion_id']) + return discussion_id if getattr(request, "context", None) and isinstance( request.context, BaseContext): discussion_id = request.context.get_discussion_id() @@ -647,7 +643,7 @@ def add_multiple_users_csv( name, email, None, None, True, localrole=with_role, discussion=discussion_id, change_old_password=False) status_in_discussion = None - if send_password_change and not (created_user or created_localrole): + if send_password_change and not created_user and not created_localrole: status_in_discussion = user.get_status_in_discussion(discussion_id) if send_password_change and ( created_user or created_localrole or ( diff --git a/assembl/fabfile.py b/assembl/fabfile.py index 0da8e7c1b8..a5b510f872 100644 --- a/assembl/fabfile.py +++ b/assembl/fabfile.py @@ -685,13 +685,13 @@ def build_virtualenv(): # and that sometimes precludes building python modules. bcfile = "/usr/local/Frameworks/Python.framework/Versions/3.6/lib/python3.6/distutils/distutils.cfg" vefile = env.venvpath + "/lib/python3.6/distutils/distutils.cfg" - sec = "build_ext" if exists(bcfile): brew_config = SafeConfigParser() brew_config.read(bcfile) venv_config = SafeConfigParser() if exists(vefile): venv_config.read(vefile) + sec = "build_ext" if (brew_config.has_section(sec) and not venv_config.has_section(sec)): venv_config.add_section(sec) @@ -955,7 +955,6 @@ def update_node(force_reinstall=False): Install node and npm to a known-good version """ node_version = '12.18.2' - npm_version = '6.14.5' with settings(warn_only=True), hide('running', 'stdout'): node_version_cmd_result = venvcmd("node --version") if force_reinstall or "v"+node_version not in node_version_cmd_result.split(): @@ -963,6 +962,7 @@ def update_node(force_reinstall=False): #Because otherwise node may be busy circus_process_stop('dev:webpack') run("rm -rf "+join(env.venvpath, "lib/node_modules")) + npm_version = '6.14.5' venvcmd(f"nodeenv --node={node_version} --npm={npm_version} --python-virtualenv assembl/static") with cd(get_node_base_path()): venvcmd("npm install --no-save reinstall -g", chdir=False) diff --git a/assembl/lib/sqla.py b/assembl/lib/sqla.py index 56025cfea6..65b9882dda 100644 --- a/assembl/lib/sqla.py +++ b/assembl/lib/sqla.py @@ -107,7 +107,7 @@ def dispose_sqlengines(): db_schema = None _metadata = None Base = None -class_registry = dict() +class_registry = {} aliased_class_registry = None @@ -1583,10 +1583,12 @@ def apply_side_effects_without_json(self, context=None, request=None): collection = sub_i_ctx.__parent__.collection parent_instance = sub_i_ctx.__parent__.parent_instance attr = collection.get_attribute(parent_instance) - if isinstance(attr, list): - if sub_instance not in attr: - collection.on_new_instance(self, sub_instance) - elif attr != sub_instance: + if ( + isinstance(attr, list) + and sub_instance not in attr + or not isinstance(attr, list) + and attr != sub_instance + ): collection.on_new_instance(self, sub_instance) self.db.add(sub_instance) @@ -1651,10 +1653,6 @@ def _do_local_update_from_json( self.__class__.__name__)) setter(self, value) continue - elif parse_instruction[0] == "'": - if value != parse_instruction[1:]: - raise HTTPBadRequest("%s should be %s'" % ( - key, parse_instruction)) else: key = parse_instruction accessor = None diff --git a/assembl/lib/text_search.py b/assembl/lib/text_search.py index 1126458371..dfeaf3e0ba 100644 --- a/assembl/lib/text_search.py +++ b/assembl/lib/text_search.py @@ -131,10 +131,7 @@ def add_simple_text_search(query, text_columns, keywords, include_rank=True): filters = [func.to_tsvector(fts_config, column).match( keywords_j, postgresql_regconfig='simple') for column in text_columns] - if len(filters) > 1: - filter = or_(*filters) - else: - filter = filters[0] + filter = or_(*filters) if len(filters) > 1 else filters[0] query = query.filter(filter) if include_rank: ranks = [func.ts_rank( diff --git a/assembl/models/auth.py b/assembl/models/auth.py index 6366c76a7e..b8bae518e4 100644 --- a/assembl/models/auth.py +++ b/assembl/models/auth.py @@ -191,10 +191,10 @@ def merge(self, other_profile): other_account.merge(my_account) other_account.profile = self session.delete(my_account) - elif (isinstance(other_account, EmailAccount) and - other_account.email.lower() in my_social_emails): - pass - else: + elif ( + not isinstance(other_account, EmailAccount) + or other_account.email.lower() not in my_social_emails + ): other_account.profile = self if other_profile.name and not self.name: self.name = other_profile.name diff --git a/assembl/models/discussion.py b/assembl/models/discussion.py index 2be1fbfdd2..055799bf0a 100644 --- a/assembl/models/discussion.py +++ b/assembl/models/discussion.py @@ -773,12 +773,6 @@ def add_facebook_source_id(inst_ctx, ctx): # post_id = data.get('post_id', None) # fb_post_id = data.get('facebook_post_id', None) raise NotImplementedError("TODO") - post_id = source.sink_post_id - cs = ContentSourceIDs(source=source, - post_id=post_id, - message_id_in_source=fb_post_id) - yield InstanceContext( - inst_ctx['pushed_messages'], cs) return (AllUsersCollection(cls), AllPubFlowsCollection(cls), diff --git a/assembl/models/facebook_integration.py b/assembl/models/facebook_integration.py index 61246b4542..4430af6a67 100644 --- a/assembl/models/facebook_integration.py +++ b/assembl/models/facebook_integration.py @@ -670,7 +670,7 @@ def _create_attachments(self, post, assembl_post, create a Facebook attachment") try: - if not raw_attach or attachment.get('url') == None: + if not raw_attach or attachment.get('url') is None: return old_attachments_on_post = assembl_post.attachments @@ -772,42 +772,39 @@ def _manage_post(self, post, obj_id, posts_db, users_db, upper = self.upper_bound if not lower: lower = self.lower_bound - if upper: - if post_created_time > upper: - cont = False - if lower: - if post_created_time < lower: - cont = False + if upper and post_created_time > upper: + cont = False + if lower and post_created_time < lower: + cont = False - if cont: - post_id = post.get('id') - creator = self.parser.get_user_post_creator(post) - self._manage_user(creator, users_db, reimport) + if not cont: + return None, cont - # Get all of the tagged users instead? - for user in self.parser.get_users_post_to_sans_self(post, obj_id): - self._manage_user(user, users_db, reimport) + post_id = post.get('id') + creator = self.parser.get_user_post_creator(post) + self._manage_user(creator, users_db, reimport) - creator_id = creator.get('id', None) - creator_agent = users_db.get(creator_id) - result = self._create_or_update_post(post, creator_agent, - posts_db, reimport) + # Get all of the tagged users instead? + for user in self.parser.get_users_post_to_sans_self(post, obj_id): + self._manage_user(user, users_db, reimport) - if not result: - return + creator_id = creator.get('id', None) + creator_agent = users_db.get(creator_id) + result = self._create_or_update_post(post, creator_agent, + posts_db, reimport) - assembl_post = posts_db.get(post_id) - self._create_or_update_attachment(post, assembl_post, reimport, - self.parser.get_post_attachments) - # self._create_attachments(post, assembl_post, reimport, - # self.parser.get_post_attachments) - self.db.commit() - # Refresh the instance - self.db.query(self.__class__).populate_existing().get(self.id) - return assembl_post, cont + if not result: + return - else: - return None, cont + assembl_post = posts_db.get(post_id) + self._create_or_update_attachment(post, assembl_post, reimport, + self.parser.get_post_attachments) + # self._create_attachments(post, assembl_post, reimport, + # self.parser.get_post_attachments) + self.db.commit() + # Refresh the instance + self.db.query(self.__class__).populate_existing().get(self.id) + return assembl_post, cont def _manage_comment(self, comment, parent_post, posts_db, users_db, reimport=False): diff --git a/assembl/models/idea.py b/assembl/models/idea.py index ad5c8aafa6..2130423d93 100644 --- a/assembl/models/idea.py +++ b/assembl/models/idea.py @@ -421,10 +421,7 @@ def is_owner(self, user_id): @classmethod def restrict_to_owners_condition(cls, query, user_id, alias=None, alias_maker=None): if not alias: - if alias_maker: - alias = alias_maker.alias_from_class(cls) - else: - alias = cls + alias = alias_maker.alias_from_class(cls) if alias_maker else cls return (query, alias.creator_id == user_id) @classmethod @@ -804,10 +801,7 @@ def _visit_ideas_breadth_first( return idea_visitor.end_visit(self, level, prev_result, child_results) def most_common_words(self, lang=None, num=8): - if lang: - langs = (lang,) - else: - langs = self.discussion.discussion_locales + langs = (lang, ) if lang else self.discussion.discussion_locales word_counter = WordCountVisitor(langs) self.visit_ideas_depth_first(word_counter) return word_counter.best(num) @@ -1304,8 +1298,7 @@ def add_related_post_link(inst_ctx, ctx): inst_ctx=WidgetPost, ctx='Idea.linkedposts') def add_youtube_attachment(inst_ctx, ctx): from .attachment import Document, PostAttachment - for subctx in add_related_post_link(inst_ctx, ctx): - yield subctx + yield from add_related_post_link(inst_ctx, ctx) post = inst_ctx._instance insp_url = post.metadata_json.get('inspiration_url', '') if insp_url.startswith("https://www.youtube.com/"): diff --git a/assembl/models/idea_graph_view.py b/assembl/models/idea_graph_view.py index 1cbd50e85b..b7254cb7a0 100644 --- a/assembl/models/idea_graph_view.py +++ b/assembl/models/idea_graph_view.py @@ -259,9 +259,8 @@ class ExplicitSubGraphView(IdeaGraphView): } def copy(self, db=None): - retval = IdeaGraphView.copy(self, db=db) # retval.ideas = self.ideas - return retval + return IdeaGraphView.copy(self, db=db) def get_idea_links(self): # more efficient than the association_proxy diff --git a/assembl/models/idea_source.py b/assembl/models/idea_source.py index 3cf47bb9b0..0ae60b1ef6 100644 --- a/assembl/models/idea_source.py +++ b/assembl/models/idea_source.py @@ -200,15 +200,13 @@ def read_json(self, data, admin_user_id, apply_filter=False): def find_objects(j): if isinstance(j, list): for x in j: - for obj in find_objects(x): - yield obj + yield from find_objects(x) elif isinstance(j, dict): jid = j.get('@id', None) if jid: yield j for x in j.values(): - for obj in find_objects(x): - yield obj + yield from find_objects(x) self.read_data_gen(find_objects(data), admin_user_id, apply_filter) @@ -383,15 +381,13 @@ def read_data(self, jsonld, admin_user_id, base=None): def find_objects(j): if isinstance(j, list): for x in j: - for obj in find_objects(x): - yield obj + yield from find_objects(x) elif isinstance(j, dict): jid = j.get('@id', None) if jid: yield j for x in j.values(): - for obj in find_objects(x): - yield obj + yield from find_objects(x) self.read_data_gen(find_objects(jsonld), admin_user_id) self.db.flush() diff --git a/assembl/models/mail.py b/assembl/models/mail.py index ae08eb9f8e..c20c63600f 100644 --- a/assembl/models/mail.py +++ b/assembl/models/mail.py @@ -391,9 +391,8 @@ def extract_text(part): if len(parts) == 1: return (parts[0], parts_type) if parts_type == "text/html": - return ("\n".join([ - "
%s
" % p for p in parts]), parts_type) - if parts_type == "text/plain": + return "\n".join("
%s
" % p for p in parts), parts_type + elif parts_type == "text/plain": return ("\n".join(parts), parts_type) elif part.get_content_disposition(): # TODO: Inline attachments @@ -418,11 +417,14 @@ def extract_text(part): def email_header_to_unicode(header_string, join_crlf=True): text = u''.join( - [ - txt.decode(enc) if enc else txt.decode('iso-8859-1') if isinstance(txt, bytes) else txt - for (txt, enc) in decode_email_header(header_string) - ] + txt.decode(enc) + if enc + else txt.decode('iso-8859-1') + if isinstance(txt, bytes) + else txt + for (txt, enc) in decode_email_header(header_string) ) + if join_crlf: text = u''.join(text.split(u'\r\n')) diff --git a/assembl/models/path_utils.py b/assembl/models/path_utils.py index b7ad4a44ee..06a3e650ba 100644 --- a/assembl/models/path_utils.py +++ b/assembl/models/path_utils.py @@ -239,10 +239,7 @@ def base_query(labeled=False): content = with_polymorphic( Content, [], Content.__table__, aliased=False, flat=True) - if labeled: - query = db.query(post.id.label("post_id")) - else: - query = db.query(post.id) + query = db.query(post.id.label("post_id")) if labeled else db.query(post.id) query = query.join(content, (content.id == post.id) & (content.discussion_id==discussion_id)) if include_deleted is not None: diff --git a/assembl/models/votes.py b/assembl/models/votes.py index 0f3deb1f28..9cc877006d 100644 --- a/assembl/models/votes.py +++ b/assembl/models/votes.py @@ -389,9 +389,7 @@ def is_valid_vote(self, vote): TokenIdeaVote.voter_id == vote.voter_id, TokenIdeaVote.tombstone_date == None ).first() - if total > self.total_number: - return False - return True + return total <= self.total_number @classmethod def get_discussion_conditions(cls, discussion_id, alias_maker=None): @@ -477,7 +475,7 @@ def joint_histogram( for spec in group_specs } group_spec_ids = {x.id for x in group_specs} - group_signature = ",".join([spec.uri() for spec in group_specs]) + group_signature = ",".join(spec.uri() for spec in group_specs) joint_histograms[group_signature] = histograms_by_idea = {} sums = [0] * len(group_specs) sum_squares = [0] * len(group_specs) @@ -583,7 +581,7 @@ class ResourceVoteSpecification(AbstractVoteSpecification): def results_for(self, voting_results, histogram_size=None): base = super(ResourceVoteSpecification, self).results_for(voting_results) - base['total'] = sum([v.vote_value for v in voting_results]) + base['total'] = sum(v.vote_value for v in voting_results) return base def vote_range(self): @@ -831,10 +829,7 @@ def is_owner(self, user_id): @classmethod def restrict_to_owners_condition(cls, query, user_id, alias=None, alias_maker=None): if not alias: - if alias_maker: - alias = alias_maker.alias_from_class(cls) - else: - alias = cls + alias = alias_maker.alias_from_class(cls) if alias_maker else cls return (query, alias.voter_id == user_id) # Do we still need this? Can access through vote_spec diff --git a/assembl/models/widgets.py b/assembl/models/widgets.py index a3b6431ea0..3cb8e96fb4 100644 --- a/assembl/models/widgets.py +++ b/assembl/models/widgets.py @@ -648,16 +648,14 @@ def add_proposal_post_link(inst_ctx, ctx): def hide_proposal_idea(inst_ctx, ctx): obj = inst_ctx._instance obj.hidden = True - for subctx in add_proposal_post(inst_ctx, ctx): - yield subctx + yield from add_proposal_post(inst_ctx, ctx) @collection_creation_side_effects.register( inst_ctx=IdeaProposalPost, ctx='BaseIdeaWidget.base_idea_hiding') def hide_proposal_post(inst_ctx, ctx): obj = inst_ctx._instance obj.hidden = True - for subctx in add_proposal_post_link(inst_ctx, ctx): - yield subctx + yield from add_proposal_post_link(inst_ctx, ctx) return (BaseIdeaCollectionC(), BaseIdeaHidingCollection('base_idea_hiding'), diff --git a/assembl/nlp/__init__.py b/assembl/nlp/__init__.py index 7af5872362..8a5d04a54a 100644 --- a/assembl/nlp/__init__.py +++ b/assembl/nlp/__init__.py @@ -130,10 +130,7 @@ def stemText(self, text): return text def stemWords(self, words): - result = [] - for word in words: - result.append(self.stemWord(word)) - return result + return [self.stemWord(word) for word in words] _stemmers = { diff --git a/assembl/nlp/clusters.py b/assembl/nlp/clusters.py index 2e5416ae7c..e7d992fd66 100644 --- a/assembl/nlp/clusters.py +++ b/assembl/nlp/clusters.py @@ -603,9 +603,10 @@ def parse_topic(self, topic, trans=identity): return {} words = topic.split(' + ') words = (word.split('*') for word in words) - return dict(((' '.join(( - trans(w) for w in k.strip('"').split('_') if w)), float(v)) - for (v, k) in words)) + return { + ' '.join((trans(w) for w in k.strip('"').split('_') if w)): float(v) + for (v, k) in words + } def calc_features(self, post_clusters): corpus = self.corpus @@ -836,10 +837,9 @@ def get_all_results(self): results = {id: self.get_cluster_info(id) for (id,) in idea_ids} results[None] = self.get_cluster_info() - posres = {id: r for (id, r) in results.items() if r is not None} # for id, (silhouette_score, compare_with_ideas, clusters, post_info) in posres.iteritems(): # log.debug(" ".join((id, silhouette_score, repr([len(x['cluster']) for x in clusters])))) - return posres + return {id: r for (id, r) in results.items() if r is not None} def as_html(self, f=None, jinja_env=None): discussion = self.discussion @@ -1254,13 +1254,12 @@ def get_posts_in_cluster(self, post_ids, parent): post_idss = set(post_ids) if parent: return [p for p in parent.children if p.id in post_idss] - else: - ancestry = self.post_ancestry - post_ids = [pid for pid in post_ids if not set( - ancestry[pid][:-1]).intersection(post_idss)] - post_ids.sort() - posts = self.posts - return [posts[id] for id in post_ids] + ancestry = self.post_ancestry + post_ids = [pid for pid in post_ids if not set( + ancestry[pid][:-1]).intersection(post_idss)] + post_ids.sort() + posts = self.posts + return [posts[id] for id in post_ids] def show_clusters(clusters): @@ -1791,8 +1790,7 @@ def _intra_cluster_distance(distances_row, labels, i): if not np.any(mask): # cluster of size 1 return 0 - a = np.mean(distances_row[mask]) - return a + return np.mean(distances_row[mask]) def _nearest_cluster_distance(distances_row, labels, i): @@ -1816,6 +1814,10 @@ def _nearest_cluster_distance(distances_row, labels, i): Mean nearest-cluster distance for sample i """ label = labels[i] - b = np.min([np.mean(distances_row[labels == cur_label]) - for cur_label in set(labels) if not cur_label == label]) - return b + return np.min( + [ + np.mean(distances_row[labels == cur_label]) + for cur_label in set(labels) + if cur_label != label + ] + ) diff --git a/assembl/nlp/optics.py b/assembl/nlp/optics.py index ca334a72be..dc0fe103d6 100644 --- a/assembl/nlp/optics.py +++ b/assembl/nlp/optics.py @@ -347,13 +347,9 @@ def is_valid_cluster(self, cluster, down_area=None, up_area=None): max_val = N.amax(RD[start+1:end]) if max_val > cluster_edge: return False - if down_area: - if not (down_area.start <= start <= down_area.end): - return False - if up_area: - if not (up_area.start <= end <= up_area.end): - return False - return True + if down_area and not (down_area.start <= start <= down_area.end): + return False + return not up_area or up_area.start <= end <= up_area.end def as_cluster(self, down_area, up_area): cluster = self.cluster_boundary(down_area, up_area) @@ -396,8 +392,8 @@ def extract_clusters(self, x=None, eps=0.05, D=None): continue steep_down_areas[a] = max(steep_down_areas[a], RD[index]) cutoff = RD[ivl.end+1] * (1-eps) - for a in steep_down_areas: - if steep_down_areas[a] <= cutoff: + for a, value in steep_down_areas.items(): + if value <= cutoff: # print 'trying', a, ivl cluster = self.as_cluster(a, ivl) if cluster: @@ -424,7 +420,7 @@ def as_dendrogram(self, clusters): check = check.parent else: assert False - return base + return last def cluster_as_ids(self, cluster): return self.order[cluster.as_slice()] diff --git a/assembl/nlp/translation_service.py b/assembl/nlp/translation_service.py index 36509711bb..a4918dca54 100644 --- a/assembl/nlp/translation_service.py +++ b/assembl/nlp/translation_service.py @@ -247,9 +247,8 @@ def translate_lse( # This is only stored if both identification and translation # failed to identify a language. target_lse = source_lse.langstring.entries_as_dict.get(target, None) - if target_lse and not retranslate: - if self.has_fatal_error(target_lse): - return target_lse + if target_lse and not retranslate and self.has_fatal_error(target_lse): + return target_lse if target_lse is None: target_lse = LangStringEntry( langstring_id=source_lse.langstring_id, @@ -269,16 +268,24 @@ def translate_lse( lang = self.asPosixLocale(lang) # What if detected language is not a discussion language? if source_locale == LocaleLabel.UNDEFINED: - if constrain_locale_threshold and ( - self.strlen_nourl(source_lse.value) < - constrain_locale_threshold): - if (not lang) or not any_locale_compatible( - lang, self.discussion.discussion_locales): - self.set_error( - source_lse, - LangStringStatus.IDENTIFIED_TO_UNKNOWN, - "Identified to "+lang) - return source_lse + if ( + constrain_locale_threshold + and ( + self.strlen_nourl(source_lse.value) + < constrain_locale_threshold + ) + and ( + (not lang) + or not any_locale_compatible( + lang, self.discussion.discussion_locales + ) + ) + ): + self.set_error( + source_lse, + LangStringStatus.IDENTIFIED_TO_UNKNOWN, + "Identified to "+lang) + return source_lse source_lse.identify_locale(lang, dict( service=self.__class__.__name__)) # This should never actually happen, because @@ -307,11 +314,13 @@ def translate_lse( target_lse.value = None else: # Note: when retranslating, we may lose a valid translation. - if source_locale == LocaleLabel.UNDEFINED: - if not self.distinct_identify_step: - # At least do this much. - self.confirm_locale(source_lse) - source_locale = source_lse.locale + if ( + source_locale == LocaleLabel.UNDEFINED + and not self.distinct_identify_step + ): + # At least do this much. + self.confirm_locale(source_lse) + source_locale = source_lse.locale self.set_error( target_lse, LangStringStatus.CANNOT_TRANSLATE, "cannot translate") @@ -654,13 +663,12 @@ def translate(self, text, target, is_html=False, source=None, db=None): def decode_exception(self, exception, identify_phase=False): if isinstance(exception, requests.Timeout): return LangStringStatus.SERVICE_DOWN, str(exception) - if isinstance(exception, RuntimeError): - if exception.args[0] == "status": - status = exception.args[1] - if status in (456,): - return LangStringStatus.QUOTA_ERROR, "" - if status in (400, 403, 404): - return LangStringStatus.PERMANENT_TRANSLATION_FAILURE, "" - elif status in (503, 429): - return LangStringStatus.SERVICE_DOWN, "" + if isinstance(exception, RuntimeError) and exception.args[0] == "status": + status = exception.args[1] + if status in (456,): + return LangStringStatus.QUOTA_ERROR, "" + if status in (400, 403, 404): + return LangStringStatus.PERMANENT_TRANSLATION_FAILURE, "" + elif status in (503, 429): + return LangStringStatus.SERVICE_DOWN, "" return LangStringStatus.UNKNOWN_ERROR, "" diff --git a/assembl/scripts/__init__.py b/assembl/scripts/__init__.py index 59db518a8e..f392da22a2 100644 --- a/assembl/scripts/__init__.py +++ b/assembl/scripts/__init__.py @@ -18,5 +18,4 @@ def boostrap_configuration(config): configure_zmq(settings['changes_socket'], False) configure_model_watcher(env['registry'], 'idealoom') configure_engine(settings, True) - session = get_session_maker()() - return session + return get_session_maker()() diff --git a/assembl/scripts/clone_discussion.py b/assembl/scripts/clone_discussion.py index 9c5d108bd0..021d21475a 100755 --- a/assembl/scripts/clone_discussion.py +++ b/assembl/scripts/clone_discussion.py @@ -245,7 +245,7 @@ def get_mapper_info(mapper): from assembl.lib.history_mixin import TombstonableMixin from assembl.models import LangStringEntry if mapper not in class_info: - pk_keys_cols = set([c for c in mapper.primary_key]) + pk_keys_cols = set(mapper.primary_key) direct_reln = {r for r in mapper.relationships if r.direction.name == 'MANYTOONE' and r.viewonly == False @@ -260,8 +260,11 @@ def get_mapper_info(mapper): # It might have been excluded by a relation. copy_col_props.add(mapper._props['tombstone_date']) non_nullable_reln = { - r for r in direct_reln - if any([not c.nullable for c in r.local_columns])} + r + for r in direct_reln + if any(not c.nullable for c in r.local_columns) + } + treat_as_non_nullable = [] for cls in mapper.class_.mro(): relns = TREAT_AS_NON_NULLABLE.get(cls.__name__, ()) @@ -294,11 +297,10 @@ def assign_dict(values, r, subob): def assign_ob(ob, r, subob): - if r.direction.name != 'MANYTOONE': - if r.mapper != ob.__class__.__mapper__: - "DISCARDING", r - # Handled by the reverse connection - return + if r.direction.name != 'MANYTOONE' and r.mapper != ob.__class__.__mapper__: + "DISCARDING", r + # Handled by the reverse connection + return for col in r.local_columns: if col.foreign_keys: fkcol = next(iter(col.foreign_keys)).column @@ -323,10 +325,7 @@ def is_known_class(self, cls): if cls in self.classes: return True for other_cls in self.classes: - if issubclass(cls, other_cls): - self.classes.add(cls) - return True - elif issubclass(other_cls, cls): + if issubclass(cls, other_cls) or issubclass(other_cls, cls): self.classes.add(cls) return True @@ -370,10 +369,12 @@ def process_column(self, column): def final_query(self): while len(self.missing): - missing = [] - for column in self.missing: - if not self.process_column(column): - missing.append(column) + missing = [ + column + for column in self.missing + if not self.process_column(column) + ] + if len(missing) == len(self.missing): break self.missing = missing @@ -526,12 +527,10 @@ def recursive_clone(ob, path): for r in nullable_relns: subob = getattr(ob, r.key, None) if subob is not None: - if isinstance(subob, list): + if isinstance(subob, list) or subob not in copies_of: local_promises[r] = subob - elif subob in copies_of: - assign_dict(values, r, copies_of[subob]) else: - local_promises[r] = subob + assign_dict(values, r, copies_of[subob]) values.update(changes[ob]) if isinstance(ob, Discussion): values['table_of_contents'] = None diff --git a/assembl/tasks/__init__.py b/assembl/tasks/__init__.py index 561eaefe1b..6f70876ade 100644 --- a/assembl/tasks/__init__.py +++ b/assembl/tasks/__init__.py @@ -104,12 +104,9 @@ def init_from_celery(self): settings_file = join(rootdir, 'local.ini') if not exists(settings_file): settings_file = join(rootdir, 'production.ini') - if not exists(settings_file): rootdir = dirname(dirname(dirname(realpath(__file__)))) settings_file = join(rootdir, 'local.ini') - if not exists(settings_file): settings_file = join(rootdir, 'production.ini') - if not exists(settings_file): raise RuntimeError("Missing settings file") _settings = settings = get_appsettings(settings_file, 'idealoom') configure_zmq(settings['changes_socket'], False) diff --git a/assembl/tasks/changes_router.py b/assembl/tasks/changes_router.py index 00b463cf88..5a801a4c2e 100644 --- a/assembl/tasks/changes_router.py +++ b/assembl/tasks/changes_router.py @@ -71,7 +71,6 @@ async def websocket_handler(request): def setup_app(path): - app = web.Application() # cors = aiohttp_cors.setup(app) # resource = cors.add(app.router.add_resource(path)) # route = cors.add( @@ -82,7 +81,7 @@ def setup_app(path): # allow_headers="*", # max_age=3600, # )}) - return app + return web.Application() class Dispatcher(object): @@ -107,7 +106,7 @@ def __init__(self, app, zmq_context, token_secret, self.token_secret = token_secret self.server_url = server_url self.out_socket_name = out_socket_name - self.active_sockets = dict() + self.active_sockets = {} self.token = None self.discussion = None self.userId = None @@ -243,7 +242,6 @@ async def on_message(self, msg): log.info('userId: %s', self.userId) except TokenInvalid: print("TokenInvalid") - pass if self.token and self.discussion: # Check if token authorizes discussion async with self.http_client.get( @@ -270,12 +268,12 @@ async def on_message(self, msg): loop = asyncio.get_event_loop() self.task = loop.create_task(self.connect()) self.session.send('[{"@type":"Connection"}]') - if self.token and self.raw_token and self.discussion and self.userId != Everyone: - async with self.http_client.post( - '%s/data/Discussion/%s/all_users/%d/connecting' % ( - self.server_url, self.discussion, self.token['userId'] - ), data={'token': self.raw_token}) as resp: - await resp.text() + if self.token and self.raw_token and self.discussion and self.userId != Everyone: + async with self.http_client.post( + '%s/data/Discussion/%s/all_users/%d/connecting' % ( + self.server_url, self.discussion, self.token['userId'] + ), data={'token': self.raw_token}) as resp: + await resp.text() except Exception as e: log.error(e) capture_exception() @@ -371,7 +369,7 @@ async def check_sockets(app): for socket_name in (in_socket, out_socket): if socket_name.startswith('ipc://'): socket_name = socket_name[6:] - for i in range(15): + for _ in range(15): if exists(socket_name): break sleep(0.1) diff --git a/assembl/tasks/piwik.py b/assembl/tasks/piwik.py index aa959d5fd1..d472c3e3d4 100644 --- a/assembl/tasks/piwik.py +++ b/assembl/tasks/piwik.py @@ -203,10 +203,11 @@ def piwik_SitesManager_getSitesIdFromSiteUrl(piwik_url, piwik_api_token, url): params = { "module": "API", "format": "JSON", - "token_auth": piwik_api_token + "token_auth": piwik_api_token, + "method": "SitesManager.getSitesIdFromSiteUrl", + "url": url, } - params["method"] = "SitesManager.getSitesIdFromSiteUrl" - params["url"] = url + result = requests.get(piwik_url, params=params, timeout=15) if result.status_code != 200: @@ -225,11 +226,12 @@ def piwik_SitesManager_addSite(piwik_url, piwik_api_token, siteName, urls, ecomm params = { "module": "API", "format": "JSON", - "token_auth": piwik_api_token + "token_auth": piwik_api_token, + "method": "SitesManager.addSite", + "siteName": siteName, + "urls": urls, } - params["method"] = "SitesManager.addSite" - params["siteName"] = siteName - params["urls"] = urls + if ( ecommerce ): params["ecommerce"] = ecommerce if ( siteSearch ): @@ -281,12 +283,13 @@ def piwik_UsersManager_setUserAccess(piwik_url, piwik_api_token, userLogin, acce params = { "module": "API", "format": "JSON", - "token_auth": piwik_api_token + "token_auth": piwik_api_token, + "method": "UsersManager.setUserAccess", + "userLogin": userLogin, + "access": access, + "idSites": idSites, } - params["method"] = "UsersManager.setUserAccess" - params["userLogin"] = userLogin - params["access"] = access - params["idSites"] = idSites + result = requests.get(piwik_url, params=params, timeout=15) if result.status_code != 200: @@ -297,17 +300,16 @@ def piwik_UsersManager_setUserAccess(piwik_url, piwik_api_token, userLogin, acce if not content: raise requests.ConnectionError() - user_access_is_set = ("result" in content and content["result"] == "success") - return user_access_is_set + return ("result" in content and content["result"] == "success") def piwik_UsersManager_hasSuperUserAccess(piwik_url, piwik_api_token, userLogin): params = { "module": "API", "format": "JSON", - "token_auth": piwik_api_token + "token_auth": piwik_api_token, + "method": "UsersManager.hasSuperUserAccess", + "userLogin": userLogin, } - params["method"] = "UsersManager.hasSuperUserAccess" - params["userLogin"] = userLogin result = requests.get(piwik_url, params=params, timeout=15) diff --git a/setup.py b/setup.py index 3542134c8b..c46e253ff5 100644 --- a/setup.py +++ b/setup.py @@ -48,13 +48,15 @@ def compose_version(): def widget_components(): - paths = [] exclusions = { 'browserify', 'jasmine', 'jsdoc', 'karma', 'mocha', 'serve', 'src', '.sass-cache'} - for (path, directories, filenames) in os.walk('assembl/static/widget'): - if not set(path.split('/')).intersection(exclusions): - paths.append(path) + paths = [ + path + for (path, directories, filenames) in os.walk('assembl/static/widget') + if not set(path.split('/')).intersection(exclusions) + ] + return [path[8:] + "/*" for path in paths]