response
stringlengths
1
33.1k
instruction
stringlengths
22
582k
Checks that the data sent is well-formed. Does not handle editability, permissions etc.
def validate_message_edit_payload( message: Message, stream_id: Optional[int], topic_name: Optional[str], propagate_mode: Optional[str], content: Optional[str], ) -> None: """ Checks that the data sent is well-formed. Does not handle editability, permissions etc. """ if topic_name is None and content is None and stream_id is None: raise JsonableError(_("Nothing to change")) if not message.is_stream_message(): if stream_id is not None: raise JsonableError(_("Direct messages cannot be moved to channels.")) if topic_name is not None: raise JsonableError(_("Direct messages cannot have topics.")) if propagate_mode != "change_one" and topic_name is None and stream_id is None: raise JsonableError(_("Invalid propagate_mode without topic edit")) if topic_name is not None: check_stream_topic(topic_name) if stream_id is not None and content is not None: raise JsonableError(_("Cannot change message content while changing channel")) # Right now, we prevent users from editing widgets. if content is not None and is_widget_message(message): raise JsonableError(_("Widgets cannot be edited."))
Checks if the user has the permission to edit the message.
def validate_user_can_edit_message( user_profile: UserProfile, message: Message, edit_limit_buffer: int ) -> None: """ Checks if the user has the permission to edit the message. """ if not user_profile.realm.allow_message_editing: raise JsonableError(_("Your organization has turned off message editing")) # You cannot edit the content of message sent by someone else. if message.sender_id != user_profile.id: raise JsonableError(_("You don't have permission to edit this message")) if user_profile.realm.message_content_edit_limit_seconds is not None: deadline_seconds = user_profile.realm.message_content_edit_limit_seconds + edit_limit_buffer if (timezone_now() - message.date_sent) > timedelta(seconds=deadline_seconds): raise JsonableError(_("The time limit for editing this message has passed"))
Returns resolved_topic_message_id if resolve topic notifications were in fact sent.
def maybe_send_resolve_topic_notifications( *, user_profile: UserProfile, stream: Stream, old_topic_name: str, new_topic_name: str, changed_messages: QuerySet[Message], ) -> Optional[int]: """Returns resolved_topic_message_id if resolve topic notifications were in fact sent.""" # Note that topics will have already been stripped in check_update_message. # # This logic is designed to treat removing a weird "✔ ✔✔ " # prefix as unresolving the topic. topic_resolved: bool = new_topic_name.startswith( RESOLVED_TOPIC_PREFIX ) and not old_topic_name.startswith(RESOLVED_TOPIC_PREFIX) topic_unresolved: bool = old_topic_name.startswith( RESOLVED_TOPIC_PREFIX ) and not new_topic_name.startswith(RESOLVED_TOPIC_PREFIX) if not topic_resolved and not topic_unresolved: # If there's some other weird topic that does not toggle the # state of "topic starts with RESOLVED_TOPIC_PREFIX", we do # nothing. Any other logic could result in cases where we send # these notifications in a non-alternating fashion. # # Note that it is still possible for an individual topic to # have multiple "This topic was marked as resolved" # notifications in a row: one can send new messages to the # pre-resolve topic and then resolve the topic created that # way to get multiple in the resolved topic. And then an # administrator can the messages in between. We consider this # to be a fundamental risk of irresponsible message deletion, # not a bug with the "resolve topics" feature. return None # Compute the users who either sent or reacted to messages that # were moved via the "resolve topic' action. Only those users # should be eligible for this message being managed as unread. affected_participant_ids = set( changed_messages.values_list("sender_id", flat=True).union( Reaction.objects.filter(message__in=changed_messages).values_list( "user_profile_id", flat=True ) ) ) sender = get_system_bot(settings.NOTIFICATION_BOT, user_profile.realm_id) user_mention = silent_mention_syntax_for_user(user_profile) with override_language(stream.realm.default_language): if topic_resolved: notification_string = _("{user} has marked this topic as resolved.") elif topic_unresolved: notification_string = _("{user} has marked this topic as unresolved.") resolved_topic_message_id = internal_send_stream_message( sender, stream, new_topic_name, notification_string.format( user=user_mention, ), limit_unread_user_ids=affected_participant_ids, ) return resolved_topic_message_id
The main function for message editing. A message edit event can modify: * the message's content (in which case the caller will have set both content and rendered_content), * the topic, in which case the caller will have set topic_name * or both message's content and the topic * or stream and/or topic, in which case the caller will have set new_stream and/or topic_name. With topic edits, propagate_mode determines whether other message also have their topics edited.
def do_update_message( user_profile: UserProfile, target_message: Message, new_stream: Optional[Stream], topic_name: Optional[str], propagate_mode: Optional[str], send_notification_to_old_thread: bool, send_notification_to_new_thread: bool, content: Optional[str], rendering_result: Optional[MessageRenderingResult], prior_mention_user_ids: Set[int], mention_data: Optional[MentionData] = None, ) -> int: """ The main function for message editing. A message edit event can modify: * the message's content (in which case the caller will have set both content and rendered_content), * the topic, in which case the caller will have set topic_name * or both message's content and the topic * or stream and/or topic, in which case the caller will have set new_stream and/or topic_name. With topic edits, propagate_mode determines whether other message also have their topics edited. """ timestamp = timezone_now() target_message.last_edit_time = timestamp event: Dict[str, Any] = { "type": "update_message", "user_id": user_profile.id, "edit_timestamp": datetime_to_timestamp(timestamp), "message_id": target_message.id, "rendering_only": False, } edit_history_event: EditHistoryEvent = { "user_id": user_profile.id, "timestamp": event["edit_timestamp"], } realm = user_profile.realm stream_being_edited = None if target_message.is_stream_message(): stream_id = target_message.recipient.type_id stream_being_edited = get_stream_by_id_in_realm(stream_id, realm) event["stream_name"] = stream_being_edited.name event["stream_id"] = stream_being_edited.id ums = UserMessage.objects.filter(message=target_message.id) if content is not None: assert rendering_result is not None # mention_data is required if there's a content edit. assert mention_data is not None # add data from group mentions to mentions_user_ids. for group_id in rendering_result.mentions_user_group_ids: members = mention_data.get_group_members(group_id) rendering_result.mentions_user_ids.update(members) # One could imagine checking realm.allow_edit_history here and # modifying the events based on that setting, but doing so # doesn't really make sense. We need to send the edit event # to clients regardless, and a client already had access to # the original/pre-edit content of the message anyway. That # setting must be enforced on the client side, and making a # change here simply complicates the logic for clients parsing # edit history events. event["orig_content"] = target_message.content event["orig_rendered_content"] = target_message.rendered_content edit_history_event["prev_content"] = target_message.content edit_history_event["prev_rendered_content"] = target_message.rendered_content edit_history_event["prev_rendered_content_version"] = ( target_message.rendered_content_version ) target_message.content = content target_message.rendered_content = rendering_result.rendered_content target_message.rendered_content_version = markdown_version event["content"] = content event["rendered_content"] = rendering_result.rendered_content event["prev_rendered_content_version"] = target_message.rendered_content_version event["is_me_message"] = Message.is_status_message( content, rendering_result.rendered_content ) # target_message.has_image and target_message.has_link will have been # already updated by Markdown rendering in the caller. target_message.has_attachment = check_attachment_reference_change( target_message, rendering_result ) if target_message.is_stream_message(): if topic_name is not None: new_topic_name = topic_name else: new_topic_name = target_message.topic_name() stream_topic: Optional[StreamTopicTarget] = StreamTopicTarget( stream_id=stream_id, topic_name=new_topic_name, ) else: stream_topic = None info = get_recipient_info( realm_id=realm.id, recipient=target_message.recipient, sender_id=target_message.sender_id, stream_topic=stream_topic, possible_topic_wildcard_mention=mention_data.message_has_topic_wildcards(), possible_stream_wildcard_mention=mention_data.message_has_stream_wildcards(), ) event["online_push_user_ids"] = list(info.online_push_user_ids) event["dm_mention_push_disabled_user_ids"] = list(info.dm_mention_push_disabled_user_ids) event["dm_mention_email_disabled_user_ids"] = list(info.dm_mention_email_disabled_user_ids) event["stream_push_user_ids"] = list(info.stream_push_user_ids) event["stream_email_user_ids"] = list(info.stream_email_user_ids) event["followed_topic_push_user_ids"] = list(info.followed_topic_push_user_ids) event["followed_topic_email_user_ids"] = list(info.followed_topic_email_user_ids) event["muted_sender_user_ids"] = list(info.muted_sender_user_ids) event["prior_mention_user_ids"] = list(prior_mention_user_ids) event["presence_idle_user_ids"] = filter_presence_idle_user_ids(info.active_user_ids) event["all_bot_user_ids"] = list(info.all_bot_user_ids) if rendering_result.mentions_stream_wildcard: event["stream_wildcard_mention_user_ids"] = list(info.stream_wildcard_mention_user_ids) event["stream_wildcard_mention_in_followed_topic_user_ids"] = list( info.stream_wildcard_mention_in_followed_topic_user_ids ) else: event["stream_wildcard_mention_user_ids"] = [] event["stream_wildcard_mention_in_followed_topic_user_ids"] = [] if rendering_result.mentions_topic_wildcard: event["topic_wildcard_mention_user_ids"] = list(info.topic_wildcard_mention_user_ids) event["topic_wildcard_mention_in_followed_topic_user_ids"] = list( info.topic_wildcard_mention_in_followed_topic_user_ids ) topic_participant_user_ids = info.topic_participant_user_ids else: event["topic_wildcard_mention_user_ids"] = [] event["topic_wildcard_mention_in_followed_topic_user_ids"] = [] topic_participant_user_ids = set() update_user_message_flags(rendering_result, ums, topic_participant_user_ids) do_update_mobile_push_notification( target_message, prior_mention_user_ids, rendering_result.mentions_user_ids, info.stream_push_user_ids, ) if topic_name is not None or new_stream is not None: assert propagate_mode is not None orig_topic_name = target_message.topic_name() event["propagate_mode"] = propagate_mode users_losing_access = UserProfile.objects.none() user_ids_gaining_usermessages: List[int] = [] if new_stream is not None: assert content is None assert target_message.is_stream_message() assert stream_being_edited is not None edit_history_event["prev_stream"] = stream_being_edited.id edit_history_event["stream"] = new_stream.id event[ORIG_TOPIC] = orig_topic_name assert new_stream.recipient_id is not None target_message.recipient_id = new_stream.recipient_id event["new_stream_id"] = new_stream.id event["propagate_mode"] = propagate_mode # When messages are moved from one stream to another, some # users may lose access to those messages, including guest # users and users not subscribed to the new stream (if it is a # private stream). For those users, their experience is as # though the messages were deleted, and we should send a # delete_message event to them instead. # We select _all_ current subscriptions, not just active ones, # for the current stream, since there may be users who were # previously subscribed when the message was sent, but are no # longer, who should also lose their UserMessage rows. old_stream_all_users = UserProfile.objects.filter( id__in=Subscription.objects.filter( recipient__type=Recipient.STREAM, recipient__type_id=stream_id, ).values_list("user_profile_id") ).only("id") new_stream_current_users = UserProfile.objects.filter( id__in=get_active_subscriptions_for_stream_id( new_stream.id, include_deactivated_users=True ).values_list("user_profile_id") ).only("id") users_losing_usermessages = old_stream_all_users.difference(new_stream_current_users) if new_stream.is_public(): # Only guest users are losing access, if it's moving to a public stream users_losing_access = old_stream_all_users.filter( role=UserProfile.ROLE_GUEST ).difference(new_stream_current_users) else: # If it's moving to a private stream, all non-subscribed users are losing access users_losing_access = users_losing_usermessages unmodified_user_messages = ums.exclude(user_profile__in=users_losing_usermessages) if not new_stream.is_history_public_to_subscribers(): # We need to guarantee that every currently-subscribed # user of the new stream has a UserMessage row, since # being a member when the message is moved is always # enough to have access. We cannot reduce that set by # removing either active or all subscribers from the old # stream, since neither set guarantees that the user was # subscribed when these messages were sent -- in fact, it # may not be consistent across the messages. # # There may be current users of the new stream who already # have a usermessage row -- we handle this via `ON # CONFLICT DO NOTHING` during insert. user_ids_gaining_usermessages = list( new_stream_current_users.values_list("id", flat=True) ) else: # If we're not moving the topic to another stream, we don't # modify the original set of UserMessage objects queried. unmodified_user_messages = ums # We save the full topic name so that checks that require comparison # between the original topic and the topic name passed into this function # will not be affected by the potential truncation of topic_name below. pre_truncation_topic_name = topic_name if topic_name is not None: topic_name = truncate_topic(topic_name) target_message.set_topic_name(topic_name) # These fields have legacy field names. event[ORIG_TOPIC] = orig_topic_name event[TOPIC_NAME] = topic_name event[TOPIC_LINKS] = topic_links(target_message.realm_id, topic_name) edit_history_event["prev_topic"] = orig_topic_name edit_history_event["topic"] = topic_name update_edit_history(target_message, timestamp, edit_history_event) # 'target_topic_has_messages', 'target_stream', and 'target_topic' # will be used while migrating user_topic records later in this function. # # We need to calculate 'target_topic_has_messages' here, # as we are moving the messages in the next step. if topic_name is not None or new_stream is not None: assert stream_being_edited is not None assert orig_topic_name is not None target_stream: Stream = new_stream if new_stream is not None else stream_being_edited target_topic_name: str = topic_name if topic_name is not None else orig_topic_name assert target_stream.recipient_id is not None target_topic_has_messages = messages_for_topic( realm.id, target_stream.recipient_id, target_topic_name ).exists() changed_messages = Message.objects.filter(id=target_message.id) changed_message_ids = [target_message.id] changed_messages_count = 1 save_changes_for_propagation_mode = lambda: Message.objects.filter( id=target_message.id ).select_related(*Message.DEFAULT_SELECT_RELATED) if propagate_mode in ["change_later", "change_all"]: assert topic_name is not None or new_stream is not None assert stream_being_edited is not None # Other messages should only get topic/stream fields in their edit history. topic_only_edit_history_event: EditHistoryEvent = { "user_id": edit_history_event["user_id"], "timestamp": edit_history_event["timestamp"], } if topic_name is not None: topic_only_edit_history_event["prev_topic"] = edit_history_event["prev_topic"] topic_only_edit_history_event["topic"] = edit_history_event["topic"] if new_stream is not None: topic_only_edit_history_event["prev_stream"] = edit_history_event["prev_stream"] topic_only_edit_history_event["stream"] = edit_history_event["stream"] later_messages, save_changes_for_propagation_mode = update_messages_for_topic_edit( acting_user=user_profile, edited_message=target_message, propagate_mode=propagate_mode, orig_topic_name=orig_topic_name, topic_name=topic_name, new_stream=new_stream, old_stream=stream_being_edited, edit_history_event=topic_only_edit_history_event, last_edit_time=timestamp, ) changed_messages |= later_messages changed_message_ids = list(changed_messages.values_list("id", flat=True)) changed_messages_count = len(changed_message_ids) if new_stream is not None: assert stream_being_edited is not None # The fact that the user didn't have a UserMessage # originally means we can infer that the user was not # mentioned in the original message (even if mention # syntax was present, it would not take effect for a user # who was not subscribed). If we were editing the # message's content, we would rerender the message and # then use the new stream's data to determine whether this # is a mention of a subscriber; but as we are not doing # so, we choose to preserve the "was this mention syntax # an actual mention" decision made during the original # rendering for implementation simplicity. As a result, # the only flag to consider applying here is read. bulk_insert_all_ums( user_ids_gaining_usermessages, changed_message_ids, UserMessage.flags.read ) # Delete UserMessage objects for users who will no # longer have access to these messages. Note: This could be # very expensive, since it's N guest users x M messages. UserMessage.objects.filter( user_profile__in=users_losing_usermessages, message__in=changed_messages, ).delete() delete_event: DeleteMessagesEvent = { "type": "delete_message", "message_ids": changed_message_ids, "message_type": "stream", "stream_id": stream_being_edited.id, "topic": orig_topic_name, } send_event(user_profile.realm, delete_event, [user.id for user in users_losing_access]) # Reset the Attachment.is_*_public caches for all messages # moved to another stream with different access permissions. if new_stream.invite_only != stream_being_edited.invite_only: Attachment.objects.filter(messages__in=changed_messages.values("id")).update( is_realm_public=None, ) ArchivedAttachment.objects.filter(messages__in=changed_messages.values("id")).update( is_realm_public=None, ) if new_stream.is_web_public != stream_being_edited.is_web_public: Attachment.objects.filter(messages__in=changed_messages.values("id")).update( is_web_public=None, ) ArchivedAttachment.objects.filter(messages__in=changed_messages.values("id")).update( is_web_public=None, ) # This does message.save(update_fields=[...]) save_message_for_edit_use_case(message=target_message) # This updates any later messages, if any. It returns the # freshly-fetched-from-the-database changed messages. changed_messages = save_changes_for_propagation_mode() realm_id: Optional[int] = None if stream_being_edited is not None: realm_id = stream_being_edited.realm_id event["message_ids"] = update_message_cache(changed_messages, realm_id) def user_info(um: UserMessage) -> Dict[str, Any]: return { "id": um.user_profile_id, "flags": um.flags_list(), } # The following blocks arranges that users who are subscribed to a # stream and can see history from before they subscribed get # live-update when old messages are edited (e.g. if the user does # a topic edit themself). # # We still don't send an update event to users who are not # subscribed to this stream and don't have a UserMessage row. This # means if a non-subscriber is viewing the narrow, they won't get # a real-time updates. This is a balance between sending # message-edit notifications for every public stream to every user # in the organization (too expansive, and also not what we do for # newly sent messages anyway) and having magical live-updates # where possible. users_to_be_notified = list(map(user_info, unmodified_user_messages)) if stream_being_edited is not None and stream_being_edited.is_history_public_to_subscribers(): subscriptions = get_active_subscriptions_for_stream_id( stream_id, include_deactivated_users=False ) # We exclude long-term idle users, since they by # definition have no active clients. subscriptions = subscriptions.exclude(user_profile__long_term_idle=True) # Remove duplicates by excluding the id of users already # in users_to_be_notified list. This is the case where a # user both has a UserMessage row and is a current # Subscriber subscriptions = subscriptions.exclude( user_profile_id__in=[um.user_profile_id for um in unmodified_user_messages] ) if new_stream is not None: subscriptions = subscriptions.exclude(user_profile__in=users_losing_access) # TODO: Guest users don't see the new moved topic # unless breadcrumb message for new stream is # enabled. Excluding these users from receiving this # event helps us avoid a error traceback for our # clients. We should figure out a way to inform the # guest users of this new topic if sending a 'message' # event for these messages is not an option. # # Don't send this event to guest subs who are not # subscribers of the old stream but are subscribed to # the new stream; clients will be confused. old_stream_current_users = UserProfile.objects.filter( id__in=get_active_subscriptions_for_stream_id( stream_being_edited.id, include_deactivated_users=True ).values_list("user_profile_id", flat=True) ).only("id") subscriptions = subscriptions.exclude( user_profile__in=new_stream_current_users.filter( role=UserProfile.ROLE_GUEST ).difference(old_stream_current_users) ) subscriber_ids = set(subscriptions.values_list("user_profile_id", flat=True)) users_to_be_notified += map(subscriber_info, sorted(subscriber_ids)) # UserTopic updates and the content of notifications depend on # whether we've moved the entire topic, or just part of it. We # make that determination here. moved_all_visible_messages = False if topic_name is not None or new_stream is not None: assert stream_being_edited is not None if propagate_mode == "change_all": moved_all_visible_messages = True else: # With other propagate modes, if the user in fact moved # all messages in the stream, we want to explain it was a # full-topic move. # # For security model reasons, we don't want to allow a # user to take any action (e.g. post a message about # having not moved the whole topic) that would leak # information about older messages they cannot access # (e.g. there were earlier inaccessible messages in the # topic, in a stream without shared history). The # bulk_access_stream_messages_query call below addresses # that concern. assert stream_being_edited.recipient_id is not None unmoved_messages = messages_for_topic( realm.id, stream_being_edited.recipient_id, orig_topic_name, ) visible_unmoved_messages = bulk_access_stream_messages_query( user_profile, unmoved_messages, stream_being_edited ) moved_all_visible_messages = not visible_unmoved_messages.exists() # Migrate 'topic with visibility_policy' configuration in the following # circumstances: # # * If propagate_mode is change_all, do so unconditionally. # # * If propagate_mode is change_later or change_one, do so when # the acting user has moved the entire topic (as visible to them). # # This rule corresponds to checking moved_all_visible_messages. if moved_all_visible_messages: assert stream_being_edited is not None assert target_stream is not None assert target_topic_name is not None stream_inaccessible_to_user_profiles: List[UserProfile] = [] orig_topic_user_profile_to_visibility_policy: Dict[UserProfile, int] = {} target_topic_user_profile_to_visibility_policy: Dict[UserProfile, int] = {} user_ids_losing_access = {user.id for user in users_losing_access} for user_topic in get_users_with_user_topic_visibility_policy( stream_being_edited.id, orig_topic_name ): if new_stream is not None and user_topic.user_profile_id in user_ids_losing_access: stream_inaccessible_to_user_profiles.append(user_topic.user_profile) else: orig_topic_user_profile_to_visibility_policy[user_topic.user_profile] = ( user_topic.visibility_policy ) for user_topic in get_users_with_user_topic_visibility_policy( target_stream.id, target_topic_name ): target_topic_user_profile_to_visibility_policy[user_topic.user_profile] = ( user_topic.visibility_policy ) # User profiles having any of the visibility policies set for either the original or target topic. user_profiles_having_visibility_policy: Set[UserProfile] = set( itertools.chain( orig_topic_user_profile_to_visibility_policy.keys(), target_topic_user_profile_to_visibility_policy.keys(), ) ) user_profiles_for_visibility_policy_pair: Dict[Tuple[int, int], List[UserProfile]] = ( defaultdict(list) ) for user_profile_with_policy in user_profiles_having_visibility_policy: if user_profile_with_policy not in target_topic_user_profile_to_visibility_policy: target_topic_user_profile_to_visibility_policy[user_profile_with_policy] = ( UserTopic.VisibilityPolicy.INHERIT ) elif user_profile_with_policy not in orig_topic_user_profile_to_visibility_policy: orig_topic_user_profile_to_visibility_policy[user_profile_with_policy] = ( UserTopic.VisibilityPolicy.INHERIT ) orig_topic_visibility_policy = orig_topic_user_profile_to_visibility_policy[ user_profile_with_policy ] target_topic_visibility_policy = target_topic_user_profile_to_visibility_policy[ user_profile_with_policy ] user_profiles_for_visibility_policy_pair[ (orig_topic_visibility_policy, target_topic_visibility_policy) ].append(user_profile_with_policy) # If the messages are being moved to a stream the user # cannot access, then we treat this as the # messages/topic being deleted for this user. This is # important for security reasons; we don't want to # give users a UserTopic row in a stream they cannot # access. Remove the user topic rows for such users. bulk_do_set_user_topic_visibility_policy( stream_inaccessible_to_user_profiles, stream_being_edited, orig_topic_name, visibility_policy=UserTopic.VisibilityPolicy.INHERIT, ) # If the messages are being moved to a stream the user _can_ # access, we move the user topic records, by removing the old # topic visibility_policy and creating a new one. # # Algorithm used for the 'merge userTopic states' case: # Using the 'user_profiles_for_visibility_policy_pair' dictionary, # we have 'orig_topic_visibility_policy', 'target_topic_visibility_policy', # and a list of 'user_profiles' having the mentioned visibility policies. # # For every 'orig_topic_visibility_policy and target_topic_visibility_policy' pair, # we determine the final visibility_policy that should be after the merge. # Update the visibility_policy for the concerned set of user_profiles. for ( visibility_policy_pair, user_profiles, ) in user_profiles_for_visibility_policy_pair.items(): orig_topic_visibility_policy, target_topic_visibility_policy = visibility_policy_pair if orig_topic_visibility_policy != UserTopic.VisibilityPolicy.INHERIT: bulk_do_set_user_topic_visibility_policy( user_profiles, stream_being_edited, orig_topic_name, visibility_policy=UserTopic.VisibilityPolicy.INHERIT, # bulk_do_set_user_topic_visibility_policy with visibility_policy # set to 'new_visibility_policy' will send an updated muted topic # event, which contains the full set of muted # topics, just after this. skip_muted_topics_event=True, ) new_visibility_policy = orig_topic_visibility_policy if target_topic_has_messages: # Here, we handle the complex case when target_topic already has # some messages. We determine the resultant visibility_policy # based on the visibility_policy of the orig_topic + target_topic. # Finally, bulk_update the user_topic rows with the new visibility_policy. new_visibility_policy = get_visibility_policy_after_merge( orig_topic_visibility_policy, target_topic_visibility_policy ) if new_visibility_policy == target_topic_visibility_policy: continue bulk_do_set_user_topic_visibility_policy( user_profiles, target_stream, target_topic_name, visibility_policy=new_visibility_policy, ) else: # This corresponds to the case when messages are moved # to a stream-topic pair that didn't exist. There can # still be UserTopic rows for the stream-topic pair # that didn't exist if the messages in that topic had # been deleted. if new_visibility_policy == target_topic_visibility_policy: # This avoids unnecessary db operations and INFO logs. continue bulk_do_set_user_topic_visibility_policy( user_profiles, target_stream, target_topic_name, visibility_policy=new_visibility_policy, ) send_event(user_profile.realm, event, users_to_be_notified) resolved_topic_message_id = None if topic_name is not None and content is None: # When stream is changed and topic is marked as resolved or unresolved # in the same API request, resolved or unresolved notification should # be sent to "new_stream". # In general, it is sent to "stream_being_edited". stream_to_send_resolve_topic_notification = stream_being_edited if new_stream is not None: stream_to_send_resolve_topic_notification = new_stream assert stream_to_send_resolve_topic_notification is not None resolved_topic_message_id = maybe_send_resolve_topic_notifications( user_profile=user_profile, stream=stream_to_send_resolve_topic_notification, old_topic_name=orig_topic_name, new_topic_name=topic_name, changed_messages=changed_messages, ) if (new_stream is not None or topic_name is not None) and stream_being_edited is not None: # Notify users that the topic was moved. old_thread_notification_string = None if send_notification_to_old_thread: if moved_all_visible_messages: old_thread_notification_string = gettext_lazy( "This topic was moved to {new_location} by {user}." ) elif changed_messages_count == 1: old_thread_notification_string = gettext_lazy( "A message was moved from this topic to {new_location} by {user}." ) else: old_thread_notification_string = gettext_lazy( "{changed_messages_count} messages were moved from this topic to {new_location} by {user}." ) # The new thread notification code path is a bit subtle. We # don't want every resolve-topic action to also annoyingly # send an extra notification that the topic was moved! # # Since one can resolve/unresolve a topic at the same time # you're moving it, we need to carefully treat the resolve # topic notification as satisfying our obligation to send a # notification to the new topic only if the only thing this # request did is mark the topic as resolved. new_thread_notification_string = None if send_notification_to_new_thread and ( new_stream is not None or not resolved_topic_message_id or ( pre_truncation_topic_name is not None and orig_topic_name.lstrip(RESOLVED_TOPIC_PREFIX) != pre_truncation_topic_name.lstrip(RESOLVED_TOPIC_PREFIX) ) ): stream_for_new_topic = new_stream if new_stream is not None else stream_being_edited assert stream_for_new_topic.recipient_id is not None new_topic_name = topic_name if topic_name is not None else orig_topic_name # We calculate whether the user moved the entire topic # using that user's own permissions, which is important to # avoid leaking information about whether there are # messages in the destination topic's deeper history that # the acting user does not have permission to access. preexisting_topic_messages = messages_for_topic( realm.id, stream_for_new_topic.recipient_id, new_topic_name ).exclude(id__in=[*changed_message_ids, resolved_topic_message_id]) visible_preexisting_messages = bulk_access_stream_messages_query( user_profile, preexisting_topic_messages, stream_for_new_topic ) no_visible_preexisting_messages = not visible_preexisting_messages.exists() if no_visible_preexisting_messages and moved_all_visible_messages: new_thread_notification_string = gettext_lazy( "This topic was moved here from {old_location} by {user}." ) else: if changed_messages_count == 1: new_thread_notification_string = gettext_lazy( "[A message]({message_link}) was moved here from {old_location} by {user}." ) else: new_thread_notification_string = gettext_lazy( "{changed_messages_count} messages were moved here from {old_location} by {user}." ) send_message_moved_breadcrumbs( target_message, user_profile, stream_being_edited, orig_topic_name, old_thread_notification_string, new_stream if new_stream is not None else stream_being_edited, topic_name, new_thread_notification_string, changed_messages_count, ) return changed_messages_count
This will update a message given the message id and user profile. It checks whether the user profile has the permission to edit the message and raises a JsonableError if otherwise. It returns the number changed.
def check_update_message( user_profile: UserProfile, message_id: int, stream_id: Optional[int] = None, topic_name: Optional[str] = None, propagate_mode: str = "change_one", send_notification_to_old_thread: bool = True, send_notification_to_new_thread: bool = True, content: Optional[str] = None, ) -> int: """This will update a message given the message id and user profile. It checks whether the user profile has the permission to edit the message and raises a JsonableError if otherwise. It returns the number changed. """ message = access_message(user_profile, message_id, lock_message=True) # If there is a change to the content, check that it hasn't been too long # Allow an extra 20 seconds since we potentially allow editing 15 seconds # past the limit, and in case there are network issues, etc. The 15 comes # from (min_seconds_to_edit + seconds_left_buffer) in message_edit.js; if # you change this value also change those two parameters in message_edit.js. edit_limit_buffer = 20 if content is not None: validate_user_can_edit_message(user_profile, message, edit_limit_buffer) # The zerver/views/message_edit.py call point already strips this # via REQ_topic; so we can delete this line if we arrange a # contract where future callers in the embedded bots system strip # use REQ_topic as well (or otherwise are guaranteed to strip input). if topic_name is not None: topic_name = topic_name.strip() if topic_name == message.topic_name(): topic_name = None validate_message_edit_payload(message, stream_id, topic_name, propagate_mode, content) if topic_name is not None and not user_profile.can_move_messages_to_another_topic(): raise JsonableError(_("You don't have permission to edit this message")) # If there is a change to the topic, check that the user is allowed to # edit it and that it has not been too long. If user is not admin or moderator, # and the time limit for editing topics is passed, raise an error. if ( topic_name is not None and user_profile.realm.move_messages_within_stream_limit_seconds is not None and not user_profile.is_realm_admin and not user_profile.is_moderator ): deadline_seconds = ( user_profile.realm.move_messages_within_stream_limit_seconds + edit_limit_buffer ) if (timezone_now() - message.date_sent) > timedelta(seconds=deadline_seconds): raise JsonableError(_("The time limit for editing this message's topic has passed.")) rendering_result = None links_for_embed: Set[str] = set() prior_mention_user_ids: Set[int] = set() mention_data: Optional[MentionData] = None if content is not None: if content.rstrip() == "": content = "(deleted)" content = normalize_body(content) mention_backend = MentionBackend(user_profile.realm_id) mention_data = MentionData( mention_backend=mention_backend, content=content, message_sender=message.sender, ) prior_mention_user_ids = get_mentions_for_message_updates(message.id) # We render the message using the current user's realm; since # the cross-realm bots never edit messages, this should be # always correct. # Note: If rendering fails, the called code will raise a JsonableError. rendering_result = render_incoming_message( message, content, user_profile.realm, mention_data=mention_data, ) links_for_embed |= rendering_result.links_for_preview if message.is_stream_message() and rendering_result.mentions_stream_wildcard: stream = access_stream_by_id(user_profile, message.recipient.type_id)[0] if not stream_wildcard_mention_allowed(message.sender, stream, message.realm): raise StreamWildcardMentionNotAllowedError if message.is_stream_message() and rendering_result.mentions_topic_wildcard: topic_participant_count = len( participants_for_topic(message.realm.id, message.recipient.id, message.topic_name()) ) if not topic_wildcard_mention_allowed( message.sender, topic_participant_count, message.realm ): raise TopicWildcardMentionNotAllowedError if rendering_result.mentions_user_group_ids: mentioned_group_ids = list(rendering_result.mentions_user_group_ids) check_user_group_mention_allowed(user_profile, mentioned_group_ids) new_stream = None number_changed = 0 if stream_id is not None: assert message.is_stream_message() if not user_profile.can_move_messages_between_streams(): raise JsonableError(_("You don't have permission to move this message")) new_stream = access_stream_by_id(user_profile, stream_id, require_active=True)[0] check_stream_access_based_on_stream_post_policy(user_profile, new_stream) if ( user_profile.realm.move_messages_between_streams_limit_seconds is not None and not user_profile.is_realm_admin and not user_profile.is_moderator ): deadline_seconds = ( user_profile.realm.move_messages_between_streams_limit_seconds + edit_limit_buffer ) if (timezone_now() - message.date_sent) > timedelta(seconds=deadline_seconds): raise JsonableError( _("The time limit for editing this message's channel has passed") ) if ( propagate_mode == "change_all" and not user_profile.is_realm_admin and not user_profile.is_moderator and (topic_name is not None or stream_id is not None) ): check_time_limit_for_change_all_propagate_mode(message, user_profile, topic_name, stream_id) number_changed = do_update_message( user_profile, message, new_stream, topic_name, propagate_mode, send_notification_to_old_thread, send_notification_to_new_thread, content, rendering_result, prior_mention_user_ids, mention_data, ) if links_for_embed: event_data = { "message_id": message.id, "message_content": message.content, # The choice of `user_profile.realm_id` rather than # `sender.realm_id` must match the decision made in the # `render_incoming_message` call earlier in this function. "message_realm_id": user_profile.realm_id, "urls": list(links_for_embed), } queue_json_publish("embed_links", event_data) return number_changed
Returns a dictionary that can be passed into do_send_messages. In production, this is always called by check_message, but some testing code paths call it directly.
def build_message_send_dict( message: Message, stream: Optional[Stream] = None, local_id: Optional[str] = None, sender_queue_id: Optional[str] = None, widget_content_dict: Optional[Dict[str, Any]] = None, email_gateway: bool = False, mention_backend: Optional[MentionBackend] = None, limit_unread_user_ids: Optional[Set[int]] = None, disable_external_notifications: bool = False, recipients_for_user_creation_events: Optional[Dict[UserProfile, Set[int]]] = None, ) -> SendMessageRequest: """Returns a dictionary that can be passed into do_send_messages. In production, this is always called by check_message, but some testing code paths call it directly. """ realm = message.realm if mention_backend is None: mention_backend = MentionBackend(realm.id) mention_data = MentionData( mention_backend=mention_backend, content=message.content, message_sender=message.sender, ) if message.is_stream_message(): stream_id = message.recipient.type_id stream_topic: Optional[StreamTopicTarget] = StreamTopicTarget( stream_id=stream_id, topic_name=message.topic_name(), ) else: stream_topic = None info = get_recipient_info( realm_id=realm.id, recipient=message.recipient, sender_id=message.sender_id, stream_topic=stream_topic, possibly_mentioned_user_ids=mention_data.get_user_ids(), possible_topic_wildcard_mention=mention_data.message_has_topic_wildcards(), possible_stream_wildcard_mention=mention_data.message_has_stream_wildcards(), ) # Render our message_dicts. assert message.rendered_content is None rendering_result = render_incoming_message( message, message.content, realm, mention_data=mention_data, email_gateway=email_gateway, ) message.rendered_content = rendering_result.rendered_content message.rendered_content_version = markdown_version links_for_embed = rendering_result.links_for_preview mentioned_user_groups_map = get_user_group_mentions_data( mentioned_user_ids=rendering_result.mentions_user_ids, mentioned_user_group_ids=list(rendering_result.mentions_user_group_ids), mention_data=mention_data, ) # For single user as well as user group mentions, we set the `mentioned` # flag on `UserMessage` for group_id in rendering_result.mentions_user_group_ids: members = mention_data.get_group_members(group_id) rendering_result.mentions_user_ids.update(members) # Only send data to Tornado about stream or topic wildcard mentions if message # rendering determined the message had an actual stream or topic wildcard # mention in it (and not e.g. stream or topic wildcard mention syntax inside a # code block). if rendering_result.mentions_stream_wildcard: stream_wildcard_mention_user_ids = info.stream_wildcard_mention_user_ids stream_wildcard_mention_in_followed_topic_user_ids = ( info.stream_wildcard_mention_in_followed_topic_user_ids ) else: stream_wildcard_mention_user_ids = set() stream_wildcard_mention_in_followed_topic_user_ids = set() if rendering_result.mentions_topic_wildcard: topic_wildcard_mention_user_ids = info.topic_wildcard_mention_user_ids topic_wildcard_mention_in_followed_topic_user_ids = ( info.topic_wildcard_mention_in_followed_topic_user_ids ) topic_participant_user_ids = info.topic_participant_user_ids else: topic_wildcard_mention_user_ids = set() topic_wildcard_mention_in_followed_topic_user_ids = set() topic_participant_user_ids = set() """ Once we have the actual list of mentioned ids from message rendering, we can patch in "default bots" (aka normal bots) who were directly mentioned in this message as eligible to get UserMessage rows. """ mentioned_user_ids = rendering_result.mentions_user_ids default_bot_user_ids = info.default_bot_user_ids mentioned_bot_user_ids = default_bot_user_ids & mentioned_user_ids info.um_eligible_user_ids |= mentioned_bot_user_ids message_send_dict = SendMessageRequest( stream=stream, sender_muted_stream=info.sender_muted_stream, local_id=local_id, sender_queue_id=sender_queue_id, realm=realm, mention_data=mention_data, mentioned_user_groups_map=mentioned_user_groups_map, message=message, rendering_result=rendering_result, active_user_ids=info.active_user_ids, online_push_user_ids=info.online_push_user_ids, dm_mention_email_disabled_user_ids=info.dm_mention_email_disabled_user_ids, dm_mention_push_disabled_user_ids=info.dm_mention_push_disabled_user_ids, stream_push_user_ids=info.stream_push_user_ids, stream_email_user_ids=info.stream_email_user_ids, followed_topic_push_user_ids=info.followed_topic_push_user_ids, followed_topic_email_user_ids=info.followed_topic_email_user_ids, muted_sender_user_ids=info.muted_sender_user_ids, um_eligible_user_ids=info.um_eligible_user_ids, long_term_idle_user_ids=info.long_term_idle_user_ids, default_bot_user_ids=info.default_bot_user_ids, service_bot_tuples=info.service_bot_tuples, all_bot_user_ids=info.all_bot_user_ids, topic_wildcard_mention_user_ids=topic_wildcard_mention_user_ids, stream_wildcard_mention_user_ids=stream_wildcard_mention_user_ids, topic_wildcard_mention_in_followed_topic_user_ids=topic_wildcard_mention_in_followed_topic_user_ids, stream_wildcard_mention_in_followed_topic_user_ids=stream_wildcard_mention_in_followed_topic_user_ids, links_for_embed=links_for_embed, widget_content=widget_content_dict, limit_unread_user_ids=limit_unread_user_ids, disable_external_notifications=disable_external_notifications, topic_participant_user_ids=topic_participant_user_ids, recipients_for_user_creation_events=recipients_for_user_creation_events, ) return message_send_dict
Given a list of active_user_ids, we build up a subset of those users who fit these criteria: * They are likely to receive push or email notifications. * They are no longer "present" according to the UserPresence table.
def get_active_presence_idle_user_ids( realm: Realm, sender_id: int, user_notifications_data_list: List[UserMessageNotificationsData], ) -> List[int]: """ Given a list of active_user_ids, we build up a subset of those users who fit these criteria: * They are likely to receive push or email notifications. * They are no longer "present" according to the UserPresence table. """ if realm.presence_disabled: return [] user_ids = set() for user_notifications_data in user_notifications_data_list: # We only need to know the presence idle state for a user if this message would be notifiable # for them if they were indeed idle. Only including those users in the calculation below is a # very important optimization for open communities with many inactive users. if user_notifications_data.is_notifiable(sender_id, idle=True): user_ids.add(user_notifications_data.user_id) return filter_presence_idle_user_ids(user_ids)
See https://zulip.readthedocs.io/en/latest/subsystems/sending-messages.html for high-level documentation on this subsystem.
def do_send_messages( send_message_requests_maybe_none: Sequence[Optional[SendMessageRequest]], *, mark_as_read: Sequence[int] = [], ) -> List[SentMessageResult]: """See https://zulip.readthedocs.io/en/latest/subsystems/sending-messages.html for high-level documentation on this subsystem. """ # Filter out messages which didn't pass internal_prep_message properly send_message_requests = [ send_request for send_request in send_message_requests_maybe_none if send_request is not None ] # Save the message receipts in the database user_message_flags: Dict[int, Dict[int, List[str]]] = defaultdict(dict) with transaction.atomic(savepoint=False): Message.objects.bulk_create(send_request.message for send_request in send_message_requests) # Claim attachments in message for send_request in send_message_requests: if do_claim_attachments( send_request.message, send_request.rendering_result.potential_attachment_path_ids ): send_request.message.has_attachment = True send_request.message.save(update_fields=["has_attachment"]) ums: List[UserMessageLite] = [] for send_request in send_message_requests: # Service bots (outgoing webhook bots and embedded bots) don't store UserMessage rows; # they will be processed later. mentioned_user_ids = send_request.rendering_result.mentions_user_ids # Extend the set with users who have muted the sender. mark_as_read_user_ids = send_request.muted_sender_user_ids mark_as_read_user_ids.update(mark_as_read) user_messages = create_user_messages( message=send_request.message, rendering_result=send_request.rendering_result, um_eligible_user_ids=send_request.um_eligible_user_ids, long_term_idle_user_ids=send_request.long_term_idle_user_ids, stream_push_user_ids=send_request.stream_push_user_ids, stream_email_user_ids=send_request.stream_email_user_ids, mentioned_user_ids=mentioned_user_ids, followed_topic_push_user_ids=send_request.followed_topic_push_user_ids, followed_topic_email_user_ids=send_request.followed_topic_email_user_ids, mark_as_read_user_ids=mark_as_read_user_ids, limit_unread_user_ids=send_request.limit_unread_user_ids, topic_participant_user_ids=send_request.topic_participant_user_ids, ) for um in user_messages: user_message_flags[send_request.message.id][um.user_profile_id] = um.flags_list() ums.extend(user_messages) send_request.service_queue_events = get_service_bot_events( sender=send_request.message.sender, service_bot_tuples=send_request.service_bot_tuples, mentioned_user_ids=mentioned_user_ids, active_user_ids=send_request.active_user_ids, recipient_type=send_request.message.recipient.type, ) bulk_insert_ums(ums) for send_request in send_message_requests: do_widget_post_save_actions(send_request) # This next loop is responsible for notifying other parts of the # Zulip system about the messages we just committed to the database: # * Sender automatically follows or unmutes the topic depending on 'automatically_follow_topics_policy' # and 'automatically_unmute_topics_in_muted_streams_policy' user settings. # * Notifying clients via send_event # * Triggering outgoing webhooks via the service event queue. # * Updating the `first_message_id` field for streams without any message history. # * Implementing the Welcome Bot reply hack # * Adding links to the embed_links queue for open graph processing. for send_request in send_message_requests: realm_id: Optional[int] = None if send_request.message.is_stream_message(): if send_request.stream is None: stream_id = send_request.message.recipient.type_id send_request.stream = Stream.objects.get(id=stream_id) # assert needed because stubs for django are missing assert send_request.stream is not None realm_id = send_request.stream.realm_id sender = send_request.message.sender # Determine and set the visibility_policy depending on 'automatically_follow_topics_policy' # and 'automatically_unmute_topics_in_muted_streams_policy'. if set_visibility_policy_possible(sender, send_request.message) and not ( sender.automatically_follow_topics_policy == sender.automatically_unmute_topics_in_muted_streams_policy == UserProfile.AUTOMATICALLY_CHANGE_VISIBILITY_POLICY_NEVER ): try: user_topic = UserTopic.objects.get( user_profile=sender, stream_id=send_request.stream.id, topic_name__iexact=send_request.message.topic_name(), ) visibility_policy = user_topic.visibility_policy except UserTopic.DoesNotExist: visibility_policy = UserTopic.VisibilityPolicy.INHERIT new_visibility_policy = visibility_policy_for_send_message( sender, send_request.message, send_request.stream, send_request.sender_muted_stream, visibility_policy, ) if new_visibility_policy: do_set_user_topic_visibility_policy( user_profile=sender, stream=send_request.stream, topic_name=send_request.message.topic_name(), visibility_policy=new_visibility_policy, ) send_request.automatic_new_visibility_policy = new_visibility_policy # Set the visibility_policy of the users mentioned in the message # to "FOLLOWED" if "automatically_follow_topics_where_mentioned" is "True". human_user_personal_mentions = send_request.rendering_result.mentions_user_ids & ( send_request.active_user_ids - send_request.all_bot_user_ids ) expect_follow_user_profiles: Set[UserProfile] = set() if len(human_user_personal_mentions) > 0: expect_follow_user_profiles = set( UserProfile.objects.filter( realm_id=realm_id, id__in=human_user_personal_mentions, automatically_follow_topics_where_mentioned=True, ) ) if len(expect_follow_user_profiles) > 0: user_topics_query_set = UserTopic.objects.filter( user_profile__in=expect_follow_user_profiles, stream_id=send_request.stream.id, topic_name__iexact=send_request.message.topic_name(), visibility_policy__in=[ # Explicitly muted takes precedence over this setting. UserTopic.VisibilityPolicy.MUTED, # Already followed UserTopic.VisibilityPolicy.FOLLOWED, ], ) skip_follow_users = { user_topic.user_profile for user_topic in user_topics_query_set } to_follow_users = list(expect_follow_user_profiles - skip_follow_users) if to_follow_users: bulk_do_set_user_topic_visibility_policy( user_profiles=to_follow_users, stream=send_request.stream, topic_name=send_request.message.topic_name(), visibility_policy=UserTopic.VisibilityPolicy.FOLLOWED, ) # Deliver events to the real-time push system, as well as # enqueuing any additional processing triggered by the message. wide_message_dict = MessageDict.wide_dict(send_request.message, realm_id) user_flags = user_message_flags.get(send_request.message.id, {}) """ TODO: We may want to limit user_ids to only those users who have UserMessage rows, if only for minor performance reasons. For now we queue events for all subscribers/sendees of the message, since downstream code may still do notifications that don't require UserMessage rows. Our automated tests have gotten better on this codepath, but we may have coverage gaps, so we should be careful about changing the next line. """ user_ids = send_request.active_user_ids | set(user_flags.keys()) sender_id = send_request.message.sender_id # We make sure the sender is listed first in the `users` list; # this results in the sender receiving the message first if # there are thousands of recipients, decreasing perceived latency. if sender_id in user_ids: user_list = [sender_id, *user_ids - {sender_id}] else: user_list = list(user_ids) class UserData(TypedDict): id: int flags: List[str] mentioned_user_group_id: Optional[int] users: List[UserData] = [] for user_id in user_list: flags = user_flags.get(user_id, []) # TODO/compatibility: The `wildcard_mentioned` flag was deprecated in favor of # the `stream_wildcard_mentioned` and `topic_wildcard_mentioned` flags. The # `wildcard_mentioned` flag exists for backwards-compatibility with older # clients. Remove this when we no longer support legacy clients that have not # been updated to access `stream_wildcard_mentioned`. if "stream_wildcard_mentioned" in flags or "topic_wildcard_mentioned" in flags: flags.append("wildcard_mentioned") user_data: UserData = dict(id=user_id, flags=flags, mentioned_user_group_id=None) if user_id in send_request.mentioned_user_groups_map: user_data["mentioned_user_group_id"] = send_request.mentioned_user_groups_map[ user_id ] users.append(user_data) sender = send_request.message.sender message_type = wide_message_dict["type"] user_notifications_data_list = [ UserMessageNotificationsData.from_user_id_sets( user_id=user_id, flags=user_flags.get(user_id, []), private_message=message_type == "private", disable_external_notifications=send_request.disable_external_notifications, online_push_user_ids=send_request.online_push_user_ids, dm_mention_push_disabled_user_ids=send_request.dm_mention_push_disabled_user_ids, dm_mention_email_disabled_user_ids=send_request.dm_mention_email_disabled_user_ids, stream_push_user_ids=send_request.stream_push_user_ids, stream_email_user_ids=send_request.stream_email_user_ids, topic_wildcard_mention_user_ids=send_request.topic_wildcard_mention_user_ids, stream_wildcard_mention_user_ids=send_request.stream_wildcard_mention_user_ids, followed_topic_push_user_ids=send_request.followed_topic_push_user_ids, followed_topic_email_user_ids=send_request.followed_topic_email_user_ids, topic_wildcard_mention_in_followed_topic_user_ids=send_request.topic_wildcard_mention_in_followed_topic_user_ids, stream_wildcard_mention_in_followed_topic_user_ids=send_request.stream_wildcard_mention_in_followed_topic_user_ids, muted_sender_user_ids=send_request.muted_sender_user_ids, all_bot_user_ids=send_request.all_bot_user_ids, ) for user_id in send_request.active_user_ids ] presence_idle_user_ids = get_active_presence_idle_user_ids( realm=send_request.realm, sender_id=sender.id, user_notifications_data_list=user_notifications_data_list, ) if send_request.recipients_for_user_creation_events is not None: from zerver.actions.create_user import notify_created_user for ( new_accessible_user, notify_user_ids, ) in send_request.recipients_for_user_creation_events.items(): notify_created_user(new_accessible_user, list(notify_user_ids)) event = dict( type="message", message=send_request.message.id, message_dict=wide_message_dict, presence_idle_user_ids=presence_idle_user_ids, online_push_user_ids=list(send_request.online_push_user_ids), dm_mention_push_disabled_user_ids=list(send_request.dm_mention_push_disabled_user_ids), dm_mention_email_disabled_user_ids=list( send_request.dm_mention_email_disabled_user_ids ), stream_push_user_ids=list(send_request.stream_push_user_ids), stream_email_user_ids=list(send_request.stream_email_user_ids), topic_wildcard_mention_user_ids=list(send_request.topic_wildcard_mention_user_ids), stream_wildcard_mention_user_ids=list(send_request.stream_wildcard_mention_user_ids), followed_topic_push_user_ids=list(send_request.followed_topic_push_user_ids), followed_topic_email_user_ids=list(send_request.followed_topic_email_user_ids), topic_wildcard_mention_in_followed_topic_user_ids=list( send_request.topic_wildcard_mention_in_followed_topic_user_ids ), stream_wildcard_mention_in_followed_topic_user_ids=list( send_request.stream_wildcard_mention_in_followed_topic_user_ids ), muted_sender_user_ids=list(send_request.muted_sender_user_ids), all_bot_user_ids=list(send_request.all_bot_user_ids), disable_external_notifications=send_request.disable_external_notifications, realm_host=send_request.realm.host, ) if send_request.message.is_stream_message(): # Note: This is where authorization for single-stream # get_updates happens! We only attach stream data to the # notify new_message request if it's a public stream, # ensuring that in the tornado server, non-public stream # messages are only associated to their subscribed users. # assert needed because stubs for django are missing assert send_request.stream is not None if send_request.stream.is_public(): event["realm_id"] = send_request.stream.realm_id event["stream_name"] = send_request.stream.name if send_request.stream.invite_only: event["invite_only"] = True if send_request.stream.first_message_id is None: send_request.stream.first_message_id = send_request.message.id send_request.stream.save(update_fields=["first_message_id"]) # Performance note: This check can theoretically do # database queries in a loop if many messages are being # sent via a single do_send_messages call. # # This is not a practical concern at present, because our # only use case for bulk-sending messages via this API # endpoint is for direct messages bulk-sent by system # bots; and for system bots, # "user_access_restricted_in_realm" will always return # False without doing any database queries at all. if user_access_restricted_in_realm( send_request.message.sender ) and not subscribed_to_stream(send_request.message.sender, send_request.stream.id): user_ids_who_can_access_sender = get_user_ids_who_can_access_user( send_request.message.sender ) user_ids_receiving_event = {user["id"] for user in users} user_ids_without_access_to_sender = user_ids_receiving_event - set( user_ids_who_can_access_sender ) event["user_ids_without_access_to_sender"] = user_ids_without_access_to_sender if send_request.local_id is not None: event["local_id"] = send_request.local_id if send_request.sender_queue_id is not None: event["sender_queue_id"] = send_request.sender_queue_id send_event(send_request.realm, event, users) if send_request.links_for_embed: event_data = { "message_id": send_request.message.id, "message_content": send_request.message.content, "message_realm_id": send_request.realm.id, "urls": list(send_request.links_for_embed), } queue_json_publish("embed_links", event_data) if send_request.message.recipient.type == Recipient.PERSONAL: welcome_bot_id = get_system_bot(settings.WELCOME_BOT, send_request.realm.id).id if ( welcome_bot_id in send_request.active_user_ids and welcome_bot_id != send_request.message.sender_id ): from zerver.lib.onboarding import send_welcome_bot_response send_welcome_bot_response(send_request) assert send_request.service_queue_events is not None for queue_name, events in send_request.service_queue_events.items(): for event in events: queue_json_publish( queue_name, { "message": wide_message_dict, "trigger": event["trigger"], "user_profile_id": event["user_profile_id"], }, ) sent_message_results = [ SentMessageResult( message_id=send_request.message.id, automatic_new_visibility_policy=send_request.automatic_new_visibility_policy, ) for send_request in send_message_requests ] return sent_message_results
Sends a direct message error notification to a bot's owner if one hasn't already been sent in the last 5 minutes.
def send_rate_limited_pm_notification_to_bot_owner( sender: UserProfile, realm: Realm, content: str ) -> None: """ Sends a direct message error notification to a bot's owner if one hasn't already been sent in the last 5 minutes. """ if sender.realm.is_zephyr_mirror_realm or sender.realm.deactivated: return if not sender.is_bot or sender.bot_owner is None: return # Don't send these notifications for cross-realm bot messages # (e.g. from EMAIL_GATEWAY_BOT) since the owner for # EMAIL_GATEWAY_BOT is probably the server administrator, not # the owner of the bot who could potentially fix the problem. if sender.realm != realm: return # We warn the user once every 5 minutes to avoid a flood of # direct messages on a misconfigured integration, re-using the # UserProfile.last_reminder field, which is not used for bots. last_reminder = sender.last_reminder waitperiod = timedelta(minutes=UserProfile.BOT_OWNER_STREAM_ALERT_WAITPERIOD) if last_reminder and timezone_now() - last_reminder <= waitperiod: return internal_send_private_message( get_system_bot(settings.NOTIFICATION_BOT, sender.bot_owner.realm_id), sender.bot_owner, content, ) sender.last_reminder = timezone_now() sender.save(update_fields=["last_reminder"])
If a bot sends a message to a stream that doesn't exist or has no subscribers, sends a notification to the bot owner (if not a cross-realm bot) so that the owner can correct the issue.
def send_pm_if_empty_stream( stream: Optional[Stream], realm: Realm, sender: UserProfile, stream_name: Optional[str] = None, stream_id: Optional[int] = None, ) -> None: """If a bot sends a message to a stream that doesn't exist or has no subscribers, sends a notification to the bot owner (if not a cross-realm bot) so that the owner can correct the issue.""" if not sender.is_bot or sender.bot_owner is None: return if sender.bot_owner is not None: with override_language(sender.bot_owner.default_language): arg_dict: Dict[str, Any] = { "bot_identity": f"`{sender.delivery_email}`", } if stream is None: if stream_id is not None: arg_dict = { **arg_dict, "channel_id": stream_id, } content = _( "Your bot {bot_identity} tried to send a message to channel ID " "{channel_id}, but there is no channel with that ID." ).format(**arg_dict) else: assert stream_name is not None arg_dict = { **arg_dict, "channel_name": f"#**{stream_name}**", "new_channel_link": "#channels/new", } content = _( "Your bot {bot_identity} tried to send a message to channel " "{channel_name}, but that channel does not exist. " "Click [here]({new_channel_link}) to create it." ).format(**arg_dict) else: if num_subscribers_for_stream_id(stream.id) > 0: return arg_dict = { **arg_dict, "channel_name": f"#**{stream.name}**", } content = _( "Your bot {bot_identity} tried to send a message to " "channel {channel_name}. The channel exists but " "does not have any subscribers." ).format(**arg_dict) send_rate_limited_pm_notification_to_bot_owner(sender, realm, content)
This function returns a dictionary with data about which users would receive stream creation events due to gaining access to a user. The key of the dictionary is a user object and the value is a set of user_ids that would gain access to that user.
def get_recipients_for_user_creation_events( realm: Realm, sender: UserProfile, user_profiles: Sequence[UserProfile] ) -> Dict[UserProfile, Set[int]]: """ This function returns a dictionary with data about which users would receive stream creation events due to gaining access to a user. The key of the dictionary is a user object and the value is a set of user_ids that would gain access to that user. """ recipients_for_user_creation_events: Dict[UserProfile, Set[int]] = defaultdict(set) # If none of the users in the direct message conversation are # guests, then there is no possible can_access_all_users_group # policy that would mean sending this message changes any user's # user access to other users. guest_recipients = [user for user in user_profiles if user.is_guest] if len(guest_recipients) == 0: return recipients_for_user_creation_events if realm.can_access_all_users_group.named_user_group.name == SystemGroups.EVERYONE: return recipients_for_user_creation_events if len(user_profiles) == 1: if not check_can_access_user(sender, user_profiles[0]): recipients_for_user_creation_events[sender].add(user_profiles[0].id) return recipients_for_user_creation_events users_involved_in_dms = get_users_involved_in_dms_with_target_users(guest_recipients, realm) subscribers_of_guest_recipient_subscriptions = get_subscribers_of_target_user_subscriptions( guest_recipients ) for recipient_user in guest_recipients: for user in user_profiles: if user.id == recipient_user.id or user.is_bot: continue if ( user.id not in users_involved_in_dms[recipient_user.id] and user.id not in subscribers_of_guest_recipient_subscriptions[recipient_user.id] ): recipients_for_user_creation_events[user].add(recipient_user.id) if ( not sender.is_bot and sender.id not in users_involved_in_dms[recipient_user.id] and sender.id not in subscribers_of_guest_recipient_subscriptions[recipient_user.id] ): recipients_for_user_creation_events[sender].add(recipient_user.id) return recipients_for_user_creation_events
See https://zulip.readthedocs.io/en/latest/subsystems/sending-messages.html for high-level documentation on this subsystem.
def check_message( sender: UserProfile, client: Client, addressee: Addressee, message_content_raw: str, realm: Optional[Realm] = None, forged: bool = False, forged_timestamp: Optional[float] = None, forwarder_user_profile: Optional[UserProfile] = None, local_id: Optional[str] = None, sender_queue_id: Optional[str] = None, widget_content: Optional[str] = None, email_gateway: bool = False, *, skip_stream_access_check: bool = False, mention_backend: Optional[MentionBackend] = None, limit_unread_user_ids: Optional[Set[int]] = None, disable_external_notifications: bool = False, ) -> SendMessageRequest: """See https://zulip.readthedocs.io/en/latest/subsystems/sending-messages.html for high-level documentation on this subsystem. """ stream = None message_content = normalize_body(message_content_raw) if realm is None: realm = sender.realm recipients_for_user_creation_events = None if addressee.is_stream(): topic_name = addressee.topic_name() topic_name = truncate_topic(topic_name) stream_name = addressee.stream_name() stream_id = addressee.stream_id() if stream_name is not None: stream = validate_stream_name_with_pm_notification(stream_name, realm, sender) elif stream_id is not None: stream = validate_stream_id_with_pm_notification(stream_id, realm, sender) else: stream = addressee.stream() assert stream is not None # To save a database round trip, we construct the Recipient # object for the Stream rather than fetching it from the # database using the stream.recipient foreign key. # # This is simpler than ensuring that code paths that fetch a # Stream that will be used for sending a message have a # `select_related("recipient"), which would also needlessly # expand Stream objects in memory (all the fields of Recipient # are already known given the Stream object). recipient = Recipient( id=stream.recipient_id, type_id=stream.id, type=Recipient.STREAM, ) if not skip_stream_access_check: access_stream_for_send_message( sender=sender, stream=stream, forwarder_user_profile=forwarder_user_profile ) else: # Defensive assertion - the only currently supported use case # for this option is for outgoing webhook bots and since this # is security-sensitive code, it's beneficial to ensure nothing # else can sneak past the access check. assert sender.bot_type == sender.OUTGOING_WEBHOOK_BOT if realm.mandatory_topics and topic_name == "(no topic)": raise JsonableError(_("Topics are required in this organization")) elif addressee.is_private(): user_profiles = addressee.user_profiles() mirror_message = client.name in [ "zephyr_mirror", "irc_mirror", "jabber_mirror", "JabberMirror", ] check_sender_can_access_recipients(realm, sender, user_profiles) check_private_message_policy(realm, sender, user_profiles) recipients_for_user_creation_events = get_recipients_for_user_creation_events( realm, sender, user_profiles ) # API super-users who set the `forged` flag are allowed to # forge messages sent by any user, so we disable the # `forwarded_mirror_message` security check in that case. forwarded_mirror_message = mirror_message and not forged try: recipient = recipient_for_user_profiles( user_profiles, forwarded_mirror_message, forwarder_user_profile, sender ) except ValidationError as e: assert isinstance(e.messages[0], str) raise JsonableError(e.messages[0]) else: # This is defensive code--Addressee already validates # the message type. raise AssertionError("Invalid message type") message = Message() message.sender = sender message.content = message_content message.recipient = recipient message.realm = realm if addressee.is_stream(): message.set_topic_name(topic_name) if forged and forged_timestamp is not None: # Forged messages come with a timestamp message.date_sent = timestamp_to_datetime(forged_timestamp) else: message.date_sent = timezone_now() message.sending_client = client # We render messages later in the process. assert message.rendered_content is None if client.name == "zephyr_mirror": id = already_sent_mirrored_message_id(message) if id is not None: raise ZephyrMessageAlreadySentError(id) widget_content_dict = None if widget_content is not None: try: widget_content_dict = orjson.loads(widget_content) except orjson.JSONDecodeError: raise JsonableError(_("Widgets: API programmer sent invalid JSON content")) try: check_widget_content(widget_content_dict) except ValidationError as error: raise JsonableError( _("Widgets: {error_msg}").format( error_msg=error.message, ) ) message_send_dict = build_message_send_dict( message=message, stream=stream, local_id=local_id, sender_queue_id=sender_queue_id, widget_content_dict=widget_content_dict, email_gateway=email_gateway, mention_backend=mention_backend, limit_unread_user_ids=limit_unread_user_ids, disable_external_notifications=disable_external_notifications, recipients_for_user_creation_events=recipients_for_user_creation_events, ) if ( stream is not None and message_send_dict.rendering_result.mentions_stream_wildcard and not stream_wildcard_mention_allowed(sender, stream, realm) ): raise StreamWildcardMentionNotAllowedError topic_participant_count = len(message_send_dict.topic_participant_user_ids) if ( stream is not None and message_send_dict.rendering_result.mentions_topic_wildcard and not topic_wildcard_mention_allowed(sender, topic_participant_count, realm) ): raise TopicWildcardMentionNotAllowedError if message_send_dict.rendering_result.mentions_user_group_ids: mentioned_group_ids = list(message_send_dict.rendering_result.mentions_user_group_ids) check_user_group_mention_allowed(sender, mentioned_group_ids) return message_send_dict
Create a message object and checks it, but doesn't send it or save it to the database. The internal function that calls this can therefore batch send a bunch of created messages together as one database query. Call do_send_messages with a list of the return values of this method.
def _internal_prep_message( realm: Realm, sender: UserProfile, addressee: Addressee, content: str, *, email_gateway: bool = False, mention_backend: Optional[MentionBackend] = None, limit_unread_user_ids: Optional[Set[int]] = None, disable_external_notifications: bool = False, ) -> Optional[SendMessageRequest]: """ Create a message object and checks it, but doesn't send it or save it to the database. The internal function that calls this can therefore batch send a bunch of created messages together as one database query. Call do_send_messages with a list of the return values of this method. """ # If we have a stream name, and the stream doesn't exist, we # create it here (though this code path should probably be removed # eventually, moving that responsibility to the caller). If # addressee.stream_name() is None (i.e. we're sending to a stream # by ID), we skip this, as the stream object must already exist. if addressee.is_stream(): stream_name = addressee.stream_name() if stream_name is not None: ensure_stream(realm, stream_name, acting_user=sender) try: return check_message( sender, get_client("Internal"), addressee, content, realm=realm, email_gateway=email_gateway, mention_backend=mention_backend, limit_unread_user_ids=limit_unread_user_ids, disable_external_notifications=disable_external_notifications, ) except JsonableError as e: logging.exception( "Error queueing internal message by %s: %s", sender.delivery_email, e.msg, stack_info=True, ) return None
See _internal_prep_message for details of how this works.
def internal_prep_stream_message( sender: UserProfile, stream: Stream, topic_name: str, content: str, *, email_gateway: bool = False, limit_unread_user_ids: Optional[Set[int]] = None, ) -> Optional[SendMessageRequest]: """ See _internal_prep_message for details of how this works. """ realm = stream.realm addressee = Addressee.for_stream(stream, topic_name) return _internal_prep_message( realm=realm, sender=sender, addressee=addressee, content=content, email_gateway=email_gateway, limit_unread_user_ids=limit_unread_user_ids, )
See _internal_prep_message for details of how this works.
def internal_prep_stream_message_by_name( realm: Realm, sender: UserProfile, stream_name: str, topic_name: str, content: str, ) -> Optional[SendMessageRequest]: """ See _internal_prep_message for details of how this works. """ addressee = Addressee.for_stream_name(stream_name, topic_name) return _internal_prep_message( realm=realm, sender=sender, addressee=addressee, content=content, )
See _internal_prep_message for details of how this works.
def internal_prep_private_message( sender: UserProfile, recipient_user: UserProfile, content: str, *, mention_backend: Optional[MentionBackend] = None, disable_external_notifications: bool = False, ) -> Optional[SendMessageRequest]: """ See _internal_prep_message for details of how this works. """ addressee = Addressee.for_user_profile(recipient_user) if not is_cross_realm_bot_email(recipient_user.delivery_email): realm = recipient_user.realm else: realm = sender.realm return _internal_prep_message( realm=realm, sender=sender, addressee=addressee, content=content, mention_backend=mention_backend, disable_external_notifications=disable_external_notifications, )
Should be called while holding a SELECT FOR UPDATE lock (e.g. via access_message(..., lock_message=True)) on the Message row, to prevent race conditions.
def do_add_reaction( user_profile: UserProfile, message: Message, emoji_name: str, emoji_code: str, reaction_type: str, ) -> None: """Should be called while holding a SELECT FOR UPDATE lock (e.g. via access_message(..., lock_message=True)) on the Message row, to prevent race conditions. """ reaction = Reaction( user_profile=user_profile, message=message, emoji_name=emoji_name, emoji_code=emoji_code, reaction_type=reaction_type, ) reaction.save() # Determine and set the visibility_policy depending on 'automatically_follow_topics_policy' # and 'automatically_unmute_topics_in_muted_streams_policy'. if set_visibility_policy_possible( user_profile, message ) and UserProfile.AUTOMATICALLY_CHANGE_VISIBILITY_POLICY_ON_PARTICIPATION in [ user_profile.automatically_follow_topics_policy, user_profile.automatically_unmute_topics_in_muted_streams_policy, ]: stream_id = message.recipient.type_id (stream, sub) = access_stream_by_id(user_profile, stream_id) assert stream is not None if sub: new_visibility_policy = visibility_policy_for_participation(user_profile, sub.is_muted) if new_visibility_policy and should_change_visibility_policy( new_visibility_policy, user_profile, stream_id, topic_name=message.topic_name(), ): do_set_user_topic_visibility_policy( user_profile=user_profile, stream=stream, topic_name=message.topic_name(), visibility_policy=new_visibility_policy, ) notify_reaction_update(user_profile, message, reaction, "add")
Should be called while holding a SELECT FOR UPDATE lock (e.g. via access_message(..., lock_message=True)) on the Message row, to prevent race conditions.
def do_remove_reaction( user_profile: UserProfile, message: Message, emoji_code: str, reaction_type: str ) -> None: """Should be called while holding a SELECT FOR UPDATE lock (e.g. via access_message(..., lock_message=True)) on the Message row, to prevent race conditions. """ reaction = Reaction.objects.filter( user_profile=user_profile, message=message, emoji_code=emoji_code, reaction_type=reaction_type, ).get() reaction.delete() notify_reaction_update(user_profile, message, reaction, "remove")
ordered_linkifier_ids should contain ids of all existing linkifiers. In the rare situation when any of the linkifier gets deleted that more ids are passed, the checks below are sufficient to detect inconsistencies most of the time.
def check_reorder_linkifiers( realm: Realm, ordered_linkifier_ids: List[int], *, acting_user: Optional[UserProfile] ) -> None: """ordered_linkifier_ids should contain ids of all existing linkifiers. In the rare situation when any of the linkifier gets deleted that more ids are passed, the checks below are sufficient to detect inconsistencies most of the time.""" # Repeated IDs in the user request would collapse into the same key when # constructing the set. linkifier_id_set = set(ordered_linkifier_ids) if len(linkifier_id_set) < len(ordered_linkifier_ids): raise JsonableError(_("The ordered list must not contain duplicated linkifiers")) linkifiers = RealmFilter.objects.filter(realm=realm) if {linkifier.id for linkifier in linkifiers} != linkifier_id_set: raise JsonableError( _("The ordered list must enumerate all existing linkifiers exactly once") ) # After the validation, we are sure that there is nothing to do. Return # early to avoid flushing the cache and populating the audit logs. if len(linkifiers) == 0: return id_to_new_order = { linkifier_id: order for order, linkifier_id in enumerate(ordered_linkifier_ids) } for linkifier in linkifiers: assert linkifier.id in id_to_new_order linkifier.order = id_to_new_order[linkifier.id] RealmFilter.objects.bulk_update(linkifiers, fields=["order"]) flush_linkifiers(instance=linkifiers[0]) # This roundtrip re-fetches the linkifiers sorted in the new order. realm_linkifiers = linkifiers_for_realm(realm.id) RealmAuditLog.objects.create( realm=realm, acting_user=acting_user, event_type=RealmAuditLog.REALM_LINKIFIERS_REORDERED, event_time=timezone_now(), extra_data={ "realm_linkifiers": realm_linkifiers, }, ) notify_linkifiers(realm, realm_linkifiers)
Takes in a realm object, the name of an attribute to update, the value to update and and the user who initiated the update.
def do_set_realm_property( realm: Realm, name: str, value: Any, *, acting_user: Optional[UserProfile] ) -> None: """Takes in a realm object, the name of an attribute to update, the value to update and and the user who initiated the update. """ property_type = Realm.property_types[name] assert isinstance( value, property_type ), f"Cannot update {name}: {value} is not an instance of {property_type}" old_value = getattr(realm, name) if old_value == value: return setattr(realm, name, value) realm.save(update_fields=[name]) event = dict( type="realm", op="update", property=name, value=value, ) # These settings have a different event format due to their history. message_edit_settings = [ "allow_message_editing", "edit_topic_policy", "message_content_edit_limit_seconds", ] if name in message_edit_settings: event = dict( type="realm", op="update_dict", property="default", data={name: value}, ) send_event_on_commit(realm, event, active_user_ids(realm.id)) event_time = timezone_now() RealmAuditLog.objects.create( realm=realm, event_type=RealmAuditLog.REALM_PROPERTY_CHANGED, event_time=event_time, acting_user=acting_user, extra_data={ RealmAuditLog.OLD_VALUE: old_value, RealmAuditLog.NEW_VALUE: value, "property": name, }, ) if name == "waiting_period_threshold": update_users_in_full_members_system_group(realm, acting_user=acting_user)
Takes in a realm object, the name of an attribute to update, the user_group to update and and the user who initiated the update.
def do_change_realm_permission_group_setting( realm: Realm, setting_name: str, user_group: UserGroup, *, acting_user: Optional[UserProfile] ) -> None: """Takes in a realm object, the name of an attribute to update, the user_group to update and and the user who initiated the update. """ assert setting_name in Realm.REALM_PERMISSION_GROUP_SETTINGS old_user_group_id = getattr(realm, setting_name).id setattr(realm, setting_name, user_group) realm.save(update_fields=[setting_name]) event = dict( type="realm", op="update_dict", property="default", data={setting_name: user_group.id}, ) send_event_on_commit(realm, event, active_user_ids(realm.id)) event_time = timezone_now() RealmAuditLog.objects.create( realm=realm, event_type=RealmAuditLog.REALM_PROPERTY_CHANGED, event_time=event_time, acting_user=acting_user, extra_data={ RealmAuditLog.OLD_VALUE: old_user_group_id, RealmAuditLog.NEW_VALUE: user_group.id, "property": setting_name, }, )
Deactivate this realm. Do NOT deactivate the users -- we need to be able to tell the difference between users that were intentionally deactivated, e.g. by a realm admin, and users who can't currently use Zulip because their realm has been deactivated.
def do_deactivate_realm(realm: Realm, *, acting_user: Optional[UserProfile]) -> None: """ Deactivate this realm. Do NOT deactivate the users -- we need to be able to tell the difference between users that were intentionally deactivated, e.g. by a realm admin, and users who can't currently use Zulip because their realm has been deactivated. """ if realm.deactivated: return with transaction.atomic(): realm.deactivated = True realm.save(update_fields=["deactivated"]) if settings.BILLING_ENABLED: billing_session = RealmBillingSession(user=acting_user, realm=realm) billing_session.downgrade_now_without_creating_additional_invoices() event_time = timezone_now() RealmAuditLog.objects.create( realm=realm, event_type=RealmAuditLog.REALM_DEACTIVATED, event_time=event_time, acting_user=acting_user, extra_data={ RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(realm), }, ) from zerver.lib.remote_server import maybe_enqueue_audit_log_upload maybe_enqueue_audit_log_upload(realm) ScheduledEmail.objects.filter(realm=realm).delete() # This event will only ever be received by clients with an active # longpoll connection, because by this point clients will be # unable to authenticate again to their event queue (triggering an # immediate reload into the page explaining the realm was # deactivated). So the purpose of sending this is to flush all # active longpoll connections for the realm. event = dict(type="realm", op="deactivated", realm_id=realm.id) send_event_on_commit(realm, event, active_user_ids(realm.id)) # Don't deactivate the users, as that would lose a lot of state if # the realm needs to be reactivated, but do delete their sessions # so they get bumped to the login screen, where they'll get a # realm deactivation notice when they try to log in. # # Note: This is intentionally outside the transaction because it # is unsafe to modify sessions inside transactions with the # cached_db session plugin we're using, and our session engine # declared in zerver/lib/safe_session_cached_db.py enforces this. delete_realm_user_sessions(realm)
Even though our submessage architecture is geared toward collaboration among all message readers, we still enforce the the first person to attach a submessage to the message must be the original sender of the message.
def verify_submessage_sender( *, message_id: int, message_sender_id: int, submessage_sender_id: int, ) -> None: """Even though our submessage architecture is geared toward collaboration among all message readers, we still enforce the the first person to attach a submessage to the message must be the original sender of the message. """ if message_sender_id == submessage_sender_id: return if SubMessage.objects.filter( message_id=message_id, sender_id=message_sender_id, ).exists(): return raise JsonableError(_("You cannot attach a submessage to this message."))
Should be called while holding a SELECT FOR UPDATE lock (e.g. via access_message(..., lock_message=True)) on the Message row, to prevent race conditions.
def do_add_submessage( realm: Realm, sender_id: int, message_id: int, msg_type: str, content: str, ) -> None: """Should be called while holding a SELECT FOR UPDATE lock (e.g. via access_message(..., lock_message=True)) on the Message row, to prevent race conditions. """ submessage = SubMessage( sender_id=sender_id, message_id=message_id, msg_type=msg_type, content=content, ) submessage.save() # Determine and set the visibility_policy depending on 'automatically_follow_topics_policy' # and 'automatically_unmute_topics_policy'. sender = submessage.sender if set_visibility_policy_possible( sender, submessage.message ) and UserProfile.AUTOMATICALLY_CHANGE_VISIBILITY_POLICY_ON_PARTICIPATION in [ sender.automatically_follow_topics_policy, sender.automatically_unmute_topics_in_muted_streams_policy, ]: stream_id = submessage.message.recipient.type_id (stream, sub) = access_stream_by_id(sender, stream_id) assert stream is not None if sub: new_visibility_policy = visibility_policy_for_participation(sender, sub.is_muted) if new_visibility_policy and should_change_visibility_policy( new_visibility_policy, sender, stream_id, topic_name=submessage.message.topic_name(), ): do_set_user_topic_visibility_policy( user_profile=sender, stream=stream, topic_name=submessage.message.topic_name(), visibility_policy=new_visibility_policy, ) event = dict( type="submessage", msg_type=msg_type, message_id=message_id, submessage_id=submessage.id, sender_id=sender_id, content=content, ) ums = UserMessage.objects.filter(message_id=message_id) target_user_ids = [um.user_profile_id for um in ums] send_event_on_commit(realm, event, target_user_ids)
This is a version of do_delete_user which does not delete messages that the user was a participant in, and thus is less potentially disruptive to other users. The code is a bit tricky, because we want to, at some point, call user_profile.delete() to trigger cascading deletions of related models - but we need to avoid the cascades deleting all messages sent by the user to avoid messing up history of public stream conversations that they may have participated in. Not recommended for general use due to the following quirks: * Does not live-update other clients via `send_event` about the user's new name, email, or other attributes. * Not guaranteed to clear caches containing the deleted users. The temporary user may be visible briefly in caches due to the UserProfile model's post_save hook. * Deletes `acting_user`/`modified_user` entries in RealmAuditLog, potentially leading to corruption in audit tables if the user had, for example, changed organization-level settings previously. * May violate invariants like deleting the only subscriber to a stream/group or the last owner in a realm. * Will remove MutedUser records for other users who might have muted this user. * Will destroy Attachment/ArchivedAttachment records for files uploaded by the user, making them inaccessible. * Will destroy ArchivedMessage records associated with the user, making them impossible to restore from backups. * Will destroy Reaction/Submessage objects for reactions/poll votes done by the user. Most of these issues are not relevant for the common case that the user being deleted hasn't used Zulip extensively. It is possible a different algorithm that worked via overwriting the UserProfile's values with RealmUserDefault values, as well as a targeted set of deletions of cascading models (`Subscription`, `UserMessage`, `CustomProfileFieldValue`, etc.) would be a cleaner path to a high quality system. Other lesser quirks to be aware of: * The deleted user will disappear from all "Read receipts" displays, as all UserMessage rows will have been deleted. * Raw Markdown syntax mentioning the user still contain their original name (though modern clients will look up the user via `data-user-id` and display the current name). This is hard to change, and not important, since nothing prevents other users from just typing the user's name in their own messages. * Consumes a user ID sequence number, resulting in gaps in the space of user IDs that contain actual users.
def do_delete_user_preserving_messages(user_profile: UserProfile) -> None: """This is a version of do_delete_user which does not delete messages that the user was a participant in, and thus is less potentially disruptive to other users. The code is a bit tricky, because we want to, at some point, call user_profile.delete() to trigger cascading deletions of related models - but we need to avoid the cascades deleting all messages sent by the user to avoid messing up history of public stream conversations that they may have participated in. Not recommended for general use due to the following quirks: * Does not live-update other clients via `send_event` about the user's new name, email, or other attributes. * Not guaranteed to clear caches containing the deleted users. The temporary user may be visible briefly in caches due to the UserProfile model's post_save hook. * Deletes `acting_user`/`modified_user` entries in RealmAuditLog, potentially leading to corruption in audit tables if the user had, for example, changed organization-level settings previously. * May violate invariants like deleting the only subscriber to a stream/group or the last owner in a realm. * Will remove MutedUser records for other users who might have muted this user. * Will destroy Attachment/ArchivedAttachment records for files uploaded by the user, making them inaccessible. * Will destroy ArchivedMessage records associated with the user, making them impossible to restore from backups. * Will destroy Reaction/Submessage objects for reactions/poll votes done by the user. Most of these issues are not relevant for the common case that the user being deleted hasn't used Zulip extensively. It is possible a different algorithm that worked via overwriting the UserProfile's values with RealmUserDefault values, as well as a targeted set of deletions of cascading models (`Subscription`, `UserMessage`, `CustomProfileFieldValue`, etc.) would be a cleaner path to a high quality system. Other lesser quirks to be aware of: * The deleted user will disappear from all "Read receipts" displays, as all UserMessage rows will have been deleted. * Raw Markdown syntax mentioning the user still contain their original name (though modern clients will look up the user via `data-user-id` and display the current name). This is hard to change, and not important, since nothing prevents other users from just typing the user's name in their own messages. * Consumes a user ID sequence number, resulting in gaps in the space of user IDs that contain actual users. """ if user_profile.realm.is_zephyr_mirror_realm: raise AssertionError("Deleting zephyr mirror users is not supported") do_deactivate_user(user_profile, acting_user=None) user_id = user_profile.id personal_recipient = user_profile.recipient realm = user_profile.realm date_joined = user_profile.date_joined with transaction.atomic(): # The strategy is that before calling user_profile.delete(), we need to # reassign Messages sent by the user to a dummy user, so that they don't # get affected by CASCADE. We cannot yet create a dummy user with .id # matching that of the user_profile, so the general scheme is: # 1. We create a *temporary* dummy for the initial re-assignment of messages. # 2. We delete the UserProfile. # 3. We create a replacement dummy user with its id matching what the UserProfile had. # 4. This is the intended, final replacement UserProfile, so we re-assign # the messages from step (1) to it and delete the temporary dummy. # # We also do the same for Subscriptions - while they could be handled like # in do_delete_user by re-creating the objects after CASCADE deletion, the code # is cleaner by using the same re-assignment approach for them together with Messages. random_token = secrets.token_hex(16) temp_replacement_user = create_user( email=f"temp_deleteduser{random_token}@{get_fake_email_domain(realm.host)}", password=None, realm=realm, full_name=f"Deleted User {user_id} (temp)", active=False, is_mirror_dummy=True, force_date_joined=date_joined, create_personal_recipient=False, ) # Uses index: zerver_message_realm_sender_recipient (prefix) Message.objects.filter(realm_id=realm.id, sender=user_profile).update( sender=temp_replacement_user ) Subscription.objects.filter( user_profile=user_profile, recipient__type=Recipient.DIRECT_MESSAGE_GROUP ).update(user_profile=temp_replacement_user) user_profile.delete() replacement_user = create_user( force_id=user_id, email=f"deleteduser{user_id}@{get_fake_email_domain(realm.host)}", password=None, realm=realm, full_name=f"Deleted User {user_id}", active=False, is_mirror_dummy=True, force_date_joined=date_joined, create_personal_recipient=False, ) # We don't delete the personal recipient to preserve personal messages! # Now, the personal recipient belong to replacement_user, because # personal_recipient.type_id is equal to replacement_user.id. replacement_user.recipient = personal_recipient replacement_user.save(update_fields=["recipient"]) # Uses index: zerver_message_realm_sender_recipient (prefix) Message.objects.filter(realm_id=realm.id, sender=temp_replacement_user).update( sender=replacement_user ) Subscription.objects.filter( user_profile=temp_replacement_user, recipient__type=Recipient.DIRECT_MESSAGE_GROUP ).update(user_profile=replacement_user, is_user_active=replacement_user.is_active) temp_replacement_user.delete() RealmAuditLog.objects.create( realm=replacement_user.realm, modified_user=replacement_user, acting_user=None, event_type=RealmAuditLog.USER_DELETED_PRESERVING_MESSAGES, event_time=timezone_now(), )
Helper function for changing the .is_active field. Not meant as a standalone function in production code as properly activating/deactivating users requires more steps. This changes the is_active value and saves it, while ensuring Subscription.is_user_active values are updated in the same db transaction.
def change_user_is_active(user_profile: UserProfile, value: bool) -> None: """ Helper function for changing the .is_active field. Not meant as a standalone function in production code as properly activating/deactivating users requires more steps. This changes the is_active value and saves it, while ensuring Subscription.is_user_active values are updated in the same db transaction. """ with transaction.atomic(savepoint=False): user_profile.is_active = value user_profile.save(update_fields=["is_active"]) Subscription.objects.filter(user_profile=user_profile).update(is_user_active=value)
Verifies that the user's proposed full name is valid. The caller is responsible for checking check permissions. Returns the new full name, which may differ from what was passed in (because this function strips whitespace).
def check_change_full_name( user_profile: UserProfile, full_name_raw: str, acting_user: Optional[UserProfile] ) -> str: """Verifies that the user's proposed full name is valid. The caller is responsible for checking check permissions. Returns the new full name, which may differ from what was passed in (because this function strips whitespace).""" new_full_name = check_full_name( full_name_raw=full_name_raw, user_profile=user_profile, realm=user_profile.realm ) do_change_full_name(user_profile, new_full_name, acting_user) return new_full_name
Returns: 1. realm, converted realm data 2. avatars, which is list to map avatars to Zulip avatar records.json 3. user_map, which is a dictionary to map from Gitter user id to Zulip user id 4. stream_map, which is a dictionary to map from Gitter rooms to Zulip stream id
def gitter_workspace_to_realm( domain_name: str, gitter_data: GitterDataT, realm_subdomain: str ) -> Tuple[ZerverFieldsT, List[ZerverFieldsT], Dict[str, int], Dict[str, int]]: """ Returns: 1. realm, converted realm data 2. avatars, which is list to map avatars to Zulip avatar records.json 3. user_map, which is a dictionary to map from Gitter user id to Zulip user id 4. stream_map, which is a dictionary to map from Gitter rooms to Zulip stream id """ NOW = float(timezone_now().timestamp()) zerver_realm: List[ZerverFieldsT] = build_zerver_realm(realm_id, realm_subdomain, NOW, "Gitter") realm = build_realm(zerver_realm, realm_id, domain_name) # Users will have GitHub's generated noreply email addresses so their only way to log in # at first is via GitHub. So we set GitHub to be the only authentication method enabled # default to avoid user confusion. realm["zerver_realmauthenticationmethod"] = [ { "name": GitHubAuthBackend.auth_backend_name, "realm": realm_id, # The id doesn't matter since it gets set by the import later properly, but we need to set # it to something in the dict. "id": 1, } ] zerver_userprofile, avatars, user_map = build_userprofile(int(NOW), domain_name, gitter_data) zerver_stream, zerver_defaultstream, stream_map = build_stream_map(int(NOW), gitter_data) zerver_recipient, zerver_subscription = build_recipient_and_subscription( zerver_userprofile, zerver_stream ) realm["zerver_userprofile"] = zerver_userprofile realm["zerver_stream"] = zerver_stream realm["zerver_defaultstream"] = zerver_defaultstream realm["zerver_recipient"] = zerver_recipient realm["zerver_subscription"] = zerver_subscription return realm, avatars, user_map, stream_map
Returns: 1. zerver_userprofile, which is a list of user profile 2. avatar_list, which is list to map avatars to Zulip avatars records.json 3. added_users, which is a dictionary to map from Gitter user id to Zulip id
def build_userprofile( timestamp: Any, domain_name: str, gitter_data: GitterDataT ) -> Tuple[List[ZerverFieldsT], List[ZerverFieldsT], Dict[str, int]]: """ Returns: 1. zerver_userprofile, which is a list of user profile 2. avatar_list, which is list to map avatars to Zulip avatars records.json 3. added_users, which is a dictionary to map from Gitter user id to Zulip id """ logging.info("######### IMPORTING USERS STARTED #########\n") zerver_userprofile = [] avatar_list: List[ZerverFieldsT] = [] user_map: Dict[str, int] = {} user_id = 0 for data in gitter_data: if get_user_from_message(data) not in user_map: user_data = data["fromUser"] user_map[user_data["id"]] = user_id email = get_user_email(user_data, domain_name) if user_data.get("avatarUrl"): build_avatar( user_id, realm_id, email, user_data["avatarUrl"], timestamp, avatar_list ) # Build userprofile object userprofile = UserProfile( full_name=user_data["displayName"], id=user_id, email=email, delivery_email=email, avatar_source="U", date_joined=timestamp, last_login=timestamp, ) userprofile_dict = model_to_dict(userprofile) # Set realm id separately as the corresponding realm is not yet a Realm model # instance userprofile_dict["realm"] = realm_id # We use this later, even though Zulip doesn't # support short_name userprofile_dict["short_name"] = user_data["username"] zerver_userprofile.append(userprofile_dict) user_id += 1 logging.info("######### IMPORTING USERS FINISHED #########\n") return zerver_userprofile, avatar_list, user_map
Returns: 1. stream, which is the list of streams 2. defaultstreams, which is the list of default streams 3. stream_map, which is a dictionary to map from Gitter rooms to Zulip stream id
def build_stream_map( timestamp: Any, gitter_data: GitterDataT ) -> Tuple[List[ZerverFieldsT], List[ZerverFieldsT], Dict[str, int]]: """ Returns: 1. stream, which is the list of streams 2. defaultstreams, which is the list of default streams 3. stream_map, which is a dictionary to map from Gitter rooms to Zulip stream id """ logging.info("######### IMPORTING STREAM STARTED #########\n") stream_id = 0 stream: List[ZerverFieldsT] = [] # Default stream when no "room" field is present stream.append( build_stream(timestamp, realm_id, "from gitter", "Imported from Gitter", stream_id) ) defaultstream = build_defaultstream(realm_id=realm_id, stream_id=stream_id, defaultstream_id=0) stream_id += 1 # Gathering streams from gitter_data stream_map: Dict[str, int] = {} for data in gitter_data: if "room" in data and data["room"] not in stream_map: stream.append( build_stream( timestamp, realm_id, data["room"], f'Gitter room {data["room"]}', stream_id ) ) stream_map[data["room"]] = stream_id stream_id += 1 logging.info("######### IMPORTING STREAMS FINISHED #########\n") return stream, [defaultstream], stream_map
Assumes that there is at least one stream with 'stream_id' = 0, and that this stream is the only defaultstream, with 'defaultstream_id' = 0 Returns: 1. zerver_recipient, which is a list of mapped recipient 2. zerver_subscription, which is a list of mapped subscription
def build_recipient_and_subscription( zerver_userprofile: List[ZerverFieldsT], zerver_stream: List[ZerverFieldsT] ) -> Tuple[List[ZerverFieldsT], List[ZerverFieldsT]]: """ Assumes that there is at least one stream with 'stream_id' = 0, and that this stream is the only defaultstream, with 'defaultstream_id' = 0 Returns: 1. zerver_recipient, which is a list of mapped recipient 2. zerver_subscription, which is a list of mapped subscription """ zerver_recipient = [] zerver_subscription = [] recipient_id = subscription_id = 0 # For streams # Initial recipients correspond to initial streams # We enumerate all streams, and build a recipient for each # Hence 'recipient_id'=n corresponds to 'stream_id'=n for stream in zerver_stream: zerver_recipient.append(build_recipient(recipient_id, recipient_id, Recipient.STREAM)) recipient_id += 1 # For users for user in zerver_userprofile: zerver_recipient.append(build_recipient(user["id"], recipient_id, Recipient.PERSONAL)) zerver_subscription.append(build_subscription(recipient_id, user["id"], subscription_id)) recipient_id += 1 subscription_id += 1 # As suggested in #14830, we subscribe every user to every stream. # We rely on the above invariant: 'recipient_id'=n corresponds to 'stream_id'=n # # TODO: For multi-stream imports, subscribe users to streams # based either on Gitter API data or who sent messages where. for user in zerver_userprofile: for stream in zerver_stream: zerver_subscription.append( build_subscription(stream["id"], user["id"], subscription_id) ) subscription_id += 1 return zerver_recipient, zerver_subscription
Messages are stored in batches
def convert_gitter_workspace_messages( gitter_data: GitterDataT, output_dir: str, subscriber_map: Dict[int, Set[int]], user_map: Dict[str, int], stream_map: Dict[str, int], user_short_name_to_full_name: Dict[str, str], zerver_userprofile: List[ZerverFieldsT], realm_id: int, chunk_size: int = MESSAGE_BATCH_CHUNK_SIZE, ) -> None: """ Messages are stored in batches """ logging.info("######### IMPORTING MESSAGES STARTED #########\n") long_term_idle = long_term_idle_helper( iter(gitter_data), get_user_from_message, get_timestamp_from_message, lambda id: user_map[id], iter(user_map.keys()), zerver_userprofile, ) message_id = 0 low_index = 0 upper_index = low_index + chunk_size dump_file_id = 1 while True: message_json = {} zerver_message = [] zerver_usermessage: List[ZerverFieldsT] = [] message_data = gitter_data[low_index:upper_index] if len(message_data) == 0: break for message in message_data: message_time = get_timestamp_from_message(message) mentioned_user_ids = get_usermentions(message, user_map, user_short_name_to_full_name) rendered_content = None topic_name = "imported from Gitter" + ( f' room {message["room"]}' if "room" in message else "" ) user_id = user_map[get_user_from_message(message)] recipient_id = stream_map[message["room"]] if "room" in message else 0 zulip_message = build_message( topic_name=topic_name, date_sent=message_time, message_id=message_id, content=message["text"], rendered_content=rendered_content, user_id=user_id, recipient_id=recipient_id, realm_id=realm_id, ) zerver_message.append(zulip_message) build_usermessages( zerver_usermessage=zerver_usermessage, subscriber_map=subscriber_map, recipient_id=recipient_id, mentioned_user_ids=mentioned_user_ids, message_id=message_id, is_private=False, long_term_idle=long_term_idle, ) message_id += 1 message_json["zerver_message"] = zerver_message message_json["zerver_usermessage"] = zerver_usermessage message_filename = os.path.join(output_dir, f"messages-{dump_file_id:06}.json") logging.info("Writing messages to %s\n", message_filename) write_data_to_file(os.path.join(message_filename), message_json) low_index = upper_index upper_index = chunk_size + low_index dump_file_id += 1 logging.info("######### IMPORTING MESSAGES FINISHED #########\n")
This can be convenient for building up UserMessage rows.
def make_subscriber_map(zerver_subscription: List[ZerverFieldsT]) -> Dict[int, Set[int]]: """ This can be convenient for building up UserMessage rows. """ subscriber_map: Dict[int, Set[int]] = {} for sub in zerver_subscription: user_id = sub["user_profile"] recipient_id = sub["recipient"] if recipient_id not in subscriber_map: subscriber_map[recipient_id] = set() subscriber_map[recipient_id].add(user_id) return subscriber_map
This function was only used HipChat import, this function may be required for future conversions. The Slack and Gitter conversions do it more tightly integrated with creating other objects.
def build_recipients( zerver_userprofile: Iterable[ZerverFieldsT], zerver_stream: Iterable[ZerverFieldsT], zerver_huddle: Iterable[ZerverFieldsT] = [], ) -> List[ZerverFieldsT]: """ This function was only used HipChat import, this function may be required for future conversions. The Slack and Gitter conversions do it more tightly integrated with creating other objects. """ recipients = [] for user in zerver_userprofile: type_id = user["id"] type = Recipient.PERSONAL recipient = Recipient( type_id=type_id, id=NEXT_ID("recipient"), type=type, ) recipient_dict = model_to_dict(recipient) recipients.append(recipient_dict) for stream in zerver_stream: type_id = stream["id"] type = Recipient.STREAM recipient = Recipient( type_id=type_id, id=NEXT_ID("recipient"), type=type, ) recipient_dict = model_to_dict(recipient) recipients.append(recipient_dict) for huddle in zerver_huddle: type_id = huddle["id"] type = Recipient.DIRECT_MESSAGE_GROUP recipient = Recipient( type_id=type_id, id=NEXT_ID("recipient"), type=type, ) recipient_dict = model_to_dict(recipient) recipients.append(recipient_dict) return recipients
This function should be passed a 'fileinfo' dictionary, which contains information about 'size', 'created' (created time) and ['name'] (filename).
def build_attachment( realm_id: int, message_ids: Set[int], user_id: int, fileinfo: ZerverFieldsT, s3_path: str, zerver_attachment: List[ZerverFieldsT], ) -> None: """ This function should be passed a 'fileinfo' dictionary, which contains information about 'size', 'created' (created time) and ['name'] (filename). """ attachment_id = NEXT_ID("attachment") attachment = Attachment( id=attachment_id, size=fileinfo["size"], create_time=fileinfo["created"], is_realm_public=True, path_id=s3_path, file_name=fileinfo["name"], ) attachment_dict = model_to_dict(attachment, exclude=["owner", "messages", "realm"]) attachment_dict["owner"] = user_id attachment_dict["messages"] = list(message_ids) attachment_dict["realm"] = realm_id zerver_attachment.append(attachment_dict)
This function gets the avatar of the user and saves it in the user's avatar directory with both the extensions '.png' and '.original' Required parameters: 1. avatar_list: List of avatars to be mapped in avatars records.json file 2. avatar_dir: Folder where the downloaded avatars are saved 3. realm_id: Realm ID. We use this for Slack and Gitter conversions, where avatars need to be downloaded. For simpler conversions see write_avatar_png.
def process_avatars( avatar_list: List[ZerverFieldsT], avatar_dir: str, realm_id: int, threads: int, size_url_suffix: str = "", ) -> List[ZerverFieldsT]: """ This function gets the avatar of the user and saves it in the user's avatar directory with both the extensions '.png' and '.original' Required parameters: 1. avatar_list: List of avatars to be mapped in avatars records.json file 2. avatar_dir: Folder where the downloaded avatars are saved 3. realm_id: Realm ID. We use this for Slack and Gitter conversions, where avatars need to be downloaded. For simpler conversions see write_avatar_png. """ logging.info("######### GETTING AVATARS #########\n") logging.info("DOWNLOADING AVATARS .......\n") avatar_original_list = [] avatar_upload_list = [] for avatar in avatar_list: avatar_hash = user_avatar_path_from_ids(avatar["user_profile_id"], realm_id) avatar_url = avatar["path"] avatar_original = dict(avatar) image_path = f"{avatar_hash}.png" original_image_path = f"{avatar_hash}.original" avatar_upload_list.append([avatar_url, image_path, original_image_path]) # We don't add the size field here in avatar's records.json, # since the metadata is not needed on the import end, and we # don't have it until we've downloaded the files anyway. avatar["path"] = image_path avatar["s3_path"] = image_path avatar_original["path"] = original_image_path avatar_original["s3_path"] = original_image_path avatar_original_list.append(avatar_original) # Run downloads in parallel run_parallel_wrapper( partial(get_avatar, avatar_dir, size_url_suffix), avatar_upload_list, threads=threads ) logging.info("######### GETTING AVATARS FINISHED #########\n") return avatar_list + avatar_original_list
This function downloads the uploads and saves it in the realm's upload directory. Required parameters: 1. upload_list: List of uploads to be mapped in uploads records.json file 2. upload_dir: Folder where the downloaded uploads are saved
def process_uploads( upload_list: List[ZerverFieldsT], upload_dir: str, threads: int ) -> List[ZerverFieldsT]: """ This function downloads the uploads and saves it in the realm's upload directory. Required parameters: 1. upload_list: List of uploads to be mapped in uploads records.json file 2. upload_dir: Folder where the downloaded uploads are saved """ logging.info("######### GETTING ATTACHMENTS #########\n") logging.info("DOWNLOADING ATTACHMENTS .......\n") upload_url_list = [] for upload in upload_list: upload_url = upload["path"] upload_s3_path = upload["s3_path"] upload_url_list.append([upload_url, upload_s3_path]) upload["path"] = upload_s3_path # Run downloads in parallel run_parallel_wrapper(partial(get_uploads, upload_dir), upload_url_list, threads=threads) logging.info("######### GETTING ATTACHMENTS FINISHED #########\n") return upload_list
This function downloads the custom emojis and saves in the output emoji folder. Required parameters: 1. zerver_realmemoji: List of all RealmEmoji objects to be imported 2. emoji_dir: Folder where the downloaded emojis are saved 3. emoji_url_map: Maps emoji name to its url
def process_emojis( zerver_realmemoji: List[ZerverFieldsT], emoji_dir: str, emoji_url_map: ZerverFieldsT, threads: int, ) -> List[ZerverFieldsT]: """ This function downloads the custom emojis and saves in the output emoji folder. Required parameters: 1. zerver_realmemoji: List of all RealmEmoji objects to be imported 2. emoji_dir: Folder where the downloaded emojis are saved 3. emoji_url_map: Maps emoji name to its url """ emoji_records = [] upload_emoji_list = [] logging.info("######### GETTING EMOJIS #########\n") logging.info("DOWNLOADING EMOJIS .......\n") for emoji in zerver_realmemoji: emoji_url = emoji_url_map[emoji["name"]] emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format( realm_id=emoji["realm"], emoji_file_name=emoji["name"] ) upload_emoji_list.append([emoji_url, emoji_path]) emoji_record = dict(emoji) emoji_record["path"] = emoji_path emoji_record["s3_path"] = emoji_path emoji_record["realm_id"] = emoji_record["realm"] emoji_record.pop("realm") emoji_records.append(emoji_record) # Run downloads in parallel run_parallel_wrapper(partial(get_emojis, emoji_dir), upload_emoji_list, threads=threads) logging.info("######### GETTING EMOJIS FINISHED #########\n") return emoji_records
Algorithmically, we treat users who have sent at least 10 messages or have sent a message within the last 60 days as active. Everyone else is treated as long-term idle, which means they will have a slightly slower first page load when coming back to Zulip.
def long_term_idle_helper( message_iterator: Iterator[ZerverFieldsT], user_from_message: Callable[[ZerverFieldsT], Optional[ExternalId]], timestamp_from_message: Callable[[ZerverFieldsT], float], zulip_user_id_from_user: Callable[[ExternalId], int], all_user_ids_iterator: Iterator[ExternalId], zerver_userprofile: List[ZerverFieldsT], ) -> Set[int]: """Algorithmically, we treat users who have sent at least 10 messages or have sent a message within the last 60 days as active. Everyone else is treated as long-term idle, which means they will have a slightly slower first page load when coming back to Zulip. """ sender_counts: Dict[ExternalId, int] = defaultdict(int) recent_senders: Set[ExternalId] = set() NOW = float(timezone_now().timestamp()) for message in message_iterator: timestamp = timestamp_from_message(message) user = user_from_message(message) if user is None: continue if user in recent_senders: continue if NOW - timestamp < 60 * 24 * 60 * 60: recent_senders.add(user) sender_counts[user] += 1 for user, count in sender_counts.items(): if count > 10: recent_senders.add(user) long_term_idle = set() for user_id in all_user_ids_iterator: if user_id in recent_senders: continue zulip_user_id = zulip_user_id_from_user(user_id) long_term_idle.add(zulip_user_id) for user_profile_row in zerver_userprofile: if user_profile_row["id"] in long_term_idle: user_profile_row["long_term_idle"] = True # Setting last_active_message_id to 1 means the user, if # imported, will get the full message history for the # streams they were on. user_profile_row["last_active_message_id"] = 1 return long_term_idle
This function does most of the work for processing emoticons, the bulk of which is copying files. We also write a json file with metadata. Finally, we return a list of RealmEmoji dicts to our caller. In our data_dir we have a pretty simple setup: The exported JSON file will have emoji rows if it contains any custom emoji { "type": "emoji", "emoji": {"name": "peerdium", "image": "exported_emoji/h15ni7kf1bnj7jeua4qhmctsdo/image"} } { "type": "emoji", "emoji": {"name": "tick", "image": "exported_emoji/7u7x8ytgp78q8jir81o9ejwwnr/image"} } exported_emoji/ - contains a bunch of image files: exported_emoji/7u7x8ytgp78q8jir81o9ejwwnr/image exported_emoji/h15ni7kf1bnj7jeua4qhmctsdo/image We move all the relevant files to Zulip's more nested directory structure.
def write_emoticon_data( realm_id: int, custom_emoji_data: List[Dict[str, Any]], data_dir: str, output_dir: str ) -> List[ZerverFieldsT]: """ This function does most of the work for processing emoticons, the bulk of which is copying files. We also write a json file with metadata. Finally, we return a list of RealmEmoji dicts to our caller. In our data_dir we have a pretty simple setup: The exported JSON file will have emoji rows if it contains any custom emoji { "type": "emoji", "emoji": {"name": "peerdium", "image": "exported_emoji/h15ni7kf1bnj7jeua4qhmctsdo/image"} } { "type": "emoji", "emoji": {"name": "tick", "image": "exported_emoji/7u7x8ytgp78q8jir81o9ejwwnr/image"} } exported_emoji/ - contains a bunch of image files: exported_emoji/7u7x8ytgp78q8jir81o9ejwwnr/image exported_emoji/h15ni7kf1bnj7jeua4qhmctsdo/image We move all the relevant files to Zulip's more nested directory structure. """ logging.info("Starting to process emoticons") flat_data = [ dict( path=d["image"], name=d["name"], ) for d in custom_emoji_data ] emoji_folder = os.path.join(output_dir, "emoji") os.makedirs(emoji_folder, exist_ok=True) def process(data: ZerverFieldsT) -> ZerverFieldsT: source_sub_path = data["path"] source_path = os.path.join(data_dir, source_sub_path) target_fn = data["name"] target_sub_path = RealmEmoji.PATH_ID_TEMPLATE.format( realm_id=realm_id, emoji_file_name=target_fn, ) target_path = os.path.join(emoji_folder, target_sub_path) os.makedirs(os.path.dirname(target_path), exist_ok=True) source_path = os.path.abspath(source_path) target_path = os.path.abspath(target_path) shutil.copyfile(source_path, target_path) return dict( path=target_path, s3_path=target_path, file_name=target_fn, realm_id=realm_id, name=data["name"], ) emoji_records = list(map(process, flat_data)) create_converted_data_files(emoji_records, output_dir, "/emoji/records.json") realmemoji = [ build_realm_emoji( realm_id=realm_id, name=rec["name"], id=NEXT_ID("realmemoji"), file_name=rec["file_name"], ) for rec in emoji_records ] logging.info("Done processing emoticons") return realmemoji
Use like this: NEXT_ID = sequencer() message_id = NEXT_ID('message')
def sequencer() -> Callable[[str], int]: """ Use like this: NEXT_ID = sequencer() message_id = NEXT_ID('message') """ seq_dict: Dict[str, Callable[[], int]] = {} def next_one(name: str) -> int: if name not in seq_dict: seq_dict[name] = _seq() seq = seq_dict[name] return seq() return next_one
Returns: 1. realm, converted realm data 2. slack_user_id_to_zulip_user_id, which is a dictionary to map from Slack user id to Zulip user id 3. slack_recipient_name_to_zulip_recipient_id, which is a dictionary to map from Slack recipient name(channel names, mpim names, usernames, etc) to Zulip recipient id 4. added_channels, which is a dictionary to map from channel name to channel id, Zulip stream_id 5. added_mpims, which is a dictionary to map from MPIM name to MPIM id, Zulip huddle_id 6. dm_members, which is a dictionary to map from DM id to tuple of DM participants. 7. avatars, which is list to map avatars to Zulip avatar records.json 8. emoji_url_map, which is maps emoji name to its Slack URL
def slack_workspace_to_realm( domain_name: str, realm_id: int, user_list: List[ZerverFieldsT], realm_subdomain: str, slack_data_dir: str, custom_emoji_list: ZerverFieldsT, ) -> Tuple[ ZerverFieldsT, SlackToZulipUserIDT, SlackToZulipRecipientT, AddedChannelsT, AddedMPIMsT, DMMembersT, List[ZerverFieldsT], ZerverFieldsT, ]: """ Returns: 1. realm, converted realm data 2. slack_user_id_to_zulip_user_id, which is a dictionary to map from Slack user id to Zulip user id 3. slack_recipient_name_to_zulip_recipient_id, which is a dictionary to map from Slack recipient name(channel names, mpim names, usernames, etc) to Zulip recipient id 4. added_channels, which is a dictionary to map from channel name to channel id, Zulip stream_id 5. added_mpims, which is a dictionary to map from MPIM name to MPIM id, Zulip huddle_id 6. dm_members, which is a dictionary to map from DM id to tuple of DM participants. 7. avatars, which is list to map avatars to Zulip avatar records.json 8. emoji_url_map, which is maps emoji name to its Slack URL """ NOW = float(timezone_now().timestamp()) zerver_realm: List[ZerverFieldsT] = build_zerver_realm(realm_id, realm_subdomain, NOW, "Slack") realm = build_realm(zerver_realm, realm_id, domain_name) ( zerver_userprofile, avatars, slack_user_id_to_zulip_user_id, zerver_customprofilefield, zerver_customprofilefield_value, ) = users_to_zerver_userprofile(slack_data_dir, user_list, realm_id, int(NOW), domain_name) ( realm, added_channels, added_mpims, dm_members, slack_recipient_name_to_zulip_recipient_id, ) = channels_to_zerver_stream( slack_data_dir, realm_id, realm, slack_user_id_to_zulip_user_id, zerver_userprofile ) zerver_realmemoji, emoji_url_map = build_realmemoji(custom_emoji_list, realm_id) realm["zerver_realmemoji"] = zerver_realmemoji # See https://zulip.com/help/set-default-streams-for-new-users # for documentation on zerver_defaultstream realm["zerver_userprofile"] = zerver_userprofile realm["zerver_customprofilefield"] = zerver_customprofilefield realm["zerver_customprofilefieldvalue"] = zerver_customprofilefield_value return ( realm, slack_user_id_to_zulip_user_id, slack_recipient_name_to_zulip_recipient_id, added_channels, added_mpims, dm_members, avatars, emoji_url_map, )
Returns: 1. zerver_userprofile, which is a list of user profile 2. avatar_list, which is list to map avatars to Zulip avatar records.json 3. slack_user_id_to_zulip_user_id, which is a dictionary to map from Slack user ID to Zulip user id 4. zerver_customprofilefield, which is a list of all custom profile fields 5. zerver_customprofilefield_values, which is a list of user profile fields
def users_to_zerver_userprofile( slack_data_dir: str, users: List[ZerverFieldsT], realm_id: int, timestamp: Any, domain_name: str ) -> Tuple[ List[ZerverFieldsT], List[ZerverFieldsT], SlackToZulipUserIDT, List[ZerverFieldsT], List[ZerverFieldsT], ]: """ Returns: 1. zerver_userprofile, which is a list of user profile 2. avatar_list, which is list to map avatars to Zulip avatar records.json 3. slack_user_id_to_zulip_user_id, which is a dictionary to map from Slack user ID to Zulip user id 4. zerver_customprofilefield, which is a list of all custom profile fields 5. zerver_customprofilefield_values, which is a list of user profile fields """ logging.info("######### IMPORTING USERS STARTED #########\n") zerver_userprofile = [] zerver_customprofilefield: List[ZerverFieldsT] = [] zerver_customprofilefield_values: List[ZerverFieldsT] = [] avatar_list: List[ZerverFieldsT] = [] slack_user_id_to_zulip_user_id = {} # The user data we get from the Slack API does not contain custom profile data # Hence we get it from the Slack zip file slack_data_file_user_list = get_data_file(slack_data_dir + "/users.json") slack_user_id_to_custom_profile_fields: ZerverFieldsT = {} slack_custom_field_name_to_zulip_custom_field_id: ZerverFieldsT = {} for user in slack_data_file_user_list: process_slack_custom_fields(user, slack_user_id_to_custom_profile_fields) # We have only one primary owner in Slack, see link # https://get.slack.help/hc/en-us/articles/201912948-Owners-and-Administrators # This is to import the primary owner first from all the users user_id_count = custom_profile_field_value_id_count = custom_profile_field_id_count = 0 primary_owner_id = user_id_count user_id_count += 1 found_emails: Dict[str, int] = {} for user in users: slack_user_id = user["id"] if user.get("is_primary_owner", False): user_id = primary_owner_id else: user_id = user_id_count email = get_user_email(user, domain_name) if email.lower() in found_emails: slack_user_id_to_zulip_user_id[slack_user_id] = found_emails[email.lower()] logging.info("%s: %s MERGED", slack_user_id, email) continue found_emails[email.lower()] = user_id # ref: https://zulip.com/help/change-your-profile-picture avatar_url = build_avatar_url( slack_user_id, user["team_id"], user["profile"]["avatar_hash"] ) build_avatar(user_id, realm_id, email, avatar_url, timestamp, avatar_list) role = UserProfile.ROLE_MEMBER if get_owner(user): role = UserProfile.ROLE_REALM_OWNER elif get_admin(user): role = UserProfile.ROLE_REALM_ADMINISTRATOR if get_guest(user): role = UserProfile.ROLE_GUEST timezone = get_user_timezone(user) if slack_user_id in slack_user_id_to_custom_profile_fields: ( slack_custom_field_name_to_zulip_custom_field_id, custom_profile_field_id_count, ) = build_customprofile_field( zerver_customprofilefield, slack_user_id_to_custom_profile_fields[slack_user_id], custom_profile_field_id_count, realm_id, slack_custom_field_name_to_zulip_custom_field_id, ) custom_profile_field_value_id_count = build_customprofilefields_values( slack_custom_field_name_to_zulip_custom_field_id, slack_user_id_to_custom_profile_fields[slack_user_id], user_id, custom_profile_field_value_id_count, zerver_customprofilefield_values, ) userprofile = UserProfile( full_name=get_user_full_name(user), is_active=not user.get("deleted", False) and not user["is_mirror_dummy"], is_mirror_dummy=user["is_mirror_dummy"], id=user_id, email=email, delivery_email=email, avatar_source="U", is_bot=user.get("is_bot", False), role=role, bot_type=1 if user.get("is_bot", False) else None, date_joined=timestamp, timezone=timezone, last_login=timestamp, ) userprofile_dict = model_to_dict(userprofile) # Set realm id separately as the corresponding realm is not yet a Realm model instance userprofile_dict["realm"] = realm_id zerver_userprofile.append(userprofile_dict) slack_user_id_to_zulip_user_id[slack_user_id] = user_id if not user.get("is_primary_owner", False): user_id_count += 1 logging.info("%s: %s -> %s", slack_user_id, user["name"], userprofile_dict["email"]) process_customprofilefields(zerver_customprofilefield, zerver_customprofilefield_values) logging.info("######### IMPORTING USERS FINISHED #########\n") return ( zerver_userprofile, avatar_list, slack_user_id_to_zulip_user_id, zerver_customprofilefield, zerver_customprofilefield_values, )
Returns: 1. realm, converted realm data 2. added_channels, which is a dictionary to map from channel name to channel id, Zulip stream_id 3. added_mpims, which is a dictionary to map from MPIM(multiparty IM) name to MPIM id, Zulip huddle_id 4. dm_members, which is a dictionary to map from DM id to tuple of DM participants. 5. slack_recipient_name_to_zulip_recipient_id, which is a dictionary to map from Slack recipient name(channel names, mpim names, usernames etc) to Zulip recipient_id
def channels_to_zerver_stream( slack_data_dir: str, realm_id: int, realm: Dict[str, Any], slack_user_id_to_zulip_user_id: SlackToZulipUserIDT, zerver_userprofile: List[ZerverFieldsT], ) -> Tuple[ Dict[str, List[ZerverFieldsT]], AddedChannelsT, AddedMPIMsT, DMMembersT, SlackToZulipRecipientT ]: """ Returns: 1. realm, converted realm data 2. added_channels, which is a dictionary to map from channel name to channel id, Zulip stream_id 3. added_mpims, which is a dictionary to map from MPIM(multiparty IM) name to MPIM id, Zulip huddle_id 4. dm_members, which is a dictionary to map from DM id to tuple of DM participants. 5. slack_recipient_name_to_zulip_recipient_id, which is a dictionary to map from Slack recipient name(channel names, mpim names, usernames etc) to Zulip recipient_id """ logging.info("######### IMPORTING CHANNELS STARTED #########\n") added_channels = {} added_mpims = {} dm_members = {} slack_recipient_name_to_zulip_recipient_id = {} realm["zerver_stream"] = [] realm["zerver_huddle"] = [] realm["zerver_subscription"] = [] realm["zerver_recipient"] = [] realm["zerver_defaultstream"] = [] subscription_id_count = recipient_id_count = 0 stream_id_count = defaultstream_id = 0 huddle_id_count = 0 def process_channels(channels: List[Dict[str, Any]], invite_only: bool = False) -> None: nonlocal stream_id_count nonlocal recipient_id_count nonlocal defaultstream_id nonlocal subscription_id_count for channel in channels: # map Slack's topic and purpose content into Zulip's stream description. # WARN This mapping is lossy since the topic.creator, topic.last_set, # purpose.creator, purpose.last_set fields are not preserved. description = channel["purpose"]["value"] stream_id = stream_id_count recipient_id = recipient_id_count stream = build_stream( float(channel["created"]), realm_id, channel["name"], description, stream_id, channel["is_archived"], invite_only, ) realm["zerver_stream"].append(stream) slack_default_channels = ["general", "random"] if channel["name"] in slack_default_channels and not stream["deactivated"]: defaultstream = build_defaultstream(realm_id, stream_id, defaultstream_id) realm["zerver_defaultstream"].append(defaultstream) defaultstream_id += 1 added_channels[stream["name"]] = (channel["id"], stream_id) recipient = build_recipient(stream_id, recipient_id, Recipient.STREAM) realm["zerver_recipient"].append(recipient) slack_recipient_name_to_zulip_recipient_id[stream["name"]] = recipient_id subscription_id_count = get_subscription( channel["members"], realm["zerver_subscription"], recipient_id, slack_user_id_to_zulip_user_id, subscription_id_count, ) stream_id_count += 1 recipient_id_count += 1 logging.info("%s -> created", channel["name"]) # TODO map Slack's pins to Zulip's stars # There is the security model that Slack's pins are known to the team owner # as evident from where it is stored at (channels) # "pins": [ # { # "id": "1444755381.000003", # "type": "C", # "user": "U061A5N1G", # "owner": "U061A5N1G", # "created": "1444755463" # } # ], public_channels = get_data_file(slack_data_dir + "/channels.json") process_channels(public_channels) try: private_channels = get_data_file(slack_data_dir + "/groups.json") except FileNotFoundError: private_channels = [] process_channels(private_channels, True) # mpim is the Slack equivalent of huddle. def process_mpims(mpims: List[Dict[str, Any]]) -> None: nonlocal huddle_id_count nonlocal recipient_id_count nonlocal subscription_id_count for mpim in mpims: huddle = build_huddle(huddle_id_count) realm["zerver_huddle"].append(huddle) added_mpims[mpim["name"]] = (mpim["id"], huddle_id_count) recipient = build_recipient( huddle_id_count, recipient_id_count, Recipient.DIRECT_MESSAGE_GROUP ) realm["zerver_recipient"].append(recipient) slack_recipient_name_to_zulip_recipient_id[mpim["name"]] = recipient_id_count subscription_id_count = get_subscription( mpim["members"], realm["zerver_subscription"], recipient_id_count, slack_user_id_to_zulip_user_id, subscription_id_count, ) huddle_id_count += 1 recipient_id_count += 1 logging.info("%s -> created", mpim["name"]) try: mpims = get_data_file(slack_data_dir + "/mpims.json") except FileNotFoundError: mpims = [] process_mpims(mpims) # This may have duplicated zulip user_ids, since we merge multiple # Slack same-email shared-channel users into one Zulip dummy user zulip_user_to_recipient: Dict[int, int] = {} for slack_user_id, zulip_user_id in slack_user_id_to_zulip_user_id.items(): if zulip_user_id in zulip_user_to_recipient: slack_recipient_name_to_zulip_recipient_id[slack_user_id] = zulip_user_to_recipient[ zulip_user_id ] continue recipient = build_recipient(zulip_user_id, recipient_id_count, Recipient.PERSONAL) slack_recipient_name_to_zulip_recipient_id[slack_user_id] = recipient_id_count zulip_user_to_recipient[zulip_user_id] = recipient_id_count sub = build_subscription(recipient_id_count, zulip_user_id, subscription_id_count) realm["zerver_recipient"].append(recipient) realm["zerver_subscription"].append(sub) recipient_id_count += 1 subscription_id_count += 1 def process_dms(dms: List[Dict[str, Any]]) -> None: for dm in dms: user_a = dm["members"][0] user_b = dm["members"][1] dm_members[dm["id"]] = (user_a, user_b) try: dms = get_data_file(slack_data_dir + "/dms.json") except FileNotFoundError: dms = [] process_dms(dms) logging.info("######### IMPORTING STREAMS FINISHED #########\n") return ( realm, added_channels, added_mpims, dm_members, slack_recipient_name_to_zulip_recipient_id, )
Returns: 1. reactions, which is a list of the reactions 2. uploads, which is a list of uploads to be mapped in uploads records.json 3. attachment, which is a list of the attachments
def convert_slack_workspace_messages( slack_data_dir: str, users: List[ZerverFieldsT], realm_id: int, slack_user_id_to_zulip_user_id: SlackToZulipUserIDT, slack_recipient_name_to_zulip_recipient_id: SlackToZulipRecipientT, added_channels: AddedChannelsT, added_mpims: AddedMPIMsT, dm_members: DMMembersT, realm: ZerverFieldsT, zerver_userprofile: List[ZerverFieldsT], zerver_realmemoji: List[ZerverFieldsT], domain_name: str, output_dir: str, convert_slack_threads: bool, chunk_size: int = MESSAGE_BATCH_CHUNK_SIZE, ) -> Tuple[List[ZerverFieldsT], List[ZerverFieldsT], List[ZerverFieldsT]]: """ Returns: 1. reactions, which is a list of the reactions 2. uploads, which is a list of uploads to be mapped in uploads records.json 3. attachment, which is a list of the attachments """ long_term_idle = process_long_term_idle_users( slack_data_dir, users, slack_user_id_to_zulip_user_id, added_channels, added_mpims, dm_members, zerver_userprofile, ) all_messages = get_messages_iterator(slack_data_dir, added_channels, added_mpims, dm_members) logging.info("######### IMPORTING MESSAGES STARTED #########\n") total_reactions: List[ZerverFieldsT] = [] total_attachments: List[ZerverFieldsT] = [] total_uploads: List[ZerverFieldsT] = [] dump_file_id = 1 subscriber_map = make_subscriber_map( zerver_subscription=realm["zerver_subscription"], ) while message_data := list(itertools.islice(all_messages, chunk_size)): ( zerver_message, zerver_usermessage, attachment, uploads, reactions, ) = channel_message_to_zerver_message( realm_id, users, slack_user_id_to_zulip_user_id, slack_recipient_name_to_zulip_recipient_id, message_data, zerver_realmemoji, subscriber_map, added_channels, dm_members, domain_name, long_term_idle, convert_slack_threads, ) message_json = dict(zerver_message=zerver_message, zerver_usermessage=zerver_usermessage) message_file = f"/messages-{dump_file_id:06}.json" logging.info("Writing messages to %s\n", output_dir + message_file) create_converted_data_files(message_json, output_dir, message_file) total_reactions += reactions total_attachments += attachment total_uploads += uploads dump_file_id += 1 logging.info("######### IMPORTING MESSAGES FINISHED #########\n") return total_reactions, total_uploads, total_attachments
This function is an iterator that returns all the messages across all Slack channels, in order by timestamp. It's important to not read all the messages into memory at once, because for large imports that can OOM kill.
def get_messages_iterator( slack_data_dir: str, added_channels: Dict[str, Any], added_mpims: AddedMPIMsT, dm_members: DMMembersT, ) -> Iterator[ZerverFieldsT]: """This function is an iterator that returns all the messages across all Slack channels, in order by timestamp. It's important to not read all the messages into memory at once, because for large imports that can OOM kill.""" dir_names = [*added_channels, *added_mpims, *dm_members] all_json_names: Dict[str, List[str]] = defaultdict(list) for dir_name in dir_names: dir_path = os.path.join(slack_data_dir, dir_name) json_names = os.listdir(dir_path) for json_name in json_names: if json_name.endswith(".json"): all_json_names[json_name].append(dir_path) # Sort json_name by date for json_name in sorted(all_json_names.keys()): messages_for_one_day: List[ZerverFieldsT] = [] for dir_path in all_json_names[json_name]: message_dir = os.path.join(dir_path, json_name) dir_name = os.path.basename(dir_path) messages = [] for message in get_data_file(message_dir): if message.get("user") == "U00": # Skip messages involving the the "U00" user, # which is apparently used in some channel rename # messages. It's likely just the result of some # bug in Slack's export system. Arguably we could # change this to point to slackbot instead, but # skipping those messages is simpler. continue if message.get("mimetype") == "application/vnd.slack-docs": # This is a Slack "Post" which is HTML-formatted, # and we don't have a clean way to import at the # moment. We skip them on import. continue if dir_name in added_channels: message["channel_name"] = dir_name elif dir_name in added_mpims: message["mpim_name"] = dir_name elif dir_name in dm_members: message["pm_name"] = dir_name messages.append(message) messages_for_one_day += messages # we sort the messages according to the timestamp to show messages with # the proper date order yield from sorted(messages_for_one_day, key=get_timestamp_from_message)
Returns: 1. zerver_message, which is a list of the messages 2. zerver_usermessage, which is a list of the usermessages 3. zerver_attachment, which is a list of the attachments 4. uploads_list, which is a list of uploads to be mapped in uploads records.json 5. reaction_list, which is a list of all user reactions
def channel_message_to_zerver_message( realm_id: int, users: List[ZerverFieldsT], slack_user_id_to_zulip_user_id: SlackToZulipUserIDT, slack_recipient_name_to_zulip_recipient_id: SlackToZulipRecipientT, all_messages: List[ZerverFieldsT], zerver_realmemoji: List[ZerverFieldsT], subscriber_map: Dict[int, Set[int]], added_channels: AddedChannelsT, dm_members: DMMembersT, domain_name: str, long_term_idle: Set[int], convert_slack_threads: bool, ) -> Tuple[ List[ZerverFieldsT], List[ZerverFieldsT], List[ZerverFieldsT], List[ZerverFieldsT], List[ZerverFieldsT], ]: """ Returns: 1. zerver_message, which is a list of the messages 2. zerver_usermessage, which is a list of the usermessages 3. zerver_attachment, which is a list of the attachments 4. uploads_list, which is a list of uploads to be mapped in uploads records.json 5. reaction_list, which is a list of all user reactions """ zerver_message = [] zerver_usermessage: List[ZerverFieldsT] = [] uploads_list: List[ZerverFieldsT] = [] zerver_attachment: List[ZerverFieldsT] = [] reaction_list: List[ZerverFieldsT] = [] total_user_messages = 0 total_skipped_user_messages = 0 thread_counter: Dict[str, int] = defaultdict(int) thread_map: Dict[str, str] = {} for message in all_messages: slack_user_id = get_message_sending_user(message) if not slack_user_id: # Ignore messages without slack_user_id # These are Sometimes produced by Slack continue subtype = message.get("subtype", False) if subtype in [ # Zulip doesn't have a pinned_item concept "pinned_item", "unpinned_item", # Slack's channel join/leave notices are spammy "channel_join", "channel_leave", "channel_name", ]: continue try: content, mentioned_user_ids, has_link = convert_to_zulip_markdown( message["text"], users, added_channels, slack_user_id_to_zulip_user_id ) except Exception: print("Slack message unexpectedly missing text representation:") print(orjson.dumps(message, option=orjson.OPT_INDENT_2).decode()) continue rendered_content = None if "channel_name" in message: is_private = False recipient_id = slack_recipient_name_to_zulip_recipient_id[message["channel_name"]] elif "mpim_name" in message: is_private = True recipient_id = slack_recipient_name_to_zulip_recipient_id[message["mpim_name"]] elif "pm_name" in message: is_private = True sender = get_message_sending_user(message) members = dm_members[message["pm_name"]] if sender == members[0]: recipient_id = slack_recipient_name_to_zulip_recipient_id[members[1]] sender_recipient_id = slack_recipient_name_to_zulip_recipient_id[members[0]] else: recipient_id = slack_recipient_name_to_zulip_recipient_id[members[0]] sender_recipient_id = slack_recipient_name_to_zulip_recipient_id[members[1]] message_id = NEXT_ID("message") if "reactions" in message: build_reactions( reaction_list, message["reactions"], slack_user_id_to_zulip_user_id, message_id, zerver_realmemoji, ) # Process different subtypes of slack messages # Subtypes which have only the action in the message should # be rendered with '/me' in the content initially # For example "sh_room_created" has the message 'started a call' # which should be displayed as '/me started a call' if subtype in ["bot_add", "sh_room_created", "me_message"]: content = f"/me {content}" if subtype == "file_comment": # The file_comment message type only indicates the # responsible user in a subfield. message["user"] = message["comment"]["user"] file_info = process_message_files( message=message, domain_name=domain_name, realm_id=realm_id, message_id=message_id, slack_user_id=slack_user_id, users=users, slack_user_id_to_zulip_user_id=slack_user_id_to_zulip_user_id, zerver_attachment=zerver_attachment, uploads_list=uploads_list, ) content += file_info["content"] has_link = has_link or file_info["has_link"] has_attachment = file_info["has_attachment"] has_image = file_info["has_image"] # Slack's unthreaded messages go into a single topic, while # threads each generate a unique topic labeled by the date and # a counter among topics on that day. topic_name = "imported from Slack" if convert_slack_threads and "thread_ts" in message: thread_ts = datetime.fromtimestamp(float(message["thread_ts"]), tz=timezone.utc) thread_ts_str = thread_ts.strftime(r"%Y/%m/%d %H:%M:%S") # The topic name is "2015-08-18 Slack thread 2", where the counter at the end is to disambiguate # threads with the same date. if thread_ts_str in thread_map: topic_name = thread_map[thread_ts_str] else: thread_date = thread_ts.strftime(r"%Y-%m-%d") thread_counter[thread_date] += 1 count = thread_counter[thread_date] topic_name = f"{thread_date} Slack thread {count}" thread_map[thread_ts_str] = topic_name zulip_message = build_message( topic_name=topic_name, date_sent=get_timestamp_from_message(message), message_id=message_id, content=content, rendered_content=rendered_content, user_id=slack_user_id_to_zulip_user_id[slack_user_id], recipient_id=recipient_id, realm_id=realm_id, has_image=has_image, has_link=has_link, has_attachment=has_attachment, ) zerver_message.append(zulip_message) (num_created, num_skipped) = build_usermessages( zerver_usermessage=zerver_usermessage, subscriber_map=subscriber_map, recipient_id=recipient_id, mentioned_user_ids=mentioned_user_ids, message_id=message_id, is_private=is_private, long_term_idle=long_term_idle, ) total_user_messages += num_created total_skipped_user_messages += num_skipped if "pm_name" in message and recipient_id != sender_recipient_id: (num_created, num_skipped) = build_usermessages( zerver_usermessage=zerver_usermessage, subscriber_map=subscriber_map, recipient_id=sender_recipient_id, mentioned_user_ids=mentioned_user_ids, message_id=message_id, is_private=is_private, long_term_idle=long_term_idle, ) total_user_messages += num_created total_skipped_user_messages += num_skipped logging.debug( "Created %s UserMessages; deferred %s due to long-term idle", total_user_messages, total_skipped_user_messages, ) return zerver_message, zerver_usermessage, zerver_attachment, uploads_list, reaction_list
Returns: 1. For strikethrough formatting: This maps Slack's '~strike~' to Zulip's '~~strike~~' 2. For bold formatting: This maps Slack's '*bold*' to Zulip's '**bold**' 3. For italic formatting: This maps Slack's '_italic_' to Zulip's '*italic*'
def convert_markdown_syntax(text: str, regex: str, zulip_keyword: str) -> str: """ Returns: 1. For strikethrough formatting: This maps Slack's '~strike~' to Zulip's '~~strike~~' 2. For bold formatting: This maps Slack's '*bold*' to Zulip's '**bold**' 3. For italic formatting: This maps Slack's '_italic_' to Zulip's '*italic*' """ for match in re.finditer(regex, text, re.VERBOSE): converted_token = ( match.group(1) + zulip_keyword + match.group(3) + match.group(4) + zulip_keyword + match.group(6) ) text = text.replace(match.group(0), converted_token) return text
1. Converts '<https://foo.com>' to 'https://foo.com' 2. Converts '<https://foo.com|foo>' to 'https://foo.com|foo'
def convert_link_format(text: str) -> Tuple[str, bool]: """ 1. Converts '<https://foo.com>' to 'https://foo.com' 2. Converts '<https://foo.com|foo>' to 'https://foo.com|foo' """ has_link = False for match in re.finditer(LINK_REGEX, text, re.VERBOSE): converted_text = match.group(0).replace(">", "").replace("<", "") has_link = True text = text.replace(match.group(0), converted_text) return text, has_link
1. Converts '<mailto:[email protected]>' to 'mailto:[email protected]' 2. Converts '<mailto:[email protected]|[email protected]>' to 'mailto:[email protected]'
def convert_mailto_format(text: str) -> Tuple[str, bool]: """ 1. Converts '<mailto:[email protected]>' to 'mailto:[email protected]' 2. Converts '<mailto:[email protected]|[email protected]>' to 'mailto:[email protected]' """ has_link = False for match in re.finditer(SLACK_MAILTO_REGEX, text, re.VERBOSE): has_link = True text = text.replace(match.group(0), match.group(1)) return text, has_link
The logic in this function is fairly tricky. The essence is that a file should be cleaned up if and only if it not referenced by any Message, ScheduledMessage or ArchivedMessage. The way to find that out is through the Attachment and ArchivedAttachment tables. The queries are complicated by the fact that an uploaded file may have either only an Attachment row, only an ArchivedAttachment row, or both - depending on whether some, all or none of the messages linking to it have been archived.
def get_old_unclaimed_attachments( weeks_ago: int, ) -> Tuple[QuerySet[Attachment], QuerySet[ArchivedAttachment]]: """ The logic in this function is fairly tricky. The essence is that a file should be cleaned up if and only if it not referenced by any Message, ScheduledMessage or ArchivedMessage. The way to find that out is through the Attachment and ArchivedAttachment tables. The queries are complicated by the fact that an uploaded file may have either only an Attachment row, only an ArchivedAttachment row, or both - depending on whether some, all or none of the messages linking to it have been archived. """ delta_weeks_ago = timezone_now() - timedelta(weeks=weeks_ago) # The Attachment vs ArchivedAttachment queries are asymmetric because only # Attachment has the scheduled_messages relation. old_attachments = Attachment.objects.annotate( has_other_messages=Exists( ArchivedAttachment.objects.filter(id=OuterRef("id")).exclude(messages=None) ) ).filter( messages=None, scheduled_messages=None, create_time__lt=delta_weeks_ago, has_other_messages=False, ) old_archived_attachments = ArchivedAttachment.objects.annotate( has_other_messages=Exists( Attachment.objects.filter(id=OuterRef("id")).exclude( messages=None, scheduled_messages=None ) ) ).filter(messages=None, create_time__lt=delta_weeks_ago, has_other_messages=False) return old_attachments, old_archived_attachments
DEPRECATED: We should start using get_avatar_field to populate users, particularly for codepaths where the client can compute gravatar URLs on the client side.
def avatar_url_from_dict(userdict: Dict[str, Any], medium: bool = False) -> str: """ DEPRECATED: We should start using get_avatar_field to populate users, particularly for codepaths where the client can compute gravatar URLs on the client side. """ url = _get_unversioned_avatar_url( userdict["id"], userdict["avatar_source"], userdict["realm_id"], email=userdict["email"], medium=medium, ) return append_url_query_string(url, "version={:d}".format(userdict["avatar_version"]))
Most of the parameters to this function map to fields by the same name in UserProfile (avatar_source, realm_id, email, etc.). Then there are these: medium - This means we want a medium-sized avatar. This can affect the "s" parameter for gravatar avatars, or it can give us something like foo-medium.png for user-uploaded avatars. client_gravatar - If the client can compute their own gravatars, this will be set to True, and we'll avoid computing them on the server (mostly to save bandwidth).
def get_avatar_field( user_id: int, realm_id: int, email: str, avatar_source: str, avatar_version: int, medium: bool, client_gravatar: bool, ) -> Optional[str]: """ Most of the parameters to this function map to fields by the same name in UserProfile (avatar_source, realm_id, email, etc.). Then there are these: medium - This means we want a medium-sized avatar. This can affect the "s" parameter for gravatar avatars, or it can give us something like foo-medium.png for user-uploaded avatars. client_gravatar - If the client can compute their own gravatars, this will be set to True, and we'll avoid computing them on the server (mostly to save bandwidth). """ if client_gravatar: """ If our client knows how to calculate gravatar hashes, we will return None and let the client compute the gravatar url. """ if settings.ENABLE_GRAVATAR and avatar_source == UserProfile.AVATAR_FROM_GRAVATAR: return None """ If we get this far, we'll compute an avatar URL that may be either user-uploaded or a gravatar, and then we'll add version info to try to avoid stale caches. """ url = _get_unversioned_avatar_url( user_profile_id=user_id, avatar_source=avatar_source, realm_id=realm_id, email=email, medium=medium, ) return append_url_query_string(url, f"version={avatar_version:d}")
Absolute URLs are used to simplify logic for applications that won't be served by browsers, such as rendering GCM notifications.
def absolute_avatar_url(user_profile: UserProfile) -> str: """ Absolute URLs are used to simplify logic for applications that won't be served by browsers, such as rendering GCM notifications. """ avatar = avatar_url(user_profile) # avatar_url can return None if client_gravatar=True, however here we use the default value of False assert avatar is not None return urljoin(user_profile.realm.uri, avatar)
Compute the Gravatar hash for an email address.
def gravatar_hash(email: str) -> str: """Compute the Gravatar hash for an email address.""" # Non-ASCII characters aren't permitted by the currently active e-mail # RFCs. However, the IETF has published https://tools.ietf.org/html/rfc4952, # outlining internationalization of email addresses, and regardless if we # typo an address or someone manages to give us a non-ASCII address, let's # not error out on it. return hashlib.md5(email.lower().encode()).hexdigest()
Creates and saves a UserProfile with the given email. Has some code based off of UserManage.create_user, but doesn't .save()
def bulk_create_users( realm: Realm, users_raw: Set[Tuple[str, str, bool]], bot_type: Optional[int] = None, bot_owner: Optional[UserProfile] = None, tos_version: Optional[str] = None, timezone: str = "", ) -> None: """ Creates and saves a UserProfile with the given email. Has some code based off of UserManage.create_user, but doesn't .save() """ existing_users = frozenset( UserProfile.objects.filter(realm=realm).values_list("email", flat=True) ) users = sorted(user_raw for user_raw in users_raw if user_raw[0] not in existing_users) realm_user_default = RealmUserDefault.objects.get(realm=realm) if bot_type is None: email_address_visibility = realm_user_default.email_address_visibility else: # There is no privacy motivation for limiting access to bot email addresses, # so we hardcode them to EMAIL_ADDRESS_VISIBILITY_EVERYONE. email_address_visibility = UserProfile.EMAIL_ADDRESS_VISIBILITY_EVERYONE # Now create user_profiles profiles_to_create: List[UserProfile] = [] for email, full_name, active in users: profile = create_user_profile( realm, email, initial_password(email), active, bot_type, full_name, bot_owner, False, tos_version, timezone, default_language=realm.default_language, tutorial_status=UserProfile.TUTORIAL_FINISHED, email_address_visibility=email_address_visibility, ) if bot_type is None: # This block simulates copy_default_settings from # zerver/lib/create_user.py. # # We cannot use 'copy_default_settings' directly here # because it calls '.save' after copying the settings, and # we are bulk creating the objects here instead. for settings_name in RealmUserDefault.property_types: if settings_name in ["default_language", "enable_login_emails"]: continue value = getattr(realm_user_default, settings_name) setattr(profile, settings_name, value) profiles_to_create.append(profile) if email_address_visibility == UserProfile.EMAIL_ADDRESS_VISIBILITY_EVERYONE: UserProfile.objects.bulk_create(profiles_to_create) else: for user_profile in profiles_to_create: user_profile.email = user_profile.delivery_email UserProfile.objects.bulk_create(profiles_to_create) for user_profile in profiles_to_create: user_profile.email = get_display_email_address(user_profile) UserProfile.objects.bulk_update(profiles_to_create, ["email"]) user_ids = {user.id for user in profiles_to_create} RealmAuditLog.objects.bulk_create( RealmAuditLog( realm=realm, modified_user=profile_, event_type=RealmAuditLog.USER_CREATED, event_time=profile_.date_joined, ) for profile_ in profiles_to_create ) recipients_to_create = [ Recipient(type_id=user_id, type=Recipient.PERSONAL) for user_id in user_ids ] Recipient.objects.bulk_create(recipients_to_create) bulk_set_users_or_streams_recipient_fields( UserProfile, profiles_to_create, recipients_to_create ) recipients_by_user_id: Dict[int, Recipient] = {} for recipient in recipients_to_create: recipients_by_user_id[recipient.type_id] = recipient subscriptions_to_create = [ Subscription( user_profile_id=user_profile.id, recipient=recipients_by_user_id[user_profile.id], is_user_active=user_profile.is_active, ) for user_profile in profiles_to_create ] Subscription.objects.bulk_create(subscriptions_to_create) full_members_system_group = NamedUserGroup.objects.get( name=SystemGroups.FULL_MEMBERS, realm=realm, is_system_group=True ) members_system_group = NamedUserGroup.objects.get( name=SystemGroups.MEMBERS, realm=realm, is_system_group=True ) group_memberships_to_create: List[UserGroupMembership] = [] for user_profile in profiles_to_create: # All users are members since this function is only used to create bots # and test and development environment users. assert user_profile.role == UserProfile.ROLE_MEMBER group_memberships_to_create.append( UserGroupMembership(user_profile=user_profile, user_group=members_system_group) ) if not user_profile.is_provisional_member: group_memberships_to_create.append( UserGroupMembership(user_profile=user_profile, user_group=full_members_system_group) ) UserGroupMembership.objects.bulk_create(group_memberships_to_create) now = timezone_now() RealmAuditLog.objects.bulk_create( RealmAuditLog( realm=realm, modified_user=membership.user_profile, modified_user_group=membership.user_group.named_user_group, event_type=RealmAuditLog.USER_GROUP_DIRECT_USER_MEMBERSHIP_ADDED, event_time=now, acting_user=None, ) for membership in group_memberships_to_create )
Decorator which applies Django caching to a function. Decorator argument is a function which computes a cache key from the original function's arguments. You are responsible for avoiding collisions with other uses of this decorator or other uses of caching.
def cache_with_key( keyfunc: Callable[ParamT, str], cache_name: Optional[str] = None, timeout: Optional[int] = None, ) -> Callable[[Callable[ParamT, ReturnT]], Callable[ParamT, ReturnT]]: """Decorator which applies Django caching to a function. Decorator argument is a function which computes a cache key from the original function's arguments. You are responsible for avoiding collisions with other uses of this decorator or other uses of caching.""" def decorator(func: Callable[ParamT, ReturnT]) -> Callable[ParamT, ReturnT]: @wraps(func) def func_with_caching(*args: ParamT.args, **kwargs: ParamT.kwargs) -> ReturnT: key = keyfunc(*args, **kwargs) try: val = cache_get(key, cache_name=cache_name) except InvalidCacheKeyError: stack_trace = traceback.format_exc() log_invalid_cache_keys(stack_trace, [key]) return func(*args, **kwargs) # Values are singleton tuples so that we can distinguish # a result of None from a missing key. if val is not None: return val[0] val = func(*args, **kwargs) if isinstance(val, QuerySetAny): logging.error( "cache_with_key attempted to store a full QuerySet object -- declining to cache", stack_info=True, ) else: cache_set(key, val, cache_name=cache_name, timeout=timeout) return val return func_with_caching return decorator
Variant of cache_get_many that drops any keys that fail validation, rather than throwing an exception visible to the caller.
def safe_cache_get_many(keys: List[str], cache_name: Optional[str] = None) -> Dict[str, Any]: """Variant of cache_get_many that drops any keys that fail validation, rather than throwing an exception visible to the caller.""" try: # Almost always the keys will all be correct, so we just try # to do normal cache_get_many to avoid the overhead of # validating all the keys here. return cache_get_many(keys, cache_name) except InvalidCacheKeyError: stack_trace = traceback.format_exc() good_keys, bad_keys = filter_good_and_bad_keys(keys) log_invalid_cache_keys(stack_trace, bad_keys) return cache_get_many(good_keys, cache_name)
Variant of cache_set_many that drops saving any keys that fail validation, rather than throwing an exception visible to the caller.
def safe_cache_set_many( items: Dict[str, Any], cache_name: Optional[str] = None, timeout: Optional[int] = None ) -> None: """Variant of cache_set_many that drops saving any keys that fail validation, rather than throwing an exception visible to the caller.""" try: # Almost always the keys will all be correct, so we just try # to do normal cache_set_many to avoid the overhead of # validating all the keys here. return cache_set_many(items, cache_name, timeout) except InvalidCacheKeyError: stack_trace = traceback.format_exc() good_keys, bad_keys = filter_good_and_bad_keys(list(items.keys())) log_invalid_cache_keys(stack_trace, bad_keys) good_items = {key: items[key] for key in good_keys} return cache_set_many(good_items, cache_name, timeout)
This is a wrapper over lru_cache function. It adds following features on top of lru_cache: * It will not cache result of functions with unhashable arguments. * It will clear cache whenever zerver.lib.cache.KEY_PREFIX changes.
def ignore_unhashable_lru_cache( maxsize: int = 128, typed: bool = False ) -> Callable[[Callable[ParamT, ReturnT]], IgnoreUnhashableLruCacheWrapper[ParamT, ReturnT]]: """ This is a wrapper over lru_cache function. It adds following features on top of lru_cache: * It will not cache result of functions with unhashable arguments. * It will clear cache whenever zerver.lib.cache.KEY_PREFIX changes. """ internal_decorator = lru_cache(maxsize=maxsize, typed=typed) def decorator( user_function: Callable[ParamT, ReturnT], ) -> IgnoreUnhashableLruCacheWrapper[ParamT, ReturnT]: return IgnoreUnhashableLruCacheWrapper(user_function, internal_decorator(user_function)) return decorator
Wrapper that converts any dict args to dict item tuples.
def dict_to_items_tuple(user_function: Callable[..., Any]) -> Callable[..., Any]: """Wrapper that converts any dict args to dict item tuples.""" def dict_to_tuple(arg: Any) -> Any: if isinstance(arg, dict): return tuple(sorted(arg.items())) return arg def wrapper(*args: Any, **kwargs: Any) -> Any: new_args = (dict_to_tuple(arg) for arg in args) return user_function(*new_args, **kwargs) return wrapper
Wrapper that converts any dict items tuple args to dicts.
def items_tuple_to_dict(user_function: Callable[..., Any]) -> Callable[..., Any]: """Wrapper that converts any dict items tuple args to dicts.""" def dict_items_to_dict(arg: Any) -> Any: if isinstance(arg, tuple): try: return dict(arg) except TypeError: pass return arg def wrapper(*args: Any, **kwargs: Any) -> Any: new_args = (dict_items_to_dict(arg) for arg in args) new_kwargs = {key: dict_items_to_dict(val) for key, val in kwargs.items()} return user_function(*new_args, **new_kwargs) return wrapper
For installations like Zulip Cloud hosting a lot of realms, it only makes sense to do cache-filling work for realms that have any currently active users/clients. Otherwise, we end up with every single-user trial organization that has ever been created costing us N streams worth of cache work (where N is the number of default streams for a new organization).
def get_active_realm_ids() -> ValuesQuerySet[RealmCount, int]: """For installations like Zulip Cloud hosting a lot of realms, it only makes sense to do cache-filling work for realms that have any currently active users/clients. Otherwise, we end up with every single-user trial organization that has ever been created costing us N streams worth of cache work (where N is the number of default streams for a new organization). """ date = timezone_now() - timedelta(days=2) return ( RealmCount.objects.filter(end_time__gte=date, property="1day_actives::day", value__gt=0) .distinct("realm_id") .values_list("realm_id", flat=True) )
Compare two Zulip-style version strings. Versions are dot-separated sequences of decimal integers, followed by arbitrary trailing decoration. Comparison is lexicographic on the integer sequences, and refuses to guess how any trailing decoration compares to any other, to further numerals, or to nothing. Returns: True if ver1 < ver2 False if ver1 >= ver2 None if can't tell.
def version_lt(ver1: str, ver2: str) -> Optional[bool]: """ Compare two Zulip-style version strings. Versions are dot-separated sequences of decimal integers, followed by arbitrary trailing decoration. Comparison is lexicographic on the integer sequences, and refuses to guess how any trailing decoration compares to any other, to further numerals, or to nothing. Returns: True if ver1 < ver2 False if ver1 >= ver2 None if can't tell. """ num1, rest1 = pop_numerals(ver1) num2, rest2 = pop_numerals(ver2) if not num1 or not num2: return None common_len = min(len(num1), len(num2)) common_num1, rest_num1 = num1[:common_len], num1[common_len:] common_num2, rest_num2 = num2[:common_len], num2[common_len:] # Leading numbers win. if common_num1 != common_num2: return common_num1 < common_num2 # More numbers beats end-of-string, but ??? vs trailing text. # (NB at most one of rest_num1, rest_num2 is nonempty.) if not rest1 and rest_num2: return True if rest_num1 and not rest2: return False if rest_num1 or rest_num2: return None # Trailing text we can only compare for equality. if rest1 == rest2: return False return None
Lock a file object using flock(2) for the duration of a 'with' statement. If shared is True, use a LOCK_SH lock, otherwise LOCK_EX.
def flock(lockfile: Union[int, IO[Any]], shared: bool = False) -> Iterator[None]: """Lock a file object using flock(2) for the duration of a 'with' statement. If shared is True, use a LOCK_SH lock, otherwise LOCK_EX.""" fcntl.flock(lockfile, fcntl.LOCK_SH if shared else fcntl.LOCK_EX) try: yield finally: fcntl.flock(lockfile, fcntl.LOCK_UN)
Lock a file using flock(2) for the duration of a 'with' statement. If shared is True, use a LOCK_SH lock, otherwise LOCK_EX. The file is given by name and will be created if it does not exist.
def lockfile(filename: str, shared: bool = False) -> Iterator[None]: """Lock a file using flock(2) for the duration of a 'with' statement. If shared is True, use a LOCK_SH lock, otherwise LOCK_EX. The file is given by name and will be created if it does not exist.""" with open(filename, "w") as lock: with flock(lock, shared=shared): yield
Lock a file using flock(2) for the duration of a 'with' statement. Doesn't block, yields False immediately if the lock can't be acquired.
def lockfile_nonblocking(filename: str) -> Iterator[bool]: # nocoverage """Lock a file using flock(2) for the duration of a 'with' statement. Doesn't block, yields False immediately if the lock can't be acquired.""" with open(filename, "w") as f: lock_acquired = False try: fcntl.flock(f, fcntl.LOCK_EX | fcntl.LOCK_NB) lock_acquired = True yield lock_acquired except BlockingIOError: yield False finally: if lock_acquired: fcntl.flock(f, fcntl.LOCK_UN)
This is just a tiny wrapper on DictType, but it provides some minor benefits: - mark clearly that the schema is for a Zulip event - make sure there's a type field - add id field automatically - sanity check that we have no duplicate keys
def event_dict_type( required_keys: Sequence[Tuple[str, Any]], optional_keys: Sequence[Tuple[str, Any]] = [], ) -> DictType: """ This is just a tiny wrapper on DictType, but it provides some minor benefits: - mark clearly that the schema is for a Zulip event - make sure there's a type field - add id field automatically - sanity check that we have no duplicate keys """ rkeys = [key[0] for key in required_keys] okeys = [key[0] for key in optional_keys] keys = rkeys + okeys assert len(keys) == len(set(keys)) assert "type" in rkeys assert "id" not in keys return DictType( required_keys=[*required_keys, ("id", int)], optional_keys=optional_keys, )
Returns a YAML-like string for our data type; these are used for pretty-printing and comparison between the OpenAPI type definitions and these Python data types, as part of schema is a glorified repr of a data type, but it also includes a var_name you pass in, plus we dumb things down a bit to match our current OpenAPI spec.
def schema( var_name: str, data_type: Any, ) -> str: """Returns a YAML-like string for our data type; these are used for pretty-printing and comparison between the OpenAPI type definitions and these Python data types, as part of schema is a glorified repr of a data type, but it also includes a var_name you pass in, plus we dumb things down a bit to match our current OpenAPI spec. """ if hasattr(data_type, "schema"): return data_type.schema(var_name) if data_type in [bool, dict, int, float, list, str]: return f"{var_name}: {data_type.__name__}" raise AssertionError(f"unknown type {data_type}")
Check that val conforms to our data_type
def check_data( data_type: Any, var_name: str, val: Any, ) -> None: """Check that val conforms to our data_type""" if hasattr(data_type, "check_data"): data_type.check_data(var_name, val) return if not isinstance(val, data_type): raise AssertionError(f"{var_name} is not type {data_type}")
Interrupt running process, and provide a python prompt for interactive debugging.
def interactive_debug(sig: int, frame: Optional[FrameType]) -> None: """Interrupt running process, and provide a python prompt for interactive debugging.""" d = {"_frame": frame} # Allow access to frame object. if frame is not None: d.update(frame.f_globals) # Unless shadowed by global d.update(frame.f_locals) message = "Signal received : entering python shell.\nTraceback:\n" message += "".join(traceback.format_stack(frame)) i = code.InteractiveConsole(d) i.interact(message)
If tracemalloc tracing enabled, listen for requests to dump a snapshot. To trigger once this is listening: echo | socat -u stdin unix-sendto:/var/log/zulip/tracemalloc/tracemalloc.$pid To enable in the Zulip web server: edit /etc/zulip/uwsgi.ini , and add e.g. ` PYTHONTRACEMALLOC=5` to the `env=` line. This function is called in middleware, so the process will automatically start listening. To enable in other contexts: see upstream docs https://docs.python.org/3/library/tracemalloc . You may also have to add a call to this function somewhere.
def maybe_tracemalloc_listen() -> None: """If tracemalloc tracing enabled, listen for requests to dump a snapshot. To trigger once this is listening: echo | socat -u stdin unix-sendto:/var/log/zulip/tracemalloc/tracemalloc.$pid To enable in the Zulip web server: edit /etc/zulip/uwsgi.ini , and add e.g. ` PYTHONTRACEMALLOC=5` to the `env=` line. This function is called in middleware, so the process will automatically start listening. To enable in other contexts: see upstream docs https://docs.python.org/3/library/tracemalloc . You may also have to add a call to this function somewhere. """ if os.environ.get("PYTHONTRACEMALLOC"): # If the server was started with `tracemalloc` tracing on, then # listen for a signal to dump `tracemalloc` snapshots. tracemalloc_listen()
Return all the default streams for a realm using a list of dictionaries sorted by stream name.
def get_default_streams_for_realm_as_dicts(realm_id: int) -> List[DefaultStreamDict]: """ Return all the default streams for a realm using a list of dictionaries sorted by stream name. """ streams = get_slim_realm_default_streams(realm_id) stream_dicts = [stream.to_dict() for stream in streams] return sorted(stream_dicts, key=lambda stream: stream["name"])
Skipping streams where the user's subscription status has changed when constructing digests is critical to ensure correctness for streams without shared history, guest users, and long-term idle users, because it means that every user has the same view of the history of a given stream whose message history is being included (and thus we can share a lot of work). The downside is that newly created streams are never included in the first digest email after their creation. Should we wish to change that, we will need to be very careful to avoid creating bugs for any of those classes of users.
def get_user_stream_map(user_ids: List[int], cutoff_date: datetime) -> Dict[int, Set[int]]: """Skipping streams where the user's subscription status has changed when constructing digests is critical to ensure correctness for streams without shared history, guest users, and long-term idle users, because it means that every user has the same view of the history of a given stream whose message history is being included (and thus we can share a lot of work). The downside is that newly created streams are never included in the first digest email after their creation. Should we wish to change that, we will need to be very careful to avoid creating bugs for any of those classes of users. """ events = [ RealmAuditLog.SUBSCRIPTION_CREATED, RealmAuditLog.SUBSCRIPTION_ACTIVATED, RealmAuditLog.SUBSCRIPTION_DEACTIVATED, ] # This uses the zerver_realmauditlog_user_subscriptions_idx # partial index on RealmAuditLog which is specifically for those # three event types. rows = ( Subscription.objects.filter( user_profile_id__in=user_ids, recipient__type=Recipient.STREAM, active=True, is_muted=False, ) .alias( was_modified=Exists( RealmAuditLog.objects.filter( modified_stream_id=OuterRef("recipient__type_id"), modified_user_id=OuterRef("user_profile_id"), event_time__gt=cutoff_date, event_type__in=events, ) ) ) .filter(was_modified=False) .values("user_profile_id", "recipient__type_id") ) # maps user_id -> {stream_id, stream_id, ...} dct: Dict[int, Set[int]] = defaultdict(set) for row in rows: dct[row["user_profile_id"]].add(row["recipient__type_id"]) return dct
This returns an appropriate object describing the recipient of a direct message (whether individual or group). It will be an array of dicts for each recipient. Do not use this for streams.
def get_display_recipient_remote_cache( recipient_id: int, recipient_type: int, recipient_type_id: Optional[int] ) -> List[UserDisplayRecipient]: """ This returns an appropriate object describing the recipient of a direct message (whether individual or group). It will be an array of dicts for each recipient. Do not use this for streams. """ from zerver.models import Recipient, UserProfile assert recipient_type != Recipient.STREAM # The main priority for ordering here is being deterministic. # Right now, we order by ID, which matches the ordering of user # names in the left sidebar. user_profile_list = ( UserProfile.objects.filter( subscription__recipient_id=recipient_id, ) .order_by("id") .values(*display_recipient_fields) ) return list(user_profile_list)
Takes set of tuples of the form (recipient_id, recipient_type, recipient_type_id) Returns dict mapping recipient_id to corresponding display_recipient
def bulk_fetch_stream_names( recipient_tuples: Set[Tuple[int, int, int]], ) -> Dict[int, str]: """ Takes set of tuples of the form (recipient_id, recipient_type, recipient_type_id) Returns dict mapping recipient_id to corresponding display_recipient """ from zerver.models import Stream if len(recipient_tuples) == 0: return {} recipient_id_to_stream_id = {tup[0]: tup[2] for tup in recipient_tuples} recipient_ids = [tup[0] for tup in recipient_tuples] def get_tiny_stream_rows( recipient_ids: List[int], ) -> ValuesQuerySet[Stream, TinyStreamResult]: stream_ids = [recipient_id_to_stream_id[recipient_id] for recipient_id in recipient_ids] return Stream.objects.filter(id__in=stream_ids).values("recipient_id", "name") def get_recipient_id(row: TinyStreamResult) -> int: return row["recipient_id"] def get_name(row: TinyStreamResult) -> str: return row["name"] # ItemT = TinyStreamResult, CacheItemT = str (name), ObjKT = int (recipient_id) stream_display_recipients: Dict[int, str] = generic_bulk_cached_fetch( cache_key_function=display_recipient_cache_key, query_function=get_tiny_stream_rows, object_ids=recipient_ids, id_fetcher=get_recipient_id, cache_transformer=get_name, setter=lambda obj: obj, extractor=lambda obj: obj, ) return stream_display_recipients
Takes set of tuples of the form (recipient_id, recipient_type, recipient_type_id) Returns dict mapping recipient_id to corresponding display_recipient
def bulk_fetch_user_display_recipients( recipient_tuples: Set[Tuple[int, int, int]], ) -> Dict[int, List[UserDisplayRecipient]]: """ Takes set of tuples of the form (recipient_id, recipient_type, recipient_type_id) Returns dict mapping recipient_id to corresponding display_recipient """ from zerver.models import Recipient from zerver.models.recipients import bulk_get_huddle_user_ids if len(recipient_tuples) == 0: return {} get_recipient_id = lambda tup: tup[0] get_type = lambda tup: tup[1] personal_tuples = [tup for tup in recipient_tuples if get_type(tup) == Recipient.PERSONAL] huddle_tuples = [ tup for tup in recipient_tuples if get_type(tup) == Recipient.DIRECT_MESSAGE_GROUP ] huddle_recipient_ids = [get_recipient_id(tup) for tup in huddle_tuples] huddle_recipient_id_to_user_ids = bulk_get_huddle_user_ids(huddle_recipient_ids) # Find all user ids whose UserProfiles we will need to fetch: user_ids_to_fetch: Set[int] = set() for ignore_recipient_id, ignore_recipient_type, user_id in personal_tuples: user_ids_to_fetch.add(user_id) for recipient_id in huddle_recipient_ids: huddle_user_ids = huddle_recipient_id_to_user_ids[recipient_id] user_ids_to_fetch |= huddle_user_ids # Fetch the needed user dictionaries. user_display_recipients = bulk_fetch_single_user_display_recipients(list(user_ids_to_fetch)) result = {} for recipient_id, ignore_recipient_type, user_id in personal_tuples: display_recipients = [user_display_recipients[user_id]] result[recipient_id] = display_recipients for recipient_id in huddle_recipient_ids: user_ids = sorted(huddle_recipient_id_to_user_ids[recipient_id]) display_recipients = [user_display_recipients[user_id] for user_id in user_ids] result[recipient_id] = display_recipients return result
Takes set of tuples of the form (recipient_id, recipient_type, recipient_type_id) Returns dict mapping recipient_id to corresponding display_recipient
def bulk_fetch_display_recipients( recipient_tuples: Set[Tuple[int, int, int]], ) -> Dict[int, DisplayRecipientT]: """ Takes set of tuples of the form (recipient_id, recipient_type, recipient_type_id) Returns dict mapping recipient_id to corresponding display_recipient """ from zerver.models import Recipient stream_recipients = { recipient for recipient in recipient_tuples if recipient[1] == Recipient.STREAM } personal_and_huddle_recipients = recipient_tuples - stream_recipients stream_display_recipients = bulk_fetch_stream_names(stream_recipients) personal_and_huddle_display_recipients = bulk_fetch_user_display_recipients( personal_and_huddle_recipients ) # Glue the dicts together and return: return {**stream_display_recipients, **personal_and_huddle_display_recipients}
returns: an object describing the recipient (using a cache). If the type is a stream, the type_id must be an int; a string is returned. Otherwise, type_id may be None; an array of recipient dicts is returned.
def get_display_recipient_by_id( recipient_id: int, recipient_type: int, recipient_type_id: Optional[int] ) -> List[UserDisplayRecipient]: """ returns: an object describing the recipient (using a cache). If the type is a stream, the type_id must be an int; a string is returned. Otherwise, type_id may be None; an array of recipient dicts is returned. """ # Have to import here, to avoid circular dependency. from zerver.lib.display_recipient import get_display_recipient_remote_cache return get_display_recipient_remote_cache(recipient_id, recipient_type, recipient_type_id)
Take a DraftData object that was already validated by the @typed_endpoint decorator then further sanitize, validate, and transform it. Ultimately return this "further validated" draft dict. It will have a slightly different set of keys the values for which can be used to directly create a Draft object.
def further_validated_draft_dict( draft_dict: DraftData, user_profile: UserProfile ) -> Dict[str, Any]: """Take a DraftData object that was already validated by the @typed_endpoint decorator then further sanitize, validate, and transform it. Ultimately return this "further validated" draft dict. It will have a slightly different set of keys the values for which can be used to directly create a Draft object.""" content = normalize_body(draft_dict.content) timestamp = draft_dict.timestamp if timestamp is None: timestamp = time.time() timestamp = round(timestamp, 6) if timestamp < 0: # While it's not exactly an invalid timestamp, it's not something # we want to allow either. raise JsonableError(_("Timestamp must not be negative.")) last_edit_time = timestamp_to_datetime(timestamp) topic_name = "" recipient_id = None to = draft_dict.to if draft_dict.type == "stream": topic_name = truncate_topic(draft_dict.topic) if "\0" in topic_name: raise JsonableError(_("Topic must not contain null bytes")) if len(to) != 1: raise JsonableError(_("Must specify exactly 1 channel ID for channel messages")) stream, sub = access_stream_by_id(user_profile, to[0]) recipient_id = stream.recipient_id elif draft_dict.type == "private" and len(to) != 0: to_users = get_user_profiles_by_ids(set(to), user_profile.realm) try: recipient_id = recipient_for_user_profiles(to_users, False, None, user_profile).id except ValidationError as e: # nocoverage raise JsonableError(e.messages[0]) return { "recipient_id": recipient_id, "topic": topic_name, "content": content, "last_edit_time": last_edit_time, }
Create drafts in bulk for a given user based on the DraftData objects. Since currently, the only place this method is being used (apart from tests) is from the create_draft view, we assume that these are syntactically valid (i.e. they satisfy the @typed_endpoint validation for DraftData).
def do_create_drafts(drafts: List[DraftData], user_profile: UserProfile) -> List[Draft]: """Create drafts in bulk for a given user based on the DraftData objects. Since currently, the only place this method is being used (apart from tests) is from the create_draft view, we assume that these are syntactically valid (i.e. they satisfy the @typed_endpoint validation for DraftData).""" draft_objects = [] for draft in drafts: valid_draft_dict = further_validated_draft_dict(draft, user_profile) draft_objects.append( Draft( user_profile=user_profile, recipient_id=valid_draft_dict["recipient_id"], topic=valid_draft_dict["topic"], content=valid_draft_dict["content"], last_edit_time=valid_draft_dict["last_edit_time"], ) ) created_draft_objects = Draft.objects.bulk_create(draft_objects) event = { "type": "drafts", "op": "add", "drafts": [draft.to_dict() for draft in created_draft_objects], } send_event(user_profile.realm, event, [user_profile.id]) return created_draft_objects
Edit/update a single draft for a given user. Since the only place this method is being used from (apart from tests) is the edit_draft view, we assume that the DraftData object is syntactically valid (i.e. it satisfies the @typed_endpoint validation for DraftData).
def do_edit_draft(draft_id: int, draft: DraftData, user_profile: UserProfile) -> None: """Edit/update a single draft for a given user. Since the only place this method is being used from (apart from tests) is the edit_draft view, we assume that the DraftData object is syntactically valid (i.e. it satisfies the @typed_endpoint validation for DraftData).""" try: draft_object = Draft.objects.get(id=draft_id, user_profile=user_profile) except Draft.DoesNotExist: raise ResourceNotFoundError(_("Draft does not exist")) valid_draft_dict = further_validated_draft_dict(draft, user_profile) draft_object.content = valid_draft_dict["content"] draft_object.topic = valid_draft_dict["topic"] draft_object.recipient_id = valid_draft_dict["recipient_id"] draft_object.last_edit_time = valid_draft_dict["last_edit_time"] draft_object.save() event = {"type": "drafts", "op": "update", "draft": draft_object.to_dict()} send_event(user_profile.realm, event, [user_profile.id])
Delete a draft belonging to a particular user.
def do_delete_draft(draft_id: int, user_profile: UserProfile) -> None: """Delete a draft belonging to a particular user.""" try: draft_object = Draft.objects.get(id=draft_id, user_profile=user_profile) except Draft.DoesNotExist: raise ResourceNotFoundError(_("Draft does not exist")) draft_id = draft_object.id draft_object.delete() event = {"type": "drafts", "op": "remove", "draft_id": draft_id} send_event(user_profile.realm, event, [user_profile.id])
Missed message strings are formatted with a little "mm" prefix followed by a randomly generated 32-character string.
def is_mm_32_format(msg_string: Optional[str]) -> bool: """ Missed message strings are formatted with a little "mm" prefix followed by a randomly generated 32-character string. """ return msg_string is not None and msg_string.startswith("mm") and len(msg_string) == 34
We add quote prefix ">" to each line of the message in plain text format, such that email clients render the message as quote.
def add_quote_prefix_in_text(content: str) -> str: """ We add quote prefix ">" to each line of the message in plain text format, such that email clients render the message as quote. """ lines = content.split("\n") output = [] for line in lines: quoted_line = f"> {line}" output.append(quoted_line) return "\n".join(output)
Builds the message list object for the message notification email template. The messages are collapsed into per-recipient and per-sender blocks, like our web interface
def build_message_list( user: UserProfile, messages: List[Message], stream_id_map: Optional[Dict[int, Stream]] = None, # only needs id, name ) -> List[Dict[str, Any]]: """ Builds the message list object for the message notification email template. The messages are collapsed into per-recipient and per-sender blocks, like our web interface """ messages_to_render: List[Dict[str, Any]] = [] def sender_string(message: Message) -> str: if message.recipient.type in (Recipient.STREAM, Recipient.DIRECT_MESSAGE_GROUP): return message.sender.full_name else: return "" def fix_plaintext_image_urls(content: str) -> str: # Replace image URLs in plaintext content of the form # [image name](image url) # with a simple hyperlink. return re.sub(r"\[(\S*)\]\((\S*)\)", r"\2", content) def prepend_sender_to_message( message_plain: str, message_html: str, sender: str ) -> Tuple[str, str]: message_plain = f"{sender}:\n{message_plain}" message_soup = BeautifulSoup(message_html, "html.parser") sender_name_soup = BeautifulSoup(f"<b>{sender}</b>: ", "html.parser") first_tag = message_soup.find() if first_tag and first_tag.name == "div": first_tag = first_tag.find() if first_tag and first_tag.name == "p": first_tag.insert(0, sender_name_soup) else: message_soup.insert(0, sender_name_soup) return message_plain, str(message_soup) def build_message_payload(message: Message, sender: Optional[str] = None) -> Dict[str, str]: plain = message.content plain = fix_plaintext_image_urls(plain) # There's a small chance of colliding with non-Zulip URLs containing # "/user_uploads/", but we don't have much information about the # structure of the URL to leverage. We can't use `relative_to_full_url()` # function here because it uses a stricter regex which will not work for # plain text. plain = re.sub(r"/user_uploads/(\S*)", user.realm.uri + r"/user_uploads/\1", plain) plain = fix_spoilers_in_text(plain, user.default_language) plain = add_quote_prefix_in_text(plain) assert message.rendered_content is not None fragment = lxml.html.fragment_fromstring(message.rendered_content, create_parent=True) relative_to_full_url(fragment, user.realm.uri) fix_emojis(fragment, user.emojiset) fix_spoilers_in_html(fragment, user.default_language) change_katex_to_raw_latex(fragment) html = lxml.html.tostring(fragment, encoding="unicode") if sender: plain, html = prepend_sender_to_message(plain, html, sender) return {"plain": plain, "html": html} def build_sender_payload(message: Message) -> Dict[str, Any]: sender = sender_string(message) return {"sender": sender, "content": [build_message_payload(message, sender)]} def message_header(message: Message) -> Dict[str, Any]: if message.recipient.type == Recipient.PERSONAL: grouping: Dict[str, Any] = {"user": message.sender_id} narrow_link = personal_narrow_url( realm=user.realm, sender=message.sender, ) header = f"You and {message.sender.full_name}" header_html = f"<a style='color: #ffffff;' href='{narrow_link}'>{header}</a>" elif message.recipient.type == Recipient.DIRECT_MESSAGE_GROUP: grouping = {"huddle": message.recipient_id} display_recipient = get_display_recipient(message.recipient) narrow_link = huddle_narrow_url( user=user, display_recipient=display_recipient, ) other_recipients = [r["full_name"] for r in display_recipient if r["id"] != user.id] header = "You and {}".format(", ".join(other_recipients)) header_html = f"<a style='color: #ffffff;' href='{narrow_link}'>{header}</a>" else: assert message.recipient.type == Recipient.STREAM grouping = {"stream": message.recipient_id, "topic": message.topic_name().lower()} stream_id = message.recipient.type_id if stream_id_map is not None and stream_id in stream_id_map: stream = stream_id_map[stream_id] else: # Some of our callers don't populate stream_map, so # we just populate the stream from the database. stream = Stream.objects.only("id", "name").get(id=stream_id) narrow_link = topic_narrow_url( realm=user.realm, stream=stream, topic_name=message.topic_name(), ) header = f"{stream.name} > {message.topic_name()}" stream_link = stream_narrow_url(user.realm, stream) header_html = f"<a href='{stream_link}'>{stream.name}</a> > <a href='{narrow_link}'>{message.topic_name()}</a>" return { "grouping": grouping, "plain": header, "html": header_html, "stream_message": message.recipient.type_name() == "stream", } # # Collapse message list to # [ # { # "header": { # "plain":"header", # "html":"htmlheader" # } # "senders":[ # { # "sender":"sender_name", # "content":[ # { # "plain":"content", # "html":"htmlcontent" # } # { # "plain":"content", # "html":"htmlcontent" # } # ] # } # ] # }, # ] messages.sort(key=lambda message: message.date_sent) for message in messages: header = message_header(message) # If we want to collapse into the previous recipient block if ( len(messages_to_render) > 0 and messages_to_render[-1]["header"]["grouping"] == header["grouping"] ): sender = sender_string(message) sender_block = messages_to_render[-1]["senders"] # Same message sender, collapse again if sender_block[-1]["sender"] == sender: sender_block[-1]["content"].append(build_message_payload(message)) else: # Start a new sender block sender_block.append(build_sender_payload(message)) else: # New recipient and sender block recipient_block = {"header": header, "senders": [build_sender_payload(message)]} messages_to_render.append(recipient_block) return messages_to_render
Send a reminder email to a user if she's missed some direct messages by being offline. The email will have its reply to address set to a limited used email address that will send a Zulip message to the correct recipient. This allows the user to respond to missed direct messages, huddles, and @-mentions directly from the email. `user_profile` is the user to send the reminder to `missed_messages` is a list of dictionaries to Message objects and other data for a group of messages that share a recipient (and topic)
def do_send_missedmessage_events_reply_in_zulip( user_profile: UserProfile, missed_messages: List[Dict[str, Any]], message_count: int ) -> None: """ Send a reminder email to a user if she's missed some direct messages by being offline. The email will have its reply to address set to a limited used email address that will send a Zulip message to the correct recipient. This allows the user to respond to missed direct messages, huddles, and @-mentions directly from the email. `user_profile` is the user to send the reminder to `missed_messages` is a list of dictionaries to Message objects and other data for a group of messages that share a recipient (and topic) """ from zerver.context_processors import common_context recipients = { (msg["message"].recipient_id, msg["message"].topic_name().lower()) for msg in missed_messages } assert len(recipients) == 1, f"Unexpectedly multiple recipients: {recipients!r}" # This link is no longer a part of the email, but keeping the code in case # we find a clean way to add it back in the future unsubscribe_link = one_click_unsubscribe_link(user_profile, "missed_messages") context = common_context(user_profile) context.update( name=user_profile.full_name, message_count=message_count, unsubscribe_link=unsubscribe_link, include_realm_name_in_missedmessage_emails_subject=include_realm_name_in_missedmessage_emails_subject( user_profile ), ) mentioned_user_group_name = None mentioned_user_group_members_count = None mentioned_user_group = get_mentioned_user_group(missed_messages, user_profile) if mentioned_user_group is not None: mentioned_user_group_name = mentioned_user_group.name mentioned_user_group_members_count = mentioned_user_group.members_count triggers = [message["trigger"] for message in missed_messages] unique_triggers = set(triggers) personal_mentioned = any( message["trigger"] == NotificationTriggers.MENTION and message["mentioned_user_group_id"] is None for message in missed_messages ) mention = ( NotificationTriggers.MENTION in unique_triggers or NotificationTriggers.TOPIC_WILDCARD_MENTION in unique_triggers or NotificationTriggers.STREAM_WILDCARD_MENTION in unique_triggers or NotificationTriggers.TOPIC_WILDCARD_MENTION_IN_FOLLOWED_TOPIC in unique_triggers or NotificationTriggers.STREAM_WILDCARD_MENTION_IN_FOLLOWED_TOPIC in unique_triggers ) context.update( mention=mention, personal_mentioned=personal_mentioned, topic_wildcard_mentioned=NotificationTriggers.TOPIC_WILDCARD_MENTION in unique_triggers, stream_wildcard_mentioned=NotificationTriggers.STREAM_WILDCARD_MENTION in unique_triggers, stream_email_notify=NotificationTriggers.STREAM_EMAIL in unique_triggers, followed_topic_email_notify=NotificationTriggers.FOLLOWED_TOPIC_EMAIL in unique_triggers, topic_wildcard_mentioned_in_followed_topic=NotificationTriggers.TOPIC_WILDCARD_MENTION_IN_FOLLOWED_TOPIC in unique_triggers, stream_wildcard_mentioned_in_followed_topic=NotificationTriggers.STREAM_WILDCARD_MENTION_IN_FOLLOWED_TOPIC in unique_triggers, mentioned_user_group_name=mentioned_user_group_name, ) # If this setting (email mirroring integration) is enabled, only then # can users reply to email to send message to Zulip. Thus, one must # ensure to display warning in the template. if settings.EMAIL_GATEWAY_PATTERN: context.update( reply_to_zulip=True, ) else: context.update( reply_to_zulip=False, ) from zerver.lib.email_mirror import create_missed_message_address reply_to_address = create_missed_message_address(user_profile, missed_messages[0]["message"]) if reply_to_address == FromAddress.NOREPLY: reply_to_name = "" else: reply_to_name = "Zulip" senders = list({m["message"].sender for m in missed_messages}) if missed_messages[0]["message"].recipient.type == Recipient.DIRECT_MESSAGE_GROUP: display_recipient = get_display_recipient(missed_messages[0]["message"].recipient) narrow_url = huddle_narrow_url( user=user_profile, display_recipient=display_recipient, ) context.update(narrow_url=narrow_url) other_recipients = [r["full_name"] for r in display_recipient if r["id"] != user_profile.id] context.update(group_pm=True) if len(other_recipients) == 2: huddle_display_name = " and ".join(other_recipients) context.update(huddle_display_name=huddle_display_name) elif len(other_recipients) == 3: huddle_display_name = ( f"{other_recipients[0]}, {other_recipients[1]}, and {other_recipients[2]}" ) context.update(huddle_display_name=huddle_display_name) else: huddle_display_name = "{}, and {} others".format( ", ".join(other_recipients[:2]), len(other_recipients) - 2 ) context.update(huddle_display_name=huddle_display_name) elif missed_messages[0]["message"].recipient.type == Recipient.PERSONAL: narrow_url = personal_narrow_url( realm=user_profile.realm, sender=missed_messages[0]["message"].sender, ) context.update(narrow_url=narrow_url) context.update(private_message=True) elif ( context["mention"] or context["stream_email_notify"] or context["followed_topic_email_notify"] ): # Keep only the senders who actually mentioned the user if context["mention"]: senders = list( { m["message"].sender for m in missed_messages if m["trigger"] in [ NotificationTriggers.MENTION, NotificationTriggers.TOPIC_WILDCARD_MENTION, NotificationTriggers.STREAM_WILDCARD_MENTION, NotificationTriggers.TOPIC_WILDCARD_MENTION_IN_FOLLOWED_TOPIC, NotificationTriggers.STREAM_WILDCARD_MENTION_IN_FOLLOWED_TOPIC, ] } ) message = missed_messages[0]["message"] assert message.recipient.type == Recipient.STREAM stream = Stream.objects.only("id", "name").get(id=message.recipient.type_id) narrow_url = topic_narrow_url( realm=user_profile.realm, stream=stream, topic_name=message.topic_name(), ) context.update(narrow_url=narrow_url) topic_resolved, topic_name = get_topic_resolution_and_bare_name(message.topic_name()) context.update( channel_name=stream.name, topic_name=topic_name, topic_resolved=topic_resolved, ) else: raise AssertionError("Invalid messages!") # If message content is disabled, then flush all information we pass to email. if not message_content_allowed_in_missedmessage_emails(user_profile): realm = user_profile.realm context.update( reply_to_zulip=False, messages=[], sender_str="", realm_str=realm.name, huddle_display_name="", show_message_content=False, message_content_disabled_by_user=not user_profile.message_content_in_email_notifications, message_content_disabled_by_realm=not realm.message_content_allowed_in_email_notifications, ) else: context.update( messages=build_message_list( user=user_profile, messages=[m["message"] for m in missed_messages], ), sender_str=", ".join(sender.full_name for sender in senders), realm_str=user_profile.realm.name, show_message_content=True, ) # Soft reactivate the long_term_idle user personally mentioned soft_reactivate_if_personal_notification( user_profile, unique_triggers, mentioned_user_group_members_count ) with override_language(user_profile.default_language): from_name: str = _("{service_name} notifications").format( service_name=settings.INSTALLATION_NAME ) from_address = FromAddress.NOREPLY email_dict = { "template_prefix": "zerver/emails/missed_message", "to_user_ids": [user_profile.id], "from_name": from_name, "from_address": from_address, "reply_to_email": str(Address(display_name=reply_to_name, addr_spec=reply_to_address)), "context": context, } queue_json_publish("email_senders", email_dict) user_profile.last_reminder = timezone_now() user_profile.save(update_fields=["last_reminder"])
Avoid calling this in a loop! Instead, call get_realm_email_validator() outside of the loop.
def email_allowed_for_realm(email: str, realm: Realm) -> None: """ Avoid calling this in a loop! Instead, call get_realm_email_validator() outside of the loop. """ get_realm_email_validator(realm)(email)
We use this function even for a list of one emails. It checks "new" emails to make sure that they don't already exist. There's a bit of fiddly logic related to cross-realm bots and mirror dummies too.
def get_existing_user_errors( target_realm: Realm, emails: Set[str], verbose: bool = False, ) -> Dict[str, Tuple[str, bool]]: """ We use this function even for a list of one emails. It checks "new" emails to make sure that they don't already exist. There's a bit of fiddly logic related to cross-realm bots and mirror dummies too. """ errors: Dict[str, Tuple[str, bool]] = {} users = get_users_by_delivery_email(emails, target_realm).only( "delivery_email", "is_active", "is_mirror_dummy", ) """ A note on casing: We will preserve the casing used by the user for email in most of this code. The only exception is when we do existence checks against the `user_dict` dictionary. (We don't allow two users in the same realm to have the same effective delivery email.) """ user_dict = {user.delivery_email.lower(): user for user in users} def process_email(email: str) -> None: if is_cross_realm_bot_email(email): if verbose: msg = email_reserved_for_system_bots_error(email) else: msg = _("Reserved for system bots.") deactivated = False errors[email] = (msg, deactivated) return existing_user_profile = user_dict.get(email.lower()) if existing_user_profile is None: # HAPPY PATH! Most people invite users that don't exist yet. return if existing_user_profile.is_mirror_dummy: if existing_user_profile.is_active: raise AssertionError("Mirror dummy user is already active!") return """ Email has already been taken by a "normal" user. """ deactivated = not existing_user_profile.is_active if existing_user_profile.is_active: if verbose: msg = _("{email} already has an account").format(email=email) else: msg = _("Already has an account.") else: msg = _("Account has been deactivated.") errors[email] = (msg, deactivated) for email in emails: process_email(email) return errors
NOTE: Only use this to validate that a single email is not already used in the realm. We should start using bulk_check_new_emails() for any endpoint that takes multiple emails, such as the "invite" interface.
def validate_email_not_already_in_realm( target_realm: Realm, email: str, verbose: bool = True ) -> None: """ NOTE: Only use this to validate that a single email is not already used in the realm. We should start using bulk_check_new_emails() for any endpoint that takes multiple emails, such as the "invite" interface. """ error_dict = get_existing_user_errors(target_realm, {email}, verbose) # Loop through errors, the only key should be our email. for key, error_info in error_dict.items(): assert key == email msg, deactivated = error_info raise ValidationError(msg)
This function is used as a helper in fetch_initial_state_data, when the user passes in None for event_types, and we want to fetch info for every event type. Defining this at module level makes it easier to mock.
def always_want(msg_type: str) -> bool: """ This function is used as a helper in fetch_initial_state_data, when the user passes in None for event_types, and we want to fetch info for every event type. Defining this at module level makes it easier to mock. """ return True
When `event_types` is None, fetches the core data powering the web app's `page_params` and `/api/v1/register` (for mobile/terminal apps). Can also fetch a subset as determined by `event_types`. The user_profile=None code path is used for logged-out public access to streams with is_web_public=True. Whenever you add new code to this function, you should also add corresponding events for changes in the data structures and new code to apply_events (and add a test in test_events.py).
def fetch_initial_state_data( user_profile: Optional[UserProfile], *, realm: Optional[Realm] = None, event_types: Optional[Iterable[str]] = None, queue_id: Optional[str] = "", client_gravatar: bool = False, user_avatar_url_field_optional: bool = False, user_settings_object: bool = False, slim_presence: bool = False, include_subscribers: bool = True, include_streams: bool = True, spectator_requested_language: Optional[str] = None, pronouns_field_type_supported: bool = True, linkifier_url_template: bool = False, user_list_incomplete: bool = False, ) -> Dict[str, Any]: """When `event_types` is None, fetches the core data powering the web app's `page_params` and `/api/v1/register` (for mobile/terminal apps). Can also fetch a subset as determined by `event_types`. The user_profile=None code path is used for logged-out public access to streams with is_web_public=True. Whenever you add new code to this function, you should also add corresponding events for changes in the data structures and new code to apply_events (and add a test in test_events.py). """ if realm is None: assert user_profile is not None realm = user_profile.realm state: Dict[str, Any] = {"queue_id": queue_id} if event_types is None: # return True always want: Callable[[str], bool] = always_want else: want = set(event_types).__contains__ # Show the version info unconditionally. state["zulip_version"] = ZULIP_VERSION state["zulip_feature_level"] = API_FEATURE_LEVEL state["zulip_merge_base"] = ZULIP_MERGE_BASE if want("alert_words"): state["alert_words"] = [] if user_profile is None else user_alert_words(user_profile) if want("custom_profile_fields"): if user_profile is None: # Spectators can't access full user profiles or # personal settings, so we send an empty list. state["custom_profile_fields"] = [] else: fields = custom_profile_fields_for_realm(realm.id) state["custom_profile_fields"] = [f.as_dict() for f in fields] state["custom_profile_field_types"] = { item[4]: {"id": item[0], "name": str(item[1])} for item in CustomProfileField.ALL_FIELD_TYPES } if not pronouns_field_type_supported: for field in state["custom_profile_fields"]: if field["type"] == CustomProfileField.PRONOUNS: field["type"] = CustomProfileField.SHORT_TEXT del state["custom_profile_field_types"]["PRONOUNS"] if want("onboarding_steps"): # Even if we offered special onboarding steps for guests without an # account, we'd maybe need to store their state using cookies # or local storage, rather than in the database. state["onboarding_steps"] = ( [] if user_profile is None else get_next_onboarding_steps(user_profile) ) if want("message"): # Since the introduction of `anchor="latest"` in the API, # `max_message_id` is primarily used for generating `local_id` # values that are higher than this. We likely can eventually # remove this parameter from the API. state["max_message_id"] = max_message_id_for_user(user_profile) if want("drafts"): if user_profile is None: state["drafts"] = [] else: # Note: if a user ever disables syncing drafts then all of # their old drafts stored on the server will be deleted and # simply retained in local storage. In which case user_drafts # would just be an empty queryset. user_draft_objects = Draft.objects.filter(user_profile=user_profile).order_by( "-last_edit_time" )[: settings.MAX_DRAFTS_IN_REGISTER_RESPONSE] user_draft_dicts = [draft.to_dict() for draft in user_draft_objects] state["drafts"] = user_draft_dicts if want("scheduled_messages"): state["scheduled_messages"] = ( [] if user_profile is None else get_undelivered_scheduled_messages(user_profile) ) if want("muted_topics") and ( # Suppress muted_topics data for clients that explicitly # support user_topic. This allows clients to request both the # user_topic and muted_topics, and receive the duplicate # muted_topics data only from older servers that don't yet # support user_topic. event_types is None or not want("user_topic") ): state["muted_topics"] = [] if user_profile is None else get_topic_mutes(user_profile) if want("muted_users"): state["muted_users"] = [] if user_profile is None else get_user_mutes(user_profile) if want("presence"): state["presences"] = ( {} if user_profile is None else get_presences_for_realm(realm, slim_presence, user_profile) ) # Send server_timestamp, to match the format of `GET /presence` requests. state["server_timestamp"] = time.time() if want("realm"): # The realm bundle includes both realm properties and server # properties, since it's rare that one would want one and not # the other. We expect most clients to want it. # # A note on naming: For some settings, one could imagine # having a server-level value and a realm-level value (with # the server value serving as the default for the realm # value). For such settings, we prefer the following naming # scheme: # # * realm_inline_image_preview (current realm setting) # * server_inline_image_preview (server-level default) # # In situations where for backwards-compatibility reasons we # have an unadorned name, we should arrange that clients using # that unadorned name work correctly (i.e. that should be the # currently active setting, not a server-level default). # # Other settings, which are just server-level settings or data # about the version of Zulip, can be named without prefixes, # e.g. giphy_rating_options or development_environment. for property_name in Realm.property_types: state["realm_" + property_name] = getattr(realm, property_name) for ( setting_name, permission_configuration, ) in Realm.REALM_PERMISSION_GROUP_SETTINGS.items(): state["realm_" + setting_name] = getattr(realm, permission_configuration.id_field_name) # Most state is handled via the property_types framework; # these manual entries are for those realm settings that don't # fit into that framework. realm_authentication_methods_dict = realm.authentication_methods_dict() state["realm_authentication_methods"] = ( get_realm_authentication_methods_for_page_params_api( realm, realm_authentication_methods_dict ) ) # We pretend these features are disabled because anonymous # users can't access them. In the future, we may want to move # this logic to the frontends, so that we can correctly # display what these fields are in the settings. state["realm_allow_message_editing"] = ( False if user_profile is None else realm.allow_message_editing ) state["realm_edit_topic_policy"] = ( Realm.POLICY_ADMINS_ONLY if user_profile is None else realm.edit_topic_policy ) state["realm_delete_own_message_policy"] = ( Realm.POLICY_ADMINS_ONLY if user_profile is None else realm.delete_own_message_policy ) # This setting determines whether to send presence and also # whether to display of users list in the right sidebar; we # want both behaviors for logged-out users. We may in the # future choose to move this logic to the frontend. state["realm_presence_disabled"] = True if user_profile is None else realm.presence_disabled # Important: Encode units in the client-facing API name. state["max_avatar_file_size_mib"] = settings.MAX_AVATAR_FILE_SIZE_MIB state["max_file_upload_size_mib"] = settings.MAX_FILE_UPLOAD_SIZE state["max_icon_file_size_mib"] = settings.MAX_ICON_FILE_SIZE_MIB upload_quota_bytes = realm.upload_quota_bytes() state["realm_upload_quota_mib"] = optional_bytes_to_mib(upload_quota_bytes) state["realm_icon_url"] = realm_icon_url(realm) state["realm_icon_source"] = realm.icon_source add_realm_logo_fields(state, realm) state["realm_uri"] = realm.uri state["realm_bot_domain"] = realm.get_bot_domain() state["realm_available_video_chat_providers"] = realm.VIDEO_CHAT_PROVIDERS state["settings_send_digest_emails"] = settings.SEND_DIGEST_EMAILS state["realm_digest_emails_enabled"] = ( realm.digest_emails_enabled and settings.SEND_DIGEST_EMAILS ) state["realm_email_auth_enabled"] = email_auth_enabled( realm, realm_authentication_methods_dict ) state["realm_password_auth_enabled"] = password_auth_enabled( realm, realm_authentication_methods_dict ) state["server_generation"] = settings.SERVER_GENERATION state["realm_is_zephyr_mirror_realm"] = realm.is_zephyr_mirror_realm state["development_environment"] = settings.DEVELOPMENT state["realm_org_type"] = realm.org_type state["realm_plan_type"] = realm.plan_type state["zulip_plan_is_not_limited"] = realm.plan_type != Realm.PLAN_TYPE_LIMITED state["upgrade_text_for_wide_organization_logo"] = str(Realm.UPGRADE_TEXT_STANDARD) if realm.push_notifications_enabled_end_timestamp is not None: state["realm_push_notifications_enabled_end_timestamp"] = datetime_to_timestamp( realm.push_notifications_enabled_end_timestamp ) else: state["realm_push_notifications_enabled_end_timestamp"] = None state["password_min_length"] = settings.PASSWORD_MIN_LENGTH state["password_min_guesses"] = settings.PASSWORD_MIN_GUESSES state["server_inline_image_preview"] = settings.INLINE_IMAGE_PREVIEW state["server_inline_url_embed_preview"] = settings.INLINE_URL_EMBED_PREVIEW state["server_avatar_changes_disabled"] = settings.AVATAR_CHANGES_DISABLED state["server_name_changes_disabled"] = settings.NAME_CHANGES_DISABLED state["server_web_public_streams_enabled"] = settings.WEB_PUBLIC_STREAMS_ENABLED state["giphy_rating_options"] = realm.get_giphy_rating_options() state["server_emoji_data_url"] = emoji.data_url() state["server_needs_upgrade"] = is_outdated_server(user_profile) state["event_queue_longpoll_timeout_seconds"] = ( settings.EVENT_QUEUE_LONGPOLL_TIMEOUT_SECONDS ) # TODO: This probably belongs on the server object. state["realm_default_external_accounts"] = get_default_external_accounts() server_default_jitsi_server_url = ( settings.JITSI_SERVER_URL.rstrip("/") if settings.JITSI_SERVER_URL is not None else None ) state["server_jitsi_server_url"] = server_default_jitsi_server_url state["jitsi_server_url"] = ( realm.jitsi_server_url if realm.jitsi_server_url is not None else server_default_jitsi_server_url ) new_stream_announcements_stream = realm.get_new_stream_announcements_stream() if new_stream_announcements_stream: state["realm_new_stream_announcements_stream_id"] = new_stream_announcements_stream.id else: state["realm_new_stream_announcements_stream_id"] = -1 signup_announcements_stream = realm.get_signup_announcements_stream() if signup_announcements_stream: state["realm_signup_announcements_stream_id"] = signup_announcements_stream.id else: state["realm_signup_announcements_stream_id"] = -1 zulip_update_announcements_stream = realm.get_zulip_update_announcements_stream() if zulip_update_announcements_stream: state["realm_zulip_update_announcements_stream_id"] = ( zulip_update_announcements_stream.id ) else: state["realm_zulip_update_announcements_stream_id"] = -1 state["max_stream_name_length"] = Stream.MAX_NAME_LENGTH state["max_stream_description_length"] = Stream.MAX_DESCRIPTION_LENGTH state["max_topic_length"] = MAX_TOPIC_NAME_LENGTH state["max_message_length"] = settings.MAX_MESSAGE_LENGTH if realm.demo_organization_scheduled_deletion_date is not None: state["demo_organization_scheduled_deletion_date"] = datetime_to_timestamp( realm.demo_organization_scheduled_deletion_date ) state["realm_date_created"] = datetime_to_timestamp(realm.date_created) # Presence system parameters for client behavior. state["server_presence_ping_interval_seconds"] = settings.PRESENCE_PING_INTERVAL_SECS state["server_presence_offline_threshold_seconds"] = settings.OFFLINE_THRESHOLD_SECS # Typing notifications protocol parameters for client behavior. state["server_typing_started_expiry_period_milliseconds"] = ( settings.TYPING_STARTED_EXPIRY_PERIOD_MILLISECONDS ) state["server_typing_stopped_wait_period_milliseconds"] = ( settings.TYPING_STOPPED_WAIT_PERIOD_MILLISECONDS ) state["server_typing_started_wait_period_milliseconds"] = ( settings.TYPING_STARTED_WAIT_PERIOD_MILLISECONDS ) state["server_supported_permission_settings"] = get_server_supported_permission_settings() if want("realm_user_settings_defaults"): realm_user_default = RealmUserDefault.objects.get(realm=realm) state["realm_user_settings_defaults"] = {} for property_name in RealmUserDefault.property_types: state["realm_user_settings_defaults"][property_name] = getattr( realm_user_default, property_name ) state["realm_user_settings_defaults"]["emojiset_choices"] = ( RealmUserDefault.emojiset_choices() ) state["realm_user_settings_defaults"]["available_notification_sounds"] = ( get_available_notification_sounds() ) if want("realm_domains"): state["realm_domains"] = get_realm_domains(realm) if want("realm_emoji"): state["realm_emoji"] = get_all_custom_emoji_for_realm(realm.id) if want("realm_linkifiers"): if linkifier_url_template: state["realm_linkifiers"] = linkifiers_for_realm(realm.id) else: # When URL template is not supported by the client, return an empty list # because the new format is incompatible with the old URL format strings # and the client would not render it properly. state["realm_linkifiers"] = [] # Backwards compatibility code. if want("realm_filters"): # Always return an empty list because the new URL template format is incompatible # with the old URL format string, because legacy clients that use the # backwards-compatible `realm_filters` event would not render the it properly. state["realm_filters"] = [] if want("realm_playgrounds"): state["realm_playgrounds"] = get_realm_playgrounds(realm) if want("realm_user_groups"): state["realm_user_groups"] = user_groups_in_realm_serialized(realm) if user_profile is not None: settings_user = user_profile else: assert spectator_requested_language is not None # When UserProfile=None, we want to serve the values for various # settings as the defaults. Instead of copying the default values # from models/users.py here, we access these default values from a # temporary UserProfile object that will not be saved to the database. # # We also can set various fields to avoid duplicating code # unnecessarily. settings_user = UserProfile( full_name="Anonymous User", email="[email protected]", delivery_email="[email protected]", realm=realm, # We tag logged-out users as guests because most guest # restrictions apply to these users as well, and it lets # us avoid unnecessary conditionals. role=UserProfile.ROLE_GUEST, is_billing_admin=False, avatar_source=UserProfile.AVATAR_FROM_GRAVATAR, # ID=0 is not used in real Zulip databases, ensuring this is unique. id=0, default_language=spectator_requested_language, # Set home view to recent conversations for spectators regardless of default. web_home_view="recent_topics", ) if want("realm_user"): state["raw_users"] = get_users_for_api( realm, user_profile, client_gravatar=client_gravatar, user_avatar_url_field_optional=user_avatar_url_field_optional, # Don't send custom profile field values to spectators. include_custom_profile_fields=user_profile is not None, user_list_incomplete=user_list_incomplete, ) state["cross_realm_bots"] = list(get_cross_realm_dicts()) # For the user's own avatar URL, we force # client_gravatar=False, since that saves some unnecessary # client-side code for handing medium-size avatars. See #8253 # for details. state["avatar_source"] = settings_user.avatar_source state["avatar_url_medium"] = avatar_url( settings_user, medium=True, client_gravatar=False, ) state["avatar_url"] = avatar_url( settings_user, medium=False, client_gravatar=False, ) state["can_create_private_streams"] = settings_user.can_create_private_streams() state["can_create_public_streams"] = settings_user.can_create_public_streams() # TODO/compatibility: Deprecated in Zulip 5.0 (feature level # 102); we can remove this once we no longer need to support # legacy mobile app versions that read the old property. state["can_create_streams"] = ( settings_user.can_create_private_streams() or settings_user.can_create_public_streams() or settings_user.can_create_web_public_streams() ) state["can_create_web_public_streams"] = settings_user.can_create_web_public_streams() state["can_subscribe_other_users"] = settings_user.can_subscribe_other_users() state["can_invite_others_to_realm"] = settings_user.can_invite_users_by_email() state["is_admin"] = settings_user.is_realm_admin state["is_owner"] = settings_user.is_realm_owner state["is_moderator"] = settings_user.is_moderator state["is_guest"] = settings_user.is_guest state["is_billing_admin"] = settings_user.is_billing_admin state["user_id"] = settings_user.id state["email"] = settings_user.email state["delivery_email"] = settings_user.delivery_email state["full_name"] = settings_user.full_name if want("realm_bot"): state["realm_bots"] = [] if user_profile is None else get_owned_bot_dicts(user_profile) # This does not yet have an apply_event counterpart, since currently, # new entries for EMBEDDED_BOTS can only be added directly in the codebase. if want("realm_embedded_bots"): state["realm_embedded_bots"] = [ {"name": bot.name, "config": load_bot_config_template(bot.name)} for bot in EMBEDDED_BOTS ] # This does not have an apply_events counterpart either since this # data is mostly static. This excludes the legacy webhook # integrations as those do not follow the same URL construction # patterns as other integrations. if want("realm_incoming_webhook_bots"): state["realm_incoming_webhook_bots"] = [ { "name": integration.name, "display_name": integration.display_name, "all_event_types": get_all_event_types_for_integration(integration), "config": {c[1]: c[0] for c in integration.config_options}, } for integration in WEBHOOK_INTEGRATIONS if integration.legacy is False ] if want("recent_private_conversations"): # A data structure containing records of this form: # # [{'max_message_id': 700175, 'user_ids': [801]}] # # for all recent direct message conversations, ordered by the # highest message ID in the conversation. The user_ids list # is the list of users other than the current user in the # direct message conversation (so it is [] for direct messages # to self). # # Note that raw_recent_private_conversations is an # intermediate form as a dictionary keyed by recipient_id, # which is more efficient to update, and is rewritten to the # final format in post_process_state. state["raw_recent_private_conversations"] = ( {} if user_profile is None else get_recent_private_conversations(user_profile) ) if want("subscription"): if user_profile is not None: sub_info = gather_subscriptions_helper( user_profile, include_subscribers=include_subscribers, ) else: sub_info = get_web_public_subs(realm) state["subscriptions"] = sub_info.subscriptions state["unsubscribed"] = sub_info.unsubscribed state["never_subscribed"] = sub_info.never_subscribed if want("update_message_flags") and want("message"): # Keeping unread_msgs updated requires both message flag updates and # message updates. This is due to the fact that new messages will not # generate a flag update so we need to use the flags field in the # message event. if user_profile is not None: state["raw_unread_msgs"] = get_raw_unread_data(user_profile) else: # For logged-out visitors, we treat all messages as read; # calling this helper lets us return empty objects in the # appropriate format. state["raw_unread_msgs"] = extract_unread_data_from_um_rows([], user_profile) if want("starred_messages"): state["starred_messages"] = ( [] if user_profile is None else get_starred_message_ids(user_profile) ) if want("stream") and include_streams: # The web app doesn't use the data from here; instead, # it uses data from state["subscriptions"] and other # places. if user_profile is not None: state["streams"] = do_get_streams( user_profile, include_web_public=True, include_all_active=user_profile.is_realm_admin, ) else: # TODO: This line isn't used by the web app because it # gets these data via the `subscriptions` key; it will # be used when the mobile apps support logged-out # access. state["streams"] = get_web_public_streams(realm) # nocoverage if want("default_streams"): if settings_user.is_guest: # Guest users and logged-out users don't have access to # all default streams, so we pretend the organization # doesn't have any. state["realm_default_streams"] = [] else: state["realm_default_streams"] = get_default_streams_for_realm_as_dicts(realm.id) if want("default_stream_groups"): if settings_user.is_guest: state["realm_default_stream_groups"] = [] else: state["realm_default_stream_groups"] = default_stream_groups_to_dicts_sorted( get_default_stream_groups(realm) ) if want("stop_words"): state["stop_words"] = read_stop_words() if want("update_display_settings") and not user_settings_object: for prop in UserProfile.display_settings_legacy: state[prop] = getattr(settings_user, prop) state["emojiset_choices"] = UserProfile.emojiset_choices() state["timezone"] = canonicalize_timezone(settings_user.timezone) if want("update_global_notifications") and not user_settings_object: for notification in UserProfile.notification_settings_legacy: state[notification] = getattr(settings_user, notification) state["available_notification_sounds"] = get_available_notification_sounds() if want("user_settings"): state["user_settings"] = {} for prop in UserProfile.property_types: state["user_settings"][prop] = getattr(settings_user, prop) state["user_settings"]["emojiset_choices"] = UserProfile.emojiset_choices() state["user_settings"]["timezone"] = canonicalize_timezone(settings_user.timezone) state["user_settings"]["available_notification_sounds"] = ( get_available_notification_sounds() ) if want("user_status"): # We require creating an account to access statuses. state["user_status"] = ( {} if user_profile is None else get_user_status_dict(realm=realm, user_profile=user_profile) ) if want("user_topic"): state["user_topics"] = [] if user_profile is None else get_user_topics(user_profile) if want("video_calls"): state["has_zoom_token"] = settings_user.zoom_token is not None if want("giphy"): # Normally, it would be a nasty security bug to send a # server's API key to end users. However, GIPHY's API key # security model is precisely to do that; every service # publishes its API key (and GIPHY's client-side JS libraries # require the API key to work). This security model makes # sense because GIPHY API keys are all essentially equivalent # in letting one search for GIFs; GIPHY only requires API keys # to exist at all so that they can deactivate them in cases of # abuse. state["giphy_api_key"] = settings.GIPHY_API_KEY if settings.GIPHY_API_KEY else "" if user_profile is None: # To ensure we have the correct user state set. assert state["is_admin"] is False assert state["is_owner"] is False assert state["is_guest"] is True return state
NOTE: Below is an example of post-processing initial state data AFTER we apply events. For large payloads like `unread_msgs`, it's helpful to have an intermediate data structure that is easy to manipulate with O(1)-type operations as we apply events. Then, only at the end, we put it in the form that's more appropriate for client.
def post_process_state( user_profile: Optional[UserProfile], ret: Dict[str, Any], notification_settings_null: bool ) -> None: """ NOTE: Below is an example of post-processing initial state data AFTER we apply events. For large payloads like `unread_msgs`, it's helpful to have an intermediate data structure that is easy to manipulate with O(1)-type operations as we apply events. Then, only at the end, we put it in the form that's more appropriate for client. """ if "raw_unread_msgs" in ret: ret["unread_msgs"] = aggregate_unread_data(ret["raw_unread_msgs"]) del ret["raw_unread_msgs"] """ See the note above; the same technique applies below. """ if "raw_users" in ret: user_dicts = sorted(ret["raw_users"].values(), key=lambda x: x["user_id"]) ret["realm_users"] = [d for d in user_dicts if d["is_active"]] ret["realm_non_active_users"] = [d for d in user_dicts if not d["is_active"]] """ Be aware that we do intentional aliasing in the below code. We can now safely remove the `is_active` field from all the dicts that got partitioned into the two lists above. We remove the field because it's already implied, and sending it to clients makes clients prone to bugs where they "trust" the field but don't actually update in live updates. It also wastes bandwidth. """ for d in user_dicts: d.pop("is_active") del ret["raw_users"] if "raw_recent_private_conversations" in ret: # Reformat recent_private_conversations to be a list of dictionaries, rather than a dict. ret["recent_private_conversations"] = sorted( ( dict( **value, ) for (recipient_id, value) in ret["raw_recent_private_conversations"].items() ), key=lambda x: -x["max_message_id"], ) del ret["raw_recent_private_conversations"] if not notification_settings_null and "subscriptions" in ret: for stream_dict in ret["subscriptions"] + ret["unsubscribed"]: handle_stream_notifications_compatibility( user_profile, stream_dict, notification_settings_null )
The way we send realm emojis is kinda clumsy--we send a dict mapping the emoji id to a sub_dict with the fields (including the id). Ideally we can streamline this and just send a list of dicts. The clients can make a Map as needed.
def check_realm_emoji_update(var_name: str, event: Dict[str, object]) -> None: """ The way we send realm emojis is kinda clumsy--we send a dict mapping the emoji id to a sub_dict with the fields (including the id). Ideally we can streamline this and just send a list of dicts. The clients can make a Map as needed. """ _check_realm_emoji_update(var_name, event) assert isinstance(event["realm_emoji"], dict) for k, v in event["realm_emoji"].items(): assert v["id"] == k
Realm updates have these two fields: property value We check not only the basic schema, but also that the value people actually matches the type from Realm.property_types that we have configured for the property.
def check_realm_update( var_name: str, event: Dict[str, object], prop: str, ) -> None: """ Realm updates have these two fields: property value We check not only the basic schema, but also that the value people actually matches the type from Realm.property_types that we have configured for the property. """ _check_realm_update(var_name, event) assert prop == event["property"] value = event["value"] if prop == "plan_type": assert isinstance(value, int) assert "extra_data" in event return assert "extra_data" not in event if prop in [ "new_stream_announcements_stream_id", "signup_announcements_stream_id", "zulip_update_announcements_stream_id", "org_type", ]: assert isinstance(value, int) return property_type = Realm.property_types[prop] if property_type in (bool, int, str): assert isinstance(value, property_type) elif property_type == (int, type(None)): assert isinstance(value, int) elif property_type == (str, type(None)): assert isinstance(value, str) else: raise AssertionError(f"Unexpected property type {property_type}")
Display setting events have a "setting" field that is more specifically typed according to the UserProfile.property_types dictionary.
def check_update_display_settings( var_name: str, event: Dict[str, object], ) -> None: """ Display setting events have a "setting" field that is more specifically typed according to the UserProfile.property_types dictionary. """ _check_update_display_settings(var_name, event) setting_name = event["setting_name"] setting = event["setting"] assert isinstance(setting_name, str) if setting_name == "timezone": assert isinstance(setting, str) else: setting_type = UserProfile.property_types[setting_name] assert isinstance(setting, setting_type) if setting_name == "default_language": assert "language_name" in event else: assert "language_name" not in event
See UserProfile.notification_settings_legacy for more details.
def check_update_global_notifications( var_name: str, event: Dict[str, object], desired_val: Union[bool, int, str], ) -> None: """ See UserProfile.notification_settings_legacy for more details. """ _check_update_global_notifications(var_name, event) setting_name = event["notification_name"] setting = event["setting"] assert setting == desired_val assert isinstance(setting_name, str) setting_type = UserProfile.notification_settings_legacy[setting_name] assert isinstance(setting, setting_type)
IMPORTANT: You generally don't want to call this directly. Instead use one of the higher level helpers: write_table_data write_records_json_file The one place we call this directly is for message partials.
def write_data_to_file(output_file: Path, data: Any) -> None: """ IMPORTANT: You generally don't want to call this directly. Instead use one of the higher level helpers: write_table_data write_records_json_file The one place we call this directly is for message partials. """ with open(output_file, "wb") as f: # Because we don't pass a default handler, OPT_PASSTHROUGH_DATETIME # actually causes orjson to raise a TypeError on datetime objects. This # is what we want, because it helps us check that we correctly # post-processed them to serialize to UNIX timestamps rather than ISO # 8601 strings for historical reasons. f.write(orjson.dumps(data, option=orjson.OPT_INDENT_2 | orjson.OPT_PASSTHROUGH_DATETIME)) logging.info("Finished writing %s", output_file)
Takes a Django query and returns a JSONable list of dictionaries corresponding to the database rows.
def make_raw(query: Any, exclude: Optional[List[Field]] = None) -> List[Record]: """ Takes a Django query and returns a JSONable list of dictionaries corresponding to the database rows. """ rows = [] for instance in query: data = model_to_dict(instance, exclude=exclude) """ In Django 1.11.5, model_to_dict evaluates the QuerySet of many-to-many field to give us a list of instances. We require a list of primary keys, so we get the primary keys from the instances below. """ for field in instance._meta.many_to_many: if exclude is not None and field.name in exclude: continue value = data[field.name] data[field.name] = [row.id for row in value] rows.append(data) return rows
We add tables here that are keyed by user, and for which we fetch rows using the same scheme whether we are exporting a realm or a single user. For any table where there is nuance between how you fetch for realms vs. single users, it's best to just keep things simple and have each caller maintain its own slightly different 4/5 line Config (while still possibly calling common code deeper in the stack). As of now, we do NOT include bot tables like Service.
def add_user_profile_child_configs(user_profile_config: Config) -> None: """ We add tables here that are keyed by user, and for which we fetch rows using the same scheme whether we are exporting a realm or a single user. For any table where there is nuance between how you fetch for realms vs. single users, it's best to just keep things simple and have each caller maintain its own slightly different 4/5 line Config (while still possibly calling common code deeper in the stack). As of now, we do NOT include bot tables like Service. """ Config( table="zerver_alertword", model=AlertWord, normal_parent=user_profile_config, include_rows="user_profile_id__in", ) Config( table="zerver_customprofilefieldvalue", model=CustomProfileFieldValue, normal_parent=user_profile_config, include_rows="user_profile_id__in", ) Config( table="zerver_muteduser", model=MutedUser, normal_parent=user_profile_config, include_rows="user_profile_id__in", ) Config( table="zerver_onboardingstep", model=OnboardingStep, normal_parent=user_profile_config, include_rows="user_id__in", ) Config( table="zerver_useractivity", model=UserActivity, normal_parent=user_profile_config, include_rows="user_profile_id__in", ) Config( table="zerver_useractivityinterval", model=UserActivityInterval, normal_parent=user_profile_config, include_rows="user_profile_id__in", ) Config( table="zerver_userpresence", model=UserPresence, normal_parent=user_profile_config, include_rows="user_profile_id__in", ) Config( table="zerver_userstatus", model=UserStatus, normal_parent=user_profile_config, include_rows="user_profile_id__in", ) Config( table="zerver_usertopic", model=UserTopic, normal_parent=user_profile_config, include_rows="user_profile_id__in", )