Merge branch 'master' into feature-limited-visibility-bearcaps

This commit is contained in:
Takeshi Umeda 2021-01-10 11:17:55 +09:00 committed by GitHub
commit 98a2603dc1
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
855 changed files with 32564 additions and 10102 deletions

View file

@ -0,0 +1,17 @@
# frozen_string_literal: true
module AccessTokenExtension
extend ActiveSupport::Concern
included do
after_commit :push_to_streaming_api
end
def revoke(clock = Time)
update(revoked_at: clock.now.utc)
end
def push_to_streaming_api
Redis.current.publish("timeline:access_token:#{id}", Oj.dump(event: :kill)) if revoked? || destroyed?
end
end

View file

@ -74,7 +74,7 @@ class ActivityPub::Activity
@object_uri ||= begin
str = value_or_id(@object)
if str.start_with?('bear:')
if str&.start_with?('bear:')
Addressable::URI.parse(str).query_values['u']
else
str
@ -118,13 +118,13 @@ class ActivityPub::Activity
end
def notify_about_reblog(status)
NotifyService.new.call(status.reblog.account, status)
NotifyService.new.call(status.reblog.account, :reblog, status)
end
def notify_about_mentions(status)
status.active_mentions.includes(:account).each do |mention|
next unless mention.account.local? && audience_includes?(mention.account)
NotifyService.new.call(mention.account, mention)
NotifyService.new.call(mention.account, :mention, mention)
end
end

View file

@ -112,7 +112,7 @@ class ActivityPub::Activity::Create < ActivityPub::Activity
created_at: @object['published'],
override_timestamps: @options[:override_timestamps],
reply: @object['inReplyTo'].present?,
sensitive: @object['sensitive'] || false,
sensitive: @account.sensitized? || @object['sensitive'] || false,
visibility: visibility_from_audience,
thread: replied_to_status,
conversation: conversation_from_context,
@ -231,6 +231,8 @@ class ActivityPub::Activity::Create < ActivityPub::Activity
emoji ||= CustomEmoji.new(domain: @account.domain, shortcode: shortcode, uri: uri)
emoji.image_remote_url = image_url
emoji.save
rescue Seahorse::Client::NetworkingError
nil
end
def process_attachments
@ -253,6 +255,8 @@ class ActivityPub::Activity::Create < ActivityPub::Activity
media_attachment.save
rescue Mastodon::UnexpectedResponseError, HTTP::TimeoutError, HTTP::ConnectionError, OpenSSL::SSL::SSLError
RedownloadMediaWorker.perform_in(rand(30..600).seconds, media_attachment.id)
rescue Seahorse::Client::NetworkingError
nil
end
end

View file

@ -13,7 +13,7 @@ class ActivityPub::Activity::Delete < ActivityPub::Activity
def delete_person
lock_or_return("delete_in_progress:#{@account.id}") do
SuspendAccountService.new.call(@account, reserve_username: false)
DeleteAccountService.new.call(@account, reserve_username: false, skip_activitypub: true)
end
end

View file

@ -4,8 +4,8 @@ class ActivityPub::Activity::Flag < ActivityPub::Activity
def perform
return if skip_reports?
target_accounts = object_uris.map { |uri| account_from_uri(uri) }.compact.select(&:local?)
target_statuses_by_account = object_uris.map { |uri| status_from_uri(uri) }.compact.select(&:local?).group_by(&:account_id)
target_accounts = object_uris.filter_map { |uri| account_from_uri(uri) }.select(&:local?)
target_statuses_by_account = object_uris.filter_map { |uri| status_from_uri(uri) }.select(&:local?).group_by(&:account_id)
target_accounts.each do |target_account|
target_statuses = target_statuses_by_account[target_account.id]

View file

@ -22,10 +22,10 @@ class ActivityPub::Activity::Follow < ActivityPub::Activity
follow_request = FollowRequest.create!(account: @account, target_account: target_account, uri: @json['id'])
if target_account.locked? || @account.silenced?
NotifyService.new.call(target_account, follow_request)
NotifyService.new.call(target_account, :follow_request, follow_request)
else
AuthorizeFollowService.new.call(@account, target_account)
NotifyService.new.call(target_account, ::Follow.find_by(account: @account, target_account: target_account))
NotifyService.new.call(target_account, :follow, ::Follow.find_by(account: @account, target_account: target_account))
end
end

View file

@ -7,6 +7,6 @@ class ActivityPub::Activity::Like < ActivityPub::Activity
return if original_status.nil? || !original_status.account.local? || delete_arrived_first?(@json['id']) || @account.favourited?(original_status)
favourite = original_status.favourites.create!(account: @account)
NotifyService.new.call(original_status.account, favourite)
NotifyService.new.call(original_status.account, :favourite, favourite)
end
end

View file

@ -20,6 +20,9 @@ class ActivityPub::Activity::Move < ActivityPub::Activity
# Initiate a re-follow for each follower
MoveWorker.perform_async(origin_account.id, target_account.id)
rescue
unmark_as_processing!
raise
end
private

View file

@ -13,7 +13,7 @@ class ActivityPub::Adapter < ActiveModelSerializers::Adapter::Base
moved_to: { 'movedTo' => { '@id' => 'as:movedTo', '@type' => '@id' } },
also_known_as: { 'alsoKnownAs' => { '@id' => 'as:alsoKnownAs', '@type' => '@id' } },
emoji: { 'toot' => 'http://joinmastodon.org/ns#', 'Emoji' => 'toot:Emoji' },
featured: { 'toot' => 'http://joinmastodon.org/ns#', 'featured' => { '@id' => 'toot:featured', '@type' => '@id' } },
featured: { 'toot' => 'http://joinmastodon.org/ns#', 'featured' => { '@id' => 'toot:featured', '@type' => '@id' }, 'featuredTags' => { '@id' => 'toot:featuredTags', '@type' => '@id' } },
property_value: { 'schema' => 'http://schema.org#', 'PropertyValue' => 'schema:PropertyValue', 'value' => 'schema:value' },
atom_uri: { 'ostatus' => 'http://ostatus.org#', 'atomUri' => 'ostatus:atomUri' },
conversation: { 'ostatus' => 'http://ostatus.org#', 'inReplyToAtomUri' => 'ostatus:inReplyToAtomUri', 'conversation' => 'ostatus:conversation' },
@ -23,6 +23,7 @@ class ActivityPub::Adapter < ActiveModelSerializers::Adapter::Base
discoverable: { 'toot' => 'http://joinmastodon.org/ns#', 'discoverable' => 'toot:discoverable' },
voters_count: { 'toot' => 'http://joinmastodon.org/ns#', 'votersCount' => 'toot:votersCount' },
olm: { 'toot' => 'http://joinmastodon.org/ns#', 'Device' => 'toot:Device', 'Ed25519Signature' => 'toot:Ed25519Signature', 'Ed25519Key' => 'toot:Ed25519Key', 'Curve25519Key' => 'toot:Curve25519Key', 'EncryptedMessage' => 'toot:EncryptedMessage', 'publicKeyBase64' => 'toot:publicKeyBase64', 'deviceId' => 'toot:deviceId', 'claim' => { '@type' => '@id', '@id' => 'toot:claim' }, 'fingerprintKey' => { '@type' => '@id', '@id' => 'toot:fingerprintKey' }, 'identityKey' => { '@type' => '@id', '@id' => 'toot:identityKey' }, 'devices' => { '@type' => '@id', '@id' => 'toot:devices' }, 'messageFranking' => 'toot:messageFranking', 'messageType' => 'toot:messageType', 'cipherText' => 'toot:cipherText' },
suspended: { 'toot' => 'http://joinmastodon.org/ns#', 'suspended' => 'toot:suspended' },
}.freeze
def self.default_key_transform

View file

@ -27,7 +27,7 @@ class ActivityPub::LinkedDataSignature
document_hash = hash(@json.without('signature'))
to_be_verified = options_hash + document_hash
if creator.keypair.public_key.verify(OpenSSL::Digest::SHA256.new, Base64.decode64(signature), to_be_verified)
if creator.keypair.public_key.verify(OpenSSL::Digest.new('SHA256'), Base64.decode64(signature), to_be_verified)
creator
end
end
@ -44,7 +44,7 @@ class ActivityPub::LinkedDataSignature
to_be_signed = options_hash + document_hash
keypair = sign_with.present? ? OpenSSL::PKey::RSA.new(sign_with) : creator.keypair
signature = Base64.strict_encode64(keypair.sign(OpenSSL::Digest::SHA256.new, to_be_signed))
signature = Base64.strict_encode64(keypair.sign(OpenSSL::Digest.new('SHA256'), to_be_signed))
@json.merge('signature' => options.merge('signatureValue' => signature))
end

View file

@ -48,6 +48,10 @@ class ActivityPub::TagManager
end
end
def uri_for_username(username)
account_url(username: username)
end
def generate_uri_for(_target)
URI.join(root_url, 'payloads', SecureRandom.uuid)
end

28
app/lib/cache_buster.rb Normal file
View file

@ -0,0 +1,28 @@
# frozen_string_literal: true
class CacheBuster
def initialize(options = {})
@secret_header = options[:secret_header] || 'Secret-Header'
@secret = options[:secret] || 'True'
end
def bust(url)
site = Addressable::URI.parse(url).normalized_site
request_pool.with(site) do |http_client|
build_request(url, http_client).perform
end
end
private
def request_pool
RequestPool.current
end
def build_request(url, http_client)
Request.new(:get, url, http_client: http_client).tap do |request|
request.add_headers(@secret_header => @secret)
end
end
end

View file

@ -16,7 +16,7 @@ class EntityCache
end
def emoji(shortcodes, domain)
shortcodes = [shortcodes] unless shortcodes.is_a?(Array)
shortcodes = Array(shortcodes)
cached = Rails.cache.read_multi(*shortcodes.map { |shortcode| to_key(:emoji, shortcode, domain) })
uncached_ids = []
@ -29,7 +29,7 @@ class EntityCache
uncached.each_value { |item| Rails.cache.write(to_key(:emoji, item.shortcode, domain), item, expires_in: MAX_EXPIRATION) }
end
shortcodes.map { |shortcode| cached[to_key(:emoji, shortcode, domain)] || uncached[shortcode] }.compact
shortcodes.filter_map { |shortcode| cached[to_key(:emoji, shortcode, domain)] || uncached[shortcode] }
end
def to_key(type, *ids)

32
app/lib/fast_ip_map.rb Normal file
View file

@ -0,0 +1,32 @@
# frozen_string_literal: true
class FastIpMap
MAX_IPV4_PREFIX = 32
MAX_IPV6_PREFIX = 128
# @param [Enumerable<IPAddr>] addresses
def initialize(addresses)
@fast_lookup = {}
@ranges = []
# Hash look-up is faster but only works for exact matches, so we split
# exact addresses from non-exact ones
addresses.each do |address|
if (address.ipv4? && address.prefix == MAX_IPV4_PREFIX) || (address.ipv6? && address.prefix == MAX_IPV6_PREFIX)
@fast_lookup[address.to_s] = true
else
@ranges << address
end
end
# We're more likely to hit wider-reaching ranges when checking for
# inclusion, so make sure they're sorted first
@ranges.sort_by!(&:prefix)
end
# @param [IPAddr] address
# @return [Boolean]
def include?(address)
@fast_lookup[address.to_s] || @ranges.any? { |cidr| cidr.include?(address) }
end
end

View file

@ -6,31 +6,54 @@ class FeedManager
include Singleton
include Redisable
# Maximum number of items stored in a single feed
MAX_ITEMS = 400
# Must be <= MAX_ITEMS or the tracking sets will grow forever
# Number of items in the feed since last reblog of status
# before the new reblog will be inserted. Must be <= MAX_ITEMS
# or the tracking sets will grow forever
REBLOG_FALLOFF = 40
# Execute block for every active account
# @yield [Account]
# @return [void]
def with_active_accounts(&block)
Account.joins(:user).where('users.current_sign_in_at > ?', User::ACTIVE_DURATION.ago).find_each(&block)
end
# Redis key of a feed
# @param [Symbol] type
# @param [Integer] id
# @param [Symbol] subtype
# @return [String]
def key(type, id, subtype = nil)
return "feed:#{type}:#{id}" unless subtype
"feed:#{type}:#{id}:#{subtype}"
end
def filter?(timeline_type, status, receiver_id)
if timeline_type == :home
filter_from_home?(status, receiver_id, build_crutches(receiver_id, [status]))
elsif timeline_type == :mentions
filter_from_mentions?(status, receiver_id)
# Check if the status should not be added to a feed
# @param [Symbol] timeline_type
# @param [Status] status
# @param [Account|List] receiver
# @return [Boolean]
def filter?(timeline_type, status, receiver)
case timeline_type
when :home
filter_from_home?(status, receiver.id, build_crutches(receiver.id, [status]))
when :list
filter_from_list?(status, receiver) || filter_from_home?(status, receiver.account_id, build_crutches(receiver.account_id, [status]))
when :mentions
filter_from_mentions?(status, receiver.id)
else
false
end
end
# Add a status to a home feed and send a streaming API update
# @param [Account] account
# @param [Status] status
# @return [Boolean]
def push_to_home(account, status)
return false unless add_to_feed(:home, account.id, status, account.user&.aggregates_reblogs?)
@ -39,6 +62,10 @@ class FeedManager
true
end
# Remove a status from a home feed and send a streaming API update
# @param [Account] account
# @param [Status] status
# @return [Boolean]
def unpush_from_home(account, status)
return false unless remove_from_feed(:home, account.id, status, account.user&.aggregates_reblogs?)
@ -46,20 +73,22 @@ class FeedManager
true
end
# Add a status to a list feed and send a streaming API update
# @param [List] list
# @param [Status] status
# @return [Boolean]
def push_to_list(list, status)
if status.reply? && status.in_reply_to_account_id != status.account_id
should_filter = status.in_reply_to_account_id != list.account_id
should_filter &&= !ListAccount.where(list_id: list.id, account_id: status.in_reply_to_account_id).exists?
return false if should_filter
end
return false unless add_to_feed(:list, list.id, status, list.account.user&.aggregates_reblogs?)
return false if filter_from_list?(status, list) || !add_to_feed(:list, list.id, status, list.account.user&.aggregates_reblogs?)
trim(:list, list.id)
PushUpdateWorker.perform_async(list.account_id, status.id, "timeline:list:#{list.id}") if push_update_required?("timeline:list:#{list.id}")
true
end
# Remove a status from a list feed and send a streaming API update
# @param [List] list
# @param [Status] status
# @return [Boolean]
def unpush_from_list(list, status)
return false unless remove_from_feed(:list, list.id, status, list.account.user&.aggregates_reblogs?)
@ -67,32 +96,11 @@ class FeedManager
true
end
def trim(type, account_id)
timeline_key = key(type, account_id)
reblog_key = key(type, account_id, 'reblogs')
# Remove any items past the MAX_ITEMS'th entry in our feed
redis.zremrangebyrank(timeline_key, 0, -(FeedManager::MAX_ITEMS + 1))
# Get the score of the REBLOG_FALLOFF'th item in our feed, and stop
# tracking anything after it for deduplication purposes.
falloff_rank = FeedManager::REBLOG_FALLOFF - 1
falloff_range = redis.zrevrange(timeline_key, falloff_rank, falloff_rank, with_scores: true)
falloff_score = falloff_range&.first&.last&.to_i || 0
# Get any reblogs we might have to clean up after.
redis.zrangebyscore(reblog_key, 0, falloff_score).each do |reblogged_id|
# Remove it from the set of reblogs we're tracking *first* to avoid races.
redis.zrem(reblog_key, reblogged_id)
# Just drop any set we might have created to track additional reblogs.
# This means that if this reblog is deleted, we won't automatically insert
# another reblog, but also that any new reblog can be inserted into the
# feed.
redis.del(key(type, account_id, "reblogs:#{reblogged_id}"))
end
end
def merge_into_timeline(from_account, into_account)
# Fill a home feed with an account's statuses
# @param [Account] from_account
# @param [Account] into_account
# @return [void]
def merge_into_home(from_account, into_account)
timeline_key = key(:home, into_account.id)
aggregate = into_account.user&.aggregates_reblogs?
query = from_account.statuses.where(visibility: [:public, :unlisted, :private]).includes(:preloadable_poll, reblog: :account).limit(FeedManager::MAX_ITEMS / 4)
@ -114,7 +122,37 @@ class FeedManager
trim(:home, into_account.id)
end
def unmerge_from_timeline(from_account, into_account)
# Fill a list feed with an account's statuses
# @param [Account] from_account
# @param [List] list
# @return [void]
def merge_into_list(from_account, list)
timeline_key = key(:list, list.id)
aggregate = list.account.user&.aggregates_reblogs?
query = from_account.statuses.where(visibility: [:public, :unlisted, :private]).includes(:preloadable_poll, reblog: :account).limit(FeedManager::MAX_ITEMS / 4)
if redis.zcard(timeline_key) >= FeedManager::MAX_ITEMS / 4
oldest_home_score = redis.zrange(timeline_key, 0, 0, with_scores: true).first.last.to_i
query = query.where('id > ?', oldest_home_score)
end
statuses = query.to_a
crutches = build_crutches(list.account_id, statuses)
statuses.each do |status|
next if filter_from_home?(status, list.account_id, crutches) || filter_from_list?(status, list)
add_to_feed(:list, list.id, status, aggregate)
end
trim(:list, list.id)
end
# Remove an account's statuses from a home feed
# @param [Account] from_account
# @param [Account] into_account
# @return [void]
def unmerge_from_home(from_account, into_account)
timeline_key = key(:home, into_account.id)
oldest_home_score = redis.zrange(timeline_key, 0, 0, with_scores: true)&.first&.last&.to_i || 0
@ -123,14 +161,31 @@ class FeedManager
end
end
def clear_from_timeline(account, target_account)
# Clear from timeline all statuses from or mentionning target_account
# Remove an account's statuses from a list feed
# @param [Account] from_account
# @param [List] list
# @return [void]
def unmerge_from_list(from_account, list)
timeline_key = key(:list, list.id)
oldest_list_score = redis.zrange(timeline_key, 0, 0, with_scores: true)&.first&.last&.to_i || 0
from_account.statuses.select('id, reblog_of_id').where('id > ?', oldest_list_score).reorder(nil).find_each do |status|
remove_from_feed(:list, list.id, status, list.account.user&.aggregates_reblogs?)
end
end
# Clear all statuses from or mentioning target_account from a home feed
# @param [Account] account
# @param [Account] target_account
# @return [void]
def clear_from_home(account, target_account)
timeline_key = key(:home, account.id)
timeline_status_ids = redis.zrange(timeline_key, 0, -1)
statuses = Status.where(id: timeline_status_ids).select(:id, :reblog_of_id, :account_id).to_a
reblogged_ids = Status.where(id: statuses.map(&:reblog_of_id).compact, account: target_account).pluck(:id)
with_mentions_ids = Mention.active.where(status_id: statuses.flat_map { |s| [s.id, s.reblog_of_id] }.compact, account: target_account).pluck(:status_id)
target_statuses = statuses.filter do |status|
target_statuses = statuses.select do |status|
status.account_id == target_account.id || reblogged_ids.include?(status.reblog_of_id) || with_mentions_ids.include?(status.id) || with_mentions_ids.include?(status.reblog_of_id)
end
@ -139,12 +194,15 @@ class FeedManager
end
end
def populate_feed(account)
# Populate home feed of account from scratch
# @param [Account] account
# @return [void]
def populate_home(account)
limit = FeedManager::MAX_ITEMS / 2
aggregate = account.user&.aggregates_reblogs?
timeline_key = key(:home, account.id)
account.statuses.where.not(visibility: :direct).limit(limit).each do |status|
account.statuses.limit(limit).each do |status|
add_to_feed(:home, account.id, status, aggregate)
end
@ -172,17 +230,91 @@ class FeedManager
end
end
private
# Completely clear multiple feeds at once
# @param [Symbol] type
# @param [Array<Integer>] ids
# @return [void]
def clean_feeds!(type, ids)
reblogged_id_sets = {}
def push_update_required?(timeline_id)
redis.exists?("subscribed:#{timeline_id}")
redis.pipelined do
ids.each do |feed_id|
redis.del(key(type, feed_id))
reblog_key = key(type, feed_id, 'reblogs')
# We collect a future for this: we don't block while getting
# it, but we can iterate over it later.
reblogged_id_sets[feed_id] = redis.zrange(reblog_key, 0, -1)
redis.del(reblog_key)
end
end
# Remove all of the reblog tracking keys we just removed the
# references to.
redis.pipelined do
reblogged_id_sets.each do |feed_id, future|
future.value.each do |reblogged_id|
reblog_set_key = key(type, feed_id, "reblogs:#{reblogged_id}")
redis.del(reblog_set_key)
end
end
end
end
private
# Trim a feed to maximum size by removing older items
# @param [Symbol] type
# @param [Integer] timeline_id
# @return [void]
def trim(type, timeline_id)
timeline_key = key(type, timeline_id)
reblog_key = key(type, timeline_id, 'reblogs')
# Remove any items past the MAX_ITEMS'th entry in our feed
redis.zremrangebyrank(timeline_key, 0, -(FeedManager::MAX_ITEMS + 1))
# Get the score of the REBLOG_FALLOFF'th item in our feed, and stop
# tracking anything after it for deduplication purposes.
falloff_rank = FeedManager::REBLOG_FALLOFF
falloff_range = redis.zrevrange(timeline_key, falloff_rank, falloff_rank, with_scores: true)
falloff_score = falloff_range&.first&.last&.to_i
return if falloff_score.nil?
# Get any reblogs we might have to clean up after.
redis.zrangebyscore(reblog_key, 0, falloff_score).each do |reblogged_id|
# Remove it from the set of reblogs we're tracking *first* to avoid races.
redis.zrem(reblog_key, reblogged_id)
# Just drop any set we might have created to track additional reblogs.
# This means that if this reblog is deleted, we won't automatically insert
# another reblog, but also that any new reblog can be inserted into the
# feed.
redis.del(key(type, timeline_id, "reblogs:#{reblogged_id}"))
end
end
# Check if there is a streaming API client connected
# for the given feed
# @param [String] timeline_key
# @return [Boolean]
def push_update_required?(timeline_key)
redis.exists?("subscribed:#{timeline_key}")
end
# Check if the account is blocking or muting any of the given accounts
# @param [Integer] receiver_id
# @param [Array<Integer>] account_ids
# @param [Symbol] context
def blocks_or_mutes?(receiver_id, account_ids, context)
Block.where(account_id: receiver_id, target_account_id: account_ids).any? ||
(context == :home ? Mute.where(account_id: receiver_id, target_account_id: account_ids).any? : Mute.where(account_id: receiver_id, target_account_id: account_ids, hide_notifications: true).any?)
end
# Check if status should not be added to the home feed
# @param [Status] status
# @param [Integer] receiver_id
# @param [Hash] crutches
# @return [Boolean]
def filter_from_home?(status, receiver_id, crutches)
return false if receiver_id == status.account_id
return true if status.reply? && (status.in_reply_to_id.nil? || status.in_reply_to_account_id.nil?)
@ -215,6 +347,11 @@ class FeedManager
false
end
# Check if status should not be added to the mentions feed
# @see NotifyService
# @param [Status] status
# @param [Integer] receiver_id
# @return [Boolean]
def filter_from_mentions?(status, receiver_id)
return true if receiver_id == status.account_id
return true if phrase_filtered?(status, receiver_id, :notifications)
@ -231,6 +368,27 @@ class FeedManager
should_filter
end
# Check if status should not be added to the list feed
# @param [Status] status
# @param [List] list
# @return [Boolean]
def filter_from_list?(status, list)
if status.reply? && status.in_reply_to_account_id != status.account_id
should_filter = status.in_reply_to_account_id != list.account_id
should_filter &&= !list.show_followed?
should_filter &&= !(list.show_list? && ListAccount.where(list_id: list.id, account_id: status.in_reply_to_account_id).exists?)
return !!should_filter
end
false
end
# Check if the status hits a phrase filter
# @param [Status] status
# @param [Integer] receiver_id
# @param [Symbol] context
# @return [Boolean]
def phrase_filtered?(status, receiver_id, context)
active_filters = Rails.cache.fetch("filters:#{receiver_id}") { CustomFilter.where(account_id: receiver_id).active_irreversible.to_a }.to_a
@ -266,6 +424,11 @@ class FeedManager
# added, and false if it was not added to the feed. Note that this is
# an internal helper: callers must call trim or push updates if
# either action is appropriate.
# @param [Symbol] timeline_type
# @param [Integer] account_id
# @param [Status] status
# @param [Boolean] aggregate_reblogs
# @return [Boolean]
def add_to_feed(timeline_type, account_id, status, aggregate_reblogs = true)
timeline_key = key(timeline_type, account_id)
reblog_key = key(timeline_type, account_id, 'reblogs')
@ -278,14 +441,12 @@ class FeedManager
return false if !rank.nil? && rank < FeedManager::REBLOG_FALLOFF
reblog_rank = redis.zrevrank(reblog_key, status.reblog_of_id)
if reblog_rank.nil?
# The ordered set at `reblog_key` holds statuses which have a reblog
# in the top `REBLOG_FALLOFF` statuses of the timeline
if redis.zadd(reblog_key, status.id, status.reblog_of_id, nx: true)
# This is not something we've already seen reblogged, so we
# can just add it to the feed (and note that we're
# reblogging it).
# can just add it to the feed (and note that we're reblogging it).
redis.zadd(timeline_key, status.id, status.id)
redis.zadd(reblog_key, status.id, status.reblog_of_id)
else
# Another reblog of the same status was already in the
# REBLOG_FALLOFF most recent statuses, so we note that this
@ -299,9 +460,7 @@ class FeedManager
# delay of the worker deliverying the original status, the late addition
# by merging timelines, and other reasons.
# If such a reblog already exists, just do not re-insert it into the feed.
rank = redis.zrevrank(reblog_key, status.id)
return false unless rank.nil?
return false unless redis.zscore(reblog_key, status.id).nil?
redis.zadd(timeline_key, status.id, status.id)
end
@ -313,6 +472,11 @@ class FeedManager
# with reblogs, and returning true if a status was removed. As with
# `add_to_feed`, this does not trigger push updates, so callers must
# do so if appropriate.
# @param [Symbol] timeline_type
# @param [Integer] account_id
# @param [Status] status
# @param [Boolean] aggregate_reblogs
# @return [Boolean]
def remove_from_feed(timeline_type, account_id, status, aggregate_reblogs = true)
timeline_key = key(timeline_type, account_id)
reblog_key = key(timeline_type, account_id, 'reblogs')
@ -347,6 +511,11 @@ class FeedManager
redis.zrem(timeline_key, status.id)
end
# Pre-fetch various objects and relationships for given statuses that
# are going to be checked by the filtering methods
# @param [Integer] receiver_id
# @param [Array<Status>] statuses
# @return [Hash]
def build_crutches(receiver_id, statuses)
crutches = {}

View file

@ -135,6 +135,7 @@ class Formatter
end
end
# rubocop:disable Metrics/BlockNesting
def encode_custom_emojis(html, emojis, animate = false)
return html if emojis.empty?
@ -189,6 +190,7 @@ class Formatter
html
end
# rubocop:enable Metrics/BlockNesting
def rewrite(text, entities)
text = text.to_s

View file

@ -114,7 +114,7 @@ class Request
def signature
algorithm = 'rsa-sha256'
signature = Base64.strict_encode64(@keypair.sign(OpenSSL::Digest::SHA256.new, signed_string))
signature = Base64.strict_encode64(@keypair.sign(OpenSSL::Digest.new('SHA256'), signed_string))
"keyId=\"#{key_id}\",algorithm=\"#{algorithm}\",headers=\"#{signed_headers.keys.join(' ').downcase}\",signature=\"#{signature}\""
end
@ -253,7 +253,15 @@ class Request
alias new open
def check_private_address(address)
raise Mastodon::HostValidationError if PrivateAddressCheck.private_address?(IPAddr.new(address.to_s))
addr = IPAddr.new(address.to_s)
return if private_address_exceptions.any? { |range| range.include?(addr) }
raise Mastodon::HostValidationError if PrivateAddressCheck.private_address?(addr)
end
def private_address_exceptions
@private_address_exceptions = begin
(ENV['ALLOWED_PRIVATE_ADDRESSES'] || '').split(',').map { |addr| IPAddr.new(addr) }
end
end
end
end

View file

@ -18,6 +18,7 @@ class Sanitize
gopher
xmpp
magnet
gemini
).freeze
CLASS_WHITELIST_TRANSFORMER = lambda do |env|

View file

@ -11,7 +11,6 @@ module Settings
@object = object
end
# rubocop:disable Style/MethodMissingSuper
def method_missing(method, *args)
method_name = method.to_s
# set a value for a variable
@ -24,7 +23,6 @@ module Settings
self[method_name]
end
end
# rubocop:enable Style/MethodMissingSuper
def respond_to_missing?(*)
true

View file

@ -17,8 +17,10 @@ class SidekiqErrorHandler
private
# rubocop:disable Naming/MethodParameterName
def limit_backtrace_and_raise(e)
e.set_backtrace(e.backtrace.first(BACKTRACE_LIMIT))
raise e
end
# rubocop:enable Naming/MethodParameterName
end

View file

@ -186,9 +186,9 @@ class SpamCheck
def matching_status_ids
if nilsimsa?
other_digests.select { |record| record.start_with?('nilsimsa') && nilsimsa_compare_value(digest, record.split(':')[1]) >= NILSIMSA_COMPARE_THRESHOLD }.map { |record| record.split(':')[2] }.compact
other_digests.select { |record| record.start_with?('nilsimsa') && nilsimsa_compare_value(digest, record.split(':')[1]) >= NILSIMSA_COMPARE_THRESHOLD }.filter_map { |record| record.split(':')[2] }
else
other_digests.select { |record| record.start_with?('md5') && record.split(':')[1] == digest }.map { |record| record.split(':')[2] }.compact
other_digests.select { |record| record.start_with?('md5') && record.split(':')[1] == digest }.filter_map { |record| record.split(':')[2] }
end
end

View file

@ -0,0 +1,52 @@
# frozen_string_literal: true
class StatusReachFinder
def initialize(status)
@status = status
end
def inboxes
Account.where(id: reached_account_ids).inboxes
end
private
def reached_account_ids
[
replied_to_account_id,
reblog_of_account_id,
mentioned_account_ids,
reblogs_account_ids,
favourites_account_ids,
replies_account_ids,
].tap do |arr|
arr.flatten!
arr.compact!
arr.uniq!
end
end
def replied_to_account_id
@status.in_reply_to_account_id
end
def reblog_of_account_id
@status.reblog.account_id if @status.reblog?
end
def mentioned_account_ids
@status.mentions.pluck(:account_id)
end
def reblogs_account_ids
@status.reblogs.pluck(:account_id)
end
def favourites_account_ids
@status.favourites.pluck(:account_id)
end
def replies_account_ids
@status.replies.pluck(:account_id)
end
end

View file

@ -27,6 +27,7 @@ class UserSettingsDecorator
user.settings['display_media'] = display_media_preference if change?('setting_display_media')
user.settings['expand_spoilers'] = expand_spoilers_preference if change?('setting_expand_spoilers')
user.settings['reduce_motion'] = reduce_motion_preference if change?('setting_reduce_motion')
user.settings['disable_swiping'] = disable_swiping_preference if change?('setting_disable_swiping')
user.settings['system_font_ui'] = system_font_ui_preference if change?('setting_system_font_ui')
user.settings['noindex'] = noindex_preference if change?('setting_noindex')
user.settings['theme'] = theme_preference if change?('setting_theme')
@ -88,6 +89,10 @@ class UserSettingsDecorator
boolean_cast_setting 'setting_reduce_motion'
end
def disable_swiping_preference
boolean_cast_setting 'setting_disable_swiping'
end
def noindex_preference
boolean_cast_setting 'setting_noindex'
end

97
app/lib/webfinger.rb Normal file
View file

@ -0,0 +1,97 @@
# frozen_string_literal: true
class Webfinger
class Error < StandardError; end
class GoneError < Error; end
class RedirectError < StandardError; end
class Response
def initialize(body)
@json = Oj.load(body, mode: :strict)
end
def subject
@json['subject']
end
def link(rel, attribute)
links.dig(rel, attribute)
end
private
def links
@links ||= @json['links'].map { |link| [link['rel'], link] }.to_h
end
end
def initialize(uri)
_, @domain = uri.split('@')
raise ArgumentError, 'Webfinger requested for local account' if @domain.nil?
@uri = uri
end
def perform
Response.new(body_from_webfinger)
rescue Oj::ParseError
raise Webfinger::Error, "Invalid JSON in response for #{@uri}"
rescue Addressable::URI::InvalidURIError
raise Webfinger::Error, "Invalid URI for #{@uri}"
end
private
def body_from_webfinger(url = standard_url, use_fallback = true)
webfinger_request(url).perform do |res|
if res.code == 200
res.body_with_limit
elsif res.code == 404 && use_fallback
body_from_host_meta
elsif res.code == 410
raise Webfinger::GoneError, "#{@uri} is gone from the server"
else
raise Webfinger::Error, "Request for #{@uri} returned HTTP #{res.code}"
end
end
end
def body_from_host_meta
host_meta_request.perform do |res|
if res.code == 200
body_from_webfinger(url_from_template(res.body_with_limit), false)
else
raise Webfinger::Error, "Request for #{@uri} returned HTTP #{res.code}"
end
end
end
def url_from_template(str)
link = Nokogiri::XML(str).at_xpath('//xmlns:Link[@rel="lrdd"]')
if link.present?
link['template'].gsub('{uri}', @uri)
else
raise Webfinger::Error, "Request for #{@uri} returned host-meta without link to Webfinger"
end
rescue Nokogiri::XML::XPath::SyntaxError
raise Webfinger::Error, "Invalid XML encountered in host-meta for #{@uri}"
end
def host_meta_request
Request.new(:get, host_meta_url).add_headers('Accept' => 'application/xrd+xml, application/xml, text/xml')
end
def webfinger_request(url)
Request.new(:get, url).add_headers('Accept' => 'application/jrd+json, application/json')
end
def standard_url
"https://#{@domain}/.well-known/webfinger?resource=#{@uri}"
end
def host_meta_url
"https://#{@domain}/.well-known/host-meta"
end
end