INSTRUCTION
stringlengths 202
35.5k
| RESPONSE
stringlengths 75
161k
|
---|---|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class TagFollowingService
def initialize(user=nil)
@user = user
end
def create(name)
name_normalized = ActsAsTaggableOn::Tag.normalize(name)
raise ArgumentError, "Name field null or empty" if name_normalized.blank?
tag = ActsAsTaggableOn::Tag.find_or_create_by(name: name_normalized)
raise DuplicateTag if @user.tag_followings.exists?(tag_id: tag.id)
tag_following = @user.tag_followings.new(tag_id: tag.id)
raise "Can't process tag entity" unless tag_following.save
tag
end
def find(name)
name_normalized = ActsAsTaggableOn::Tag.normalize(name)
ActsAsTaggableOn::Tag.find_or_create_by(name: name_normalized)
end
def destroy(id)
tag_following = @user.tag_followings.find_by!(tag_id: id)
tag_following.destroy
end
def destroy_by_name(name)
name_normalized = ActsAsTaggableOn::Tag.normalize(name)
followed_tag = @user.followed_tags.find_by!(name: name_normalized)
destroy(followed_tag.id)
end
def index
@user.followed_tags
end
class DuplicateTag < RuntimeError; end
end
``` | # frozen_string_literal: true
describe TagFollowingService do
before do
add_tag("tag1", alice)
add_tag("tag2", alice)
end
describe "#create" do
it "Creates new tag with valid name" do
name = SecureRandom.uuid
expect(alice.followed_tags.find_by(name: name)).to be_nil
tag_data = tag_following_service(alice).create(name)
expect(alice.followed_tags.find_by(name: name).name).to eq(name)
expect(tag_data["name"]).to eq(name)
expect(tag_data["id"]).to be_truthy
expect(tag_data["taggings_count"]).to eq(0)
end
it "Throws error with empty tag" do
expect { tag_following_service(alice).create(nil) }.to raise_error(ArgumentError)
expect { tag_following_service(alice).create("") }.to raise_error(ArgumentError)
expect { tag_following_service(alice).create("#") }.to raise_error(ArgumentError)
expect { tag_following_service(alice).create(" ") }.to raise_error(ArgumentError)
end
it "throws an error when trying to follow an already followed tag" do
name = SecureRandom.uuid
tag_following_service.create(name)
expect {
tag_following_service.create(name)
}.to raise_error TagFollowingService::DuplicateTag
end
end
describe "#destroy" do
it "Deletes tag with valid name" do
name = SecureRandom.uuid
add_tag(name, alice)
expect(alice.followed_tags.find_by(name: name).name).to eq(name)
expect(tag_following_service(alice).destroy_by_name(name)).to be_truthy
expect(alice.followed_tags.find_by(name: name)).to be_nil
end
it "Deletes tag with id" do
name = SecureRandom.uuid
new_tag = add_tag(name, alice)
expect(alice.followed_tags.find_by(name: name).name).to eq(name)
expect(tag_following_service(alice).destroy(new_tag.tag_id)).to be_truthy
expect(alice.followed_tags.find_by(name: name)).to be_nil
end
it "Does nothing with tag that isn't already followed" do
original_length = alice.followed_tags.length
expect {
tag_following_service(alice).destroy_by_name(SecureRandom.uuid)
}.to raise_error ActiveRecord::RecordNotFound
expect {
tag_following_service(alice).destroy(-1)
}.to raise_error ActiveRecord::RecordNotFound
expect(alice.followed_tags.length).to eq(original_length)
end
it "Does nothing with empty tag name" do
original_length = alice.followed_tags.length
expect {
tag_following_service(alice).destroy_by_name("")
}.to raise_error ActiveRecord::RecordNotFound
expect(alice.followed_tags.length).to eq(original_length)
end
end
describe "#index" do
it "Returns user's list of tags" do
tags = tag_following_service(alice).index
expect(tags.length).to eq(alice.followed_tags.length)
end
end
private
def tag_following_service(user=alice)
TagFollowingService.new(user)
end
def add_tag(name, user)
tag = ActsAsTaggableOn::Tag.find_or_create_by(name: name)
tag_following = user.tag_followings.new(tag_id: tag.id)
tag_following.save
tag_following
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class AspectsMembershipService
def initialize(user=nil)
@user = user
end
def create(aspect_id, person_id)
person = Person.find(person_id)
aspect = @user.aspects.where(id: aspect_id).first
raise ActiveRecord::RecordNotFound unless person.present? && aspect.present?
contact = @user.share_with(person, aspect)
raise I18n.t("aspects.add_to_aspect.failure") if contact.blank?
AspectMembership.where(contact_id: contact.id, aspect_id: aspect.id).first
end
def destroy_by_ids(aspect_id, contact_id)
aspect = @user.aspects.where(id: aspect_id).first
contact = @user.contacts.where(person_id: contact_id).first
destroy(aspect, contact)
end
def destroy_by_membership_id(membership_id)
aspect = @user.aspects.joins(:aspect_memberships).where(aspect_memberships: {id: membership_id}).first
contact = @user.contacts.joins(:aspect_memberships).where(aspect_memberships: {id: membership_id}).first
destroy(aspect, contact)
end
def contacts_in_aspect(aspect_id)
order = [Arel.sql("contact_id IS NOT NULL DESC"), "profiles.first_name ASC", "profiles.last_name ASC",
"profiles.diaspora_handle ASC"]
@user.aspects.find(aspect_id) # to provide better error code if aspect isn't correct
contacts = @user.contacts.arel_table
aspect_memberships = AspectMembership.arel_table
@user.contacts.joins(
contacts.join(aspect_memberships).on(
aspect_memberships[:aspect_id].eq(aspect_id).and(
aspect_memberships[:contact_id].eq(contacts[:id])
)
).join_sources
).includes(person: :profile).order(order)
end
def all_contacts
order = ["profiles.first_name ASC", "profiles.last_name ASC",
"profiles.diaspora_handle ASC"]
@user.contacts.includes(person: :profile).order(order)
end
private
def destroy(aspect, contact)
raise ActiveRecord::RecordNotFound unless aspect.present? && contact.present?
raise Diaspora::NotMine unless @user.mine?(aspect) && @user.mine?(contact)
membership = contact.aspect_memberships.where(aspect_id: aspect.id).first
raise ActiveRecord::RecordNotFound if membership.blank?
success = membership.destroy
{success: success, membership: membership}
end
end
``` | # frozen_string_literal: true
describe AspectsMembershipService do
before do
@alice_aspect1 = alice.aspects.first
@alice_aspect2 = alice.aspects.create(name: "another aspect")
@bob_aspect1 = bob.aspects.first
end
describe "#create" do
context "with valid IDs" do
it "succeeds" do
membership = aspects_membership_service.create(@alice_aspect2.id, bob.person.id)
expect(membership[:aspect_id]).to eq(@alice_aspect2.id)
expect(@alice_aspect2.contacts.find_by(person_id: bob.person.id)).not_to be_nil
end
it "fails if already in aspect" do
aspects_membership_service.create(@alice_aspect2.id, bob.person.id)
expect {
aspects_membership_service.create(@alice_aspect2.id, bob.person.id)
}.to raise_error ActiveRecord::RecordNotUnique
end
end
context "with invalid IDs" do
it "fails with invalid User ID" do
expect {
aspects_membership_service.create(@alice_aspect2.id, -1)
}.to raise_error ActiveRecord::RecordNotFound
end
it "fails with invalid Aspect ID" do
expect {
aspects_membership_service.create(-1, bob.person.id)
}.to raise_error ActiveRecord::RecordNotFound
end
it "fails with aspect ID that isn't user's" do
expect {
aspects_membership_service.create(@bob_aspect1.id, eve.person.id)
}.to raise_error ActiveRecord::RecordNotFound
end
end
end
describe "#destroy" do
before do
@membership = aspects_membership_service.create(@alice_aspect2.id, bob.person.id)
end
context "with aspect/user valid IDs" do
it "succeeds if in aspect" do
aspects_membership_service.destroy_by_ids(@alice_aspect2.id, bob.person.id)
expect(@alice_aspect2.contacts.find_by(person_id: bob.person.id)).to be_nil
end
it "fails if not in aspect" do
expect {
aspects_membership_service.destroy_by_ids(@alice_aspect2.id, eve.person.id)
}.to raise_error ActiveRecord::RecordNotFound
end
end
context "with a membership ID" do
it "succeeds if their membership" do
aspects_membership_service.destroy_by_membership_id(@membership.id)
expect(@alice_aspect2.contacts.find_by(person_id: bob.person.id)).to be_nil
end
it "fails if not their membership" do
expect {
aspects_membership_service(eve).destroy_by_membership_id(@membership.id)
}.to raise_error ActiveRecord::RecordNotFound
end
it "fails if invalid membership ID" do
expect {
aspects_membership_service(eve).destroy_by_membership_id(-1)
}.to raise_error ActiveRecord::RecordNotFound
end
end
context "with invalid IDs" do
it "fails with invalid User ID" do
expect {
aspects_membership_service.destroy_by_ids(@alice_aspect2.id, -1)
}.to raise_error ActiveRecord::RecordNotFound
end
it "fails with invalid Aspect ID" do
expect {
aspects_membership_service.destroy_by_ids(-1, eve.person.id)
}.to raise_error ActiveRecord::RecordNotFound
end
it "fails with aspect ID that isn't user's" do
expect {
aspects_membership_service(eve).destroy_by_ids(@alice_aspect2.id, bob.person.id)
}.to raise_error ActiveRecord::RecordNotFound
end
end
end
describe "#list" do
before do
aspects_membership_service.create(@alice_aspect2.id, bob.person.id)
aspects_membership_service.create(@alice_aspect2.id, eve.person.id)
@alice_aspect3 = alice.aspects.create(name: "empty aspect")
end
context "with valid aspect ID" do
it "returns users in full aspect" do
contacts = aspects_membership_service.contacts_in_aspect(@alice_aspect2.id)
expect(contacts.length).to eq(2)
expect(contacts.map {|c| c.person.guid }.sort).to eq([bob.person.guid, eve.person.guid].sort)
end
it "returns empty array in empty aspect" do
contacts = aspects_membership_service.contacts_in_aspect(@alice_aspect3.id)
expect(contacts.length).to eq(0)
end
end
context "with invalid aspect ID" do
it "fails" do
expect {
aspects_membership_service.contacts_in_aspect(-1)
}.to raise_error ActiveRecord::RecordNotFound
end
end
end
describe "#all_contacts" do
before do
aspects_membership_service.create(@alice_aspect2.id, bob.person.id)
aspects_membership_service.create(@alice_aspect2.id, eve.person.id)
@alice_aspect3 = alice.aspects.create(name: "empty aspect")
end
it "returns all user's contacts" do
contacts = aspects_membership_service.all_contacts
expect(contacts.length).to eq(2)
end
end
def aspects_membership_service(user=alice)
AspectsMembershipService.new(user)
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class LikeService
def initialize(user=nil)
@user = user
end
def create_for_post(post_id)
post = post_service.find!(post_id)
user.like!(post)
end
def create_for_comment(comment_id)
comment = comment_service.find!(comment_id)
post_service.find!(comment.commentable_id) # checks implicit for visible posts
user.like_comment!(comment)
end
def destroy(like_id)
like = Like.find(like_id)
if user.owns?(like)
user.retract(like)
true
else
false
end
end
def find_for_post(post_id)
likes = post_service.find!(post_id).likes
user ? likes.order(Arel.sql("author_id = #{user.person.id} DESC")) : likes
end
def find_for_comment(comment_id)
comment = comment_service.find!(comment_id)
post_service.find!(comment.post.id) # checks implicit for visible posts
likes = comment.likes
user ? likes.order(Arel.sql("author_id = #{user.person.id} DESC")) : likes
end
def unlike_post(post_id)
likes = post_service.find!(post_id).likes
likes = likes.order(Arel.sql("author_id = #{user.person.id} DESC"))
if !likes.empty? && user.owns?(likes[0])
user.retract(likes[0])
true
else
false
end
end
def unlike_comment(comment_id)
likes = comment_service.find!(comment_id).likes
likes = likes.order(Arel.sql("author_id = #{user.person.id} DESC"))
if !likes.empty? && user.owns?(likes[0])
user.retract(likes[0])
true
else
false
end
end
private
attr_reader :user
def post_service
@post_service ||= PostService.new(user)
end
def comment_service
@comment_service ||= CommentService.new(user)
end
end
``` | # frozen_string_literal: true
describe LikeService do
let(:post) { alice.post(:status_message, text: "hello", to: alice.aspects.first) }
let(:alice_comment) { CommentService.new(alice).create(post.id, "This is a wonderful post") }
let(:bobs_comment) { CommentService.new(bob).create(post.id, "My post was better than yours") }
describe "#create_for_post" do
it "creates a like on my own post" do
expect {
LikeService.new(alice).create_for_post(post.id)
}.not_to raise_error
end
it "creates a like on a post of a contact" do
expect {
LikeService.new(bob).create_for_post(post.id)
}.not_to raise_error
end
it "attaches the like to the post" do
like = LikeService.new(alice).create_for_post(post.id)
expect(post.likes.first.id).to eq(like.id)
end
it "fails if the post does not exist" do
expect {
LikeService.new(bob).create_for_post("unknown id")
}.to raise_error ActiveRecord::RecordNotFound
end
it "fails if the user can't see the post" do
expect {
LikeService.new(eve).create_for_post(post.id)
}.to raise_error ActiveRecord::RecordNotFound
end
it "fails if the user already liked the post" do
LikeService.new(alice).create_for_post(post.id)
expect {
LikeService.new(alice).create_for_post(post.id)
}.to raise_error ActiveRecord::RecordInvalid
end
end
describe "#create_for_comment" do
it "creates a like on a posts comment" do
expect {
LikeService.new(alice).create_for_comment(alice_comment.id)
}.not_to raise_error
end
it "creates a like on someone else comment" do
expect {
LikeService.new(alice).create_for_comment(bobs_comment.id)
}.not_to raise_error
end
it "attaches the like to the comment" do
like = LikeService.new(alice).create_for_comment(bobs_comment.id)
expect(bobs_comment.likes.first.id).to eq(like.id)
end
it "fails if comment does not exist" do
expect {
LikeService.new(alice).create_for_comment("unknown_id")
}.to raise_error ActiveRecord::RecordNotFound
end
it "fails if user cant see post and its comments" do
expect {
LikeService.new(eve).create_for_comment(bobs_comment.id)
}.to raise_error ActiveRecord::RecordNotFound
end
it "fails if user already liked the comment" do
LikeService.new(alice).create_for_comment(bobs_comment.id)
expect {
LikeService.new(alice).create_for_comment(bobs_comment.id)
}.to raise_error ActiveRecord::RecordInvalid
end
end
describe "#destroy" do
context "for post like" do
let(:like) { LikeService.new(bob).create_for_post(post.id) }
it "lets the user destroy their own like" do
result = LikeService.new(bob).destroy(like.id)
expect(result).to be_truthy
end
it "doesn't let the parent author destroy others likes" do
result = LikeService.new(alice).destroy(like.id)
expect(result).to be_falsey
end
it "doesn't let someone destroy others likes" do
result = LikeService.new(eve).destroy(like.id)
expect(result).to be_falsey
end
it "fails if the like doesn't exist" do
expect {
LikeService.new(bob).destroy("unknown id")
}.to raise_error ActiveRecord::RecordNotFound
end
end
context "for comment like" do
let(:like) { LikeService.new(bob).create_for_comment(alice_comment.id) }
it "let the user destroy its own comment like" do
result = LikeService.new(bob).destroy(like.id)
expect(result).to be_truthy
end
it "doesn't let the parent author destroy other comment likes" do
result = LikeService.new(alice).destroy(like.id)
expect(result).to be_falsey
end
it "fails if the like doesn't exist" do
expect {
LikeService.new(alice).destroy("unknown id")
}.to raise_error ActiveRecord::RecordNotFound
end
end
end
describe "#find_for_post" do
context "with user" do
it "returns likes for a public post" do
post = alice.post(:status_message, text: "hello", public: true)
like = LikeService.new(alice).create_for_post(post.id)
expect(LikeService.new(eve).find_for_post(post.id)).to include(like)
end
it "returns likes for a visible private post" do
like = LikeService.new(alice).create_for_post(post.id)
expect(LikeService.new(bob).find_for_post(post.id)).to include(like)
end
it "doesn't return likes for a private post the user can not see" do
LikeService.new(alice).create_for_post(post.id)
expect {
LikeService.new(eve).find_for_post(post.id)
}.to raise_error ActiveRecord::RecordNotFound
end
it "returns the user's like first" do
post = alice.post(:status_message, text: "hello", public: true)
[alice, bob, eve].map {|user| LikeService.new(user).create_for_post(post.id) }
[alice, bob, eve].each do |user|
expect(
LikeService.new(user).find_for_post(post.id).first.author.id
).to be user.person.id
end
end
end
context "without user" do
it "returns likes for a public post" do
post = alice.post(:status_message, text: "hello", public: true)
like = LikeService.new(alice).create_for_post(post.id)
expect(LikeService.new.find_for_post(post.id)).to include(like)
end
it "doesn't return likes a for private post" do
LikeService.new(alice).create_for_post(post.id)
expect {
LikeService.new.find_for_post(post.id)
}.to raise_error Diaspora::NonPublic
end
end
it "returns all likes of a post" do
post = alice.post(:status_message, text: "hello", public: true)
likes = [alice, bob, eve].map {|user| LikeService.new(user).create_for_post(post.id) }
expect(LikeService.new.find_for_post(post.id)).to match_array(likes)
end
end
describe "#find_for_comment" do
context "with user" do
it "returns likes for a public post comment" do
post = alice.post(:status_message, text: "hello", public: true)
comment = CommentService.new(bob).create(post.id, "Hello comment")
like = LikeService.new(alice).create_for_comment(comment.id)
expect(LikeService.new(eve).find_for_comment(comment.id)).to include(like)
end
it "returns likes for visible private post comments" do
comment = CommentService.new(bob).create(post.id, "Hello comment")
like = LikeService.new(alice).create_for_comment(comment.id)
expect(LikeService.new(bob).find_for_comment(comment.id)).to include(like)
end
it "doesn't return likes for a posts comment the user can not see" do
expect {
LikeService.new(eve).find_for_comment(alice_comment.id)
}.to raise_error ActiveRecord::RecordNotFound
end
it "returns the user's like first" do
post = alice.post(:status_message, text: "hello", public: true)
comment = CommentService.new(alice).create(post.id, "I like my own post")
[alice, bob, eve].map {|user| LikeService.new(user).create_for_comment(comment.id) }
[alice, bob, eve].each do |user|
expect(
LikeService.new(user).find_for_comment(comment.id).first.author.id
).to be user.person.id
end
end
end
context "without user" do
it "returns likes for a comment on a public post" do
post = alice.post(:status_message, text: "hello", public: true)
comment = CommentService.new(bob).create(post.id, "I like my own post")
like = LikeService.new(alice).create_for_comment(comment.id)
expect(
LikeService.new.find_for_comment(comment.id)
).to include(like)
end
it "doesn't return likes for a private post comment" do
LikeService.new(alice).create_for_comment(alice_comment.id)
expect {
LikeService.new.find_for_comment(alice_comment.id)
}.to raise_error Diaspora::NonPublic
end
end
end
describe "#unlike_post" do
before do
LikeService.new(alice).create_for_post(post.id)
end
it "removes the like to the post" do
LikeService.new(alice).unlike_post(post.id)
expect(post.likes.length).to eq(0)
end
end
describe "#unlike_comment" do
it "removes the like for a comment" do
comment = CommentService.new(alice).create(post.id, "I like my own post")
LikeService.new(alice).create_for_comment(comment.id)
expect(comment.likes.length).to eq(1)
LikeService.new(alice).unlike_comment(comment.id)
comment = CommentService.new(alice).find!(comment.id)
expect(comment.likes.length).to eq(0)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class StatusMessageCreationService
include Rails.application.routes.url_helpers
def initialize(user)
@user = user
end
def create(params)
validate_content(params)
build_status_message(params).tap do |status_message|
load_aspects(params[:aspect_ids]) unless status_message.public?
add_attachments(status_message, params)
status_message.save
process(status_message, params[:services])
end
end
private
attr_reader :user, :aspects
def validate_content(params)
raise MissingContent unless params[:status_message][:text].present? || params[:photos].present?
end
def build_status_message(params)
public = params[:public] || false
user.build_post(:status_message, params[:status_message].merge(public: public))
end
def add_attachments(status_message, params)
add_location(status_message, params[:location_address], params[:location_coords])
add_poll(status_message, params)
add_photos(status_message, params[:photos])
end
def add_location(status_message, address, coordinates)
status_message.build_location(address: address, coordinates: coordinates) if address.present?
end
def add_poll(status_message, params)
if params[:poll_question].present?
status_message.build_poll(question: params[:poll_question])
[*params[:poll_answers]].each do |poll_answer|
answer = status_message.poll.poll_answers.build(answer: poll_answer)
answer.poll = status_message.poll
end
end
end
def add_photos(status_message, photos)
if photos.present?
status_message.photos << Photo.where(id: photos, author_id: status_message.author_id)
status_message.photos.each do |photo|
photo.public = status_message.public
photo.pending = false
end
end
end
def load_aspects(aspect_ids)
@aspects = user.aspects_from_ids(aspect_ids)
raise BadAspectsIDs if aspects.empty?
end
def process(status_message, services)
add_to_streams(status_message) unless status_message.public?
dispatch(status_message, services)
end
def add_to_streams(status_message)
user.add_to_streams(status_message, aspects)
status_message.photos.each {|photo| user.add_to_streams(photo, aspects) }
end
def dispatch(status_message, services)
receiving_services = services ? Service.titles(services) : []
status_message.filter_mentions # this is only required until changes from #6818 are deployed on every pod
user.dispatch_post(status_message,
url: short_post_url(status_message.guid, host: AppConfig.environment.url),
service_types: receiving_services)
end
class BadAspectsIDs < RuntimeError
end
class MissingContent < RuntimeError
end
end
``` | # frozen_string_literal: true
describe StatusMessageCreationService do
describe "#create" do
let(:aspect) { alice.aspects.first }
let(:text) { "I'm writing tests" }
let(:params) {
{
status_message: {text: text},
aspect_ids: [aspect.id.to_s]
}
}
it "returns the created StatusMessage" do
status_message = StatusMessageCreationService.new(alice).create(params)
expect(status_message).to_not be_nil
expect(status_message.text).to eq(text)
end
context "with aspect_ids" do
it "creates aspect_visibilities for the StatusMessages" do
alice.aspects.create(name: "another aspect")
status_message = StatusMessageCreationService.new(alice).create(params)
expect(status_message.aspect_visibilities.map(&:aspect)).to eq([aspect])
end
it "does not create aspect_visibilities if the post is public" do
status_message = StatusMessageCreationService.new(alice).create(params.merge(public: true))
expect(status_message.aspect_visibilities).to be_empty
end
it "raises exception if aspects_ids don't contain any applicable aspect identifiers" do
bad_ids = [Aspect.ids.max.next, bob.aspects.first.id].map(&:to_s)
expect {
StatusMessageCreationService.new(alice).create(params.merge(aspect_ids: bad_ids))
}.to remain(StatusMessage, :count).and raise_error(StatusMessageCreationService::BadAspectsIDs)
end
end
context "with public" do
it "it creates a private StatusMessage by default" do
status_message = StatusMessageCreationService.new(alice).create(params)
expect(status_message.public).to be_falsey
end
it "it creates a private StatusMessage" do
status_message = StatusMessageCreationService.new(alice).create(params.merge(public: false))
expect(status_message.public).to be_falsey
end
it "it creates a public StatusMessage" do
status_message = StatusMessageCreationService.new(alice).create(params.merge(public: true))
expect(status_message.public).to be_truthy
end
end
context "with location" do
it "it creates a location" do
location_params = {location_address: "somewhere", location_coords: "1,2"}
status_message = StatusMessageCreationService.new(alice).create(params.merge(location_params))
location = status_message.location
expect(location.address).to eq("somewhere")
expect(location.lat).to eq("1")
expect(location.lng).to eq("2")
end
it "does not add a location without location params" do
status_message = StatusMessageCreationService.new(alice).create(params)
expect(status_message.location).to be_nil
end
end
context "with poll" do
it "it creates a poll" do
poll_params = {poll_question: "something?", poll_answers: %w(yes no maybe)}
status_message = StatusMessageCreationService.new(alice).create(params.merge(poll_params))
poll = status_message.poll
expect(poll.question).to eq("something?")
expect(poll.poll_answers.size).to eq(3)
poll_answers = poll.poll_answers.map(&:answer)
expect(poll_answers).to include("yes")
expect(poll_answers).to include("no")
expect(poll_answers).to include("maybe")
end
it "does not add a poll without poll params" do
status_message = StatusMessageCreationService.new(alice).create(params)
expect(status_message.poll).to be_nil
end
end
context "with photos" do
let(:photo1) {
alice.build_post(:photo, pending: true, user_file: File.open(photo_fixture_name), to: aspect.id).tap(&:save!)
}
let(:photo2) {
alice.build_post(:photo, pending: true, user_file: File.open(photo_fixture_name), to: aspect.id).tap(&:save!)
}
let(:photo_ids) { [photo1.id.to_s, photo2.id.to_s] }
it "it attaches all photos" do
status_message = StatusMessageCreationService.new(alice).create(params.merge(photos: photo_ids))
photos = status_message.photos
expect(photos.size).to eq(2)
expect(photos.map(&:id).map(&:to_s)).to match_array(photo_ids)
end
it "does not attach photos without photos param" do
status_message = StatusMessageCreationService.new(alice).create(params)
expect(status_message.photos).to be_empty
end
context "with aspect_ids" do
it "it marks the photos as non-public if the post is non-public" do
status_message = StatusMessageCreationService.new(alice).create(params.merge(photos: photo_ids))
status_message.photos.each do |photo|
expect(photo.public).to be_falsey
end
end
it "creates aspect_visibilities for the Photo" do
alice.aspects.create(name: "another aspect")
status_message = StatusMessageCreationService.new(alice).create(params.merge(photos: photo_ids))
status_message.photos.each do |photo|
expect(photo.aspect_visibilities.map(&:aspect)).to eq([aspect])
end
end
it "does not create aspect_visibilities if the post is public" do
status_message = StatusMessageCreationService.new(alice).create(params.merge(photos: photo_ids, public: true))
status_message.photos.each do |photo|
expect(photo.aspect_visibilities).to be_empty
end
end
it "sets pending to false on any attached photos" do
status_message = StatusMessageCreationService.new(alice).create(params.merge(photos: photo_ids))
status_message.photos.each do |photo|
expect(photo.reload.pending).to be_falsey
end
end
end
context "with public" do
it "it marks the photos as public if the post is public" do
status_message = StatusMessageCreationService.new(alice).create(params.merge(photos: photo_ids, public: true))
status_message.photos.each do |photo|
expect(photo.public).to be_truthy
end
end
it "sets pending to false on any attached photos" do
status_message = StatusMessageCreationService.new(alice).create(params.merge(photos: photo_ids, public: true))
status_message.photos.each do |photo|
expect(photo.reload.pending).to be_falsey
end
end
end
end
context "dispatch" do
it "dispatches the StatusMessage" do
expect(alice).to receive(:dispatch_post).with(instance_of(StatusMessage), hash_including(service_types: []))
StatusMessageCreationService.new(alice).create(params)
end
it "dispatches the StatusMessage to services" do
expect(alice).to receive(:dispatch_post)
.with(instance_of(StatusMessage),
hash_including(service_types: array_including(%w[Services::Tumblr Services::Twitter])))
StatusMessageCreationService.new(alice).create(params.merge(services: %w[twitter tumblr]))
end
context "with mention" do
let(:text) { text_mentioning(eve) }
# this is only required until changes from #6818 are deployed on every pod
it "filters out mentions from text attribute" do
status_message = StatusMessageCreationService.new(alice).create(params)
expect(status_message.text).not_to include(eve.diaspora_handle)
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class PostService
def initialize(user=nil)
@user = user
end
def find(id)
if user
user.find_visible_shareable_by_id(Post, id)
else
Post.find_by_id_and_public(id, true)
end
end
def find!(id_or_guid)
if user
find_non_public_by_guid_or_id_with_user!(id_or_guid)
else
find_public!(id_or_guid)
end
end
def present_json
PostPresenter.new(post, user)
end
def present_interactions_json
PostInteractionPresenter.new(post, user)
end
def mark_user_notifications(post_id)
return unless user
mark_comment_reshare_like_notifications_read(post_id)
mark_mention_notifications_read(post_id)
end
def destroy(post_id, private_allowed=true)
post = if private_allowed
find_non_public_by_guid_or_id_with_user!(post_id)
else
find_public!(post_id)
end
raise Diaspora::NotMine unless post.author == user.person
user.retract(post)
end
def mentionable_in_comment(post_id, query)
post = find!(post_id)
Person
.allowed_to_be_mentioned_in_a_comment_to(post)
.where.not(id: user.person_id)
.find_by_substring(query)
.sort_for_mention_suggestion(post, user)
.for_json
.limit(15)
end
private
attr_reader :user
def find_public!(id_or_guid)
Post.where(post_key(id_or_guid) => id_or_guid).first.tap do |post|
raise ActiveRecord::RecordNotFound, "could not find a post with id #{id_or_guid}" unless post
raise Diaspora::NonPublic unless post.public?
end
end
def find_non_public_by_guid_or_id_with_user!(id_or_guid)
user.find_visible_shareable_by_id(Post, id_or_guid, key: post_key(id_or_guid)).tap do |post|
raise ActiveRecord::RecordNotFound, "could not find a post with id #{id_or_guid} for user #{user.id}" unless post
end
end
# We can assume a guid is at least 16 characters long as we have guids set to hex(8) since we started using them.
def post_key(id_or_guid)
id_or_guid.to_s.length < 16 ? :id : :guid
end
def mark_comment_reshare_like_notifications_read(post_id)
Notification.where(recipient_id: user.id, target_type: "Post", target_id: post_id, unread: true)
.update_all(unread: false)
end
def mark_mention_notifications_read(post_id)
mention_ids = Mention.where(
mentions_container_id: post_id,
mentions_container_type: "Post",
person_id: user.person_id
).ids
mention_ids.concat(mentions_in_comments_for_post(post_id).pluck(:id))
Notification.where(recipient_id: user.id, target_type: "Mention", target_id: mention_ids, unread: true)
.update_all(unread: false) if mention_ids.any?
end
def mentions_in_comments_for_post(post_id)
Mention
.joins("INNER JOIN comments ON mentions_container_id = comments.id AND mentions_container_type = 'Comment'")
.where(comments: {commentable_id: post_id, commentable_type: "Post"})
end
end
``` | # frozen_string_literal: true
describe PostService do
let(:post) { alice.post(:status_message, text: "ohai", to: alice.aspects.first) }
let(:public) { alice.post(:status_message, text: "hey", public: true) }
describe "#find" do
context "with user" do
it "returns the post, if it is the users post" do
expect(PostService.new(alice).find(post.id)).to eq(post)
end
it "returns the post, if the user can see the it" do
expect(PostService.new(bob).find(post.id)).to eq(post)
end
it "returns the post, if it is public" do
expect(PostService.new(eve).find(public.id)).to eq(public)
end
it "does not return the post, if the post cannot be found" do
expect(PostService.new(alice).find("unknown")).to be_nil
end
it "does not return the post, if user cannot see the post" do
expect(PostService.new(eve).find(post.id)).to be_nil
end
end
context "without user" do
it "returns the post, if it is public" do
expect(PostService.new.find(public.id)).to eq(public)
end
it "does not return the post, if the post is private" do
expect(PostService.new.find(post.id)).to be_nil
end
it "does not return the post, if the post cannot be found" do
expect(PostService.new.find("unknown")).to be_nil
end
end
end
describe "#find!" do
context "with user" do
it "returns the post, if it is the users post" do
expect(PostService.new(alice).find!(post.id)).to eq(post)
end
it "works with guid" do
expect(PostService.new(alice).find!(post.guid)).to eq(post)
end
it "returns the post, if the user can see the it" do
expect(PostService.new(bob).find!(post.id)).to eq(post)
end
it "returns the post, if it is public" do
expect(PostService.new(eve).find!(public.id)).to eq(public)
end
it "RecordNotFound if the post cannot be found" do
expect {
PostService.new(alice).find!("unknown")
}.to raise_error ActiveRecord::RecordNotFound, "could not find a post with id unknown for user #{alice.id}"
end
it "RecordNotFound if user cannot see the post" do
expect {
PostService.new(eve).find!(post.id)
}.to raise_error ActiveRecord::RecordNotFound, "could not find a post with id #{post.id} for user #{eve.id}"
end
end
context "without user" do
it "returns the post, if it is public" do
expect(PostService.new.find!(public.id)).to eq(public)
end
it "works with guid" do
expect(PostService.new.find!(public.guid)).to eq(public)
end
it "NonPublic if the post is private" do
expect {
PostService.new.find!(post.id)
}.to raise_error Diaspora::NonPublic
end
it "RecordNotFound if the post cannot be found" do
expect {
PostService.new.find!("unknown")
}.to raise_error ActiveRecord::RecordNotFound, "could not find a post with id unknown"
end
end
context "id/guid switch" do
let(:public) { alice.post(:status_message, text: "ohai", public: true) }
it "assumes ids less than 16 chars are ids and not guids" do
post = Post.where(id: public.id)
expect(Post).to receive(:where).with(hash_including(id: "123456789012345")).and_return(post).at_least(:once)
PostService.new(alice).find!("123456789012345")
end
it "assumes ids more than (or equal to) 16 chars are actually guids" do
post = Post.where(guid: public.guid)
expect(Post).to receive(:where).with(hash_including(guid: "1234567890123456")).and_return(post).at_least(:once)
PostService.new(alice).find!("1234567890123456")
end
end
end
describe "#mark_user_notifications" do
let(:status_text) { text_mentioning(alice) }
it "marks a corresponding notifications as read" do
FactoryBot.create(:notification, recipient: alice, target: post, unread: true)
FactoryBot.create(:notification, recipient: alice, target: post, unread: true)
expect {
PostService.new(alice).mark_user_notifications(post.id)
}.to change(Notification.where(unread: true), :count).by(-2)
end
it "marks a corresponding mention notification as read" do
mention_post = bob.post(:status_message, text: status_text, public: true)
expect {
PostService.new(alice).mark_user_notifications(mention_post.id)
}.to change(Notification.where(unread: true), :count).by(-1)
end
it "marks a corresponding mention in comment notification as read" do
notification = FactoryBot.create(:notification_mentioned_in_comment)
status_message = notification.target.mentions_container.parent
user = notification.recipient
expect {
PostService.new(user).mark_user_notifications(status_message.id)
}.to change(Notification.where(unread: true), :count).by(-1)
end
it "does not change the update_at date/time for post notifications" do
notification = Timecop.travel(1.minute.ago) do
FactoryBot.create(:notification, recipient: alice, target: post, unread: true)
end
expect {
PostService.new(alice).mark_user_notifications(post.id)
}.not_to change { Notification.where(id: notification.id).pluck(:updated_at) }
end
it "does not change the update_at date/time for mention notifications" do
mention_post = Timecop.travel(1.minute.ago) do
bob.post(:status_message, text: status_text, public: true)
end
mention = mention_post.mentions.where(person_id: alice.person.id).first
expect {
PostService.new(alice).mark_user_notifications(post.id)
}.not_to change { Notification.where(target_type: "Mention", target_id: mention.id).pluck(:updated_at) }
end
it "does nothing without a user" do
expect_any_instance_of(PostService).not_to receive(:mark_comment_reshare_like_notifications_read).with(post.id)
expect_any_instance_of(PostService).not_to receive(:mark_mention_notifications_read).with(post.id)
PostService.new.mark_user_notifications(post.id)
end
end
describe "#destroy" do
it "let a user delete his message" do
PostService.new(alice).destroy(post.id)
expect(StatusMessage.find_by_id(post.id)).to be_nil
end
it "sends a retraction on delete" do
expect(alice).to receive(:retract).with(post)
PostService.new(alice).destroy(post.id)
end
it "won't delete private post if explicitly unallowed" do
expect {
PostService.new(alice).destroy(post.id, false)
}.to raise_error Diaspora::NonPublic
expect(StatusMessage.find_by(id: post.id)).not_to be_nil
end
it "will not let you destroy posts visible to you but that you do not own" do
expect {
PostService.new(bob).destroy(post.id)
}.to raise_error Diaspora::NotMine
expect(StatusMessage.find_by_id(post.id)).not_to be_nil
end
it "will not let you destroy posts that are not visible to you" do
expect {
PostService.new(eve).destroy(post.id)
}.to raise_error(ActiveRecord::RecordNotFound)
expect(StatusMessage.find_by_id(post.id)).not_to be_nil
end
end
describe "#mentionable_in_comment" do
describe "semi-integration test" do
let(:post_author_attributes) { {first_name: "Ro#{r_str}"} }
let(:post_author) { FactoryBot.create(:person, post_author_attributes) }
let(:current_user) { FactoryBot.create(:user_with_aspect) }
let(:post_service) { PostService.new(current_user) }
shared_context "with commenters and likers" do
# randomize ids of the created people so that the test doesn't pass just because of
# the id sequence matched against the expected ordering
let(:ids) { (1..4).map {|i| Person.maximum(:id) + i }.shuffle }
before do
# in case when post_author has't been instantiated before this context, specify id
# in order to avoid id conflict with the people generated here
post_author_attributes.merge!(id: ids.max + 1)
end
let!(:commenter1) {
FactoryBot.create(:person, id: ids.shift, first_name: "Ro1#{r_str}").tap {|person|
FactoryBot.create(:comment, author: person, post: post)
}
}
let!(:commenter2) {
FactoryBot.create(:person, id: ids.shift, first_name: "Ro2#{r_str}").tap {|person|
FactoryBot.create(:comment, author: person, post: post)
}
}
let!(:liker1) {
FactoryBot.create(:person, id: ids.shift, first_name: "Ro1#{r_str}").tap {|person|
FactoryBot.create(:like, author: person, target: post)
}
}
let!(:liker2) {
FactoryBot.create(:person, id: ids.shift, first_name: "Ro2#{r_str}").tap {|person|
FactoryBot.create(:like, author: person, target: post)
}
}
end
shared_context "with a current user's friend" do
let!(:current_users_friend) {
FactoryBot.create(:person).tap {|friend|
current_user.contacts.create!(
person: friend,
aspects: [current_user.aspects.first],
sharing: true,
receiving: true
)
}
}
end
context "with private post" do
let(:post) { FactoryBot.create(:status_message, text: "ohai", author: post_author) }
context "when the post doesn't have a visibility for the current user" do
it "doesn't find a post and raises an exception" do
expect {
post_service.mentionable_in_comment(post.id, "Ro")
}.to raise_error(ActiveRecord::RecordNotFound)
end
end
context "when the post has a visibility for the current user" do
before do
ShareVisibility.batch_import([current_user.id], post)
end
context "with commenters and likers" do
include_context "with commenters and likers"
it "returns mention suggestions in the correct order" do
expected_suggestions = [
post_author, commenter1, commenter2, liker1, liker2
]
expect(post_service.mentionable_in_comment(post.id, "Ro")).to eq(expected_suggestions)
end
end
context "with a current user's friend" do
include_context "with a current user's friend"
it "doesn't include a contact" do
expect(post_service.mentionable_in_comment(post.id, current_users_friend.first_name)).to be_empty
end
end
it "doesn't include a non contact" do
expect(post_service.mentionable_in_comment(post.id, eve.person.first_name)).to be_empty
end
end
end
context "with public post" do
let(:post) { FactoryBot.create(:status_message, text: "ohai", public: true, author: post_author) }
context "with commenters and likers and with a current user's friend" do
include_context "with commenters and likers"
include_context "with a current user's friend"
it "returns mention suggestions in the correct order" do
result = post_service.mentionable_in_comment(post.id, "Ro").to_a
expect(result.size).to be > 7
# participants: post author, comments, likers
expect(result[0..4]).to eq([post_author, commenter1, commenter2, liker1, liker2])
# contacts
expect(result[5]).to eq(current_users_friend)
# non-contacts
result[6..-1].each {|person|
expect(person.contacts.where(user_id: current_user.id)).to be_empty
expect(person.profile.first_name).to include("Ro")
}
end
it "doesn't include people with non-matching names" do
commenter = FactoryBot.create(:person, first_name: "RRR#{r_str}")
FactoryBot.create(:comment, author: commenter)
liker = FactoryBot.create(:person, first_name: "RRR#{r_str}")
FactoryBot.create(:like, author: liker)
friend = FactoryBot.create(:person, first_name: "RRR#{r_str}")
current_user.contacts.create!(
person: friend,
aspects: [current_user.aspects.first],
sharing: true,
receiving: true
)
result = post_service.mentionable_in_comment(post.id, "Ro")
expect(result).not_to include(commenter)
expect(result).not_to include(liker)
expect(result).not_to include(friend)
end
end
shared_examples "current user can't mention themself" do
before do
current_user.profile.update(first_name: "Ro#{r_str}")
end
it "doesn't include current user" do
expect(post_service.mentionable_in_comment(post.id, "Ro")).not_to include(current_user.person)
end
end
context "when current user is a post author" do
let(:post_author) { current_user.person }
include_examples "current user can't mention themself"
end
context "current user is a participant" do
before do
current_user.like!(post)
current_user.comment!(post, "hello")
end
include_examples "current user can't mention themself"
end
context "current user is a stranger matching a search pattern" do
include_examples "current user can't mention themself"
end
it "doesn't fail when the post author doesn't match the requested pattern" do
expect(post_service.mentionable_in_comment(post.id, "#{r_str}#{r_str}#{r_str}")).to be_empty
end
it "renders a commenter with multiple comments only once" do
person = FactoryBot.create(:person, first_name: "Ro2#{r_str}")
2.times { FactoryBot.create(:comment, author: person, post: post) }
expect(post_service.mentionable_in_comment(post.id, person.first_name).length).to eq(1)
end
end
end
describe "unit test" do
let(:post_service) { PostService.new(alice) }
before do
expect(post_service).to receive(:find!).and_return(post)
end
it "calls Person.allowed_to_be_mentioned_in_a_comment_to" do
expect(Person).to receive(:allowed_to_be_mentioned_in_a_comment_to).with(post).and_call_original
post_service.mentionable_in_comment(post.id, "whatever")
end
it "calls Person.find_by_substring" do
expect(Person).to receive(:find_by_substring).with("whatever").and_call_original
post_service.mentionable_in_comment(post.id, "whatever")
end
it "calls Person.sort_for_mention_suggestion" do
expect(Person).to receive(:sort_for_mention_suggestion).with(post, alice).and_call_original
post_service.mentionable_in_comment(post.id, "whatever")
end
it "calls Person.limit" do
16.times {
FactoryBot.create(:comment, author: FactoryBot.create(:person, first_name: "Ro#{r_str}"), post: post)
}
expect(post_service.mentionable_in_comment(post.id, "Ro").length).to eq(15)
end
it "contains a constraint on a current user" do
expect(Person).to receive(:allowed_to_be_mentioned_in_a_comment_to) { Person.all }
expect(Person).to receive(:find_by_substring) { Person.all }
expect(Person).to receive(:sort_for_mention_suggestion) { Person.all }
expect(post_service.mentionable_in_comment(post.id, alice.person.first_name))
.not_to include(alice.person)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class NotificationService
NOTIFICATION_TYPES = {
Comment => [Notifications::MentionedInComment, Notifications::CommentOnPost, Notifications::AlsoCommented],
Like => [Notifications::Liked, Notifications::LikedComment],
StatusMessage => [Notifications::MentionedInPost],
Conversation => [Notifications::PrivateMessage],
Message => [Notifications::PrivateMessage],
Reshare => [Notifications::Reshared],
Contact => [Notifications::StartedSharing]
}.freeze
NOTIFICATIONS_JSON_TYPES = {
"also_commented" => "Notifications::AlsoCommented",
"comment_on_post" => "Notifications::CommentOnPost",
"liked" => "Notifications::Liked",
"liked_comment" => "Notifications::LikedComment",
"mentioned" => "Notifications::MentionedInPost",
"mentioned_in_comment" => "Notifications::MentionedInComment",
"reshared" => "Notifications::Reshared",
"started_sharing" => "Notifications::StartedSharing",
"contacts_birthday" => "Notifications::ContactsBirthday"
}.freeze
NOTIFICATIONS_REVERSE_JSON_TYPES = NOTIFICATIONS_JSON_TYPES.invert.freeze
def initialize(user=nil)
@user = user
end
def index(unread_only=nil, only_after=nil)
query_string = "recipient_id = ? "
query_string += "AND unread = true " if unread_only
where_clause = [query_string, @user.id]
if only_after
query_string += " AND created_at >= ?"
where_clause = [query_string, @user.id, only_after]
end
Notification.where(where_clause).includes(:target, actors: :profile)
end
def get_by_guid(guid)
Notification.where(recipient_id: @user.id, guid: guid).first
end
def update_status_by_guid(guid, is_read_status)
notification = get_by_guid(guid)
raise ActiveRecord::RecordNotFound unless notification
notification.set_read_state(is_read_status)
true
end
def notify(object, recipient_user_ids)
notification_types(object).each {|type| type.notify(object, recipient_user_ids) }
end
private
def notification_types(object)
NOTIFICATION_TYPES.fetch(object.class, [])
end
end
``` | # frozen_string_literal: true
describe NotificationService do
describe "notification interrelation" do
context "with mention in comment" do
let(:status_message) {
FactoryBot.create(:status_message, public: true, author: alice.person).tap {|status_message|
eve.comment!(status_message, "whatever")
}
}
let(:comment) {
FactoryBot.create(
:comment,
author: bob.person,
text: text_mentioning(alice, eve),
post: status_message
)
}
it "sends only mention notification" do
[alice, eve].each do |user|
expect(Workers::Mail::MentionedInComment).to receive(:perform_async).with(
user.id,
bob.person.id,
*comment.mentions.where(person: user.person).ids
)
end
expect {
NotificationService.new.notify(comment, [])
}.to change { Notification.where(recipient_id: alice).count }.by(1)
.and change { Notification.where(recipient_id: eve).count }.by(1)
[alice, eve].each do |user|
expect(
Notifications::MentionedInComment.where(target: comment.mentions, recipient_id: user.id)
).to exist
expect(
Notifications::CommentOnPost.where(target: comment.parent, recipient_id: user.id)
).not_to exist
expect(
Notifications::AlsoCommented.where(target: comment.parent, recipient_id: user.id)
).not_to exist
end
end
context "with \"mentioned in comment\" email turned off" do
before do
alice.user_preferences.create(email_type: "mentioned_in_comment")
eve.user_preferences.create(email_type: "mentioned_in_comment")
end
it "calls appropriate mail worker instead" do
expect(Workers::Mail::MentionedInComment).not_to receive(:perform_async)
expect(Workers::Mail::CommentOnPost).to receive(:perform_async).with(
alice.id,
bob.person.id,
*comment.mentions.where(person: alice.person).ids
)
expect(Workers::Mail::AlsoCommented).to receive(:perform_async).with(
eve.id,
bob.person.id,
*comment.mentions.where(person: eve.person).ids
)
NotificationService.new.notify(comment, [])
end
end
end
end
describe "query methods" do
before do
@post = alice.post(
:status_message,
text: "This is a status message",
public: true,
to: "all"
)
@notification = FactoryBot.create(:notification, recipient: alice, target: @post)
@service = NotificationService.new(alice)
end
describe "#index" do
it "gets all" do
notifications = @service.index
expect(notifications.length).to eq(1)
end
it "gets unread only" do
notifications = @service.index(true)
expect(notifications.length).to eq(1)
@notification.set_read_state(true)
notifications = @service.index(true)
expect(notifications.length).to eq(0)
end
it "gets only after" do
notifications = @service.index(nil, (Time.current - 1.day))
expect(notifications.length).to eq(1)
@notification.set_read_state(true)
notifications = @service.index(nil, (Time.current + 1.day))
expect(notifications.length).to eq(0)
end
it "combined filtering" do
notifications = @service.index(true, (Time.current - 1.day))
expect(notifications.length).to eq(1)
end
end
describe "#show" do
it "succeeds with valid GUID" do
notification = @service.get_by_guid(@notification.guid)
expect(notification).not_to be_nil
end
end
describe "#update" do
it "succeeds with valid GUID" do
expect(@service.update_status_by_guid(@notification.guid, true)).to be_truthy
expect(@notification.reload.unread).to eq(false)
expect(@service.update_status_by_guid(@notification.guid, false)).to be_truthy
expect(@notification.reload.unread).to eq(true)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
# Encapsulates logic of processing diaspora:// links
class DiasporaLinkService
attr_reader :type, :author, :guid
def initialize(link)
@link = link.dup
parse
end
def find_or_fetch_entity
if type && guid
entity_finder.find || fetch_entity
elsif author
find_or_fetch_person
end
end
private
attr_accessor :link
def fetch_entity
DiasporaFederation::Federation::Fetcher.fetch_public(author, type, guid)
entity_finder.find
rescue DiasporaFederation::Federation::Fetcher::NotFetchable
nil
end
def entity_finder
@entity_finder ||= Diaspora::EntityFinder.new(type, guid)
end
def find_or_fetch_person
Person.find_or_fetch_by_identifier(author)
rescue DiasporaFederation::Discovery::DiscoveryError
nil
end
def normalize
link.gsub!(%r{^web\+diaspora://}, "diaspora://") ||
link.gsub!(%r{^//}, "diaspora://") ||
%r{^diaspora://}.match(link) ||
self.link = "diaspora://#{link}"
end
def parse
normalize
match = DiasporaFederation::Federation::DiasporaUrlParser::DIASPORA_URL_REGEX.match(link)
if match
@author, @type, @guid = match.captures
else
@author = %r{^diaspora://(#{Validation::Rule::DiasporaId::DIASPORA_ID_REGEX})$}u.match(link)&.captures&.first
end
end
end
``` | # frozen_string_literal: true
describe DiasporaLinkService do
let(:service) { described_class.new(link) }
describe "#find_or_fetch_entity" do
context "when entity is known" do
let(:post) { FactoryBot.create(:status_message) }
let(:link) { "diaspora://#{post.author.diaspora_handle}/post/#{post.guid}" }
it "returns the entity" do
expect(service.find_or_fetch_entity).to eq(post)
end
end
context "when entity is unknown" do
let(:remote_person) { FactoryBot.create(:person) }
let(:guid) { "1234567890abcdef" }
let(:link) { "diaspora://#{remote_person.diaspora_handle}/post/#{guid}" }
it "fetches entity" do
expect(DiasporaFederation::Federation::Fetcher)
.to receive(:fetch_public)
.with(remote_person.diaspora_handle, "post", guid) {
FactoryBot.create(:status_message, author: remote_person, guid: guid)
}
entity = service.find_or_fetch_entity
expect(entity).to be_a(StatusMessage)
expect(entity.guid).to eq(guid)
expect(entity.author).to eq(remote_person)
end
it "returns nil when entity is non fetchable" do
expect(DiasporaFederation::Federation::Fetcher)
.to receive(:fetch_public)
.with(remote_person.diaspora_handle, "post", guid)
.and_raise(DiasporaFederation::Federation::Fetcher::NotFetchable)
expect(service.find_or_fetch_entity).to be_nil
end
end
context "with invalid links" do
it "returns nil when the link is invalid" do
service = described_class.new("web+diaspora://something_invalid")
expect(service.find_or_fetch_entity).to be_nil
end
it "returns nil when the author is valid, but rest of the link is invalid" do
service = described_class.new("web+diaspora://#{alice.diaspora_handle}/foo/bar")
expect(service.find_or_fetch_entity).to be_nil
end
end
context "with only a diaspora ID" do
let(:person) { FactoryBot.create(:person) }
let(:link) { "diaspora://#{person.diaspora_handle}" }
it "returns the person" do
expect(service.find_or_fetch_entity).to eq(person)
end
it "returns nil when person is non fetchable" do
expect(Person).to receive(:find_or_fetch_by_identifier)
.with(person.diaspora_handle).and_raise(DiasporaFederation::Discovery::DiscoveryError)
expect(service.find_or_fetch_entity).to be_nil
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class PollParticipationService
def initialize(user)
@user = user
end
def vote(post_id, answer_id)
answer = PollAnswer.find(answer_id)
@user.participate_in_poll!(target(post_id), answer) if target(post_id)
end
private
def target(post_id)
@target ||= @user.find_visible_shareable_by_id(Post, post_id) || raise(ActiveRecord::RecordNotFound.new)
end
end
``` | # frozen_string_literal: true
describe PollParticipationService do
let(:poll_post) { FactoryBot.create(:status_message_with_poll, public: true) }
let(:poll_answer) { poll_post.poll.poll_answers.first }
describe "voting on poll" do
it "succeeds" do
expect(poll_service.vote(poll_post.id, poll_answer.id)).not_to be_nil
end
it "fails to vote twice" do
expect(poll_service.vote(poll_post.id, poll_answer.id)).not_to be_nil
expect { poll_service.vote(poll_post.id, poll_answer.id) }.to raise_error(ActiveRecord::RecordInvalid)
end
it "fails with bad answer id" do
expect { poll_service.vote(poll_post.id, -2) }.to raise_error(ActiveRecord::RecordNotFound)
end
it "fails with bad post id" do
expect { poll_service.vote(-1, poll_answer.id) }.to raise_error(ActiveRecord::RecordNotFound)
end
end
def poll_service(user=alice)
PollParticipationService.new(user)
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class ReshareService
def initialize(user=nil)
@user = user
end
def create(post_id)
post = post_service.find!(post_id)
post = post.absolute_root if post.is_a? Reshare
user.reshare!(post)
end
def find_for_post(post_id)
reshares = post_service.find!(post_id).reshares
user ? reshares.order(Arel.sql("author_id = #{user.person.id} DESC")) : reshares
end
private
attr_reader :user
def post_service
@post_service ||= PostService.new(user)
end
end
``` | # frozen_string_literal: true
describe ReshareService do
let(:post) { alice.post(:status_message, text: "hello", public: true) }
describe "#create" do
it "doesn't create a reshare of my own post" do
expect {
ReshareService.new(alice).create(post.id)
}.to raise_error RuntimeError
end
it "creates a reshare of a post of a contact" do
expect {
ReshareService.new(bob).create(post.id)
}.not_to raise_error
end
it "attaches the reshare to the post" do
reshare = ReshareService.new(bob).create(post.id)
expect(post.reshares.first.id).to eq(reshare.id)
end
it "reshares the original post when called with a reshare" do
reshare = ReshareService.new(bob).create(post.id)
reshare2 = ReshareService.new(eve).create(reshare.id)
expect(post.reshares.map(&:id)).to include(reshare2.id)
end
it "fails if the post does not exist" do
expect {
ReshareService.new(bob).create("unknown id")
}.to raise_error ActiveRecord::RecordNotFound
end
it "fails if the post is not public" do
post = alice.post(:status_message, text: "hello", to: alice.aspects.first)
expect {
ReshareService.new(bob).create(post.id)
}.to raise_error ActiveRecord::RecordInvalid
end
it "fails if the user already reshared the post" do
ReshareService.new(bob).create(post.id)
expect {
ReshareService.new(bob).create(post.id)
}.to raise_error ActiveRecord::RecordInvalid
end
it "fails if the user already reshared the original post" do
reshare = ReshareService.new(bob).create(post.id)
expect {
ReshareService.new(bob).create(reshare.id)
}.to raise_error ActiveRecord::RecordInvalid
end
end
describe "#find_for_post" do
context "with user" do
it "returns reshares for a public post" do
reshare = ReshareService.new(bob).create(post.id)
expect(ReshareService.new(eve).find_for_post(post.id)).to include(reshare)
end
it "returns reshares for a visible private post" do
post = alice.post(:status_message, text: "hello", to: alice.aspects.first)
expect(ReshareService.new(bob).find_for_post(post.id)).to be_empty
end
it "doesn't return reshares for a private post the user can not see" do
post = alice.post(:status_message, text: "hello", to: alice.aspects.first)
expect {
ReshareService.new(eve).find_for_post(post.id)
}.to raise_error ActiveRecord::RecordNotFound
end
it "returns the user's reshare first" do
[bob, eve].map {|user| ReshareService.new(user).create(post.id) }
[bob, eve].each do |user|
expect(
ReshareService.new(user).find_for_post(post.id).first.author.id
).to be user.person.id
end
end
end
context "without user" do
it "returns reshares for a public post" do
reshare = ReshareService.new(bob).create(post.id)
expect(ReshareService.new.find_for_post(post.id)).to include(reshare)
end
it "doesn't return reshares a for private post" do
post = alice.post(:status_message, text: "hello", to: alice.aspects.first)
expect {
ReshareService.new.find_for_post(post.id)
}.to raise_error Diaspora::NonPublic
end
end
it "returns all reshares of a post" do
reshares = [bob, eve].map {|user| ReshareService.new(user).create(post.id) }
expect(ReshareService.new.find_for_post(post.id)).to match_array(reshares)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class PhotoService
def initialize(user=nil, deny_raw_files=true)
@user = user
@deny_raw_files = deny_raw_files
end
def visible_photo(photo_guid)
Photo.owned_or_visible_by_user(@user).where(guid: photo_guid).first
end
def create_from_params_and_file(base_params, uploaded_file)
photo_params = build_params(base_params)
raise RuntimeError if @deny_raw_files && !confirm_uploaded_file_settings(uploaded_file)
photo_params[:user_file] = uploaded_file
photo = @user.build_post(:photo, photo_params)
raise RuntimeError unless photo.save
send_messages(photo, photo_params)
update_profile_photo(photo) if photo_params[:set_profile_photo]
photo
end
private
def build_params(base_params)
photo_params = base_params.permit(:pending, :set_profile_photo, aspect_ids: [])
if base_params.permit(:aspect_ids)[:aspect_ids] == "all"
photo_params[:aspect_ids] = @user.aspects.map(&:id)
elsif photo_params[:aspect_ids].is_a?(Hash)
photo_params[:aspect_ids] = params[:photo][:aspect_ids].values
end
photo_params
end
def confirm_uploaded_file_settings(uploaded_file)
unless uploaded_file.is_a?(ActionDispatch::Http::UploadedFile) || uploaded_file.is_a?(Rack::Test::UploadedFile)
return false
end
return false if uploaded_file.original_filename.empty?
return false if uploaded_file.content_type.empty?
true
end
def send_messages(photo, photo_params)
send_to_streams(photo, photo_params) unless photo.pending && photo.public?
@user.dispatch_post(photo, to: photo_params[:aspect_ids]) unless photo.pending
end
def update_profile_photo(photo)
@user.update_profile(photo: photo)
end
def send_to_streams(photo, photo_params)
aspects = @user.aspects_from_ids(photo_params[:aspect_ids])
@user.add_to_streams(photo, aspects)
end
end
``` | # frozen_string_literal: true
describe PhotoService do
before do
alice_eve_spec = alice.aspects.create(name: "eve aspect")
alice.share_with(eve.person, alice_eve_spec)
alice_bob_spec = alice.aspects.create(name: "bob aspect")
alice.share_with(bob.person, alice_bob_spec)
@alice_eve_photo = alice.post(:photo, pending: false, user_file: File.open(photo_fixture_name),
to: alice_eve_spec.id)
@alice_bob_photo = alice.post(:photo, pending: false, user_file: File.open(photo_fixture_name),
to: alice_bob_spec.id)
@alice_public_photo = alice.post(:photo, pending: false, user_file: File.open(photo_fixture_name), public: true)
@bob_photo1 = bob.post(:photo, pending: true, user_file: File.open(photo_fixture_name), public: true)
end
describe "visible_photo" do
it "returns a user's photo" do
photo = photo_service.visible_photo(@bob_photo1.guid)
expect(photo.guid).to eq(@bob_photo1.guid)
end
it "returns another user's public photo" do
photo = photo_service.visible_photo(@alice_public_photo.guid)
expect(photo.guid).to eq(@alice_public_photo.guid)
end
it "returns another user's shared photo" do
photo = photo_service.visible_photo(@alice_bob_photo.guid)
expect(photo.guid).to eq(@alice_bob_photo.guid)
end
it "returns nil for other user's private photo" do
photo = photo_service.visible_photo(@alice_eve_photo.guid)
expect(photo).to be_nil
end
end
describe "create" do
before do
@image_file = Rack::Test::UploadedFile.new(Rails.root.join("spec", "fixtures", "button.png").to_s, "image/png")
end
context "succeeds" do
it "accepts a photo from a regular form uploaded file no parameters" do
params = ActionController::Parameters.new
photo = photo_service.create_from_params_and_file(params, @image_file)
expect(photo).not_to be_nil
expect(photo.pending?).to be_falsey
expect(photo.public?).to be_falsey
end
it "honors pending" do
params = ActionController::Parameters.new(pending: true)
photo = photo_service.create_from_params_and_file(params, @image_file)
expect(photo).not_to be_nil
expect(photo.pending?).to be_truthy
expect(photo.public?).to be_falsey
end
it "sets a user profile when requested" do
original_profile_pic = bob.person.profile.image_url
params = ActionController::Parameters.new(set_profile_photo: true)
photo = photo_service.create_from_params_and_file(params, @image_file)
expect(photo).not_to be_nil
expect(bob.reload.person.profile.image_url).not_to eq(original_profile_pic)
end
it "has correct aspects settings for limited shared" do
params = ActionController::Parameters.new(pending: false, aspect_ids: [bob.aspects.first.id])
photo = photo_service.create_from_params_and_file(params, @image_file)
expect(photo).not_to be_nil
expect(photo.pending?).to be_falsey
expect(photo.public?).to be_falsey
end
it "allow raw file if explicitly allowing" do
params = ActionController::Parameters.new
photo = photo_service(bob, false).create_from_params_and_file(params, uploaded_photo)
expect(photo).not_to be_nil
end
end
context "fails" do
before do
@params = ActionController::Parameters.new
end
it "fails if given a raw file" do
expect {
photo_service.create_from_params_and_file(@params, uploaded_photo)
}.to raise_error RuntimeError
end
it "file type isn't an image" do
text_file = Rack::Test::UploadedFile.new(Rails.root.join("README.md").to_s, "text/plain")
expect {
photo_service.create_from_params_and_file(@params, text_file)
}.to raise_error CarrierWave::IntegrityError
text_file = Rack::Test::UploadedFile.new(Rails.root.join("README.md").to_s, "image/png")
expect {
photo_service.create_from_params_and_file(@params, text_file)
}.to raise_error CarrierWave::IntegrityError
end
end
end
def photo_service(user=bob, deny_raw_files=true)
PhotoService.new(user, deny_raw_files)
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class ConversationService
def initialize(user=nil)
@user = user
end
def all_for_user(filter={})
conversation_filter = {}
unless filter[:only_after].nil?
conversation_filter = \
"conversations.created_at >= ?", filter[:only_after]
end
visibility_filter = if filter[:unread]
{
person_id: @user.person_id,
unread: 1
}
else
{person_id: @user.person_id}
end
Conversation.where(conversation_filter)
.joins(:conversation_visibilities)
.where(conversation_visibilities: visibility_filter)
.all
end
def build(subject, text, recipients)
person_ids = @user.contacts
.mutual
.where(person_id: recipients)
.pluck(:person_id)
opts = {
subject: subject,
message: {text: text},
participant_ids: person_ids
}
@user.build_conversation(opts)
end
def find!(conversation_guid)
conversation = Conversation.find_by!(guid: conversation_guid)
@user.conversations
.joins(:conversation_visibilities)
.where(conversation_visibilities: {
person_id: @user.person_id,
conversation_id: conversation.id
}).first!
end
def destroy!(conversation_guid)
conversation = find!(conversation_guid)
conversation.destroy!
end
def get_visibility(conversation_guid)
conversation = find!(conversation_guid)
ConversationVisibility.where(
person_id: @user.person.id,
conversation_id: conversation.id
).first!
end
end
``` | # frozen_string_literal: true
describe ConversationService do
before do
opts = {
subject: "conversation subject",
message: {text: "conversation text"},
participant_ids: [bob.person.id]
}
@conversation = alice.build_conversation(opts)
@conversation.created_at = 2.hours.ago
@conversation.save!
end
describe "#all_for_user" do
before do
opts = {
subject: "conversation subject 2",
message: {text: "conversation text 2"},
participant_ids: [bob.person.id]
}
@conversation = alice.build_conversation(opts)
@conversation.created_at = 1.hour.ago
@conversation.save!
@date = @conversation.created_at
opts = {
subject: "conversation subject 3",
message: {text: "conversation text 3"},
participant_ids: []
}
@conversation = bob.build_conversation(opts)
@conversation.save!
end
it "returns all conversations" do
expect(alice_conversation_service.all_for_user.length).to eq(2)
expect(bob_conversation_service.all_for_user.length).to eq(3)
end
it "returns all unread conversations" do
@conversation.conversation_visibilities[0].unread = true
@conversation.conversation_visibilities[0].save!
conversations = bob_conversation_service.all_for_user(unread: true)
expect(conversations.length).to eq(1)
end
it "returns conversation after a given date" do
conversations = bob_conversation_service.all_for_user(only_after: @date)
expect(conversations.length).to eq(2)
end
end
describe "#find!" do
it "returns the conversation, if it is the user's conversation" do
expect(bob_conversation_service.find!(@conversation.guid)).to eq(
@conversation
)
end
it "returns the conversation, if the user is recipient" do
expect(bob_conversation_service.find!(@conversation.guid)).to eq(
@conversation
)
end
it "raises RecordNotFound if the conversation cannot be found" do
expect {
alice_conversation_service.find!("unknown")
}.to raise_error ActiveRecord::RecordNotFound
end
it "raises RecordNotFound if the user is not recipient" do
expect {
eve_conversation_service.find!(@conversation.guid)
}.to raise_error ActiveRecord::RecordNotFound
end
end
describe "#build" do
it "creates the conversation for given user and recipients" do
new_conversation = alice_conversation_service.build(
"subject test",
"message test",
[bob.person.id]
)
expect(new_conversation.subject).to eq("subject test")
expect(new_conversation.author_id).to eq(alice.person.id)
expect(new_conversation.messages[0].text).to eq("message test")
expect(new_conversation.messages[0].author_id).to eq(alice.person.id)
expect(new_conversation.participants.length).to eq(2)
end
it "doesn't add recipients if they are not user contacts" do
new_conversation = alice_conversation_service.build(
"subject test",
"message test",
[bob.person.id, eve.person.id]
)
expect(new_conversation.participants.length).to eq(2)
expect(new_conversation.messages[0].text).to eq("message test")
expect(new_conversation.messages[0].author_id).to eq(alice.person.id)
end
end
describe "#get_visibility" do
it "returns visibility for current user" do
visibility = alice_conversation_service.get_visibility(
@conversation.guid
)
expect(visibility).to_not be_nil
end
it "raises RecordNotFound if the user has no visibility" do
expect {
eve_conversation_service.get_visibility(@conversation.id)
}.to raise_error ActiveRecord::RecordNotFound
end
end
describe "#destroy!" do
it "deletes the conversation, when it is the user conversation" do
alice_conversation_service.destroy!(@conversation.guid)
expect {
alice_conversation_service.find!(@conversation.guid)
}.to raise_error ActiveRecord::RecordNotFound
end
it "raises RecordNotFound if the conversation cannot be found" do
expect {
alice_conversation_service.destroy!("unknown")
}.to raise_error ActiveRecord::RecordNotFound
end
it "raises RecordNotFound if the user is not part of the conversation" do
expect {
eve_conversation_service.destroy!(@conversation.guid)
}.to raise_error ActiveRecord::RecordNotFound
end
end
def alice_conversation_service
ConversationService.new(alice)
end
def bob_conversation_service
ConversationService.new(bob)
end
def eve_conversation_service
ConversationService.new(eve)
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class CommentService
def initialize(user=nil)
@user = user
end
def create(post_id, text)
post = post_service.find!(post_id)
user.comment!(post, text)
end
def find_for_post(post_id)
post_service.find!(post_id).comments.for_a_stream
end
def find!(id_or_guid)
Comment.find_by!(comment_key(id_or_guid) => id_or_guid)
end
def destroy(comment_id)
comment = Comment.find(comment_id)
if user.owns?(comment) || user.owns?(comment.parent)
user.retract(comment)
true
else
false
end
end
def destroy!(comment_guid)
comment = find!(comment_guid)
if user.owns?(comment)
user.retract(comment)
elsif user.owns?(comment.parent)
user.retract(comment)
elsif comment
raise ActiveRecord::RecordInvalid
else
raise ActiveRecord::RecordNotFound
end
end
private
attr_reader :user
# We can assume a guid is at least 16 characters long as we have guids set to hex(8) since we started using them.
def comment_key(id_or_guid)
id_or_guid.to_s.length < 16 ? :id : :guid
end
def post_service
@post_service ||= PostService.new(user)
end
end
``` | # frozen_string_literal: true
describe CommentService do
let(:post) { alice.post(:status_message, text: "hello", to: alice.aspects.first) }
describe "#create" do
it "creates a comment on my own post" do
comment = CommentService.new(alice).create(post.id, "hi")
expect(comment.text).to eq("hi")
end
it "creates a comment on post of a contact" do
comment = CommentService.new(bob).create(post.id, "hi")
expect(comment.text).to eq("hi")
end
it "attaches the comment to the post" do
comment = CommentService.new(alice).create(post.id, "hi")
expect(post.comments.first.text).to eq("hi")
expect(post.comments.first.id).to eq(comment.id)
end
it "fail if the post does not exist" do
expect {
CommentService.new(alice).create("unknown id", "hi")
}.to raise_error ActiveRecord::RecordNotFound
end
it "fail if the user can not see the post" do
expect {
CommentService.new(eve).create(post.id, "hi")
}.to raise_error ActiveRecord::RecordNotFound
end
end
describe "#find!" do
let(:comment) { CommentService.new(bob).create(post.id, "hi") }
it "returns comment" do
result = CommentService.new(bob).find!(comment.guid)
expect(result.id).to eq(comment.id)
end
it "raises exception the comment does not exist" do
expect {
CommentService.new(bob).find!("unknown id")
}.to raise_error ActiveRecord::RecordNotFound
end
end
describe "#destroy" do
let(:comment) { CommentService.new(bob).create(post.id, "hi") }
it "lets the user destroy his own comment" do
result = CommentService.new(bob).destroy(comment.id)
expect(result).to be_truthy
end
it "lets the parent author destroy others comment" do
result = CommentService.new(alice).destroy(comment.id)
expect(result).to be_truthy
end
it "does not let someone destroy others comment" do
result = CommentService.new(eve).destroy(comment.id)
expect(result).to be_falsey
end
it "fails if the comment does not exist" do
expect {
CommentService.new(bob).destroy("unknown id")
}.to raise_error ActiveRecord::RecordNotFound
end
end
describe "#destroy!" do
let(:comment) { CommentService.new(bob).create(post.id, "hi") }
it "lets the user destroy his own comment" do
result = CommentService.new(bob).destroy!(comment.guid)
expect(result).to be_truthy
end
it "lets the parent author destroy others comment" do
result = CommentService.new(alice).destroy!(comment.guid)
expect(result).to be_truthy
end
it "does not let someone destroy others comment" do
expect {
CommentService.new(eve).destroy!(comment.guid)
}.to raise_error ActiveRecord::RecordInvalid
end
it "raises exception the comment does not exist" do
expect {
CommentService.new(bob).destroy!("unknown id")
}.to raise_error ActiveRecord::RecordNotFound
end
end
describe "#find_for_post" do
context "with user" do
it "returns comments for a public post" do
post = alice.post(:status_message, text: "hello", public: true)
comment = CommentService.new(alice).create(post.id, "hi")
expect(CommentService.new(eve).find_for_post(post.id)).to include(comment)
end
it "returns comments for a visible private post" do
comment = CommentService.new(alice).create(post.id, "hi")
expect(CommentService.new(bob).find_for_post(post.id)).to include(comment)
end
it "does not return comments for private post the user can not see" do
expect {
CommentService.new(eve).find_for_post(post.id)
}.to raise_error ActiveRecord::RecordNotFound
end
end
context "without user" do
it "returns comments for a public post" do
post = alice.post(:status_message, text: "hello", public: true)
comment = CommentService.new(alice).create(post.id, "hi")
expect(CommentService.new.find_for_post(post.id)).to include(comment)
end
it "does not return comments for private post" do
expect {
CommentService.new.find_for_post(post.id)
}.to raise_error Diaspora::NonPublic
end
end
it "returns all comments of a post" do
post = alice.post(:status_message, text: "hello", public: true)
comments = [alice, bob, eve].map {|user| CommentService.new(user).create(post.id, "hi") }
expect(CommentService.new.find_for_post(post.id)).to match_array(comments)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
require 'csv'
# NOTE: This is a deprecated service, only kept to not break ongoing imports
# on upgrade. See `BulkImportService` for its replacement.
class ImportService < BaseService
ROWS_PROCESSING_LIMIT = 20_000
def call(import)
@import = import
@account = @import.account
case @import.type
when 'following'
import_follows!
when 'blocking'
import_blocks!
when 'muting'
import_mutes!
when 'domain_blocking'
import_domain_blocks!
when 'bookmarks'
import_bookmarks!
end
end
private
def import_follows!
parse_import_data!(['Account address'])
import_relationships!('follow', 'unfollow', @account.following, ROWS_PROCESSING_LIMIT, reblogs: { header: 'Show boosts', default: true }, notify: { header: 'Notify on new posts', default: false }, languages: { header: 'Languages', default: nil })
end
def import_blocks!
parse_import_data!(['Account address'])
import_relationships!('block', 'unblock', @account.blocking, ROWS_PROCESSING_LIMIT)
end
def import_mutes!
parse_import_data!(['Account address'])
import_relationships!('mute', 'unmute', @account.muting, ROWS_PROCESSING_LIMIT, notifications: { header: 'Hide notifications', default: true })
end
def import_domain_blocks!
parse_import_data!(['#domain'])
items = @data.take(ROWS_PROCESSING_LIMIT).map { |row| row['#domain'].strip }
if @import.overwrite?
presence_hash = items.index_with(true)
@account.domain_blocks.find_each do |domain_block|
if presence_hash[domain_block.domain]
items.delete(domain_block.domain)
else
@account.unblock_domain!(domain_block.domain)
end
end
end
items.each do |domain|
@account.block_domain!(domain)
end
AfterAccountDomainBlockWorker.push_bulk(items) do |domain|
[@account.id, domain]
end
end
def import_relationships!(action, undo_action, overwrite_scope, limit, extra_fields = {})
local_domain_suffix = "@#{Rails.configuration.x.local_domain}"
items = @data.take(limit).map { |row| [row['Account address']&.strip&.delete_suffix(local_domain_suffix), extra_fields.to_h { |key, field_settings| [key, row[field_settings[:header]]&.strip || field_settings[:default]] }] }.reject { |(id, _)| id.blank? }
if @import.overwrite?
presence_hash = items.each_with_object({}) { |(id, extra), mapping| mapping[id] = [true, extra] }
overwrite_scope.reorder(nil).find_each do |target_account|
if presence_hash[target_account.acct]
items.delete(target_account.acct)
extra = presence_hash[target_account.acct][1]
Import::RelationshipWorker.perform_async(@account.id, target_account.acct, action, extra.stringify_keys)
else
Import::RelationshipWorker.perform_async(@account.id, target_account.acct, undo_action)
end
end
end
head_items = items.uniq { |acct, _| acct.split('@')[1] }
tail_items = items - head_items
Import::RelationshipWorker.push_bulk(head_items + tail_items) do |acct, extra|
[@account.id, acct, action, extra.stringify_keys]
end
end
def import_bookmarks!
parse_import_data!(['#uri'])
items = @data.take(ROWS_PROCESSING_LIMIT).map { |row| row['#uri'].strip }
if @import.overwrite?
presence_hash = items.index_with(true)
@account.bookmarks.find_each do |bookmark|
if presence_hash[bookmark.status.uri]
items.delete(bookmark.status.uri)
else
bookmark.destroy!
end
end
end
statuses = items.filter_map do |uri|
status = ActivityPub::TagManager.instance.uri_to_resource(uri, Status)
next if status.nil? && ActivityPub::TagManager.instance.local_uri?(uri)
status || ActivityPub::FetchRemoteStatusService.new.call(uri)
rescue HTTP::Error, OpenSSL::SSL::SSLError, Mastodon::UnexpectedResponseError
nil
rescue => e
Rails.logger.warn "Unexpected error when importing bookmark: #{e}"
nil
end
account_ids = statuses.map(&:account_id)
preloaded_relations = @account.relations_map(account_ids, skip_blocking_and_muting: true)
statuses.keep_if { |status| StatusPolicy.new(@account, status, preloaded_relations).show? }
statuses.each do |status|
@account.bookmarks.find_or_create_by!(account: @account, status: status)
end
end
def parse_import_data!(default_headers)
data = CSV.parse(import_data, headers: true)
data = CSV.parse(import_data, headers: default_headers) unless data.headers&.first&.strip&.include?(' ')
@data = data.compact_blank
end
def import_data
Paperclip.io_adapters.for(@import.data).read.force_encoding(Encoding::UTF_8)
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe ImportService, type: :service do
include RoutingHelper
let!(:account) { Fabricate(:account, locked: false) }
let!(:bob) { Fabricate(:account, username: 'bob', locked: false) }
let!(:eve) { Fabricate(:account, username: 'eve', domain: 'example.com', locked: false, protocol: :activitypub, inbox_url: 'https://example.com/inbox') }
before do
stub_request(:post, 'https://example.com/inbox').to_return(status: 200)
end
context 'when importing old-style list of muted users' do
subject { described_class.new }
let(:csv) { attachment_fixture('mute-imports.txt') }
describe 'when no accounts are muted' do
let(:import) { Import.create(account: account, type: 'muting', data: csv) }
it 'mutes the listed accounts, including notifications' do
subject.call(import)
expect(account.muting.count).to eq 2
expect(Mute.find_by(account: account, target_account: bob).hide_notifications).to be true
end
end
describe 'when some accounts are muted and overwrite is not set' do
let(:import) { Import.create(account: account, type: 'muting', data: csv) }
it 'mutes the listed accounts, including notifications' do
account.mute!(bob, notifications: false)
subject.call(import)
expect(account.muting.count).to eq 2
expect(Mute.find_by(account: account, target_account: bob).hide_notifications).to be true
end
end
describe 'when some accounts are muted and overwrite is set' do
let(:import) { Import.create(account: account, type: 'muting', data: csv, overwrite: true) }
it 'mutes the listed accounts, including notifications' do
account.mute!(bob, notifications: false)
subject.call(import)
expect(account.muting.count).to eq 2
expect(Mute.find_by(account: account, target_account: bob).hide_notifications).to be true
end
end
end
context 'when importing new-style list of muted users' do
subject { described_class.new }
let(:csv) { attachment_fixture('new-mute-imports.txt') }
describe 'when no accounts are muted' do
let(:import) { Import.create(account: account, type: 'muting', data: csv) }
it 'mutes the listed accounts, respecting notifications' do
subject.call(import)
expect(account.muting.count).to eq 2
expect(Mute.find_by(account: account, target_account: bob).hide_notifications).to be true
expect(Mute.find_by(account: account, target_account: eve).hide_notifications).to be false
end
end
describe 'when some accounts are muted and overwrite is not set' do
let(:import) { Import.create(account: account, type: 'muting', data: csv) }
it 'mutes the listed accounts, respecting notifications' do
account.mute!(bob, notifications: true)
subject.call(import)
expect(account.muting.count).to eq 2
expect(Mute.find_by(account: account, target_account: bob).hide_notifications).to be true
expect(Mute.find_by(account: account, target_account: eve).hide_notifications).to be false
end
end
describe 'when some accounts are muted and overwrite is set' do
let(:import) { Import.create(account: account, type: 'muting', data: csv, overwrite: true) }
it 'mutes the listed accounts, respecting notifications' do
account.mute!(bob, notifications: true)
subject.call(import)
expect(account.muting.count).to eq 2
expect(Mute.find_by(account: account, target_account: bob).hide_notifications).to be true
expect(Mute.find_by(account: account, target_account: eve).hide_notifications).to be false
end
end
end
context 'when importing old-style list of followed users' do
subject { described_class.new }
let(:csv) { attachment_fixture('mute-imports.txt') }
describe 'when no accounts are followed' do
let(:import) { Import.create(account: account, type: 'following', data: csv) }
it 'follows the listed accounts, including boosts' do
subject.call(import)
expect(account.following.count).to eq 1
expect(account.follow_requests.count).to eq 1
expect(Follow.find_by(account: account, target_account: bob).show_reblogs).to be true
end
end
describe 'when some accounts are already followed and overwrite is not set' do
let(:import) { Import.create(account: account, type: 'following', data: csv) }
it 'follows the listed accounts, including notifications' do
account.follow!(bob, reblogs: false)
subject.call(import)
expect(account.following.count).to eq 1
expect(account.follow_requests.count).to eq 1
expect(Follow.find_by(account: account, target_account: bob).show_reblogs).to be true
end
end
describe 'when some accounts are already followed and overwrite is set' do
let(:import) { Import.create(account: account, type: 'following', data: csv, overwrite: true) }
it 'mutes the listed accounts, including notifications' do
account.follow!(bob, reblogs: false)
subject.call(import)
expect(account.following.count).to eq 1
expect(account.follow_requests.count).to eq 1
expect(Follow.find_by(account: account, target_account: bob).show_reblogs).to be true
end
end
end
context 'when importing new-style list of followed users' do
subject { described_class.new }
let(:csv) { attachment_fixture('new-following-imports.txt') }
describe 'when no accounts are followed' do
let(:import) { Import.create(account: account, type: 'following', data: csv) }
it 'follows the listed accounts, respecting boosts' do
subject.call(import)
expect(account.following.count).to eq 1
expect(account.follow_requests.count).to eq 1
expect(Follow.find_by(account: account, target_account: bob).show_reblogs).to be true
expect(FollowRequest.find_by(account: account, target_account: eve).show_reblogs).to be false
end
end
describe 'when some accounts are already followed and overwrite is not set' do
let(:import) { Import.create(account: account, type: 'following', data: csv) }
it 'mutes the listed accounts, respecting notifications' do
account.follow!(bob, reblogs: true)
subject.call(import)
expect(account.following.count).to eq 1
expect(account.follow_requests.count).to eq 1
expect(Follow.find_by(account: account, target_account: bob).show_reblogs).to be true
expect(FollowRequest.find_by(account: account, target_account: eve).show_reblogs).to be false
end
end
describe 'when some accounts are already followed and overwrite is set' do
let(:import) { Import.create(account: account, type: 'following', data: csv, overwrite: true) }
it 'mutes the listed accounts, respecting notifications' do
account.follow!(bob, reblogs: true)
subject.call(import)
expect(account.following.count).to eq 1
expect(account.follow_requests.count).to eq 1
expect(Follow.find_by(account: account, target_account: bob).show_reblogs).to be true
expect(FollowRequest.find_by(account: account, target_account: eve).show_reblogs).to be false
end
end
end
# Based on the bug report 20571 where UTF-8 encoded domains were rejecting import of their users
#
# https://github.com/mastodon/mastodon/issues/20571
context 'with a utf-8 encoded domains' do
subject { described_class.new }
let!(:nare) { Fabricate(:account, username: 'nare', domain: 'թութ.հայ', locked: false, protocol: :activitypub, inbox_url: 'https://թութ.հայ/inbox') }
let(:csv) { attachment_fixture('utf8-followers.txt') }
let(:import) { Import.create(account: account, type: 'following', data: csv) }
# Make sure to not actually go to the remote server
before do
stub_request(:post, 'https://թութ.հայ/inbox').to_return(status: 200)
end
it 'follows the listed account' do
expect(account.follow_requests.count).to eq 0
subject.call(import)
expect(account.follow_requests.count).to eq 1
end
end
context 'when importing bookmarks' do
subject { described_class.new }
let(:csv) { attachment_fixture('bookmark-imports.txt') }
let(:local_account) { Fabricate(:account, username: 'foo', domain: '') }
let!(:remote_status) { Fabricate(:status, uri: 'https://example.com/statuses/1312') }
let!(:direct_status) { Fabricate(:status, uri: 'https://example.com/statuses/direct', visibility: :direct) }
around do |example|
local_before = Rails.configuration.x.local_domain
web_before = Rails.configuration.x.web_domain
Rails.configuration.x.local_domain = 'local.com'
Rails.configuration.x.web_domain = 'local.com'
example.run
Rails.configuration.x.web_domain = web_before
Rails.configuration.x.local_domain = local_before
end
before do
service = instance_double(ActivityPub::FetchRemoteStatusService)
allow(ActivityPub::FetchRemoteStatusService).to receive(:new).and_return(service)
allow(service).to receive(:call).with('https://unknown-remote.com/users/bar/statuses/1') do
Fabricate(:status, uri: 'https://unknown-remote.com/users/bar/statuses/1')
end
end
describe 'when no bookmarks are set' do
let(:import) { Import.create(account: account, type: 'bookmarks', data: csv) }
it 'adds the toots the user has access to to bookmarks' do
local_status = Fabricate(:status, account: local_account, uri: 'https://local.com/users/foo/statuses/42', id: 42, local: true)
subject.call(import)
expect(account.bookmarks.map { |bookmark| bookmark.status.id }).to include(local_status.id)
expect(account.bookmarks.map { |bookmark| bookmark.status.id }).to include(remote_status.id)
expect(account.bookmarks.map { |bookmark| bookmark.status.id }).to_not include(direct_status.id)
expect(account.bookmarks.count).to eq 3
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class AuthorizeFollowService < BaseService
include Payloadable
def call(source_account, target_account, **options)
if options[:skip_follow_request]
follow_request = FollowRequest.new(account: source_account, target_account: target_account, uri: options[:follow_request_uri])
else
follow_request = FollowRequest.find_by!(account: source_account, target_account: target_account)
follow_request.authorize!
end
create_notification(follow_request) if !source_account.local? && source_account.activitypub?
follow_request
end
private
def create_notification(follow_request)
ActivityPub::DeliveryWorker.perform_async(build_json(follow_request), follow_request.target_account_id, follow_request.account.inbox_url)
end
def build_json(follow_request)
Oj.dump(serialize_payload(follow_request, ActivityPub::AcceptFollowSerializer))
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe AuthorizeFollowService, type: :service do
subject { described_class.new }
let(:sender) { Fabricate(:account, username: 'alice') }
describe 'local' do
let(:bob) { Fabricate(:account, username: 'bob') }
before do
FollowRequest.create(account: bob, target_account: sender)
subject.call(bob, sender)
end
it 'removes follow request' do
expect(bob.requested?(sender)).to be false
end
it 'creates follow relation' do
expect(bob.following?(sender)).to be true
end
end
describe 'remote ActivityPub' do
let(:bob) { Fabricate(:account, username: 'bob', domain: 'example.com', protocol: :activitypub, inbox_url: 'http://example.com/inbox') }
before do
FollowRequest.create(account: bob, target_account: sender)
stub_request(:post, bob.inbox_url).to_return(status: 200)
subject.call(bob, sender)
end
it 'removes follow request' do
expect(bob.requested?(sender)).to be false
end
it 'creates follow relation' do
expect(bob.following?(sender)).to be true
end
it 'sends an accept activity' do
expect(a_request(:post, bob.inbox_url)).to have_been_made.once
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class PrecomputeFeedService < BaseService
include Redisable
def call(account)
FeedManager.instance.populate_home(account)
ensure
redis.del("account:#{account.id}:regeneration")
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe PrecomputeFeedService, type: :service do
subject { described_class.new }
describe 'call' do
let(:account) { Fabricate(:account) }
it 'fills a user timeline with statuses' do
account = Fabricate(:account)
status = Fabricate(:status, account: account)
subject.call(account)
expect(redis.zscore(FeedManager.instance.key(:home, account.id), status.id)).to be_within(0.1).of(status.id.to_f)
end
it 'does not raise an error even if it could not find any status' do
account = Fabricate(:account)
expect { subject.call(account) }.to_not raise_error
end
it 'filters statuses' do
account = Fabricate(:account)
muted_account = Fabricate(:account)
Fabricate(:mute, account: account, target_account: muted_account)
reblog = Fabricate(:status, account: muted_account)
Fabricate(:status, account: account, reblog: reblog)
subject.call(account)
expect(redis.zscore(FeedManager.instance.key(:home, account.id), reblog.id)).to be_nil
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class PostStatusService < BaseService
include Redisable
include LanguagesHelper
MIN_SCHEDULE_OFFSET = 5.minutes.freeze
class UnexpectedMentionsError < StandardError
attr_reader :accounts
def initialize(message, accounts)
super(message)
@accounts = accounts
end
end
# Post a text status update, fetch and notify remote users mentioned
# @param [Account] account Account from which to post
# @param [Hash] options
# @option [String] :text Message
# @option [Status] :thread Optional status to reply to
# @option [Boolean] :sensitive
# @option [String] :visibility
# @option [String] :spoiler_text
# @option [String] :language
# @option [String] :scheduled_at
# @option [Hash] :poll Optional poll to attach
# @option [Enumerable] :media_ids Optional array of media IDs to attach
# @option [Doorkeeper::Application] :application
# @option [String] :idempotency Optional idempotency key
# @option [Boolean] :with_rate_limit
# @option [Enumerable] :allowed_mentions Optional array of expected mentioned account IDs, raises `UnexpectedMentionsError` if unexpected accounts end up in mentions
# @return [Status]
def call(account, options = {})
@account = account
@options = options
@text = @options[:text] || ''
@in_reply_to = @options[:thread]
return idempotency_duplicate if idempotency_given? && idempotency_duplicate?
validate_media!
preprocess_attributes!
if scheduled?
schedule_status!
else
process_status!
end
redis.setex(idempotency_key, 3_600, @status.id) if idempotency_given?
unless scheduled?
postprocess_status!
bump_potential_friendship!
end
@status
end
private
def preprocess_attributes!
@sensitive = (@options[:sensitive].nil? ? @account.user&.setting_default_sensitive : @options[:sensitive]) || @options[:spoiler_text].present?
@text = @options.delete(:spoiler_text) if @text.blank? && @options[:spoiler_text].present?
@visibility = @options[:visibility] || @account.user&.setting_default_privacy
@visibility = :unlisted if @visibility&.to_sym == :public && @account.silenced?
@scheduled_at = @options[:scheduled_at]&.to_datetime
@scheduled_at = nil if scheduled_in_the_past?
rescue ArgumentError
raise ActiveRecord::RecordInvalid
end
def process_status!
@status = @account.statuses.new(status_attributes)
process_mentions_service.call(@status, save_records: false)
safeguard_mentions!(@status)
# The following transaction block is needed to wrap the UPDATEs to
# the media attachments when the status is created
ApplicationRecord.transaction do
@status.save!
end
end
def safeguard_mentions!(status)
return if @options[:allowed_mentions].nil?
expected_account_ids = @options[:allowed_mentions].map(&:to_i)
unexpected_accounts = status.mentions.map(&:account).to_a.reject { |mentioned_account| expected_account_ids.include?(mentioned_account.id) }
return if unexpected_accounts.empty?
raise UnexpectedMentionsError.new('Post would be sent to unexpected accounts', unexpected_accounts)
end
def schedule_status!
status_for_validation = @account.statuses.build(status_attributes)
if status_for_validation.valid?
# Marking the status as destroyed is necessary to prevent the status from being
# persisted when the associated media attachments get updated when creating the
# scheduled status.
status_for_validation.destroy
# The following transaction block is needed to wrap the UPDATEs to
# the media attachments when the scheduled status is created
ApplicationRecord.transaction do
@status = @account.scheduled_statuses.create!(scheduled_status_attributes)
end
else
raise ActiveRecord::RecordInvalid
end
end
def postprocess_status!
process_hashtags_service.call(@status)
Trends.tags.register(@status)
LinkCrawlWorker.perform_async(@status.id)
DistributionWorker.perform_async(@status.id)
ActivityPub::DistributionWorker.perform_async(@status.id)
PollExpirationNotifyWorker.perform_at(@status.poll.expires_at, @status.poll.id) if @status.poll
end
def validate_media!
if @options[:media_ids].blank? || !@options[:media_ids].is_a?(Enumerable)
@media = []
return
end
raise Mastodon::ValidationError, I18n.t('media_attachments.validations.too_many') if @options[:media_ids].size > 4 || @options[:poll].present?
@media = @account.media_attachments.where(status_id: nil).where(id: @options[:media_ids].take(4).map(&:to_i))
raise Mastodon::ValidationError, I18n.t('media_attachments.validations.images_and_video') if @media.size > 1 && @media.find(&:audio_or_video?)
raise Mastodon::ValidationError, I18n.t('media_attachments.validations.not_ready') if @media.any?(&:not_processed?)
end
def process_mentions_service
ProcessMentionsService.new
end
def process_hashtags_service
ProcessHashtagsService.new
end
def scheduled?
@scheduled_at.present?
end
def idempotency_key
"idempotency:status:#{@account.id}:#{@options[:idempotency]}"
end
def idempotency_given?
@options[:idempotency].present?
end
def idempotency_duplicate
if scheduled?
@account.schedule_statuses.find(@idempotency_duplicate)
else
@account.statuses.find(@idempotency_duplicate)
end
end
def idempotency_duplicate?
@idempotency_duplicate = redis.get(idempotency_key)
end
def scheduled_in_the_past?
@scheduled_at.present? && @scheduled_at <= Time.now.utc + MIN_SCHEDULE_OFFSET
end
def bump_potential_friendship!
return if [email protected]? || @account.id == @status.in_reply_to_account_id
ActivityTracker.increment('activity:interactions')
return if @account.following?(@status.in_reply_to_account_id)
PotentialFriendshipTracker.record(@account.id, @status.in_reply_to_account_id, :reply)
end
def status_attributes
{
text: @text,
media_attachments: @media || [],
ordered_media_attachment_ids: (@options[:media_ids] || []).map(&:to_i) & @media.map(&:id),
thread: @in_reply_to,
poll_attributes: poll_attributes,
sensitive: @sensitive,
spoiler_text: @options[:spoiler_text] || '',
visibility: @visibility,
language: valid_locale_cascade(@options[:language], @account.user&.preferred_posting_language, I18n.default_locale),
application: @options[:application],
rate_limit: @options[:with_rate_limit],
}.compact
end
def scheduled_status_attributes
{
scheduled_at: @scheduled_at,
media_attachments: @media || [],
params: scheduled_options,
}
end
def poll_attributes
return if @options[:poll].blank?
@options[:poll].merge(account: @account, voters_count: 0)
end
def scheduled_options
@options.tap do |options_hash|
options_hash[:in_reply_to_id] = options_hash.delete(:thread)&.id
options_hash[:application_id] = options_hash.delete(:application)&.id
options_hash[:scheduled_at] = nil
options_hash[:idempotency] = nil
options_hash[:with_rate_limit] = false
end
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe PostStatusService, type: :service do
subject { described_class.new }
it 'creates a new status' do
account = Fabricate(:account)
text = 'test status update'
status = subject.call(account, text: text)
expect(status).to be_persisted
expect(status.text).to eq text
end
it 'creates a new response status' do
in_reply_to_status = Fabricate(:status)
account = Fabricate(:account)
text = 'test status update'
status = subject.call(account, text: text, thread: in_reply_to_status)
expect(status).to be_persisted
expect(status.text).to eq text
expect(status.thread).to eq in_reply_to_status
end
context 'when scheduling a status' do
let!(:account) { Fabricate(:account) }
let!(:future) { Time.now.utc + 2.hours }
let!(:previous_status) { Fabricate(:status, account: account) }
it 'schedules a status' do
status = subject.call(account, text: 'Hi future!', scheduled_at: future)
expect(status).to be_a ScheduledStatus
expect(status.scheduled_at).to eq future
expect(status.params['text']).to eq 'Hi future!'
end
it 'does not immediately create a status' do
media = Fabricate(:media_attachment, account: account)
status = subject.call(account, text: 'Hi future!', media_ids: [media.id], scheduled_at: future)
expect(status).to be_a ScheduledStatus
expect(status.scheduled_at).to eq future
expect(status.params['text']).to eq 'Hi future!'
expect(status.params['media_ids']).to eq [media.id]
expect(media.reload.status).to be_nil
expect(Status.where(text: 'Hi future!')).to_not exist
end
it 'does not change statuses count' do
expect { subject.call(account, text: 'Hi future!', scheduled_at: future, thread: previous_status) }.to_not(change { [account.statuses_count, previous_status.replies_count] })
end
end
it 'creates response to the original status of boost' do
boosted_status = Fabricate(:status)
in_reply_to_status = Fabricate(:status, reblog: boosted_status)
account = Fabricate(:account)
text = 'test status update'
status = subject.call(account, text: text, thread: in_reply_to_status)
expect(status).to be_persisted
expect(status.text).to eq text
expect(status.thread).to eq boosted_status
end
it 'creates a sensitive status' do
status = create_status_with_options(sensitive: true)
expect(status).to be_persisted
expect(status).to be_sensitive
end
it 'creates a status with spoiler text' do
spoiler_text = 'spoiler text'
status = create_status_with_options(spoiler_text: spoiler_text)
expect(status).to be_persisted
expect(status.spoiler_text).to eq spoiler_text
end
it 'creates a sensitive status when there is a CW but no text' do
status = subject.call(Fabricate(:account), text: '', spoiler_text: 'foo')
expect(status).to be_persisted
expect(status).to be_sensitive
end
it 'creates a status with empty default spoiler text' do
status = create_status_with_options(spoiler_text: nil)
expect(status).to be_persisted
expect(status.spoiler_text).to eq ''
end
it 'creates a status with the given visibility' do
status = create_status_with_options(visibility: :private)
expect(status).to be_persisted
expect(status.visibility).to eq 'private'
end
it 'creates a status with limited visibility for silenced users' do
status = subject.call(Fabricate(:account, silenced: true), text: 'test', visibility: :public)
expect(status).to be_persisted
expect(status.visibility).to eq 'unlisted'
end
it 'creates a status for the given application' do
application = Fabricate(:application)
status = create_status_with_options(application: application)
expect(status).to be_persisted
expect(status.application).to eq application
end
it 'creates a status with a language set' do
account = Fabricate(:account)
text = 'This is an English text.'
status = subject.call(account, text: text)
expect(status.language).to eq 'en'
end
it 'processes mentions' do
mention_service = instance_double(ProcessMentionsService)
allow(mention_service).to receive(:call)
allow(ProcessMentionsService).to receive(:new).and_return(mention_service)
account = Fabricate(:account)
status = subject.call(account, text: 'test status update')
expect(ProcessMentionsService).to have_received(:new)
expect(mention_service).to have_received(:call).with(status, save_records: false)
end
it 'safeguards mentions' do
account = Fabricate(:account)
mentioned_account = Fabricate(:account, username: 'alice')
unexpected_mentioned_account = Fabricate(:account, username: 'bob')
expect do
subject.call(account, text: '@alice hm, @bob is really annoying lately', allowed_mentions: [mentioned_account.id])
end.to raise_error(an_instance_of(PostStatusService::UnexpectedMentionsError).and(having_attributes(accounts: [unexpected_mentioned_account])))
end
it 'processes duplicate mentions correctly' do
account = Fabricate(:account)
Fabricate(:account, username: 'alice')
expect do
subject.call(account, text: '@alice @alice @alice hey @alice')
end.to_not raise_error
end
it 'processes hashtags' do
hashtags_service = instance_double(ProcessHashtagsService)
allow(hashtags_service).to receive(:call)
allow(ProcessHashtagsService).to receive(:new).and_return(hashtags_service)
account = Fabricate(:account)
status = subject.call(account, text: 'test status update')
expect(ProcessHashtagsService).to have_received(:new)
expect(hashtags_service).to have_received(:call).with(status)
end
it 'gets distributed' do
allow(DistributionWorker).to receive(:perform_async)
allow(ActivityPub::DistributionWorker).to receive(:perform_async)
account = Fabricate(:account)
status = subject.call(account, text: 'test status update')
expect(DistributionWorker).to have_received(:perform_async).with(status.id)
expect(ActivityPub::DistributionWorker).to have_received(:perform_async).with(status.id)
end
it 'crawls links' do
allow(LinkCrawlWorker).to receive(:perform_async)
account = Fabricate(:account)
status = subject.call(account, text: 'test status update')
expect(LinkCrawlWorker).to have_received(:perform_async).with(status.id)
end
it 'attaches the given media to the created status' do
account = Fabricate(:account)
media = Fabricate(:media_attachment, account: account)
status = subject.call(
account,
text: 'test status update',
media_ids: [media.id]
)
expect(media.reload.status).to eq status
end
it 'does not attach media from another account to the created status' do
account = Fabricate(:account)
media = Fabricate(:media_attachment, account: Fabricate(:account))
subject.call(
account,
text: 'test status update',
media_ids: [media.id]
)
expect(media.reload.status).to be_nil
end
it 'does not allow attaching more than 4 files' do
account = Fabricate(:account)
expect do
subject.call(
account,
text: 'test status update',
media_ids: [
Fabricate(:media_attachment, account: account),
Fabricate(:media_attachment, account: account),
Fabricate(:media_attachment, account: account),
Fabricate(:media_attachment, account: account),
Fabricate(:media_attachment, account: account),
].map(&:id)
)
end.to raise_error(
Mastodon::ValidationError,
I18n.t('media_attachments.validations.too_many')
)
end
it 'does not allow attaching both videos and images' do
account = Fabricate(:account)
video = Fabricate(:media_attachment, type: :video, account: account)
image = Fabricate(:media_attachment, type: :image, account: account)
video.update(type: :video)
expect do
subject.call(
account,
text: 'test status update',
media_ids: [
video,
image,
].map(&:id)
)
end.to raise_error(
Mastodon::ValidationError,
I18n.t('media_attachments.validations.images_and_video')
)
end
it 'returns existing status when used twice with idempotency key' do
account = Fabricate(:account)
status1 = subject.call(account, text: 'test', idempotency: 'meepmeep')
status2 = subject.call(account, text: 'test', idempotency: 'meepmeep')
expect(status2.id).to eq status1.id
end
def create_status_with_options(**options)
subject.call(Fabricate(:account), options.merge(text: 'test'))
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class AccountSearchService < BaseService
attr_reader :query, :limit, :offset, :options, :account
MENTION_ONLY_RE = /\A#{Account::MENTION_RE}\z/i
# Min. number of characters to look for non-exact matches
MIN_QUERY_LENGTH = 5
class QueryBuilder
def initialize(query, account, options = {})
@query = query
@account = account
@options = options
end
def build
AccountsIndex.query(
bool: {
must: {
function_score: {
query: {
bool: {
must: must_clauses,
},
},
functions: [
reputation_score_function,
followers_score_function,
time_distance_function,
],
},
},
should: should_clauses,
}
)
end
private
def must_clauses
if @account && @options[:following]
[core_query, only_following_query]
else
[core_query]
end
end
def should_clauses
if @account && !@options[:following]
[boost_following_query]
else
[]
end
end
# This function limits results to only the accounts the user is following
def only_following_query
{
terms: {
id: following_ids,
},
}
end
# This function promotes accounts the user is following
def boost_following_query
{
terms: {
id: following_ids,
boost: 100,
},
}
end
# This function deranks accounts that follow more people than follow them
def reputation_score_function
{
script_score: {
script: {
source: "(Math.max(doc['followers_count'].value, 0) + 0.0) / (Math.max(doc['followers_count'].value, 0) + Math.max(doc['following_count'].value, 0) + 1)",
},
},
}
end
# This function promotes accounts that have more followers
def followers_score_function
{
script_score: {
script: {
source: "(Math.max(doc['followers_count'].value, 0) / (Math.max(doc['followers_count'].value, 0) + 1))",
},
},
}
end
# This function deranks accounts that haven't posted in a long time
def time_distance_function
{
gauss: {
last_status_at: {
scale: '30d',
offset: '30d',
decay: 0.3,
},
},
}
end
def following_ids
@following_ids ||= @account.active_relationships.pluck(:target_account_id) + [@account.id]
end
end
class AutocompleteQueryBuilder < QueryBuilder
private
def core_query
{
multi_match: {
query: @query,
type: 'bool_prefix',
fields: %w(username^2 username.*^2 display_name display_name.*),
},
}
end
end
class FullQueryBuilder < QueryBuilder
private
def core_query
{
multi_match: {
query: @query,
type: 'most_fields',
fields: %w(username^2 display_name^2 text text.*),
operator: 'and',
},
}
end
end
def call(query, account = nil, options = {})
@query = query&.strip&.gsub(/\A@/, '')
@limit = options[:limit].to_i
@offset = options[:offset].to_i
@options = options
@account = account
search_service_results.compact.uniq
end
private
def search_service_results
return [] if query.blank? || limit < 1
[exact_match] + search_results
end
def exact_match
return unless offset.zero? && username_complete?
return @exact_match if defined?(@exact_match)
match = if options[:resolve]
ResolveAccountService.new.call(query)
elsif domain_is_local?
Account.find_local(query_username)
else
Account.find_remote(query_username, query_domain)
end
match = nil if !match.nil? && !account.nil? && options[:following] && !account.following?(match)
@exact_match = match
end
def search_results
return [] if limit_for_non_exact_results.zero?
@search_results ||= begin
results = from_elasticsearch if Chewy.enabled?
results ||= from_database
results
end
end
def from_database
if account
advanced_search_results
else
simple_search_results
end
end
def advanced_search_results
Account.advanced_search_for(terms_for_query, account, limit: limit_for_non_exact_results, following: options[:following], offset: offset)
end
def simple_search_results
Account.search_for(terms_for_query, limit: limit_for_non_exact_results, offset: offset)
end
def from_elasticsearch
query_builder = begin
if options[:use_searchable_text]
FullQueryBuilder.new(terms_for_query, account, options.slice(:following))
else
AutocompleteQueryBuilder.new(terms_for_query, account, options.slice(:following))
end
end
records = query_builder.build.limit(limit_for_non_exact_results).offset(offset).objects.compact
ActiveRecord::Associations::Preloader.new(records: records, associations: :account_stat)
records
rescue Faraday::ConnectionFailed, Parslet::ParseFailed
nil
end
def limit_for_non_exact_results
return 0 if @account.nil? && query.size < MIN_QUERY_LENGTH
if exact_match?
limit - 1
else
limit
end
end
def terms_for_query
if domain_is_local?
query_username
else
query
end
end
def split_query_string
@split_query_string ||= query.split('@')
end
def query_username
@query_username ||= split_query_string.first || ''
end
def query_domain
@query_domain ||= query_without_split? ? nil : split_query_string.last
end
def query_without_split?
split_query_string.size == 1
end
def domain_is_local?
@domain_is_local ||= TagManager.instance.local_domain?(query_domain)
end
def exact_match?
exact_match.present?
end
def username_complete?
query.include?('@') && "@#{query}".match?(MENTION_ONLY_RE)
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
describe AccountSearchService, type: :service do
describe '#call' do
context 'with a query to ignore' do
it 'returns empty array for missing query' do
results = subject.call('', nil, limit: 10)
expect(results).to eq []
end
it 'returns empty array for limit zero' do
Fabricate(:account, username: 'match')
results = subject.call('match', nil, limit: 0)
expect(results).to eq []
end
end
context 'when searching for a simple term that is not an exact match' do
it 'does not return a nil entry in the array for the exact match' do
account = Fabricate(:account, username: 'matchingusername')
results = subject.call('match', nil, limit: 5)
expect(results).to eq [account]
end
end
context 'when there is a local domain' do
around do |example|
before = Rails.configuration.x.local_domain
example.run
Rails.configuration.x.local_domain = before
end
it 'returns exact match first' do
remote = Fabricate(:account, username: 'a', domain: 'remote', display_name: 'e')
remote_too = Fabricate(:account, username: 'b', domain: 'remote', display_name: 'e')
exact = Fabricate(:account, username: 'e')
Rails.configuration.x.local_domain = 'example.com'
results = subject.call('[email protected]', nil, limit: 2)
expect(results).to eq([exact, remote]).or eq([exact, remote_too])
end
end
context 'when there is a domain but no exact match' do
it 'follows the remote account when resolve is true' do
service = instance_double(ResolveAccountService, call: nil)
allow(ResolveAccountService).to receive(:new).and_return(service)
subject.call('[email protected]', nil, limit: 10, resolve: true)
expect(service).to have_received(:call).with('[email protected]')
end
it 'does not follow the remote account when resolve is false' do
service = instance_double(ResolveAccountService, call: nil)
allow(ResolveAccountService).to receive(:new).and_return(service)
subject.call('[email protected]', nil, limit: 10, resolve: false)
expect(service).to_not have_received(:call)
end
end
it 'returns the fuzzy match first, and does not return suspended exacts' do
partial = Fabricate(:account, username: 'exactness')
Fabricate(:account, username: 'exact', suspended: true)
results = subject.call('exact', nil, limit: 10)
expect(results.size).to eq 1
expect(results).to eq [partial]
end
it 'does not return suspended remote accounts' do
Fabricate(:account, username: 'a', domain: 'remote', display_name: 'e', suspended: true)
results = subject.call('[email protected]', nil, limit: 2)
expect(results.size).to eq 0
expect(results).to eq []
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class SoftwareUpdateCheckService < BaseService
def call
clean_outdated_updates!
return unless SoftwareUpdate.check_enabled?
process_update_notices!(fetch_update_notices)
end
private
def clean_outdated_updates!
SoftwareUpdate.find_each do |software_update|
software_update.delete if Mastodon::Version.gem_version >= software_update.gem_version
rescue ArgumentError
software_update.delete
end
end
def fetch_update_notices
Request.new(:get, "#{api_url}?version=#{version}").add_headers('Accept' => 'application/json', 'User-Agent' => 'Mastodon update checker').perform do |res|
return Oj.load(res.body_with_limit, mode: :strict) if res.code == 200
end
rescue HTTP::Error, OpenSSL::SSL::SSLError, Oj::ParseError
nil
end
def api_url
ENV.fetch('UPDATE_CHECK_URL', 'https://api.joinmastodon.org/update-check')
end
def version
@version ||= Mastodon::Version.to_s.split('+')[0]
end
def process_update_notices!(update_notices)
return if update_notices.blank? || update_notices['updatesAvailable'].nil?
# Clear notices that are not listed by the update server anymore
SoftwareUpdate.where.not(version: update_notices['updatesAvailable'].pluck('version')).delete_all
return if update_notices['updatesAvailable'].blank?
# Check if any of the notices is new, and issue notifications
known_versions = SoftwareUpdate.where(version: update_notices['updatesAvailable'].pluck('version')).pluck(:version)
new_update_notices = update_notices['updatesAvailable'].filter { |notice| known_versions.exclude?(notice['version']) }
return if new_update_notices.blank?
new_updates = new_update_notices.map do |notice|
SoftwareUpdate.create!(version: notice['version'], urgent: notice['urgent'], type: notice['type'], release_notes: notice['releaseNotes'])
end
notify_devops!(new_updates)
end
def should_notify_user?(user, urgent_version, patch_version)
case user.settings['notification_emails.software_updates']
when 'none'
false
when 'critical'
urgent_version
when 'patch'
urgent_version || patch_version
when 'all'
true
end
end
def notify_devops!(new_updates)
has_new_urgent_version = new_updates.any?(&:urgent?)
has_new_patch_version = new_updates.any?(&:patch_type?)
User.those_who_can(:view_devops).includes(:account).find_each do |user|
next unless should_notify_user?(user, has_new_urgent_version, has_new_patch_version)
if has_new_urgent_version
AdminMailer.with(recipient: user.account).new_critical_software_updates.deliver_later
else
AdminMailer.with(recipient: user.account).new_software_updates.deliver_later
end
end
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe SoftwareUpdateCheckService, type: :service do
subject { described_class.new }
shared_examples 'when the feature is enabled' do
let(:full_update_check_url) { "#{update_check_url}?version=#{Mastodon::Version.to_s.split('+')[0]}" }
let(:devops_role) { Fabricate(:user_role, name: 'DevOps', permissions: UserRole::FLAGS[:view_devops]) }
let(:owner_user) { Fabricate(:user, role: UserRole.find_by(name: 'Owner')) }
let(:old_devops_user) { Fabricate(:user) }
let(:none_user) { Fabricate(:user, role: devops_role) }
let(:patch_user) { Fabricate(:user, role: devops_role) }
let(:critical_user) { Fabricate(:user, role: devops_role) }
around do |example|
queue_adapter = ActiveJob::Base.queue_adapter
ActiveJob::Base.queue_adapter = :test
example.run
ActiveJob::Base.queue_adapter = queue_adapter
end
before do
Fabricate(:software_update, version: '3.5.0', type: 'major', urgent: false)
Fabricate(:software_update, version: '42.13.12', type: 'major', urgent: false)
owner_user.settings.update('notification_emails.software_updates': 'all')
owner_user.save!
old_devops_user.settings.update('notification_emails.software_updates': 'all')
old_devops_user.save!
none_user.settings.update('notification_emails.software_updates': 'none')
none_user.save!
patch_user.settings.update('notification_emails.software_updates': 'patch')
patch_user.save!
critical_user.settings.update('notification_emails.software_updates': 'critical')
critical_user.save!
end
context 'when the update server errors out' do
before do
stub_request(:get, full_update_check_url).to_return(status: 404)
end
it 'deletes outdated update records but keeps valid update records' do
expect { subject.call }.to change { SoftwareUpdate.pluck(:version).sort }.from(['3.5.0', '42.13.12']).to(['42.13.12'])
end
end
context 'when the server returns new versions' do
let(:server_json) do
{
updatesAvailable: [
{
version: '4.2.1',
urgent: false,
type: 'patch',
releaseNotes: 'https://github.com/mastodon/mastodon/releases/v4.2.1',
},
{
version: '4.3.0',
urgent: false,
type: 'minor',
releaseNotes: 'https://github.com/mastodon/mastodon/releases/v4.3.0',
},
{
version: '5.0.0',
urgent: false,
type: 'minor',
releaseNotes: 'https://github.com/mastodon/mastodon/releases/v5.0.0',
},
],
}
end
before do
stub_request(:get, full_update_check_url).to_return(body: Oj.dump(server_json))
end
it 'updates the list of known updates' do
expect { subject.call }.to change { SoftwareUpdate.pluck(:version).sort }.from(['3.5.0', '42.13.12']).to(['4.2.1', '4.3.0', '5.0.0'])
end
context 'when no update is urgent' do
it 'sends e-mail notifications according to settings', :aggregate_failures do
expect { subject.call }.to have_enqueued_mail(AdminMailer, :new_software_updates)
.with(hash_including(params: { recipient: owner_user.account })).once
.and(have_enqueued_mail(AdminMailer, :new_software_updates).with(hash_including(params: { recipient: patch_user.account })).once)
.and(have_enqueued_mail.at_most(2))
end
end
context 'when an update is urgent' do
let(:server_json) do
{
updatesAvailable: [
{
version: '5.0.0',
urgent: true,
type: 'minor',
releaseNotes: 'https://github.com/mastodon/mastodon/releases/v5.0.0',
},
],
}
end
it 'sends e-mail notifications according to settings', :aggregate_failures do
expect { subject.call }.to have_enqueued_mail(AdminMailer, :new_critical_software_updates)
.with(hash_including(params: { recipient: owner_user.account })).once
.and(have_enqueued_mail(AdminMailer, :new_critical_software_updates).with(hash_including(params: { recipient: patch_user.account })).once)
.and(have_enqueued_mail(AdminMailer, :new_critical_software_updates).with(hash_including(params: { recipient: critical_user.account })).once)
.and(have_enqueued_mail.at_most(3))
end
end
end
end
context 'when update checking is disabled' do
around do |example|
ClimateControl.modify UPDATE_CHECK_URL: '' do
example.run
end
end
before do
Fabricate(:software_update, version: '3.5.0', type: 'major', urgent: false)
end
it 'deletes outdated update records' do
expect { subject.call }.to change(SoftwareUpdate, :count).from(1).to(0)
end
end
context 'when using the default update checking API' do
let(:update_check_url) { 'https://api.joinmastodon.org/update-check' }
it_behaves_like 'when the feature is enabled'
end
context 'when using a custom update check URL' do
let(:update_check_url) { 'https://api.example.com/update_check' }
around do |example|
ClimateControl.modify UPDATE_CHECK_URL: 'https://api.example.com/update_check' do
example.run
end
end
it_behaves_like 'when the feature is enabled'
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class UnsuspendAccountService < BaseService
include Payloadable
# Restores a recently-unsuspended account
# @param [Account] account Account to restore
def call(account)
@account = account
refresh_remote_account!
return if @account.nil? || @account.suspended?
merge_into_home_timelines!
merge_into_list_timelines!
publish_media_attachments!
distribute_update_actor!
end
private
def refresh_remote_account!
return if @account.local?
# While we had the remote account suspended, it could be that
# it got suspended on its origin, too. So, we need to refresh
# it straight away so it gets marked as remotely suspended in
# that case.
@account.update!(last_webfingered_at: nil)
@account = ResolveAccountService.new.call(@account)
# Worth noting that it is possible that the remote has not only
# been suspended, but deleted permanently, in which case
# @account would now be nil.
end
def distribute_update_actor!
return unless @account.local?
account_reach_finder = AccountReachFinder.new(@account)
ActivityPub::DeliveryWorker.push_bulk(account_reach_finder.inboxes, limit: 1_000) do |inbox_url|
[signed_activity_json, @account.id, inbox_url]
end
end
def merge_into_home_timelines!
@account.followers_for_local_distribution.reorder(nil).find_each do |follower|
FeedManager.instance.merge_into_home(@account, follower)
end
end
def merge_into_list_timelines!
@account.lists_for_local_distribution.reorder(nil).find_each do |list|
FeedManager.instance.merge_into_list(@account, list)
end
end
def publish_media_attachments!
attachment_names = MediaAttachment.attachment_definitions.keys
@account.media_attachments.find_each do |media_attachment|
attachment_names.each do |attachment_name|
attachment = media_attachment.public_send(attachment_name)
styles = MediaAttachment::DEFAULT_STYLES | attachment.styles.keys
next if attachment.blank?
styles.each do |style|
case Paperclip::Attachment.default_options[:storage]
when :s3
# Prevent useless S3 calls if ACLs are disabled
next if ENV['S3_PERMISSION'] == ''
begin
attachment.s3_object(style).acl.put(acl: Paperclip::Attachment.default_options[:s3_permissions])
rescue Aws::S3::Errors::NoSuchKey
Rails.logger.warn "Tried to change acl on non-existent key #{attachment.s3_object(style).key}"
rescue Aws::S3::Errors::NotImplemented => e
Rails.logger.error "Error trying to change ACL on #{attachment.s3_object(style).key}: #{e.message}"
end
when :fog, :azure
# Not supported
when :filesystem
begin
FileUtils.chmod(0o666 & ~File.umask, attachment.path(style)) unless attachment.path(style).nil?
rescue Errno::ENOENT
Rails.logger.warn "Tried to change permission on non-existent file #{attachment.path(style)}"
end
end
CacheBusterWorker.perform_async(attachment.path(style)) if Rails.configuration.x.cache_buster_enabled
end
end
end
end
def signed_activity_json
@signed_activity_json ||= Oj.dump(serialize_payload(@account, ActivityPub::UpdateSerializer, signer: @account))
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe UnsuspendAccountService, type: :service do
shared_context 'with common context' do
subject { described_class.new.call(account) }
let!(:local_follower) { Fabricate(:user, current_sign_in_at: 1.hour.ago).account }
let!(:list) { Fabricate(:list, account: local_follower) }
before do
allow(FeedManager.instance).to receive_messages(merge_into_home: nil, merge_into_list: nil)
local_follower.follow!(account)
list.accounts << account
account.unsuspend!
end
end
describe 'unsuspending a local account' do
def match_update_actor_request(req, account)
json = JSON.parse(req.body)
actor_id = ActivityPub::TagManager.instance.uri_for(account)
json['type'] == 'Update' && json['actor'] == actor_id && json['object']['id'] == actor_id && !json['object']['suspended']
end
before do
stub_request(:post, 'https://alice.com/inbox').to_return(status: 201)
stub_request(:post, 'https://bob.com/inbox').to_return(status: 201)
end
it 'does not change the “suspended” flag' do
expect { subject }.to_not change(account, :suspended?)
end
include_examples 'with common context' do
let!(:account) { Fabricate(:account) }
let!(:remote_follower) { Fabricate(:account, uri: 'https://alice.com', inbox_url: 'https://alice.com/inbox', protocol: :activitypub, domain: 'alice.com') }
let!(:remote_reporter) { Fabricate(:account, uri: 'https://bob.com', inbox_url: 'https://bob.com/inbox', protocol: :activitypub, domain: 'bob.com') }
let!(:report) { Fabricate(:report, account: remote_reporter, target_account: account) }
before do
remote_follower.follow!(account)
end
it "merges back into local followers' feeds" do
subject
expect(FeedManager.instance).to have_received(:merge_into_home).with(account, local_follower)
expect(FeedManager.instance).to have_received(:merge_into_list).with(account, list)
end
it 'sends an update actor to followers and reporters' do
subject
expect(a_request(:post, remote_follower.inbox_url).with { |req| match_update_actor_request(req, account) }).to have_been_made.once
expect(a_request(:post, remote_reporter.inbox_url).with { |req| match_update_actor_request(req, account) }).to have_been_made.once
end
end
end
describe 'unsuspending a remote account' do
include_examples 'with common context' do
let!(:account) { Fabricate(:account, domain: 'bob.com', uri: 'https://bob.com', inbox_url: 'https://bob.com/inbox', protocol: :activitypub) }
let!(:resolve_account_service) { instance_double(ResolveAccountService) }
before do
allow(ResolveAccountService).to receive(:new).and_return(resolve_account_service)
end
context 'when the account is not remotely suspended' do
before do
allow(resolve_account_service).to receive(:call).with(account).and_return(account)
end
it 're-fetches the account' do
subject
expect(resolve_account_service).to have_received(:call).with(account)
end
it "merges back into local followers' feeds" do
subject
expect(FeedManager.instance).to have_received(:merge_into_home).with(account, local_follower)
expect(FeedManager.instance).to have_received(:merge_into_list).with(account, list)
end
it 'does not change the “suspended” flag' do
expect { subject }.to_not change(account, :suspended?)
end
end
context 'when the account is remotely suspended' do
before do
allow(resolve_account_service).to receive(:call).with(account) do |account|
account.suspend!(origin: :remote)
account
end
end
it 're-fetches the account' do
subject
expect(resolve_account_service).to have_received(:call).with(account)
end
it "does not merge back into local followers' feeds" do
subject
expect(FeedManager.instance).to_not have_received(:merge_into_home).with(account, local_follower)
expect(FeedManager.instance).to_not have_received(:merge_into_list).with(account, list)
end
it 'marks account as suspended' do
expect { subject }.to change(account, :suspended?).from(false).to(true)
end
end
context 'when the account is remotely deleted' do
before do
allow(resolve_account_service).to receive(:call).with(account).and_return(nil)
end
it 're-fetches the account' do
subject
expect(resolve_account_service).to have_received(:call).with(account)
end
it "does not merge back into local followers' feeds" do
subject
expect(FeedManager.instance).to_not have_received(:merge_into_home).with(account, local_follower)
expect(FeedManager.instance).to_not have_received(:merge_into_list).with(account, list)
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class AfterBlockDomainFromAccountService < BaseService
include Payloadable
# This service does not create an AccountDomainBlock record,
# it's meant to be called after such a record has been created
# synchronously, to "clean up"
def call(account, domain)
@account = account
@domain = domain
clear_notifications!
remove_follows!
reject_existing_followers!
reject_pending_follow_requests!
end
private
def remove_follows!
@account.active_relationships.where(target_account: Account.where(domain: @domain)).includes(:target_account).reorder(nil).find_each do |follow|
UnfollowService.new.call(@account, follow.target_account)
end
end
def clear_notifications!
Notification.where(account: @account).where(from_account: Account.where(domain: @domain)).in_batches.delete_all
end
def reject_existing_followers!
@account.passive_relationships.where(account: Account.where(domain: @domain)).includes(:account).reorder(nil).find_each do |follow|
reject_follow!(follow)
end
end
def reject_pending_follow_requests!
FollowRequest.where(target_account: @account).where(account: Account.where(domain: @domain)).includes(:account).reorder(nil).find_each do |follow_request|
reject_follow!(follow_request)
end
end
def reject_follow!(follow)
follow.destroy
return unless follow.account.activitypub?
ActivityPub::DeliveryWorker.perform_async(Oj.dump(serialize_payload(follow, ActivityPub::RejectFollowSerializer)), @account.id, follow.account.inbox_url)
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe AfterBlockDomainFromAccountService, type: :service do
subject { described_class.new }
let!(:wolf) { Fabricate(:account, username: 'wolf', domain: 'evil.org', inbox_url: 'https://evil.org/inbox', protocol: :activitypub) }
let!(:alice) { Fabricate(:account, username: 'alice') }
before do
allow(ActivityPub::DeliveryWorker).to receive(:perform_async)
end
it 'purge followers from blocked domain' do
wolf.follow!(alice)
subject.call(alice, 'evil.org')
expect(wolf.following?(alice)).to be false
end
it 'sends Reject->Follow to followers from blocked domain' do
wolf.follow!(alice)
subject.call(alice, 'evil.org')
expect(ActivityPub::DeliveryWorker).to have_received(:perform_async).once
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class BlockDomainService < BaseService
attr_reader :domain_block
def call(domain_block, update = false)
@domain_block = domain_block
process_domain_block!
process_retroactive_updates! if update
end
private
def process_retroactive_updates!
# If the domain block severity has been changed, undo the appropriate limitations
scope = Account.by_domain_and_subdomains(domain_block.domain)
scope.where(silenced_at: domain_block.created_at).in_batches.update_all(silenced_at: nil) unless domain_block.silence?
scope.where(suspended_at: domain_block.created_at).in_batches.update_all(suspended_at: nil, suspension_origin: nil) unless domain_block.suspend?
end
def process_domain_block!
if domain_block.silence?
silence_accounts!
elsif domain_block.suspend?
suspend_accounts!
end
DomainClearMediaWorker.perform_async(domain_block.id) if domain_block.reject_media?
end
def silence_accounts!
blocked_domain_accounts.without_silenced.in_batches.update_all(silenced_at: @domain_block.created_at)
end
def suspend_accounts!
blocked_domain_accounts.without_suspended.in_batches.update_all(suspended_at: @domain_block.created_at, suspension_origin: :local)
blocked_domain_accounts.where(suspended_at: @domain_block.created_at).reorder(nil).find_each do |account|
DeleteAccountService.new.call(account, reserve_username: true, suspended_at: @domain_block.created_at)
end
end
def blocked_domain
domain_block.domain
end
def blocked_domain_accounts
Account.by_domain_and_subdomains(blocked_domain)
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe BlockDomainService, type: :service do
subject { described_class.new }
let!(:bad_account) { Fabricate(:account, username: 'badguy666', domain: 'evil.org') }
let!(:bad_status_plain) { Fabricate(:status, account: bad_account, text: 'You suck') }
let!(:bad_status_with_attachment) { Fabricate(:status, account: bad_account, text: 'Hahaha') }
let!(:bad_attachment) { Fabricate(:media_attachment, account: bad_account, status: bad_status_with_attachment, file: attachment_fixture('attachment.jpg')) }
let!(:already_banned_account) { Fabricate(:account, username: 'badguy', domain: 'evil.org', suspended: true, silenced: true) }
describe 'for a suspension' do
before do
subject.call(DomainBlock.create!(domain: 'evil.org', severity: :suspend))
end
it 'creates a domain block' do
expect(DomainBlock.blocked?('evil.org')).to be true
end
it 'removes remote accounts from that domain' do
expect(Account.find_remote('badguy666', 'evil.org').suspended?).to be true
end
it 'records suspension date appropriately' do
expect(Account.find_remote('badguy666', 'evil.org').suspended_at).to eq DomainBlock.find_by(domain: 'evil.org').created_at
end
it 'keeps already-banned accounts banned' do
expect(Account.find_remote('badguy', 'evil.org').suspended?).to be true
end
it 'does not overwrite suspension date of already-banned accounts' do
expect(Account.find_remote('badguy', 'evil.org').suspended_at).to_not eq DomainBlock.find_by(domain: 'evil.org').created_at
end
it 'removes the remote accounts\'s statuses and media attachments' do
expect { bad_status_plain.reload }.to raise_exception ActiveRecord::RecordNotFound
expect { bad_status_with_attachment.reload }.to raise_exception ActiveRecord::RecordNotFound
expect { bad_attachment.reload }.to raise_exception ActiveRecord::RecordNotFound
end
end
describe 'for a silence with reject media' do
before do
subject.call(DomainBlock.create!(domain: 'evil.org', severity: :silence, reject_media: true))
end
it 'does not create a domain block' do
expect(DomainBlock.blocked?('evil.org')).to be false
end
it 'silences remote accounts from that domain' do
expect(Account.find_remote('badguy666', 'evil.org').silenced?).to be true
end
it 'records suspension date appropriately' do
expect(Account.find_remote('badguy666', 'evil.org').silenced_at).to eq DomainBlock.find_by(domain: 'evil.org').created_at
end
it 'keeps already-banned accounts banned' do
expect(Account.find_remote('badguy', 'evil.org').silenced?).to be true
end
it 'does not overwrite suspension date of already-banned accounts' do
expect(Account.find_remote('badguy', 'evil.org').silenced_at).to_not eq DomainBlock.find_by(domain: 'evil.org').created_at
end
it 'leaves the domains status and attachments, but clears media' do
expect { bad_status_plain.reload }.to_not raise_error
expect { bad_status_with_attachment.reload }.to_not raise_error
expect { bad_attachment.reload }.to_not raise_error
expect(bad_attachment.file.exists?).to be false
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class BlockService < BaseService
include Payloadable
def call(account, target_account)
return if account.id == target_account.id
UnfollowService.new.call(account, target_account) if account.following?(target_account)
UnfollowService.new.call(target_account, account) if target_account.following?(account)
RejectFollowService.new.call(target_account, account) if target_account.requested?(account)
block = account.block!(target_account)
BlockWorker.perform_async(account.id, target_account.id)
create_notification(block) if !target_account.local? && target_account.activitypub?
block
end
private
def create_notification(block)
ActivityPub::DeliveryWorker.perform_async(build_json(block), block.account_id, block.target_account.inbox_url)
end
def build_json(block)
Oj.dump(serialize_payload(block, ActivityPub::BlockSerializer))
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe BlockService, type: :service do
subject { described_class.new }
let(:sender) { Fabricate(:account, username: 'alice') }
describe 'local' do
let(:bob) { Fabricate(:account, username: 'bob') }
before do
subject.call(sender, bob)
end
it 'creates a blocking relation' do
expect(sender.blocking?(bob)).to be true
end
end
describe 'remote ActivityPub' do
let(:bob) { Fabricate(:account, username: 'bob', protocol: :activitypub, domain: 'example.com', inbox_url: 'http://example.com/inbox') }
before do
stub_request(:post, 'http://example.com/inbox').to_return(status: 200)
subject.call(sender, bob)
end
it 'creates a blocking relation' do
expect(sender.blocking?(bob)).to be true
end
it 'sends a block activity' do
expect(a_request(:post, 'http://example.com/inbox')).to have_been_made.once
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class UnfollowService < BaseService
include Payloadable
include Redisable
include Lockable
# Unfollow and notify the remote user
# @param [Account] source_account Where to unfollow from
# @param [Account] target_account Which to unfollow
# @param [Hash] options
# @option [Boolean] :skip_unmerge
def call(source_account, target_account, options = {})
@source_account = source_account
@target_account = target_account
@options = options
with_redis_lock("relationship:#{[source_account.id, target_account.id].sort.join(':')}") do
unfollow! || undo_follow_request!
end
end
private
def unfollow!
follow = Follow.find_by(account: @source_account, target_account: @target_account)
return unless follow
follow.destroy!
create_notification(follow) if !@target_account.local? && @target_account.activitypub?
create_reject_notification(follow) if @target_account.local? && !@source_account.local? && @source_account.activitypub?
UnmergeWorker.perform_async(@target_account.id, @source_account.id) unless @options[:skip_unmerge]
follow
end
def undo_follow_request!
follow_request = FollowRequest.find_by(account: @source_account, target_account: @target_account)
return unless follow_request
follow_request.destroy!
create_notification(follow_request) unless @target_account.local?
follow_request
end
def create_notification(follow)
ActivityPub::DeliveryWorker.perform_async(build_json(follow), follow.account_id, follow.target_account.inbox_url)
end
def create_reject_notification(follow)
ActivityPub::DeliveryWorker.perform_async(build_reject_json(follow), follow.target_account_id, follow.account.inbox_url)
end
def build_json(follow)
Oj.dump(serialize_payload(follow, ActivityPub::UndoFollowSerializer))
end
def build_reject_json(follow)
Oj.dump(serialize_payload(follow, ActivityPub::RejectFollowSerializer))
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe UnfollowService, type: :service do
subject { described_class.new }
let(:sender) { Fabricate(:account, username: 'alice') }
describe 'local' do
let(:bob) { Fabricate(:account, username: 'bob') }
before do
sender.follow!(bob)
subject.call(sender, bob)
end
it 'destroys the following relation' do
expect(sender.following?(bob)).to be false
end
end
describe 'remote ActivityPub' do
let(:bob) { Fabricate(:account, username: 'bob', protocol: :activitypub, domain: 'example.com', inbox_url: 'http://example.com/inbox') }
before do
sender.follow!(bob)
stub_request(:post, 'http://example.com/inbox').to_return(status: 200)
subject.call(sender, bob)
end
it 'destroys the following relation' do
expect(sender.following?(bob)).to be false
end
it 'sends an unfollow activity' do
expect(a_request(:post, 'http://example.com/inbox')).to have_been_made.once
end
end
describe 'remote ActivityPub (reverse)' do
let(:bob) { Fabricate(:account, username: 'bob', protocol: :activitypub, domain: 'example.com', inbox_url: 'http://example.com/inbox') }
before do
bob.follow!(sender)
stub_request(:post, 'http://example.com/inbox').to_return(status: 200)
subject.call(bob, sender)
end
it 'destroys the following relation' do
expect(bob.following?(sender)).to be false
end
it 'sends a reject activity' do
expect(a_request(:post, 'http://example.com/inbox')).to have_been_made.once
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class SuspendAccountService < BaseService
include Payloadable
# Carry out the suspension of a recently-suspended account
# @param [Account] account Account to suspend
def call(account)
return unless account.suspended?
@account = account
reject_remote_follows!
distribute_update_actor!
unmerge_from_home_timelines!
unmerge_from_list_timelines!
privatize_media_attachments!
end
private
def reject_remote_follows!
return if @account.local? || [email protected]?
# When suspending a remote account, the account obviously doesn't
# actually become suspended on its origin server, i.e. unlike a
# locally suspended account it continues to have access to its home
# feed and other content. To prevent it from being able to continue
# to access toots it would receive because it follows local accounts,
# we have to force it to unfollow them. Unfortunately, there is no
# counterpart to this operation, i.e. you can't then force a remote
# account to re-follow you, so this part is not reversible.
Follow.where(account: @account).find_in_batches do |follows|
ActivityPub::DeliveryWorker.push_bulk(follows) do |follow|
[Oj.dump(serialize_payload(follow, ActivityPub::RejectFollowSerializer)), follow.target_account_id, @account.inbox_url]
end
follows.each(&:destroy)
end
end
def distribute_update_actor!
return unless @account.local?
account_reach_finder = AccountReachFinder.new(@account)
ActivityPub::DeliveryWorker.push_bulk(account_reach_finder.inboxes, limit: 1_000) do |inbox_url|
[signed_activity_json, @account.id, inbox_url]
end
end
def unmerge_from_home_timelines!
@account.followers_for_local_distribution.reorder(nil).find_each do |follower|
FeedManager.instance.unmerge_from_home(@account, follower)
end
end
def unmerge_from_list_timelines!
@account.lists_for_local_distribution.reorder(nil).find_each do |list|
FeedManager.instance.unmerge_from_list(@account, list)
end
end
def privatize_media_attachments!
attachment_names = MediaAttachment.attachment_definitions.keys
@account.media_attachments.find_each do |media_attachment|
attachment_names.each do |attachment_name|
attachment = media_attachment.public_send(attachment_name)
styles = MediaAttachment::DEFAULT_STYLES | attachment.styles.keys
next if attachment.blank?
styles.each do |style|
case Paperclip::Attachment.default_options[:storage]
when :s3
# Prevent useless S3 calls if ACLs are disabled
next if ENV['S3_PERMISSION'] == ''
begin
attachment.s3_object(style).acl.put(acl: 'private')
rescue Aws::S3::Errors::NoSuchKey
Rails.logger.warn "Tried to change acl on non-existent key #{attachment.s3_object(style).key}"
rescue Aws::S3::Errors::NotImplemented => e
Rails.logger.error "Error trying to change ACL on #{attachment.s3_object(style).key}: #{e.message}"
end
when :fog, :azure
# Not supported
when :filesystem
begin
FileUtils.chmod(0o600 & ~File.umask, attachment.path(style)) unless attachment.path(style).nil?
rescue Errno::ENOENT
Rails.logger.warn "Tried to change permission on non-existent file #{attachment.path(style)}"
end
end
CacheBusterWorker.perform_async(attachment.path(style)) if Rails.configuration.x.cache_buster_enabled
end
end
end
end
def signed_activity_json
@signed_activity_json ||= Oj.dump(serialize_payload(@account, ActivityPub::UpdateSerializer, signer: @account))
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe SuspendAccountService, type: :service do
shared_examples 'common behavior' do
subject { described_class.new.call(account) }
let!(:local_follower) { Fabricate(:user, current_sign_in_at: 1.hour.ago).account }
let!(:list) { Fabricate(:list, account: local_follower) }
before do
allow(FeedManager.instance).to receive_messages(unmerge_from_home: nil, unmerge_from_list: nil)
local_follower.follow!(account)
list.accounts << account
account.suspend!
end
it "unmerges from local followers' feeds" do
subject
expect(FeedManager.instance).to have_received(:unmerge_from_home).with(account, local_follower)
expect(FeedManager.instance).to have_received(:unmerge_from_list).with(account, list)
end
it 'does not change the “suspended” flag' do
expect { subject }.to_not change(account, :suspended?)
end
end
describe 'suspending a local account' do
def match_update_actor_request(req, account)
json = JSON.parse(req.body)
actor_id = ActivityPub::TagManager.instance.uri_for(account)
json['type'] == 'Update' && json['actor'] == actor_id && json['object']['id'] == actor_id && json['object']['suspended']
end
before do
stub_request(:post, 'https://alice.com/inbox').to_return(status: 201)
stub_request(:post, 'https://bob.com/inbox').to_return(status: 201)
end
include_examples 'common behavior' do
let!(:account) { Fabricate(:account) }
let!(:remote_follower) { Fabricate(:account, uri: 'https://alice.com', inbox_url: 'https://alice.com/inbox', protocol: :activitypub, domain: 'alice.com') }
let!(:remote_reporter) { Fabricate(:account, uri: 'https://bob.com', inbox_url: 'https://bob.com/inbox', protocol: :activitypub, domain: 'bob.com') }
let!(:report) { Fabricate(:report, account: remote_reporter, target_account: account) }
before do
remote_follower.follow!(account)
end
it 'sends an update actor to followers and reporters' do
subject
expect(a_request(:post, remote_follower.inbox_url).with { |req| match_update_actor_request(req, account) }).to have_been_made.once
expect(a_request(:post, remote_reporter.inbox_url).with { |req| match_update_actor_request(req, account) }).to have_been_made.once
end
end
end
describe 'suspending a remote account' do
def match_reject_follow_request(req, account, followee)
json = JSON.parse(req.body)
json['type'] == 'Reject' && json['actor'] == ActivityPub::TagManager.instance.uri_for(followee) && json['object']['actor'] == account.uri
end
before do
stub_request(:post, 'https://bob.com/inbox').to_return(status: 201)
end
include_examples 'common behavior' do
let!(:account) { Fabricate(:account, domain: 'bob.com', uri: 'https://bob.com', inbox_url: 'https://bob.com/inbox', protocol: :activitypub) }
let!(:local_followee) { Fabricate(:account) }
before do
account.follow!(local_followee)
end
it 'sends a reject follow' do
subject
expect(a_request(:post, account.inbox_url).with { |req| match_reject_follow_request(req, account, local_followee) }).to have_been_made.once
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class BulkImportService < BaseService
def call(import)
@import = import
@account = @import.account
case @import.type.to_sym
when :following
import_follows!
when :blocking
import_blocks!
when :muting
import_mutes!
when :domain_blocking
import_domain_blocks!
when :bookmarks
import_bookmarks!
when :lists
import_lists!
end
@import.update!(state: :finished, finished_at: Time.now.utc) if @import.processed_items == @import.total_items
rescue
@import.update!(state: :finished, finished_at: Time.now.utc)
raise
end
private
def extract_rows_by_acct
local_domain_suffix = "@#{Rails.configuration.x.local_domain}"
@import.rows.to_a.index_by { |row| row.data['acct'].delete_suffix(local_domain_suffix) }
end
def import_follows!
rows_by_acct = extract_rows_by_acct
if @import.overwrite?
@account.following.reorder(nil).find_each do |followee|
row = rows_by_acct.delete(followee.acct)
if row.nil?
UnfollowService.new.call(@account, followee)
else
row.destroy
@import.processed_items += 1
@import.imported_items += 1
# Since we're updating the settings of an existing relationship, we can safely call
# FollowService directly
FollowService.new.call(@account, followee, reblogs: row.data['show_reblogs'], notify: row.data['notify'], languages: row.data['languages'])
end
end
# Save pending infos due to `overwrite?` handling
@import.save!
end
Import::RowWorker.push_bulk(rows_by_acct.values) do |row|
[row.id]
end
end
def import_blocks!
rows_by_acct = extract_rows_by_acct
if @import.overwrite?
@account.blocking.reorder(nil).find_each do |blocked_account|
row = rows_by_acct.delete(blocked_account.acct)
if row.nil?
UnblockService.new.call(@account, blocked_account)
else
row.destroy
@import.processed_items += 1
@import.imported_items += 1
BlockService.new.call(@account, blocked_account)
end
end
# Save pending infos due to `overwrite?` handling
@import.save!
end
Import::RowWorker.push_bulk(rows_by_acct.values) do |row|
[row.id]
end
end
def import_mutes!
rows_by_acct = extract_rows_by_acct
if @import.overwrite?
@account.muting.reorder(nil).find_each do |muted_account|
row = rows_by_acct.delete(muted_account.acct)
if row.nil?
UnmuteService.new.call(@account, muted_account)
else
row.destroy
@import.processed_items += 1
@import.imported_items += 1
MuteService.new.call(@account, muted_account, notifications: row.data['hide_notifications'])
end
end
# Save pending infos due to `overwrite?` handling
@import.save!
end
Import::RowWorker.push_bulk(rows_by_acct.values) do |row|
[row.id]
end
end
def import_domain_blocks!
domains = @import.rows.map { |row| row.data['domain'] }
if @import.overwrite?
@account.domain_blocks.find_each do |domain_block|
domain = domains.delete(domain_block)
@account.unblock_domain!(domain_block.domain) if domain.nil?
end
end
@import.rows.delete_all
domains.each { |domain| @account.block_domain!(domain) }
@import.update!(processed_items: @import.total_items, imported_items: @import.total_items)
AfterAccountDomainBlockWorker.push_bulk(domains) do |domain|
[@account.id, domain]
end
end
def import_bookmarks!
rows_by_uri = @import.rows.index_by { |row| row.data['uri'] }
if @import.overwrite?
@account.bookmarks.includes(:status).find_each do |bookmark|
row = rows_by_uri.delete(ActivityPub::TagManager.instance.uri_for(bookmark.status))
if row.nil?
bookmark.destroy!
else
row.destroy
@import.processed_items += 1
@import.imported_items += 1
end
end
# Save pending infos due to `overwrite?` handling
@import.save!
end
Import::RowWorker.push_bulk(rows_by_uri.values) do |row|
[row.id]
end
end
def import_lists!
rows = @import.rows.to_a
included_lists = rows.map { |row| row.data['list_name'] }.uniq
if @import.overwrite?
@account.owned_lists.where.not(title: included_lists).destroy_all
# As list membership changes do not retroactively change timeline
# contents, simplify things by just clearing everything
@account.owned_lists.find_each do |list|
list.list_accounts.destroy_all
end
end
included_lists.each do |title|
@account.owned_lists.find_or_create_by!(title: title)
end
Import::RowWorker.push_bulk(rows) do |row|
[row.id]
end
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe BulkImportService do
subject { described_class.new }
let(:account) { Fabricate(:account) }
let(:import) { Fabricate(:bulk_import, account: account, type: import_type, overwrite: overwrite, state: :in_progress, imported_items: 0, processed_items: 0) }
before do
import.update(total_items: import.rows.count)
end
describe '#call', :sidekiq_fake do
context 'when importing follows' do
let(:import_type) { 'following' }
let(:overwrite) { false }
let!(:rows) do
[
{ 'acct' => '[email protected]' },
{ 'acct' => '[email protected]' },
].map { |data| import.rows.create!(data: data) }
end
before do
account.follow!(Fabricate(:account))
end
it 'does not immediately change who the account follows' do
expect { subject.call(import) }.to_not(change { account.reload.active_relationships.to_a })
end
it 'enqueues workers for the expected rows' do
subject.call(import)
expect(Import::RowWorker.jobs.pluck('args').flatten).to match_array(rows.map(&:id))
end
it 'requests to follow all the listed users once the workers have run' do
subject.call(import)
resolve_account_service_double = instance_double(ResolveAccountService)
allow(ResolveAccountService).to receive(:new).and_return(resolve_account_service_double)
allow(resolve_account_service_double).to receive(:call).with('[email protected]', any_args) { Fabricate(:account, username: 'user', domain: 'foo.bar', protocol: :activitypub) }
allow(resolve_account_service_double).to receive(:call).with('[email protected]', any_args) { Fabricate(:account, username: 'unknown', domain: 'unknown.bar', protocol: :activitypub) }
Import::RowWorker.drain
expect(FollowRequest.includes(:target_account).where(account: account).map { |follow_request| follow_request.target_account.acct }).to contain_exactly('[email protected]', '[email protected]')
end
end
context 'when importing follows with overwrite' do
let(:import_type) { 'following' }
let(:overwrite) { true }
let!(:followed) { Fabricate(:account, username: 'followed', domain: 'foo.bar', protocol: :activitypub) }
let!(:to_be_unfollowed) { Fabricate(:account, username: 'to_be_unfollowed', domain: 'foo.bar', protocol: :activitypub) }
let!(:rows) do
[
{ 'acct' => '[email protected]', 'show_reblogs' => false, 'notify' => true, 'languages' => ['en'] },
{ 'acct' => '[email protected]' },
{ 'acct' => '[email protected]' },
].map { |data| import.rows.create!(data: data) }
end
before do
account.follow!(followed, reblogs: true, notify: false)
account.follow!(to_be_unfollowed)
end
it 'unfollows user not present on list' do
subject.call(import)
expect(account.following?(to_be_unfollowed)).to be false
end
it 'updates the existing follow relationship as expected' do
expect { subject.call(import) }.to change { Follow.where(account: account, target_account: followed).pick(:show_reblogs, :notify, :languages) }.from([true, false, nil]).to([false, true, ['en']])
end
it 'enqueues workers for the expected rows' do
subject.call(import)
expect(Import::RowWorker.jobs.pluck('args').flatten).to match_array(rows[1..].map(&:id))
end
it 'requests to follow all the expected users once the workers have run' do
subject.call(import)
resolve_account_service_double = instance_double(ResolveAccountService)
allow(ResolveAccountService).to receive(:new).and_return(resolve_account_service_double)
allow(resolve_account_service_double).to receive(:call).with('[email protected]', any_args) { Fabricate(:account, username: 'user', domain: 'foo.bar', protocol: :activitypub) }
allow(resolve_account_service_double).to receive(:call).with('[email protected]', any_args) { Fabricate(:account, username: 'unknown', domain: 'unknown.bar', protocol: :activitypub) }
Import::RowWorker.drain
expect(FollowRequest.includes(:target_account).where(account: account).map { |follow_request| follow_request.target_account.acct }).to contain_exactly('[email protected]', '[email protected]')
end
end
context 'when importing blocks' do
let(:import_type) { 'blocking' }
let(:overwrite) { false }
let!(:rows) do
[
{ 'acct' => '[email protected]' },
{ 'acct' => '[email protected]' },
].map { |data| import.rows.create!(data: data) }
end
before do
account.block!(Fabricate(:account, username: 'already_blocked', domain: 'remote.org'))
end
it 'does not immediately change who the account blocks' do
expect { subject.call(import) }.to_not(change { account.reload.blocking.to_a })
end
it 'enqueues workers for the expected rows' do
subject.call(import)
expect(Import::RowWorker.jobs.pluck('args').flatten).to match_array(rows.map(&:id))
end
it 'blocks all the listed users once the workers have run' do
subject.call(import)
resolve_account_service_double = instance_double(ResolveAccountService)
allow(ResolveAccountService).to receive(:new).and_return(resolve_account_service_double)
allow(resolve_account_service_double).to receive(:call).with('[email protected]', any_args) { Fabricate(:account, username: 'user', domain: 'foo.bar', protocol: :activitypub) }
allow(resolve_account_service_double).to receive(:call).with('[email protected]', any_args) { Fabricate(:account, username: 'unknown', domain: 'unknown.bar', protocol: :activitypub) }
Import::RowWorker.drain
expect(account.blocking.map(&:acct)).to contain_exactly('[email protected]', '[email protected]', '[email protected]')
end
end
context 'when importing blocks with overwrite' do
let(:import_type) { 'blocking' }
let(:overwrite) { true }
let!(:blocked) { Fabricate(:account, username: 'blocked', domain: 'foo.bar', protocol: :activitypub) }
let!(:to_be_unblocked) { Fabricate(:account, username: 'to_be_unblocked', domain: 'foo.bar', protocol: :activitypub) }
let!(:rows) do
[
{ 'acct' => '[email protected]' },
{ 'acct' => '[email protected]' },
{ 'acct' => '[email protected]' },
].map { |data| import.rows.create!(data: data) }
end
before do
account.block!(blocked)
account.block!(to_be_unblocked)
end
it 'unblocks user not present on list' do
subject.call(import)
expect(account.blocking?(to_be_unblocked)).to be false
end
it 'enqueues workers for the expected rows' do
subject.call(import)
expect(Import::RowWorker.jobs.pluck('args').flatten).to match_array(rows[1..].map(&:id))
end
it 'requests to follow all the expected users once the workers have run' do
subject.call(import)
resolve_account_service_double = instance_double(ResolveAccountService)
allow(ResolveAccountService).to receive(:new).and_return(resolve_account_service_double)
allow(resolve_account_service_double).to receive(:call).with('[email protected]', any_args) { Fabricate(:account, username: 'user', domain: 'foo.bar', protocol: :activitypub) }
allow(resolve_account_service_double).to receive(:call).with('[email protected]', any_args) { Fabricate(:account, username: 'unknown', domain: 'unknown.bar', protocol: :activitypub) }
Import::RowWorker.drain
expect(account.blocking.map(&:acct)).to contain_exactly('[email protected]', '[email protected]', '[email protected]')
end
end
context 'when importing mutes' do
let(:import_type) { 'muting' }
let(:overwrite) { false }
let!(:rows) do
[
{ 'acct' => '[email protected]' },
{ 'acct' => '[email protected]' },
].map { |data| import.rows.create!(data: data) }
end
before do
account.mute!(Fabricate(:account, username: 'already_muted', domain: 'remote.org'))
end
it 'does not immediately change who the account blocks' do
expect { subject.call(import) }.to_not(change { account.reload.muting.to_a })
end
it 'enqueues workers for the expected rows' do
subject.call(import)
expect(Import::RowWorker.jobs.pluck('args').flatten).to match_array(rows.map(&:id))
end
it 'mutes all the listed users once the workers have run' do
subject.call(import)
resolve_account_service_double = instance_double(ResolveAccountService)
allow(ResolveAccountService).to receive(:new).and_return(resolve_account_service_double)
allow(resolve_account_service_double).to receive(:call).with('[email protected]', any_args) { Fabricate(:account, username: 'user', domain: 'foo.bar', protocol: :activitypub) }
allow(resolve_account_service_double).to receive(:call).with('[email protected]', any_args) { Fabricate(:account, username: 'unknown', domain: 'unknown.bar', protocol: :activitypub) }
Import::RowWorker.drain
expect(account.muting.map(&:acct)).to contain_exactly('[email protected]', '[email protected]', '[email protected]')
end
end
context 'when importing mutes with overwrite' do
let(:import_type) { 'muting' }
let(:overwrite) { true }
let!(:muted) { Fabricate(:account, username: 'muted', domain: 'foo.bar', protocol: :activitypub) }
let!(:to_be_unmuted) { Fabricate(:account, username: 'to_be_unmuted', domain: 'foo.bar', protocol: :activitypub) }
let!(:rows) do
[
{ 'acct' => '[email protected]', 'hide_notifications' => true },
{ 'acct' => '[email protected]' },
{ 'acct' => '[email protected]' },
].map { |data| import.rows.create!(data: data) }
end
before do
account.mute!(muted, notifications: false)
account.mute!(to_be_unmuted)
end
it 'updates the existing mute as expected' do
expect { subject.call(import) }.to change { Mute.where(account: account, target_account: muted).pick(:hide_notifications) }.from(false).to(true)
end
it 'unblocks user not present on list' do
subject.call(import)
expect(account.muting?(to_be_unmuted)).to be false
end
it 'enqueues workers for the expected rows' do
subject.call(import)
expect(Import::RowWorker.jobs.pluck('args').flatten).to match_array(rows[1..].map(&:id))
end
it 'requests to follow all the expected users once the workers have run' do
subject.call(import)
resolve_account_service_double = instance_double(ResolveAccountService)
allow(ResolveAccountService).to receive(:new).and_return(resolve_account_service_double)
allow(resolve_account_service_double).to receive(:call).with('[email protected]', any_args) { Fabricate(:account, username: 'user', domain: 'foo.bar', protocol: :activitypub) }
allow(resolve_account_service_double).to receive(:call).with('[email protected]', any_args) { Fabricate(:account, username: 'unknown', domain: 'unknown.bar', protocol: :activitypub) }
Import::RowWorker.drain
expect(account.muting.map(&:acct)).to contain_exactly('[email protected]', '[email protected]', '[email protected]')
end
end
context 'when importing domain blocks' do
let(:import_type) { 'domain_blocking' }
let(:overwrite) { false }
let!(:rows) do
[
{ 'domain' => 'blocked.com' },
{ 'domain' => 'to_block.com' },
].map { |data| import.rows.create!(data: data) }
end
before do
account.block_domain!('alreadyblocked.com')
account.block_domain!('blocked.com')
end
it 'blocks all the new domains' do
subject.call(import)
expect(account.domain_blocks.pluck(:domain)).to contain_exactly('alreadyblocked.com', 'blocked.com', 'to_block.com')
end
it 'marks the import as finished' do
subject.call(import)
expect(import.reload.finished?).to be true
end
end
context 'when importing domain blocks with overwrite' do
let(:import_type) { 'domain_blocking' }
let(:overwrite) { true }
let!(:rows) do
[
{ 'domain' => 'blocked.com' },
{ 'domain' => 'to_block.com' },
].map { |data| import.rows.create!(data: data) }
end
before do
account.block_domain!('alreadyblocked.com')
account.block_domain!('blocked.com')
end
it 'blocks all the new domains' do
subject.call(import)
expect(account.domain_blocks.pluck(:domain)).to contain_exactly('blocked.com', 'to_block.com')
end
it 'marks the import as finished' do
subject.call(import)
expect(import.reload.finished?).to be true
end
end
context 'when importing bookmarks' do
let(:import_type) { 'bookmarks' }
let(:overwrite) { false }
let!(:already_bookmarked) { Fabricate(:status, uri: 'https://already.bookmarked/1') }
let!(:status) { Fabricate(:status, uri: 'https://foo.bar/posts/1') }
let!(:inaccessible_status) { Fabricate(:status, uri: 'https://foo.bar/posts/inaccessible', visibility: :direct) }
let!(:bookmarked) { Fabricate(:status, uri: 'https://foo.bar/posts/already-bookmarked') }
let!(:rows) do
[
{ 'uri' => status.uri },
{ 'uri' => inaccessible_status.uri },
{ 'uri' => bookmarked.uri },
{ 'uri' => 'https://domain.unknown/foo' },
{ 'uri' => 'https://domain.unknown/private' },
].map { |data| import.rows.create!(data: data) }
end
before do
account.bookmarks.create!(status: already_bookmarked)
account.bookmarks.create!(status: bookmarked)
end
it 'enqueues workers for the expected rows' do
subject.call(import)
expect(Import::RowWorker.jobs.pluck('args').flatten).to match_array(rows.map(&:id))
end
it 'updates the bookmarks as expected once the workers have run' do
subject.call(import)
service_double = instance_double(ActivityPub::FetchRemoteStatusService)
allow(ActivityPub::FetchRemoteStatusService).to receive(:new).and_return(service_double)
allow(service_double).to receive(:call).with('https://domain.unknown/foo') { Fabricate(:status, uri: 'https://domain.unknown/foo') }
allow(service_double).to receive(:call).with('https://domain.unknown/private') { Fabricate(:status, uri: 'https://domain.unknown/private', visibility: :direct) }
Import::RowWorker.drain
expect(account.bookmarks.map { |bookmark| bookmark.status.uri }).to contain_exactly(already_bookmarked.uri, status.uri, bookmarked.uri, 'https://domain.unknown/foo')
end
end
context 'when importing bookmarks with overwrite' do
let(:import_type) { 'bookmarks' }
let(:overwrite) { true }
let!(:already_bookmarked) { Fabricate(:status, uri: 'https://already.bookmarked/1') }
let!(:status) { Fabricate(:status, uri: 'https://foo.bar/posts/1') }
let!(:inaccessible_status) { Fabricate(:status, uri: 'https://foo.bar/posts/inaccessible', visibility: :direct) }
let!(:bookmarked) { Fabricate(:status, uri: 'https://foo.bar/posts/already-bookmarked') }
let!(:rows) do
[
{ 'uri' => status.uri },
{ 'uri' => inaccessible_status.uri },
{ 'uri' => bookmarked.uri },
{ 'uri' => 'https://domain.unknown/foo' },
{ 'uri' => 'https://domain.unknown/private' },
].map { |data| import.rows.create!(data: data) }
end
before do
account.bookmarks.create!(status: already_bookmarked)
account.bookmarks.create!(status: bookmarked)
end
it 'enqueues workers for the expected rows' do
subject.call(import)
expect(Import::RowWorker.jobs.pluck('args').flatten).to match_array(rows.map(&:id))
end
it 'updates the bookmarks as expected once the workers have run' do
subject.call(import)
service_double = instance_double(ActivityPub::FetchRemoteStatusService)
allow(ActivityPub::FetchRemoteStatusService).to receive(:new).and_return(service_double)
allow(service_double).to receive(:call).with('https://domain.unknown/foo') { Fabricate(:status, uri: 'https://domain.unknown/foo') }
allow(service_double).to receive(:call).with('https://domain.unknown/private') { Fabricate(:status, uri: 'https://domain.unknown/private', visibility: :direct) }
Import::RowWorker.drain
expect(account.bookmarks.map { |bookmark| bookmark.status.uri }).to contain_exactly(status.uri, bookmarked.uri, 'https://domain.unknown/foo')
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class AfterBlockService < BaseService
def call(account, target_account)
@account = account
@target_account = target_account
clear_home_feed!
clear_list_feeds!
clear_notifications!
clear_conversations!
end
private
def clear_home_feed!
FeedManager.instance.clear_from_home(@account, @target_account)
end
def clear_list_feeds!
FeedManager.instance.clear_from_lists(@account, @target_account)
end
def clear_conversations!
AccountConversation.where(account: @account).where('? = ANY(participant_account_ids)', @target_account.id).in_batches.destroy_all
end
def clear_notifications!
Notification.where(account: @account).where(from_account: @target_account).in_batches.delete_all
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe AfterBlockService, type: :service do
subject { described_class.new.call(account, target_account) }
let(:account) { Fabricate(:account) }
let(:target_account) { Fabricate(:account) }
let(:status) { Fabricate(:status, account: target_account) }
let(:other_status) { Fabricate(:status, account: target_account) }
let(:other_account_status) { Fabricate(:status) }
let(:other_account_reblog) { Fabricate(:status, reblog_of_id: other_status.id) }
describe 'home timeline' do
let(:home_timeline_key) { FeedManager.instance.key(:home, account.id) }
before do
redis.del(home_timeline_key)
end
it "clears account's statuses" do
FeedManager.instance.push_to_home(account, status)
FeedManager.instance.push_to_home(account, other_account_status)
FeedManager.instance.push_to_home(account, other_account_reblog)
expect { subject }.to change {
redis.zrange(home_timeline_key, 0, -1)
}.from([status.id.to_s, other_account_status.id.to_s, other_account_reblog.id.to_s]).to([other_account_status.id.to_s])
end
end
describe 'lists' do
let(:list) { Fabricate(:list, account: account) }
let(:list_timeline_key) { FeedManager.instance.key(:list, list.id) }
before do
redis.del(list_timeline_key)
end
it "clears account's statuses" do
FeedManager.instance.push_to_list(list, status)
FeedManager.instance.push_to_list(list, other_account_status)
FeedManager.instance.push_to_list(list, other_account_reblog)
expect { subject }.to change {
redis.zrange(list_timeline_key, 0, -1)
}.from([status.id.to_s, other_account_status.id.to_s, other_account_reblog.id.to_s]).to([other_account_status.id.to_s])
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class MuteService < BaseService
def call(account, target_account, notifications: nil, duration: 0)
return if account.id == target_account.id
mute = account.mute!(target_account, notifications: notifications, duration: duration)
if mute.hide_notifications?
BlockWorker.perform_async(account.id, target_account.id)
else
MuteWorker.perform_async(account.id, target_account.id)
end
DeleteMuteWorker.perform_at(duration.seconds, mute.id) if duration != 0
mute
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe MuteService, type: :service do
subject { described_class.new.call(account, target_account) }
let(:account) { Fabricate(:account) }
let(:target_account) { Fabricate(:account) }
describe 'home timeline' do
let(:status) { Fabricate(:status, account: target_account) }
let(:other_account_status) { Fabricate(:status) }
let(:home_timeline_key) { FeedManager.instance.key(:home, account.id) }
before do
redis.del(home_timeline_key)
end
it "clears account's statuses" do
FeedManager.instance.push_to_home(account, status)
FeedManager.instance.push_to_home(account, other_account_status)
expect { subject }.to change {
redis.zrange(home_timeline_key, 0, -1)
}.from([status.id.to_s, other_account_status.id.to_s]).to([other_account_status.id.to_s])
end
end
it 'mutes account' do
expect { subject }.to change {
account.muting?(target_account)
}.from(false).to(true)
end
context 'without specifying a notifications parameter' do
it 'mutes notifications from the account' do
expect { subject }.to change {
account.muting_notifications?(target_account)
}.from(false).to(true)
end
end
context 'with a true notifications parameter' do
subject { described_class.new.call(account, target_account, notifications: true) }
it 'mutes notifications from the account' do
expect { subject }.to change {
account.muting_notifications?(target_account)
}.from(false).to(true)
end
end
context 'with a false notifications parameter' do
subject { described_class.new.call(account, target_account, notifications: false) }
it 'does not mute notifications from the account' do
expect { subject }.to_not change {
account.muting_notifications?(target_account)
}.from(false)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class RemoveFromFollowersService < BaseService
include Payloadable
def call(source_account, target_accounts)
source_account.passive_relationships.where(account_id: target_accounts).find_each do |follow|
follow.destroy
create_notification(follow) if source_account.local? && !follow.account.local? && follow.account.activitypub?
end
end
private
def create_notification(follow)
ActivityPub::DeliveryWorker.perform_async(build_json(follow), follow.target_account_id, follow.account.inbox_url)
end
def build_json(follow)
Oj.dump(serialize_payload(follow, ActivityPub::RejectFollowSerializer))
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe RemoveFromFollowersService, type: :service do
subject { described_class.new }
let(:bob) { Fabricate(:account, username: 'bob') }
describe 'local' do
let(:sender) { Fabricate(:account, username: 'alice') }
before do
Follow.create(account: sender, target_account: bob)
subject.call(bob, sender)
end
it 'does not create follow relation' do
expect(bob.followed_by?(sender)).to be false
end
end
describe 'remote ActivityPub' do
let(:sender) { Fabricate(:account, username: 'alice', domain: 'example.com', protocol: :activitypub, inbox_url: 'http://example.com/inbox') }
before do
Follow.create(account: sender, target_account: bob)
stub_request(:post, sender.inbox_url).to_return(status: 200)
subject.call(bob, sender)
end
it 'does not create follow relation' do
expect(bob.followed_by?(sender)).to be false
end
it 'sends a reject activity' do
expect(a_request(:post, sender.inbox_url)).to have_been_made.once
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class FetchLinkCardService < BaseService
include Redisable
include Lockable
URL_PATTERN = %r{
(#{Twitter::TwitterText::Regex[:valid_url_preceding_chars]}) # $1 preceding chars
( # $2 URL
(https?://) # $3 Protocol (required)
(#{Twitter::TwitterText::Regex[:valid_domain]}) # $4 Domain(s)
(?::(#{Twitter::TwitterText::Regex[:valid_port_number]}))? # $5 Port number (optional)
(/#{Twitter::TwitterText::Regex[:valid_url_path]}*)? # $6 URL Path and anchor
(\?#{Twitter::TwitterText::Regex[:valid_url_query_chars]}*#{Twitter::TwitterText::Regex[:valid_url_query_ending_chars]})? # $7 Query String
)
}iox
def call(status)
@status = status
@original_url = parse_urls
return if @original_url.nil? || @status.with_preview_card?
@url = @original_url.to_s
with_redis_lock("fetch:#{@original_url}") do
@card = PreviewCard.find_by(url: @url)
process_url if @card.nil? || @card.updated_at <= 2.weeks.ago || @card.missing_image?
end
attach_card if @card&.persisted?
rescue HTTP::Error, OpenSSL::SSL::SSLError, Addressable::URI::InvalidURIError, Mastodon::HostValidationError, Mastodon::LengthValidationError => e
Rails.logger.debug { "Error fetching link #{@original_url}: #{e}" }
nil
end
private
def process_url
@card ||= PreviewCard.new(url: @url)
attempt_oembed || attempt_opengraph
end
def html
return @html if defined?(@html)
@html = Request.new(:get, @url).add_headers('Accept' => 'text/html', 'User-Agent' => "#{Mastodon::Version.user_agent} Bot").perform do |res|
next unless res.code == 200 && res.mime_type == 'text/html'
# We follow redirects, and ideally we want to save the preview card for
# the destination URL and not any link shortener in-between, so here
# we set the URL to the one of the last response in the redirect chain
@url = res.request.uri.to_s
@card = PreviewCard.find_or_initialize_by(url: @url) if @card.url != @url
@html_charset = res.charset
res.body_with_limit
end
end
def attach_card
with_redis_lock("attach_card:#{@status.id}") do
return if @status.with_preview_card?
PreviewCardsStatus.create(status: @status, preview_card: @card, url: @original_url)
Rails.cache.delete(@status)
Trends.links.register(@status)
end
end
def parse_urls
urls = if @status.local?
@status.text.scan(URL_PATTERN).map { |array| Addressable::URI.parse(array[1]).normalize }
else
document = Nokogiri::HTML(@status.text)
links = document.css('a')
links.filter_map { |a| Addressable::URI.parse(a['href']) unless skip_link?(a) }.filter_map(&:normalize)
end
urls.reject { |uri| bad_url?(uri) }.first
end
def bad_url?(uri)
# Avoid local instance URLs and invalid URLs
uri.host.blank? || TagManager.instance.local_url?(uri.to_s) || !%w(http https).include?(uri.scheme)
end
def mention_link?(anchor)
@status.mentions.any? do |mention|
anchor['href'] == ActivityPub::TagManager.instance.url_for(mention.account)
end
end
def skip_link?(anchor)
# Avoid links for hashtags and mentions (microformats)
anchor['rel']&.include?('tag') || anchor['class']&.match?(/u-url|h-card/) || mention_link?(anchor)
end
def attempt_oembed
service = FetchOEmbedService.new
url_domain = Addressable::URI.parse(@url).normalized_host
cached_endpoint = Rails.cache.read("oembed_endpoint:#{url_domain}")
embed = service.call(@url, cached_endpoint: cached_endpoint) unless cached_endpoint.nil?
embed ||= service.call(@url, html: html) unless html.nil?
return false if embed.nil?
url = Addressable::URI.parse(service.endpoint_url)
@card.type = embed[:type]
@card.title = embed[:title] || ''
@card.author_name = embed[:author_name] || ''
@card.author_url = embed[:author_url].present? ? (url + embed[:author_url]).to_s : ''
@card.provider_name = embed[:provider_name] || ''
@card.provider_url = embed[:provider_url].present? ? (url + embed[:provider_url]).to_s : ''
@card.width = 0
@card.height = 0
case @card.type
when 'link'
@card.image_remote_url = (url + embed[:thumbnail_url]).to_s if embed[:thumbnail_url].present?
when 'photo'
return false if embed[:url].blank?
@card.embed_url = (url + embed[:url]).to_s
@card.image_remote_url = (url + embed[:url]).to_s
@card.width = embed[:width].presence || 0
@card.height = embed[:height].presence || 0
when 'video'
@card.width = embed[:width].presence || 0
@card.height = embed[:height].presence || 0
@card.html = Sanitize.fragment(embed[:html], Sanitize::Config::MASTODON_OEMBED)
@card.image_remote_url = (url + embed[:thumbnail_url]).to_s if embed[:thumbnail_url].present?
when 'rich'
# Most providers rely on <script> tags, which is a no-no
return false
end
@card.save_with_optional_image!
end
def attempt_opengraph
return if html.nil?
link_details_extractor = LinkDetailsExtractor.new(@url, @html, @html_charset)
@card = PreviewCard.find_or_initialize_by(url: link_details_extractor.canonical_url) if link_details_extractor.canonical_url != @card.url
@card.assign_attributes(link_details_extractor.to_preview_card_attributes)
@card.save_with_optional_image! unless @card.title.blank? && @card.html.blank?
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe FetchLinkCardService, type: :service do
subject { described_class.new }
let(:html) { '<!doctype html><title>Hello world</title>' }
let(:oembed_cache) { nil }
before do
stub_request(:get, 'http://example.com/html').to_return(headers: { 'Content-Type' => 'text/html' }, body: html)
stub_request(:get, 'http://example.com/not-found').to_return(status: 404, headers: { 'Content-Type' => 'text/html' }, body: html)
stub_request(:get, 'http://example.com/text').to_return(status: 404, headers: { 'Content-Type' => 'text/plain' }, body: 'Hello')
stub_request(:get, 'http://example.com/redirect').to_return(status: 302, headers: { 'Location' => 'http://example.com/html' })
stub_request(:get, 'http://example.com/redirect-to-404').to_return(status: 302, headers: { 'Location' => 'http://example.com/not-found' })
stub_request(:get, 'http://example.com/oembed?url=http://example.com/html').to_return(headers: { 'Content-Type' => 'application/json' }, body: '{ "version": "1.0", "type": "link", "title": "oEmbed title" }')
stub_request(:get, 'http://example.com/oembed?format=json&url=http://example.com/html').to_return(headers: { 'Content-Type' => 'application/json' }, body: '{ "version": "1.0", "type": "link", "title": "oEmbed title" }')
stub_request(:get, 'http://example.xn--fiqs8s')
stub_request(:get, 'http://example.com/日本語')
stub_request(:get, 'http://example.com/test?data=file.gpx%5E1')
stub_request(:get, 'http://example.com/test-')
stub_request(:get, 'http://example.com/sjis').to_return(request_fixture('sjis.txt'))
stub_request(:get, 'http://example.com/sjis_with_wrong_charset').to_return(request_fixture('sjis_with_wrong_charset.txt'))
stub_request(:get, 'http://example.com/koi8-r').to_return(request_fixture('koi8-r.txt'))
stub_request(:get, 'http://example.com/windows-1251').to_return(request_fixture('windows-1251.txt'))
Rails.cache.write('oembed_endpoint:example.com', oembed_cache) if oembed_cache
subject.call(status)
end
context 'with a local status' do
context 'with URL of a regular HTML page' do
let(:status) { Fabricate(:status, text: 'http://example.com/html') }
it 'creates preview card' do
expect(status.preview_card).to_not be_nil
expect(status.preview_card.url).to eq 'http://example.com/html'
expect(status.preview_card.title).to eq 'Hello world'
end
end
context 'with URL of a page with no title' do
let(:status) { Fabricate(:status, text: 'http://example.com/html') }
let(:html) { '<!doctype html><title></title>' }
it 'does not create a preview card' do
expect(status.preview_card).to be_nil
end
end
context 'with a URL of a plain-text page' do
let(:status) { Fabricate(:status, text: 'http://example.com/text') }
it 'does not create a preview card' do
expect(status.preview_card).to be_nil
end
end
context 'with multiple URLs' do
let(:status) { Fabricate(:status, text: 'ftp://example.com http://example.com/html http://example.com/text') }
it 'fetches the first valid URL' do
expect(a_request(:get, 'http://example.com/html')).to have_been_made
end
it 'does not fetch the second valid URL' do
expect(a_request(:get, 'http://example.com/text/')).to_not have_been_made
end
end
context 'with a redirect URL' do
let(:status) { Fabricate(:status, text: 'http://example.com/redirect') }
it 'follows redirect' do
expect(a_request(:get, 'http://example.com/redirect')).to have_been_made.once
expect(a_request(:get, 'http://example.com/html')).to have_been_made.once
end
it 'creates preview card' do
expect(status.preview_card).to_not be_nil
expect(status.preview_card.url).to eq 'http://example.com/html'
expect(status.preview_card.title).to eq 'Hello world'
end
end
context 'with a broken redirect URL' do
let(:status) { Fabricate(:status, text: 'http://example.com/redirect-to-404') }
it 'follows redirect' do
expect(a_request(:get, 'http://example.com/redirect-to-404')).to have_been_made.once
expect(a_request(:get, 'http://example.com/not-found')).to have_been_made.once
end
it 'does not create a preview card' do
expect(status.preview_card).to be_nil
end
end
context 'with a 404 URL' do
let(:status) { Fabricate(:status, text: 'http://example.com/not-found') }
it 'does not create a preview card' do
expect(status.preview_card).to be_nil
end
end
context 'with an IDN URL' do
let(:status) { Fabricate(:status, text: 'Check out http://example.中国') }
it 'fetches the URL' do
expect(a_request(:get, 'http://example.xn--fiqs8s/')).to have_been_made.once
end
end
context 'with a URL of a page in Shift JIS encoding' do
let(:status) { Fabricate(:status, text: 'Check out http://example.com/sjis') }
it 'decodes the HTML' do
expect(status.preview_card.title).to eq('SJISのページ')
end
end
context 'with a URL of a page in Shift JIS encoding labeled as UTF-8' do
let(:status) { Fabricate(:status, text: 'Check out http://example.com/sjis_with_wrong_charset') }
it 'decodes the HTML despite the wrong charset header' do
expect(status.preview_card.title).to eq('SJISのページ')
end
end
context 'with a URL of a page in KOI8-R encoding' do
let(:status) { Fabricate(:status, text: 'Check out http://example.com/koi8-r') }
it 'decodes the HTML' do
expect(status.preview_card.title).to eq('Московя начинаетъ только въ XVI ст. привлекать внимане иностранцевъ.')
end
end
context 'with a URL of a page in Windows-1251 encoding' do
let(:status) { Fabricate(:status, text: 'Check out http://example.com/windows-1251') }
it 'decodes the HTML' do
expect(status.preview_card.title).to eq('сэмпл текст')
end
end
context 'with a Japanese path URL' do
let(:status) { Fabricate(:status, text: 'テストhttp://example.com/日本語') }
it 'fetches the URL' do
expect(a_request(:get, 'http://example.com/日本語')).to have_been_made.once
end
end
context 'with a hyphen-suffixed URL' do
let(:status) { Fabricate(:status, text: 'test http://example.com/test-') }
it 'fetches the URL' do
expect(a_request(:get, 'http://example.com/test-')).to have_been_made.once
end
end
context 'with a caret-suffixed URL' do
let(:status) { Fabricate(:status, text: 'test http://example.com/test?data=file.gpx^1') }
it 'fetches the URL' do
expect(a_request(:get, 'http://example.com/test?data=file.gpx%5E1')).to have_been_made.once
end
it 'does not strip the caret before fetching' do
expect(a_request(:get, 'http://example.com/test?data=file.gpx')).to_not have_been_made
end
end
context 'with a non-isolated URL' do
let(:status) { Fabricate(:status, text: 'testhttp://example.com/sjis') }
it 'does not fetch URLs not isolated from their surroundings' do
expect(a_request(:get, 'http://example.com/sjis')).to_not have_been_made
end
end
context 'with a URL of a page with oEmbed support' do
let(:html) { '<!doctype html><title>Hello world</title><link rel="alternate" type="application/json+oembed" href="http://example.com/oembed?url=http://example.com/html">' }
let(:status) { Fabricate(:status, text: 'http://example.com/html') }
it 'fetches the oEmbed URL' do
expect(a_request(:get, 'http://example.com/oembed?url=http://example.com/html')).to have_been_made.once
end
it 'creates preview card' do
expect(status.preview_card).to_not be_nil
expect(status.preview_card.url).to eq 'http://example.com/html'
expect(status.preview_card.title).to eq 'oEmbed title'
end
context 'when oEmbed endpoint cache populated' do
let(:oembed_cache) { { endpoint: 'http://example.com/oembed?format=json&url={url}', format: :json } }
it 'uses the cached oEmbed response' do
expect(a_request(:get, 'http://example.com/oembed?url=http://example.com/html')).to_not have_been_made
expect(a_request(:get, 'http://example.com/oembed?format=json&url=http://example.com/html')).to have_been_made
end
it 'creates preview card' do
expect(status.preview_card).to_not be_nil
expect(status.preview_card.url).to eq 'http://example.com/html'
expect(status.preview_card.title).to eq 'oEmbed title'
end
end
# If the original HTML URL for whatever reason (e.g. DOS protection) redirects to
# an error page, we can still use the cached oEmbed but should not use the
# redirect URL on the card.
context 'when oEmbed endpoint cache populated but page returns 404' do
let(:status) { Fabricate(:status, text: 'http://example.com/redirect-to-404') }
let(:oembed_cache) { { endpoint: 'http://example.com/oembed?url=http://example.com/html', format: :json } }
it 'uses the cached oEmbed response' do
expect(a_request(:get, 'http://example.com/oembed?url=http://example.com/html')).to have_been_made
end
it 'creates preview card' do
expect(status.preview_card).to_not be_nil
expect(status.preview_card.title).to eq 'oEmbed title'
end
it 'uses the original URL' do
expect(status.preview_card&.url).to eq 'http://example.com/redirect-to-404'
end
end
end
end
context 'with a remote status' do
let(:status) do
Fabricate(:status, account: Fabricate(:account, domain: 'example.com'), text: <<-TEXT)
Habt ihr ein paar gute Links zu <a>foo</a>
#<span class="tag"><a href="https://quitter.se/tag/wannacry" target="_blank" rel="tag noopener noreferrer" title="https://quitter.se/tag/wannacry">Wannacry</a></span> herumfliegen?
Ich will mal unter <br> <a href="http://example.com/not-found" target="_blank" rel="noopener noreferrer" title="http://example.com/not-found">http://example.com/not-found</a> was sammeln. !
<a href="http://sn.jonkman.ca/group/416/id" target="_blank" rel="noopener noreferrer" title="http://sn.jonkman.ca/group/416/id">security</a>
TEXT
end
it 'parses out URLs' do
expect(a_request(:get, 'http://example.com/not-found')).to have_been_made.once
end
it 'ignores URLs to hashtags' do
expect(a_request(:get, 'https://quitter.se/tag/wannacry')).to_not have_been_made
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class UpdateAccountService < BaseService
def call(account, params, raise_error: false)
was_locked = account.locked
update_method = raise_error ? :update! : :update
account.send(update_method, params).tap do |ret|
next unless ret
authorize_all_follow_requests(account) if was_locked && !account.locked
check_links(account)
process_hashtags(account)
end
rescue Mastodon::DimensionsValidationError, Mastodon::StreamValidationError => e
account.errors.add(:avatar, e.message)
false
end
private
def authorize_all_follow_requests(account)
follow_requests = FollowRequest.where(target_account: account)
follow_requests = follow_requests.preload(:account).select { |req| !req.account.silenced? }
AuthorizeFollowWorker.push_bulk(follow_requests, limit: 1_000) do |req|
[req.account_id, req.target_account_id]
end
end
def check_links(account)
return unless account.fields.any?(&:requires_verification?)
VerifyAccountLinksWorker.perform_async(account.id)
end
def process_hashtags(account)
account.tags_as_strings = Extractor.extract_hashtags(account.note)
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe UpdateAccountService, type: :service do
subject { described_class.new }
describe 'switching form locked to unlocked accounts' do
let(:account) { Fabricate(:account, locked: true) }
let(:alice) { Fabricate(:account) }
let(:bob) { Fabricate(:account) }
let(:eve) { Fabricate(:account) }
before do
bob.touch(:silenced_at)
account.mute!(eve)
FollowService.new.call(alice, account)
FollowService.new.call(bob, account)
FollowService.new.call(eve, account)
subject.call(account, { locked: false })
end
it 'auto-accepts pending follow requests' do
expect(alice.following?(account)).to be true
expect(alice.requested?(account)).to be false
end
it 'does not auto-accept pending follow requests from silenced users' do
expect(bob.following?(account)).to be false
expect(bob.requested?(account)).to be true
end
it 'auto-accepts pending follow requests from muted users so as to not leak mute' do
expect(eve.following?(account)).to be true
expect(eve.requested?(account)).to be false
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class ReblogService < BaseService
include Authorization
include Payloadable
# Reblog a status and notify its remote author
# @param [Account] account Account to reblog from
# @param [Status] reblogged_status Status to be reblogged
# @param [Hash] options
# @option [String] :visibility
# @option [Boolean] :with_rate_limit
# @return [Status]
def call(account, reblogged_status, options = {})
reblogged_status = reblogged_status.reblog if reblogged_status.reblog?
authorize_with account, reblogged_status, :reblog?
reblog = account.statuses.find_by(reblog: reblogged_status)
return reblog unless reblog.nil?
visibility = if reblogged_status.hidden?
reblogged_status.visibility
else
options[:visibility] || account.user&.setting_default_privacy
end
reblog = account.statuses.create!(reblog: reblogged_status, text: '', visibility: visibility, rate_limit: options[:with_rate_limit])
Trends.register!(reblog)
DistributionWorker.perform_async(reblog.id)
ActivityPub::DistributionWorker.perform_async(reblog.id)
create_notification(reblog)
bump_potential_friendship(account, reblog)
reblog
end
private
def create_notification(reblog)
reblogged_status = reblog.reblog
if reblogged_status.account.local?
LocalNotificationWorker.perform_async(reblogged_status.account_id, reblog.id, reblog.class.name, 'reblog')
elsif reblogged_status.account.activitypub? && !reblogged_status.account.following?(reblog.account)
ActivityPub::DeliveryWorker.perform_async(build_json(reblog), reblog.account_id, reblogged_status.account.inbox_url)
end
end
def bump_potential_friendship(account, reblog)
ActivityTracker.increment('activity:interactions')
return if account.following?(reblog.reblog.account_id)
PotentialFriendshipTracker.record(account.id, reblog.reblog.account_id, :reblog)
end
def build_json(reblog)
Oj.dump(serialize_payload(ActivityPub::ActivityPresenter.from_status(reblog), ActivityPub::ActivitySerializer, signer: reblog.account))
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe ReblogService, type: :service do
let(:alice) { Fabricate(:account, username: 'alice') }
context 'when creates a reblog with appropriate visibility' do
subject { described_class.new }
let(:visibility) { :public }
let(:reblog_visibility) { :public }
let(:status) { Fabricate(:status, account: alice, visibility: visibility) }
before do
subject.call(alice, status, visibility: reblog_visibility)
end
describe 'boosting privately' do
let(:reblog_visibility) { :private }
it 'reblogs privately' do
expect(status.reblogs.first.visibility).to eq 'private'
end
end
describe 'public reblogs of private toots should remain private' do
let(:visibility) { :private }
let(:reblog_visibility) { :public }
it 'reblogs privately' do
expect(status.reblogs.first.visibility).to eq 'private'
end
end
end
context 'when the reblogged status is discarded in the meantime' do
let(:status) { Fabricate(:status, account: alice, visibility: :public, text: 'discard-status-text') }
# Add a callback to discard the status being reblogged after the
# validations pass but before the database commit is executed.
before do
Status.class_eval do
before_save :discard_status
def discard_status
Status
.where(id: reblog_of_id)
.where(text: 'discard-status-text')
.update_all(deleted_at: Time.now.utc) # rubocop:disable Rails/SkipsModelValidations
end
end
end
# Remove race condition simulating `discard_status` callback.
after do
Status._save_callbacks.delete(:discard_status)
end
it 'raises an exception' do
expect { subject.call(alice, status) }.to raise_error ActiveRecord::ActiveRecordError
end
end
context 'with ActivityPub' do
subject { described_class.new }
let(:bob) { Fabricate(:account, username: 'bob', protocol: :activitypub, domain: 'example.com', inbox_url: 'http://example.com/inbox') }
let(:status) { Fabricate(:status, account: bob) }
before do
stub_request(:post, bob.inbox_url)
allow(ActivityPub::DistributionWorker).to receive(:perform_async)
subject.call(alice, status)
end
it 'creates a reblog' do
expect(status.reblogs.count).to eq 1
end
describe 'after_create_commit :store_uri' do
it 'keeps consistent reblog count' do
expect(status.reblogs.count).to eq 1
end
end
it 'distributes to followers' do
expect(ActivityPub::DistributionWorker).to have_received(:perform_async)
end
it 'sends an announce activity to the author' do
expect(a_request(:post, bob.inbox_url)).to have_been_made.once
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class ProcessMentionsService < BaseService
include Payloadable
# Scan status for mentions and fetch remote mentioned users,
# and create local mention pointers
# @param [Status] status
# @param [Boolean] save_records Whether to save records in database
def call(status, save_records: true)
@status = status
@save_records = save_records
return unless @status.local?
@previous_mentions = @status.active_mentions.includes(:account).to_a
@current_mentions = []
Status.transaction do
scan_text!
assign_mentions!
end
end
private
def scan_text!
@status.text = @status.text.gsub(Account::MENTION_RE) do |match|
username, domain = Regexp.last_match(1).split('@')
domain = if TagManager.instance.local_domain?(domain)
nil
else
TagManager.instance.normalize_domain(domain)
end
mentioned_account = Account.find_remote(username, domain)
# Unapproved and unconfirmed accounts should not be mentionable
next match if mentioned_account&.local? && !(mentioned_account.user_confirmed? && mentioned_account.user_approved?)
# If the account cannot be found or isn't the right protocol,
# first try to resolve it
if mention_undeliverable?(mentioned_account)
begin
mentioned_account = ResolveAccountService.new.call(Regexp.last_match(1))
rescue Webfinger::Error, HTTP::Error, OpenSSL::SSL::SSLError, Mastodon::UnexpectedResponseError
mentioned_account = nil
end
end
# If after resolving it still isn't found or isn't the right
# protocol, then give up
next match if mention_undeliverable?(mentioned_account) || mentioned_account&.unavailable?
mention = @previous_mentions.find { |x| x.account_id == mentioned_account.id }
mention ||= @current_mentions.find { |x| x.account_id == mentioned_account.id }
mention ||= @status.mentions.new(account: mentioned_account)
@current_mentions << mention
"@#{mentioned_account.acct}"
end
@status.save! if @save_records
end
def assign_mentions!
# Make sure we never mention blocked accounts
unless @current_mentions.empty?
mentioned_domains = @current_mentions.filter_map { |m| m.account.domain }.uniq
blocked_domains = Set.new(mentioned_domains.empty? ? [] : AccountDomainBlock.where(account_id: @status.account_id, domain: mentioned_domains))
mentioned_account_ids = @current_mentions.map(&:account_id)
blocked_account_ids = Set.new(@status.account.block_relationships.where(target_account_id: mentioned_account_ids).pluck(:target_account_id))
dropped_mentions, @current_mentions = @current_mentions.partition { |mention| blocked_account_ids.include?(mention.account_id) || blocked_domains.include?(mention.account.domain) }
dropped_mentions.each(&:destroy)
end
@current_mentions.each do |mention|
mention.save if mention.new_record? && @save_records
end
# If previous mentions are no longer contained in the text, convert them
# to silent mentions, since withdrawing access from someone who already
# received a notification might be more confusing
removed_mentions = @previous_mentions - @current_mentions
Mention.where(id: removed_mentions.map(&:id)).update_all(silent: true) unless removed_mentions.empty?
end
def mention_undeliverable?(mentioned_account)
mentioned_account.nil? || (!mentioned_account.local? && !mentioned_account.activitypub?)
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe ProcessMentionsService, type: :service do
subject { described_class.new }
let(:account) { Fabricate(:account, username: 'alice') }
context 'when mentions contain blocked accounts' do
let(:non_blocked_account) { Fabricate(:account) }
let(:individually_blocked_account) { Fabricate(:account) }
let(:domain_blocked_account) { Fabricate(:account, domain: 'evil.com') }
let(:status) { Fabricate(:status, account: account, text: "Hello @#{non_blocked_account.acct} @#{individually_blocked_account.acct} @#{domain_blocked_account.acct}", visibility: :public) }
before do
account.block!(individually_blocked_account)
account.domain_blocks.create!(domain: domain_blocked_account.domain)
subject.call(status)
end
it 'creates a mention to the non-blocked account' do
expect(non_blocked_account.mentions.where(status: status).count).to eq 1
end
it 'does not create a mention to the individually blocked account' do
expect(individually_blocked_account.mentions.where(status: status).count).to eq 0
end
it 'does not create a mention to the domain-blocked account' do
expect(domain_blocked_account.mentions.where(status: status).count).to eq 0
end
end
context 'with resolving a mention to a remote account' do
let(:status) { Fabricate(:status, account: account, text: "Hello @#{remote_user.acct}", visibility: :public) }
context 'with ActivityPub' do
context 'with a valid remote user' do
let!(:remote_user) { Fabricate(:account, username: 'remote_user', protocol: :activitypub, domain: 'example.com', inbox_url: 'http://example.com/inbox') }
before do
subject.call(status)
end
it 'creates a mention' do
expect(remote_user.mentions.where(status: status).count).to eq 1
end
end
context 'when mentioning a user several times when not saving records' do
let!(:remote_user) { Fabricate(:account, username: 'remote_user', protocol: :activitypub, domain: 'example.com', inbox_url: 'http://example.com/inbox') }
let(:status) { Fabricate(:status, account: account, text: "Hello @#{remote_user.acct} @#{remote_user.acct} @#{remote_user.acct}", visibility: :public) }
before do
subject.call(status, save_records: false)
end
it 'creates exactly one mention' do
expect(status.mentions.size).to eq 1
end
end
context 'with an IDN domain' do
let!(:remote_user) { Fabricate(:account, username: 'sneak', protocol: :activitypub, domain: 'xn--hresiar-mxa.ch', inbox_url: 'http://example.com/inbox') }
let!(:status) { Fabricate(:status, account: account, text: 'Hello @sneak@hæresiar.ch') }
before do
subject.call(status)
end
it 'creates a mention' do
expect(remote_user.mentions.where(status: status).count).to eq 1
end
end
context 'with an IDN TLD' do
let!(:remote_user) { Fabricate(:account, username: 'foo', protocol: :activitypub, domain: 'xn--y9a3aq.xn--y9a3aq', inbox_url: 'http://example.com/inbox') }
let!(:status) { Fabricate(:status, account: account, text: 'Hello @foo@հայ.հայ') }
before do
subject.call(status)
end
it 'creates a mention' do
expect(remote_user.mentions.where(status: status).count).to eq 1
end
end
end
context 'with a Temporarily-unreachable ActivityPub user' do
let!(:remote_user) { Fabricate(:account, username: 'remote_user', protocol: :activitypub, domain: 'example.com', inbox_url: 'http://example.com/inbox', last_webfingered_at: nil) }
before do
stub_request(:get, 'https://example.com/.well-known/host-meta').to_return(status: 404)
stub_request(:get, 'https://example.com/.well-known/webfinger?resource=acct:[email protected]').to_return(status: 500)
subject.call(status)
end
it 'creates a mention' do
expect(remote_user.mentions.where(status: status).count).to eq 1
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class BatchedRemoveStatusService < BaseService
include Redisable
# Delete multiple statuses and reblogs of them as efficiently as possible
# @param [Enumerable<Status>] statuses An array of statuses
# @param [Hash] options
# @option [Boolean] :skip_side_effects Do not modify feeds and send updates to streaming API
def call(statuses, **options)
ActiveRecord::Associations::Preloader.new(
records: statuses,
associations: options[:skip_side_effects] ? :reblogs : [:account, :tags, reblogs: :account]
)
statuses_and_reblogs = statuses.flat_map { |status| [status] + status.reblogs }
# The conversations for direct visibility statuses also need
# to be manually updated. This part is not efficient but we
# rely on direct visibility statuses being relatively rare.
statuses_with_account_conversations = statuses.select(&:direct_visibility?)
ActiveRecord::Associations::Preloader.new(
records: statuses_with_account_conversations,
associations: [mentions: :account]
)
statuses_with_account_conversations.each(&:unlink_from_conversations!)
# We do not batch all deletes into one to avoid having a long-running
# transaction lock the database, but we use the delete method instead
# of destroy to avoid all callbacks. We rely on foreign keys to
# cascade the delete faster without loading the associations.
statuses_and_reblogs.each_slice(50) { |slice| Status.where(id: slice.map(&:id)).delete_all }
# Since we skipped all callbacks, we also need to manually
# deindex the statuses
if Chewy.enabled?
Chewy.strategy.current.update(StatusesIndex, statuses_and_reblogs)
Chewy.strategy.current.update(PublicStatusesIndex, statuses_and_reblogs)
end
return if options[:skip_side_effects]
# Batch by source account
statuses_and_reblogs.group_by(&:account_id).each_value do |account_statuses|
account = account_statuses.first.account
next unless account
unpush_from_home_timelines(account, account_statuses)
unpush_from_list_timelines(account, account_statuses)
end
# Cannot be batched
@status_id_cutoff = Mastodon::Snowflake.id_at(2.weeks.ago)
redis.pipelined do |pipeline|
statuses.each do |status|
unpush_from_public_timelines(status, pipeline)
end
end
end
private
def unpush_from_home_timelines(account, statuses)
account.followers_for_local_distribution.includes(:user).reorder(nil).find_each do |follower|
statuses.each do |status|
FeedManager.instance.unpush_from_home(follower, status)
end
end
end
def unpush_from_list_timelines(account, statuses)
account.lists_for_local_distribution.select(:id, :account_id).includes(account: :user).reorder(nil).find_each do |list|
statuses.each do |status|
FeedManager.instance.unpush_from_list(list, status)
end
end
end
def unpush_from_public_timelines(status, pipeline)
return unless status.public_visibility? && status.id > @status_id_cutoff
payload = Oj.dump(event: :delete, payload: status.id.to_s)
pipeline.publish('timeline:public', payload)
pipeline.publish(status.local? ? 'timeline:public:local' : 'timeline:public:remote', payload)
if status.media_attachments.any?
pipeline.publish('timeline:public:media', payload)
pipeline.publish(status.local? ? 'timeline:public:local:media' : 'timeline:public:remote:media', payload)
end
status.tags.map { |tag| tag.name.mb_chars.downcase }.each do |hashtag|
pipeline.publish("timeline:hashtag:#{hashtag}", payload)
pipeline.publish("timeline:hashtag:#{hashtag}:local", payload) if status.local?
end
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe BatchedRemoveStatusService, type: :service do
subject { described_class.new }
let!(:alice) { Fabricate(:account) }
let!(:bob) { Fabricate(:account, username: 'bob', domain: 'example.com') }
let!(:jeff) { Fabricate(:account) }
let!(:hank) { Fabricate(:account, username: 'hank', protocol: :activitypub, domain: 'example.com', inbox_url: 'http://example.com/inbox') }
let(:status_alice_hello) { PostStatusService.new.call(alice, text: 'Hello @[email protected]') }
let(:status_alice_other) { PostStatusService.new.call(alice, text: 'Another status') }
before do
allow(redis).to receive_messages(publish: nil)
stub_request(:post, 'http://example.com/inbox').to_return(status: 200)
jeff.user.update(current_sign_in_at: Time.zone.now)
jeff.follow!(alice)
hank.follow!(alice)
status_alice_hello
status_alice_other
subject.call([status_alice_hello, status_alice_other])
end
it 'removes statuses' do
expect { Status.find(status_alice_hello.id) }.to raise_error ActiveRecord::RecordNotFound
expect { Status.find(status_alice_other.id) }.to raise_error ActiveRecord::RecordNotFound
end
it 'removes statuses from author\'s home feed' do
expect(HomeFeed.new(alice).get(10).pluck(:id)).to_not include(status_alice_hello.id, status_alice_other.id)
end
it 'removes statuses from local follower\'s home feed' do
expect(HomeFeed.new(jeff).get(10).pluck(:id)).to_not include(status_alice_hello.id, status_alice_other.id)
end
it 'notifies streaming API of followers' do
expect(redis).to have_received(:publish).with("timeline:#{jeff.id}", any_args).at_least(:once)
end
it 'notifies streaming API of public timeline' do
expect(redis).to have_received(:publish).with('timeline:public', any_args).at_least(:once)
end
it 'sends delete activity to followers' do
expect(a_request(:post, 'http://example.com/inbox')).to have_been_made.at_least_once
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class FetchRemoteStatusService < BaseService
def call(url, prefetched_body: nil, request_id: nil)
if prefetched_body.nil?
resource_url, resource_options = FetchResourceService.new.call(url)
else
resource_url = url
resource_options = { prefetched_body: prefetched_body }
end
ActivityPub::FetchRemoteStatusService.new.call(resource_url, **resource_options.merge(request_id: request_id)) unless resource_url.nil?
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe FetchRemoteStatusService, type: :service do
let(:account) { Fabricate(:account, domain: 'example.org', uri: 'https://example.org/foo') }
let(:prefetched_body) { nil }
let(:note) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
id: 'https://example.org/@foo/1234',
type: 'Note',
content: 'Lorem ipsum',
attributedTo: ActivityPub::TagManager.instance.uri_for(account),
}
end
context 'when protocol is :activitypub' do
subject { described_class.new.call(note[:id], prefetched_body: prefetched_body) }
let(:prefetched_body) { Oj.dump(note) }
before do
subject
end
it 'creates status' do
status = account.statuses.first
expect(status).to_not be_nil
expect(status.text).to eq 'Lorem ipsum'
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class SearchService < BaseService
QUOTE_EQUIVALENT_CHARACTERS = /[“”„«»「」『』《》]/
def call(query, account, limit, options = {})
@query = query&.strip&.gsub(QUOTE_EQUIVALENT_CHARACTERS, '"')
@account = account
@options = options
@limit = limit.to_i
@offset = options[:type].blank? ? 0 : options[:offset].to_i
@resolve = options[:resolve] || false
@following = options[:following] || false
default_results.tap do |results|
next if @query.blank? || @limit.zero?
if url_query?
results.merge!(url_resource_results) unless url_resource.nil? || @offset.positive? || (@options[:type].present? && url_resource_symbol != @options[:type].to_sym)
elsif @query.present?
results[:accounts] = perform_accounts_search! if account_searchable?
results[:statuses] = perform_statuses_search! if status_searchable?
results[:hashtags] = perform_hashtags_search! if hashtag_searchable?
end
end
end
private
def perform_accounts_search!
AccountSearchService.new.call(
@query,
@account,
limit: @limit,
resolve: @resolve,
offset: @offset,
use_searchable_text: true,
following: @following,
start_with_hashtag: @query.start_with?('#')
)
end
def perform_statuses_search!
StatusesSearchService.new.call(
@query,
@account,
limit: @limit,
offset: @offset,
account_id: @options[:account_id],
min_id: @options[:min_id],
max_id: @options[:max_id]
)
end
def perform_hashtags_search!
TagSearchService.new.call(
@query,
limit: @limit,
offset: @offset,
exclude_unreviewed: @options[:exclude_unreviewed]
)
end
def default_results
{ accounts: [], hashtags: [], statuses: [] }
end
def url_query?
@resolve && %r{\Ahttps?://}.match?(@query)
end
def url_resource_results
{ url_resource_symbol => [url_resource] }
end
def url_resource
@url_resource ||= ResolveURLService.new.call(@query, on_behalf_of: @account)
end
def url_resource_symbol
url_resource.class.name.downcase.pluralize.to_sym
end
def status_searchable?
Chewy.enabled? && status_search? && @account.present?
end
def account_searchable?
account_search?
end
def hashtag_searchable?
hashtag_search?
end
def account_search?
@options[:type].blank? || @options[:type] == 'accounts'
end
def hashtag_search?
@options[:type].blank? || @options[:type] == 'hashtags'
end
def status_search?
@options[:type].blank? || @options[:type] == 'statuses'
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
describe SearchService, type: :service do
subject { described_class.new }
describe '#call' do
describe 'with a blank query' do
it 'returns empty results without searching' do
allow(AccountSearchService).to receive(:new)
allow(Tag).to receive(:search_for)
results = subject.call('', nil, 10)
expect(results).to eq(empty_results)
expect(AccountSearchService).to_not have_received(:new)
expect(Tag).to_not have_received(:search_for)
end
end
describe 'with an url query' do
let(:query) { 'http://test.host/query' }
context 'when it does not find anything' do
it 'returns the empty results' do
service = instance_double(ResolveURLService, call: nil)
allow(ResolveURLService).to receive(:new).and_return(service)
results = subject.call(query, nil, 10, resolve: true)
expect(service).to have_received(:call).with(query, on_behalf_of: nil)
expect(results).to eq empty_results
end
end
context 'when it finds an account' do
it 'includes the account in the results' do
account = Account.new
service = instance_double(ResolveURLService, call: account)
allow(ResolveURLService).to receive(:new).and_return(service)
results = subject.call(query, nil, 10, resolve: true)
expect(service).to have_received(:call).with(query, on_behalf_of: nil)
expect(results).to eq empty_results.merge(accounts: [account])
end
end
context 'when it finds a status' do
it 'includes the status in the results' do
status = Status.new
service = instance_double(ResolveURLService, call: status)
allow(ResolveURLService).to receive(:new).and_return(service)
results = subject.call(query, nil, 10, resolve: true)
expect(service).to have_received(:call).with(query, on_behalf_of: nil)
expect(results).to eq empty_results.merge(statuses: [status])
end
end
end
describe 'with a non-url query' do
context 'when it matches an account' do
it 'includes the account in the results' do
query = 'username'
account = Account.new
service = instance_double(AccountSearchService, call: [account])
allow(AccountSearchService).to receive(:new).and_return(service)
results = subject.call(query, nil, 10)
expect(service).to have_received(:call).with(query, nil, limit: 10, offset: 0, resolve: false, start_with_hashtag: false, use_searchable_text: true, following: false)
expect(results).to eq empty_results.merge(accounts: [account])
end
end
context 'when it matches a tag' do
it 'includes the tag in the results' do
query = '#tag'
tag = Tag.new
allow(Tag).to receive(:search_for).with('tag', 10, 0, { exclude_unreviewed: nil }).and_return([tag])
results = subject.call(query, nil, 10)
expect(Tag).to have_received(:search_for).with('tag', 10, 0, exclude_unreviewed: nil)
expect(results).to eq empty_results.merge(hashtags: [tag])
end
end
end
end
def empty_results
{ accounts: [], hashtags: [], statuses: [] }
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class VerifyLinkService < BaseService
def call(field)
@link_back = ActivityPub::TagManager.instance.url_for(field.account)
@url = field.value_for_verification
perform_request!
return unless link_back_present?
field.mark_verified!
rescue OpenSSL::SSL::SSLError, HTTP::Error, Addressable::URI::InvalidURIError, Mastodon::HostValidationError, Mastodon::LengthValidationError, IPAddr::AddressFamilyError => e
Rails.logger.debug { "Error fetching link #{@url}: #{e}" }
nil
end
private
def perform_request!
@body = Request.new(:get, @url).add_headers('Accept' => 'text/html').perform do |res|
res.code == 200 ? res.body_with_limit : nil
end
end
def link_back_present?
return false if @body.blank?
links = Nokogiri::HTML5(@body).xpath('//a[contains(concat(" ", normalize-space(@rel), " "), " me ")]|//link[contains(concat(" ", normalize-space(@rel), " "), " me ")]')
if links.any? { |link| link['href']&.downcase == @link_back.downcase }
true
elsif links.empty?
false
else
link_redirects_back?(links.first['href'])
end
end
def link_redirects_back?(test_url)
return false if test_url.blank?
redirect_to_url = Request.new(:head, test_url, follow: false).perform do |res|
res.headers['Location']
end
redirect_to_url == @link_back
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe VerifyLinkService, type: :service do
subject { described_class.new }
context 'when given a local account' do
let(:account) { Fabricate(:account, username: 'alice') }
let(:field) { Account::Field.new(account, 'name' => 'Website', 'value' => 'http://example.com') }
before do
stub_request(:head, 'https://redirect.me/abc').to_return(status: 301, headers: { 'Location' => ActivityPub::TagManager.instance.url_for(account) })
stub_request(:get, 'http://example.com').to_return(status: 200, body: html)
subject.call(field)
end
context 'when a link contains an <a> back' do
let(:html) do
<<-HTML
<!doctype html>
<body>
<a href="#{ActivityPub::TagManager.instance.url_for(account)}" rel="me">Follow me on Mastodon</a>
</body>
HTML
end
it 'marks the field as verified' do
expect(field.verified?).to be true
end
end
context 'when a link contains an <a rel="noopener noreferrer"> back' do
let(:html) do
<<-HTML
<!doctype html>
<body>
<a href="#{ActivityPub::TagManager.instance.url_for(account)}" rel="me noopener noreferrer" target="_blank">Follow me on Mastodon</a>
</body>
HTML
end
it 'marks the field as verified' do
expect(field.verified?).to be true
end
end
context 'when a link contains a <link> back' do
let(:html) do
<<-HTML
<!doctype html>
<head>
<link type="text/html" href="#{ActivityPub::TagManager.instance.url_for(account)}" rel="me" />
</head>
HTML
end
it 'marks the field as verified' do
expect(field.verified?).to be true
end
end
context 'when a link goes through a redirect back' do
let(:html) do
<<-HTML
<!doctype html>
<head>
<link type="text/html" href="https://redirect.me/abc" rel="me" />
</head>
HTML
end
it 'marks the field as verified' do
expect(field.verified?).to be true
end
end
context 'when a document is truncated but the link back is valid' do
let(:html) do
"
<!doctype html>
<body>
<a rel=\"me\" href=\"#{ActivityPub::TagManager.instance.url_for(account)}\"
"
end
it 'marks the field as not verified' do
expect(field.verified?).to be false
end
end
context 'when a link back might be truncated' do
let(:html) do
"
<!doctype html>
<body>
<a rel=\"me\" href=\"#{ActivityPub::TagManager.instance.url_for(account)}"
end
it 'does not mark the field as verified' do
expect(field.verified?).to be false
end
end
context 'when a link does not contain a link back' do
let(:html) { '' }
it 'does not mark the field as verified' do
expect(field.verified?).to be false
end
end
context 'when link has no `href` attribute' do
let(:html) do
<<-HTML
<!doctype html>
<head>
<link type="text/html" rel="me" />
</head>
<body>
<a rel="me" target="_blank">Follow me on Mastodon</a>
</body>
HTML
end
it 'does not mark the field as verified' do
expect(field.verified?).to be false
end
end
end
context 'when given a remote account' do
let(:account) { Fabricate(:account, username: 'alice', domain: 'example.com', url: 'https://profile.example.com/alice') }
let(:field) { Account::Field.new(account, 'name' => 'Website', 'value' => '<a href="http://example.com" rel="me"><span class="invisible">http://</span><span class="">example.com</span><span class="invisible"></span></a>') }
before do
stub_request(:get, 'http://example.com').to_return(status: 200, body: html)
subject.call(field)
end
context 'when a link contains an <a> back' do
let(:html) do
<<-HTML
<!doctype html>
<body>
<a href="https://profile.example.com/alice" rel="me">Follow me on Mastodon</a>
</body>
HTML
end
it 'marks the field as verified' do
expect(field.verified?).to be true
end
end
context 'when the link contains a link with a missing protocol slash' do
# This was seen in the wild where a user had three pages:
# 1. their mastodon profile, which linked to github and the personal website
# 2. their personal website correctly linking back to mastodon
# 3. a github profile that was linking to the personal website, but with
# a malformed protocol of http:/
#
# This caused link verification between the mastodon profile and the
# website to fail.
#
# apparently github allows the user to enter website URLs with a single
# slash and makes no attempts to correct that.
let(:html) { '<a href="http:/unrelated.example">Hello</a>' }
it 'does not crash' do
# We could probably put more effort into perhaps auto-correcting the
# link and following it anyway, but at the very least we shouldn't let
# exceptions bubble up
expect(field.verified?).to be false
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class FetchResourceService < BaseService
include JsonLdHelper
ACCEPT_HEADER = 'application/activity+json, application/ld+json; profile="https://www.w3.org/ns/activitystreams", text/html;q=0.1'
ACTIVITY_STREAM_LINK_TYPES = ['application/activity+json', 'application/ld+json; profile="https://www.w3.org/ns/activitystreams"'].freeze
attr_reader :response_code
def call(url)
return if url.blank?
process(url)
rescue HTTP::Error, OpenSSL::SSL::SSLError, Addressable::URI::InvalidURIError, Mastodon::HostValidationError, Mastodon::LengthValidationError => e
Rails.logger.debug { "Error fetching resource #{@url}: #{e}" }
nil
end
private
def process(url, terminal: false)
@url = url
perform_request { |response| process_response(response, terminal) }
end
def perform_request(&block)
Request.new(:get, @url).tap do |request|
request.add_headers('Accept' => ACCEPT_HEADER)
# In a real setting we want to sign all outgoing requests,
# in case the remote server has secure mode enabled and requires
# authentication on all resources. However, during development,
# sending request signatures with an inaccessible host is useless
# and prevents even public resources from being fetched, so
# don't do it
request.on_behalf_of(Account.representative) unless Rails.env.development?
end.perform(&block)
end
def process_response(response, terminal = false)
@response_code = response.code
return nil if response.code != 200
if ['application/activity+json', 'application/ld+json'].include?(response.mime_type)
body = response.body_with_limit
json = body_to_json(body)
[json['id'], { prefetched_body: body, id: true }] if supported_context?(json) && (equals_or_includes_any?(json['type'], ActivityPub::FetchRemoteActorService::SUPPORTED_TYPES) || expected_type?(json))
elsif !terminal
link_header = response['Link'] && parse_link_header(response)
if link_header&.find_link(%w(rel alternate))
process_link_headers(link_header)
elsif response.mime_type == 'text/html'
process_html(response)
end
end
end
def expected_type?(json)
equals_or_includes_any?(json['type'], ActivityPub::Activity::Create::SUPPORTED_TYPES + ActivityPub::Activity::Create::CONVERTED_TYPES)
end
def process_html(response)
page = Nokogiri::HTML(response.body_with_limit)
json_link = page.xpath('//link[@rel="alternate"]').find { |link| ACTIVITY_STREAM_LINK_TYPES.include?(link['type']) }
process(json_link['href'], terminal: true) unless json_link.nil?
end
def process_link_headers(link_header)
json_link = link_header.find_link(%w(rel alternate), %w(type application/activity+json)) || link_header.find_link(%w(rel alternate), ['type', 'application/ld+json; profile="https://www.w3.org/ns/activitystreams"'])
process(json_link.href, terminal: true) unless json_link.nil?
end
def parse_link_header(response)
LinkHeader.parse(response['Link'].is_a?(Array) ? response['Link'].first : response['Link'])
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe FetchResourceService, type: :service do
describe '#call' do
subject { described_class.new.call(url) }
let(:url) { 'http://example.com' }
context 'with blank url' do
let(:url) { '' }
it { is_expected.to be_nil }
end
context 'when request fails' do
before do
stub_request(:get, url).to_return(status: 500, body: '', headers: {})
end
it { is_expected.to be_nil }
end
context 'when OpenSSL::SSL::SSLError is raised' do
before do
request = instance_double(Request)
allow(Request).to receive(:new).and_return(request)
allow(request).to receive(:add_headers)
allow(request).to receive(:on_behalf_of)
allow(request).to receive(:perform).and_raise(OpenSSL::SSL::SSLError)
end
it { is_expected.to be_nil }
end
context 'when HTTP::ConnectionError is raised' do
before do
request = instance_double(Request)
allow(Request).to receive(:new).and_return(request)
allow(request).to receive(:add_headers)
allow(request).to receive(:on_behalf_of)
allow(request).to receive(:perform).and_raise(HTTP::ConnectionError)
end
it { is_expected.to be_nil }
end
context 'when request succeeds' do
let(:body) { '' }
let(:content_type) { 'application/json' }
let(:headers) do
{ 'Content-Type' => content_type }
end
let(:json) do
{
id: 1,
'@context': ActivityPub::TagManager::CONTEXT,
type: 'Note',
}.to_json
end
before do
stub_request(:get, url).to_return(status: 200, body: body, headers: headers)
stub_request(:get, 'http://example.com/foo').to_return(status: 200, body: json, headers: { 'Content-Type' => 'application/activity+json' })
end
it 'signs request' do
subject
expect(a_request(:get, url).with(headers: { 'Signature' => /keyId="#{Regexp.escape(ActivityPub::TagManager.instance.key_uri_for(Account.representative))}"/ })).to have_been_made
end
context 'when content type is application/atom+xml' do
let(:content_type) { 'application/atom+xml' }
it { is_expected.to be_nil }
end
context 'when content type is activity+json' do
let(:content_type) { 'application/activity+json; charset=utf-8' }
let(:body) { json }
it { is_expected.to eq [1, { prefetched_body: body, id: true }] }
end
context 'when content type is ld+json with profile' do
let(:content_type) { 'application/ld+json; profile="https://www.w3.org/ns/activitystreams"' }
let(:body) { json }
it { is_expected.to eq [1, { prefetched_body: body, id: true }] }
end
context 'when link header is present' do
let(:headers) { { 'Link' => '<http://example.com/foo>; rel="alternate"; type="application/activity+json"' } }
it { is_expected.to eq [1, { prefetched_body: json, id: true }] }
end
context 'when content type is text/html' do
let(:content_type) { 'text/html' }
let(:body) { '<html><head><link rel="alternate" href="http://example.com/foo" type="application/activity+json"/></head></html>' }
it { is_expected.to eq [1, { prefetched_body: json, id: true }] }
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class AppSignUpService < BaseService
include RegistrationHelper
def call(app, remote_ip, params)
@app = app
@remote_ip = remote_ip
@params = params
raise Mastodon::NotPermittedError unless allowed_registration?(remote_ip, invite)
ApplicationRecord.transaction do
create_user!
create_access_token!
end
@access_token
end
private
def create_user!
@user = User.create!(
user_params.merge(created_by_application: @app, sign_up_ip: @remote_ip, password_confirmation: user_params[:password], account_attributes: account_params, invite_request_attributes: invite_request_params)
)
end
def create_access_token!
@access_token = Doorkeeper::AccessToken.create!(
application: @app,
resource_owner_id: @user.id,
scopes: @app.scopes,
expires_in: Doorkeeper.configuration.access_token_expires_in,
use_refresh_token: Doorkeeper.configuration.refresh_token_enabled?
)
end
def invite
Invite.find_by(code: @params[:invite_code]) if @params[:invite_code].present?
end
def user_params
@params.slice(:email, :password, :agreement, :locale, :time_zone, :invite_code)
end
def account_params
@params.slice(:username)
end
def invite_request_params
{ text: @params[:reason] }
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe AppSignUpService, type: :service do
subject { described_class.new }
let(:app) { Fabricate(:application, scopes: 'read write') }
let(:good_params) { { username: 'alice', password: '12345678', email: '[email protected]', agreement: true } }
let(:remote_ip) { IPAddr.new('198.0.2.1') }
describe '#call' do
let(:params) { good_params }
shared_examples 'successful registration' do
it 'creates an unconfirmed user with access token and the app\'s scope', :aggregate_failures do
access_token = subject.call(app, remote_ip, params)
expect(access_token).to_not be_nil
expect(access_token.scopes.to_s).to eq 'read write'
user = User.find_by(id: access_token.resource_owner_id)
expect(user).to_not be_nil
expect(user.confirmed?).to be false
expect(user.account).to_not be_nil
expect(user.invite_request).to be_nil
end
end
context 'when registrations are closed' do
around do |example|
tmp = Setting.registrations_mode
Setting.registrations_mode = 'none'
example.run
Setting.registrations_mode = tmp
end
it 'raises an error', :aggregate_failures do
expect { subject.call(app, remote_ip, good_params) }.to raise_error Mastodon::NotPermittedError
end
context 'when using a valid invite' do
let(:params) { good_params.merge({ invite_code: invite.code }) }
let(:invite) { Fabricate(:invite) }
before do
invite.user.approve!
end
it_behaves_like 'successful registration'
end
context 'when using an invalid invite' do
let(:params) { good_params.merge({ invite_code: invite.code }) }
let(:invite) { Fabricate(:invite, uses: 1, max_uses: 1) }
it 'raises an error', :aggregate_failures do
expect { subject.call(app, remote_ip, params) }.to raise_error Mastodon::NotPermittedError
end
end
end
it 'raises an error when params are missing' do
expect { subject.call(app, remote_ip, {}) }.to raise_error ActiveRecord::RecordInvalid
end
it_behaves_like 'successful registration'
context 'when given an invite request text' do
it 'creates an account with invite request text' do
access_token = subject.call(app, remote_ip, good_params.merge(reason: 'Foo bar'))
expect(access_token).to_not be_nil
user = User.find_by(id: access_token.resource_owner_id)
expect(user).to_not be_nil
expect(user.invite_request&.text).to eq 'Foo bar'
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class UnblockDomainService < BaseService
attr_accessor :domain_block
def call(domain_block)
@domain_block = domain_block
process_retroactive_updates
domain_block.destroy
end
def process_retroactive_updates
scope = Account.by_domain_and_subdomains(domain_block.domain)
scope.where(silenced_at: domain_block.created_at).in_batches.update_all(silenced_at: nil) unless domain_block.noop?
scope.where(suspended_at: domain_block.created_at).in_batches.update_all(suspended_at: nil, suspension_origin: nil) if domain_block.suspend?
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
describe UnblockDomainService, type: :service do
subject { described_class.new }
describe 'call' do
let!(:independently_suspended) { Fabricate(:account, domain: 'example.com', suspended_at: 1.hour.ago) }
let!(:independently_silenced) { Fabricate(:account, domain: 'example.com', silenced_at: 1.hour.ago) }
let!(:domain_block) { Fabricate(:domain_block, domain: 'example.com') }
let!(:silenced) { Fabricate(:account, domain: 'example.com', silenced_at: domain_block.created_at) }
let!(:suspended) { Fabricate(:account, domain: 'example.com', suspended_at: domain_block.created_at) }
it 'unsilences accounts and removes block' do
domain_block.update(severity: :silence)
subject.call(domain_block)
expect_deleted_domain_block
expect(silenced.reload.silenced?).to be false
expect(suspended.reload.suspended?).to be true
expect(independently_suspended.reload.suspended?).to be true
expect(independently_silenced.reload.silenced?).to be true
end
it 'unsuspends accounts and removes block' do
domain_block.update(severity: :suspend)
subject.call(domain_block)
expect_deleted_domain_block
expect(suspended.reload.suspended?).to be false
expect(silenced.reload.silenced?).to be false
expect(independently_suspended.reload.suspended?).to be true
expect(independently_silenced.reload.silenced?).to be true
end
end
def expect_deleted_domain_block
expect { domain_block.reload }.to raise_error(ActiveRecord::RecordNotFound)
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class PurgeDomainService < BaseService
def call(domain)
Account.remote.where(domain: domain).reorder(nil).find_each do |account|
DeleteAccountService.new.call(account, reserve_username: false, skip_side_effects: true)
end
CustomEmoji.remote.where(domain: domain).reorder(nil).find_each(&:destroy)
Instance.refresh
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe PurgeDomainService, type: :service do
subject { described_class.new }
let!(:old_account) { Fabricate(:account, domain: 'obsolete.org') }
let!(:old_status_plain) { Fabricate(:status, account: old_account) }
let!(:old_status_with_attachment) { Fabricate(:status, account: old_account) }
let!(:old_attachment) { Fabricate(:media_attachment, account: old_account, status: old_status_with_attachment, file: attachment_fixture('attachment.jpg')) }
describe 'for a suspension' do
before do
subject.call('obsolete.org')
end
it 'removes the remote accounts\'s statuses and media attachments' do
expect { old_account.reload }.to raise_exception ActiveRecord::RecordNotFound
expect { old_status_plain.reload }.to raise_exception ActiveRecord::RecordNotFound
expect { old_status_with_attachment.reload }.to raise_exception ActiveRecord::RecordNotFound
expect { old_attachment.reload }.to raise_exception ActiveRecord::RecordNotFound
end
it 'refreshes instances view' do
expect(Instance.where(domain: 'obsolete.org').exists?).to be false
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class RejectFollowService < BaseService
include Payloadable
def call(source_account, target_account)
follow_request = FollowRequest.find_by!(account: source_account, target_account: target_account)
follow_request.reject!
create_notification(follow_request) if !source_account.local? && source_account.activitypub?
follow_request
end
private
def create_notification(follow_request)
ActivityPub::DeliveryWorker.perform_async(build_json(follow_request), follow_request.target_account_id, follow_request.account.inbox_url)
end
def build_json(follow_request)
Oj.dump(serialize_payload(follow_request, ActivityPub::RejectFollowSerializer))
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe RejectFollowService, type: :service do
subject { described_class.new }
let(:sender) { Fabricate(:account, username: 'alice') }
describe 'local' do
let(:bob) { Fabricate(:account) }
before do
FollowRequest.create(account: bob, target_account: sender)
subject.call(bob, sender)
end
it 'removes follow request' do
expect(bob.requested?(sender)).to be false
end
it 'does not create follow relation' do
expect(bob.following?(sender)).to be false
end
end
describe 'remote ActivityPub' do
let(:bob) { Fabricate(:account, username: 'bob', domain: 'example.com', protocol: :activitypub, inbox_url: 'http://example.com/inbox') }
before do
FollowRequest.create(account: bob, target_account: sender)
stub_request(:post, bob.inbox_url).to_return(status: 200)
subject.call(bob, sender)
end
it 'removes follow request' do
expect(bob.requested?(sender)).to be false
end
it 'does not create follow relation' do
expect(bob.following?(sender)).to be false
end
it 'sends a reject activity' do
expect(a_request(:post, bob.inbox_url)).to have_been_made.once
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class UnallowDomainService < BaseService
include DomainControlHelper
def call(domain_allow)
suspend_accounts!(domain_allow.domain) if limited_federation_mode?
domain_allow.destroy
end
private
def suspend_accounts!(domain)
Account.where(domain: domain).in_batches.update_all(suspended_at: Time.now.utc)
AfterUnallowDomainWorker.perform_async(domain)
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe UnallowDomainService, type: :service do
subject { described_class.new }
let!(:bad_account) { Fabricate(:account, username: 'badguy666', domain: 'evil.org') }
let!(:bad_status_harassment) { Fabricate(:status, account: bad_account, text: 'You suck') }
let!(:bad_status_mean) { Fabricate(:status, account: bad_account, text: 'Hahaha') }
let!(:bad_attachment) { Fabricate(:media_attachment, account: bad_account, status: bad_status_mean, file: attachment_fixture('attachment.jpg')) }
let!(:already_banned_account) { Fabricate(:account, username: 'badguy', domain: 'evil.org', suspended: true, silenced: true) }
let!(:domain_allow) { Fabricate(:domain_allow, domain: 'evil.org') }
context 'with limited federation mode' do
before do
allow(Rails.configuration.x).to receive(:limited_federation_mode).and_return(true)
end
describe '#call' do
before do
subject.call(domain_allow)
end
it 'removes the allowed domain' do
expect(DomainAllow.allowed?('evil.org')).to be false
end
it 'removes remote accounts from that domain' do
expect(Account.where(domain: 'evil.org').exists?).to be false
end
it 'removes the remote accounts\'s statuses and media attachments' do
expect { bad_status_harassment.reload }.to raise_exception ActiveRecord::RecordNotFound
expect { bad_status_mean.reload }.to raise_exception ActiveRecord::RecordNotFound
expect { bad_attachment.reload }.to raise_exception ActiveRecord::RecordNotFound
end
end
end
context 'without limited federation mode' do
before do
allow(Rails.configuration.x).to receive(:limited_federation_mode).and_return(false)
end
describe '#call' do
before do
subject.call(domain_allow)
end
it 'removes the allowed domain' do
expect(DomainAllow.allowed?('evil.org')).to be false
end
it 'does not remove accounts from that domain' do
expect(Account.where(domain: 'evil.org').exists?).to be true
end
it 'removes the remote accounts\'s statuses and media attachments' do
expect { bad_status_harassment.reload }.to_not raise_error
expect { bad_status_mean.reload }.to_not raise_error
expect { bad_attachment.reload }.to_not raise_error
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class ReportService < BaseService
include Payloadable
def call(source_account, target_account, options = {})
@source_account = source_account
@target_account = target_account
@status_ids = options.delete(:status_ids).presence || []
@comment = options.delete(:comment).presence || ''
@category = options[:rule_ids].present? ? 'violation' : (options.delete(:category).presence || 'other')
@rule_ids = options.delete(:rule_ids).presence
@options = options
raise ActiveRecord::RecordNotFound if @target_account.unavailable?
create_report!
notify_staff!
if forward?
forward_to_origin!
forward_to_replied_to!
end
@report
end
private
def create_report!
@report = @source_account.reports.create!(
target_account: @target_account,
status_ids: reported_status_ids,
comment: @comment,
uri: @options[:uri],
forwarded: forward_to_origin?,
category: @category,
rule_ids: @rule_ids
)
end
def notify_staff!
return if @report.unresolved_siblings?
User.those_who_can(:manage_reports).includes(:account).find_each do |u|
LocalNotificationWorker.perform_async(u.account_id, @report.id, 'Report', 'admin.report')
AdminMailer.with(recipient: u.account).new_report(@report).deliver_later if u.allows_report_emails?
end
end
def forward_to_origin!
return unless forward_to_origin?
# Send report to the server where the account originates from
ActivityPub::DeliveryWorker.perform_async(payload, some_local_account.id, @target_account.inbox_url)
end
def forward_to_replied_to!
# Send report to servers to which the account was replying to, so they also have a chance to act
inbox_urls = Account.remote.where(domain: forward_to_domains).where(id: Status.where(id: reported_status_ids).where.not(in_reply_to_account_id: nil).select(:in_reply_to_account_id)).inboxes - [@target_account.inbox_url, @target_account.shared_inbox_url]
inbox_urls.each do |inbox_url|
ActivityPub::DeliveryWorker.perform_async(payload, some_local_account.id, inbox_url)
end
end
def forward?
!@target_account.local? && ActiveModel::Type::Boolean.new.cast(@options[:forward])
end
def forward_to_origin?
forward? && forward_to_domains.include?(@target_account.domain)
end
def forward_to_domains
@forward_to_domains ||= (@options[:forward_to_domains] || [@target_account.domain]).filter_map { |domain| TagManager.instance.normalize_domain(domain&.strip) }.uniq
end
def reported_status_ids
return AccountStatusesFilter.new(@target_account, @source_account).results.with_discarded.find(Array(@status_ids)).pluck(:id) if @source_account.local?
# If the account making reports is remote, it is likely anonymized so we have to relax the requirements for attaching statuses.
domain = @source_account.domain.to_s.downcase
has_followers = @target_account.followers.where(Account.arel_table[:domain].lower.eq(domain)).exists?
visibility = has_followers ? %i(public unlisted private) : %i(public unlisted)
scope = @target_account.statuses.with_discarded
scope.merge!(scope.where(visibility: visibility).or(scope.where('EXISTS (SELECT 1 FROM mentions m JOIN accounts a ON m.account_id = a.id WHERE lower(a.domain) = ?)', domain)))
# Allow missing posts to not drop reports that include e.g. a deleted post
scope.where(id: Array(@status_ids)).pluck(:id)
end
def payload
Oj.dump(serialize_payload(@report, ActivityPub::FlagSerializer, account: some_local_account))
end
def some_local_account
@some_local_account ||= Account.representative
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe ReportService, type: :service do
subject { described_class.new }
let(:source_account) { Fabricate(:account) }
let(:target_account) { Fabricate(:account) }
context 'with a local account' do
it 'has a uri' do
report = subject.call(source_account, target_account)
expect(report.uri).to_not be_nil
end
end
context 'with a remote account' do
let(:remote_account) { Fabricate(:account, domain: 'example.com', protocol: :activitypub, inbox_url: 'http://example.com/inbox') }
let(:forward) { false }
before do
stub_request(:post, 'http://example.com/inbox').to_return(status: 200)
end
context 'when forward is true' do
let(:forward) { true }
it 'sends ActivityPub payload when forward is true' do
subject.call(source_account, remote_account, forward: forward)
expect(a_request(:post, 'http://example.com/inbox')).to have_been_made
end
it 'has an uri' do
report = subject.call(source_account, remote_account, forward: forward)
expect(report.uri).to_not be_nil
end
context 'when reporting a reply on a different remote server' do
let(:remote_thread_account) { Fabricate(:account, domain: 'foo.com', protocol: :activitypub, inbox_url: 'http://foo.com/inbox') }
let(:reported_status) { Fabricate(:status, account: remote_account, thread: Fabricate(:status, account: remote_thread_account)) }
before do
stub_request(:post, 'http://foo.com/inbox').to_return(status: 200)
end
context 'when forward_to_domains includes both the replied-to domain and the origin domain' do
it 'sends ActivityPub payload to both the author of the replied-to post and the reported user' do
subject.call(source_account, remote_account, status_ids: [reported_status.id], forward: forward, forward_to_domains: [remote_account.domain, remote_thread_account.domain])
expect(a_request(:post, 'http://foo.com/inbox')).to have_been_made
expect(a_request(:post, 'http://example.com/inbox')).to have_been_made
end
end
context 'when forward_to_domains includes only the replied-to domain' do
it 'sends ActivityPub payload only to the author of the replied-to post' do
subject.call(source_account, remote_account, status_ids: [reported_status.id], forward: forward, forward_to_domains: [remote_thread_account.domain])
expect(a_request(:post, 'http://foo.com/inbox')).to have_been_made
expect(a_request(:post, 'http://example.com/inbox')).to_not have_been_made
end
end
context 'when forward_to_domains does not include the replied-to domain' do
it 'does not send ActivityPub payload to the author of the replied-to post' do
subject.call(source_account, remote_account, status_ids: [reported_status.id], forward: forward)
expect(a_request(:post, 'http://foo.com/inbox')).to_not have_been_made
end
end
end
context 'when reporting a reply on the same remote server as the person being replied-to' do
let(:remote_thread_account) { Fabricate(:account, domain: 'example.com', protocol: :activitypub, inbox_url: 'http://example.com/inbox') }
let(:reported_status) { Fabricate(:status, account: remote_account, thread: Fabricate(:status, account: remote_thread_account)) }
context 'when forward_to_domains includes both the replied-to domain and the origin domain' do
it 'sends ActivityPub payload only once' do
subject.call(source_account, remote_account, status_ids: [reported_status.id], forward: forward, forward_to_domains: [remote_account.domain])
expect(a_request(:post, 'http://example.com/inbox')).to have_been_made.once
end
end
context 'when forward_to_domains does not include the replied-to domain' do
it 'sends ActivityPub payload only once' do
subject.call(source_account, remote_account, status_ids: [reported_status.id], forward: forward)
expect(a_request(:post, 'http://example.com/inbox')).to have_been_made.once
end
end
end
end
context 'when forward is false' do
it 'does not send anything' do
subject.call(source_account, remote_account, forward: forward)
expect(a_request(:post, 'http://example.com/inbox')).to_not have_been_made
end
end
end
context 'when the reported status is a DM' do
subject do
-> { described_class.new.call(source_account, target_account, status_ids: [status.id]) }
end
let(:status) { Fabricate(:status, account: target_account, visibility: :direct) }
context 'when it is addressed to the reporter' do
before do
status.mentions.create(account: source_account)
end
it 'creates a report' do
expect { subject.call }.to change { target_account.targeted_reports.count }.from(0).to(1)
end
it 'attaches the DM to the report' do
subject.call
expect(target_account.targeted_reports.pluck(:status_ids)).to eq [[status.id]]
end
end
context 'when it is not addressed to the reporter' do
it 'errors out' do
expect { subject.call }.to raise_error(ActiveRecord::RecordNotFound)
end
end
context 'when the reporter is remote' do
let(:source_account) { Fabricate(:account, domain: 'example.com', uri: 'https://example.com/users/1') }
context 'when it is addressed to the reporter' do
before do
status.mentions.create(account: source_account)
end
it 'creates a report' do
expect { subject.call }.to change { target_account.targeted_reports.count }.from(0).to(1)
end
it 'attaches the DM to the report' do
subject.call
expect(target_account.targeted_reports.pluck(:status_ids)).to eq [[status.id]]
end
end
context 'when it is not addressed to the reporter' do
it 'does not add the DM to the report' do
subject.call
expect(target_account.targeted_reports.pluck(:status_ids)).to eq [[]]
end
end
end
end
context 'when other reports already exist for the same target' do
subject do
-> { described_class.new.call(source_account, target_account) }
end
let!(:other_report) { Fabricate(:report, target_account: target_account) }
before do
ActionMailer::Base.deliveries.clear
source_account.user.settings['notification_emails.report'] = true
source_account.user.save
end
it 'does not send an e-mail' do
expect { subject.call }.to_not change(ActionMailer::Base.deliveries, :count).from(0)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class FavouriteService < BaseService
include Authorization
include Payloadable
# Favourite a status and notify remote user
# @param [Account] account
# @param [Status] status
# @return [Favourite]
def call(account, status)
authorize_with account, status, :favourite?
favourite = Favourite.find_by(account: account, status: status)
return favourite unless favourite.nil?
favourite = Favourite.create!(account: account, status: status)
Trends.statuses.register(status)
create_notification(favourite)
bump_potential_friendship(account, status)
favourite
end
private
def create_notification(favourite)
status = favourite.status
if status.account.local?
LocalNotificationWorker.perform_async(status.account_id, favourite.id, 'Favourite', 'favourite')
elsif status.account.activitypub?
ActivityPub::DeliveryWorker.perform_async(build_json(favourite), favourite.account_id, status.account.inbox_url)
end
end
def bump_potential_friendship(account, status)
ActivityTracker.increment('activity:interactions')
return if account.following?(status.account_id)
PotentialFriendshipTracker.record(account.id, status.account_id, :favourite)
end
def build_json(favourite)
Oj.dump(serialize_payload(favourite, ActivityPub::LikeSerializer))
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe FavouriteService, type: :service do
subject { described_class.new }
let(:sender) { Fabricate(:account, username: 'alice') }
describe 'local' do
let(:bob) { Fabricate(:account) }
let(:status) { Fabricate(:status, account: bob) }
before do
subject.call(sender, status)
end
it 'creates a favourite' do
expect(status.favourites.first).to_not be_nil
end
end
describe 'remote ActivityPub' do
let(:bob) { Fabricate(:account, protocol: :activitypub, username: 'bob', domain: 'example.com', inbox_url: 'http://example.com/inbox') }
let(:status) { Fabricate(:status, account: bob) }
before do
stub_request(:post, 'http://example.com/inbox').to_return(status: 200, body: '', headers: {})
subject.call(sender, status)
end
it 'creates a favourite' do
expect(status.favourites.first).to_not be_nil
end
it 'sends a like activity' do
expect(a_request(:post, 'http://example.com/inbox')).to have_been_made.once
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class ClearDomainMediaService < BaseService
attr_reader :domain_block
def call(domain_block)
@domain_block = domain_block
clear_media! if domain_block.reject_media?
end
private
def clear_media!
clear_account_images!
clear_account_attachments!
clear_emojos!
end
def clear_account_images!
blocked_domain_accounts.reorder(nil).find_in_batches do |accounts|
AttachmentBatch.new(Account, accounts).clear
end
end
def clear_account_attachments!
media_from_blocked_domain.reorder(nil).find_in_batches do |attachments|
AttachmentBatch.new(MediaAttachment, attachments).clear
end
end
def clear_emojos!
emojis_from_blocked_domains.find_in_batches do |custom_emojis|
AttachmentBatch.new(CustomEmoji, custom_emojis).delete
end
end
def blocked_domain
domain_block.domain
end
def blocked_domain_accounts
Account.by_domain_and_subdomains(blocked_domain)
end
def media_from_blocked_domain
MediaAttachment.joins(:account).merge(blocked_domain_accounts)
end
def emojis_from_blocked_domains
CustomEmoji.by_domain_and_subdomains(blocked_domain)
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe ClearDomainMediaService, type: :service do
subject { described_class.new }
let!(:bad_account) { Fabricate(:account, username: 'badguy666', domain: 'evil.org') }
let!(:bad_status_plain) { Fabricate(:status, account: bad_account, text: 'You suck') }
let!(:bad_status_with_attachment) { Fabricate(:status, account: bad_account, text: 'Hahaha') }
let!(:bad_attachment) { Fabricate(:media_attachment, account: bad_account, status: bad_status_with_attachment, file: attachment_fixture('attachment.jpg')) }
describe 'for a silence with reject media' do
before do
subject.call(DomainBlock.create!(domain: 'evil.org', severity: :silence, reject_media: true))
end
it 'leaves the domains status and attachments, but clears media' do
expect { bad_status_plain.reload }.to_not raise_error
expect { bad_status_with_attachment.reload }.to_not raise_error
expect { bad_attachment.reload }.to_not raise_error
expect(bad_attachment.file.exists?).to be false
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class UnblockService < BaseService
include Payloadable
def call(account, target_account)
return unless account.blocking?(target_account)
unblock = account.unblock!(target_account)
create_notification(unblock) if !target_account.local? && target_account.activitypub?
unblock
end
private
def create_notification(unblock)
ActivityPub::DeliveryWorker.perform_async(build_json(unblock), unblock.account_id, unblock.target_account.inbox_url)
end
def build_json(unblock)
Oj.dump(serialize_payload(unblock, ActivityPub::UndoBlockSerializer))
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe UnblockService, type: :service do
subject { described_class.new }
let(:sender) { Fabricate(:account, username: 'alice') }
describe 'local' do
let(:bob) { Fabricate(:account) }
before do
sender.block!(bob)
subject.call(sender, bob)
end
it 'destroys the blocking relation' do
expect(sender.blocking?(bob)).to be false
end
end
describe 'remote ActivityPub' do
let(:bob) { Fabricate(:account, username: 'bob', protocol: :activitypub, domain: 'example.com', inbox_url: 'http://example.com/inbox') }
before do
sender.block!(bob)
stub_request(:post, 'http://example.com/inbox').to_return(status: 200)
subject.call(sender, bob)
end
it 'destroys the blocking relation' do
expect(sender.blocking?(bob)).to be false
end
it 'sends an unblock activity' do
expect(a_request(:post, 'http://example.com/inbox')).to have_been_made.once
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class NotifyService < BaseService
include Redisable
NON_EMAIL_TYPES = %i(
admin.report
admin.sign_up
update
poll
status
).freeze
def call(recipient, type, activity)
@recipient = recipient
@activity = activity
@notification = Notification.new(account: @recipient, type: type, activity: @activity)
return if recipient.user.nil? || blocked?
@notification.save!
# It's possible the underlying activity has been deleted
# between the save call and now
return if @notification.activity.nil?
push_notification!
push_to_conversation! if direct_message?
send_email! if email_needed?
rescue ActiveRecord::RecordInvalid
nil
end
private
def blocked_mention?
FeedManager.instance.filter?(:mentions, @notification.mention.status, @recipient)
end
def following_sender?
return @following_sender if defined?(@following_sender)
@following_sender = @recipient.following?(@notification.from_account) || @recipient.requested?(@notification.from_account)
end
def optional_non_follower?
@recipient.user.settings['interactions.must_be_follower'] && [email protected]_account.following?(@recipient)
end
def optional_non_following?
@recipient.user.settings['interactions.must_be_following'] && !following_sender?
end
def message?
@notification.type == :mention
end
def direct_message?
message? && @notification.target_status.direct_visibility?
end
# Returns true if the sender has been mentioned by the recipient up the thread
def response_to_recipient?
return false if @notification.target_status.in_reply_to_id.nil?
# Using an SQL CTE to avoid unneeded back-and-forth with SQL server in case of long threads
!Status.count_by_sql([<<-SQL.squish, id: @notification.target_status.in_reply_to_id, recipient_id: @recipient.id, sender_id: @notification.from_account.id, depth_limit: 100]).zero?
WITH RECURSIVE ancestors(id, in_reply_to_id, mention_id, path, depth) AS (
SELECT s.id, s.in_reply_to_id, m.id, ARRAY[s.id], 0
FROM statuses s
LEFT JOIN mentions m ON m.silent = FALSE AND m.account_id = :sender_id AND m.status_id = s.id
WHERE s.id = :id
UNION ALL
SELECT s.id, s.in_reply_to_id, m.id, st.path || s.id, st.depth + 1
FROM ancestors st
JOIN statuses s ON s.id = st.in_reply_to_id
LEFT JOIN mentions m ON m.silent = FALSE AND m.account_id = :sender_id AND m.status_id = s.id
WHERE st.mention_id IS NULL AND NOT s.id = ANY(path) AND st.depth < :depth_limit
)
SELECT COUNT(*)
FROM ancestors st
JOIN statuses s ON s.id = st.id
WHERE st.mention_id IS NOT NULL AND s.visibility = 3
SQL
end
def from_staff?
@notification.from_account.local? && @notification.from_account.user.present? && @notification.from_account.user_role&.overrides?(@recipient.user_role)
end
def optional_non_following_and_direct?
direct_message? &&
@recipient.user.settings['interactions.must_be_following_dm'] &&
!following_sender? &&
!response_to_recipient?
end
def hellbanned?
@notification.from_account.silenced? && !following_sender?
end
def from_self?
@recipient.id == @notification.from_account.id
end
def domain_blocking?
@recipient.domain_blocking?(@notification.from_account.domain) && !following_sender?
end
def blocked?
blocked = @recipient.unavailable?
blocked ||= from_self? && @notification.type != :poll
return blocked if message? && from_staff?
blocked ||= domain_blocking?
blocked ||= @recipient.blocking?(@notification.from_account)
blocked ||= @recipient.muting_notifications?(@notification.from_account)
blocked ||= hellbanned?
blocked ||= optional_non_follower?
blocked ||= optional_non_following?
blocked ||= optional_non_following_and_direct?
blocked ||= conversation_muted?
blocked ||= blocked_mention? if @notification.type == :mention
blocked
end
def conversation_muted?
if @notification.target_status
@recipient.muting_conversation?(@notification.target_status.conversation)
else
false
end
end
def push_notification!
push_to_streaming_api! if subscribed_to_streaming_api?
push_to_web_push_subscriptions!
end
def push_to_streaming_api!
redis.publish("timeline:#{@recipient.id}:notifications", Oj.dump(event: :notification, payload: InlineRenderer.render(@notification, @recipient, :notification)))
end
def subscribed_to_streaming_api?
redis.exists?("subscribed:timeline:#{@recipient.id}") || redis.exists?("subscribed:timeline:#{@recipient.id}:notifications")
end
def push_to_conversation!
AccountConversation.add_status(@recipient, @notification.target_status)
end
def push_to_web_push_subscriptions!
::Web::PushNotificationWorker.push_bulk(web_push_subscriptions.select { |subscription| subscription.pushable?(@notification) }) { |subscription| [subscription.id, @notification.id] }
end
def web_push_subscriptions
@web_push_subscriptions ||= ::Web::PushSubscription.where(user_id: @recipient.user.id).to_a
end
def subscribed_to_web_push?
web_push_subscriptions.any?
end
def send_email!
return unless NotificationMailer.respond_to?(@notification.type)
NotificationMailer
.with(recipient: @recipient, notification: @notification)
.public_send(@notification.type)
.deliver_later(wait: 2.minutes)
end
def email_needed?
(!recipient_online? || always_send_emails?) && send_email_for_notification_type?
end
def recipient_online?
subscribed_to_streaming_api? || subscribed_to_web_push?
end
def always_send_emails?
@recipient.user.settings.always_send_emails
end
def send_email_for_notification_type?
NON_EMAIL_TYPES.exclude?(@notification.type) && @recipient.user.settings["notification_emails.#{@notification.type}"]
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe NotifyService, type: :service do
subject { described_class.new.call(recipient, type, activity) }
let(:user) { Fabricate(:user) }
let(:recipient) { user.account }
let(:sender) { Fabricate(:account, domain: 'example.com') }
let(:activity) { Fabricate(:follow, account: sender, target_account: recipient) }
let(:type) { :follow }
it { expect { subject }.to change(Notification, :count).by(1) }
it 'does not notify when sender is blocked' do
recipient.block!(sender)
expect { subject }.to_not change(Notification, :count)
end
it 'does not notify when sender is muted with hide_notifications' do
recipient.mute!(sender, notifications: true)
expect { subject }.to_not change(Notification, :count)
end
it 'does notify when sender is muted without hide_notifications' do
recipient.mute!(sender, notifications: false)
expect { subject }.to change(Notification, :count)
end
it 'does not notify when sender\'s domain is blocked' do
recipient.block_domain!(sender.domain)
expect { subject }.to_not change(Notification, :count)
end
it 'does still notify when sender\'s domain is blocked but sender is followed' do
recipient.block_domain!(sender.domain)
recipient.follow!(sender)
expect { subject }.to change(Notification, :count)
end
it 'does not notify when sender is silenced and not followed' do
sender.silence!
expect { subject }.to_not change(Notification, :count)
end
it 'does not notify when recipient is suspended' do
recipient.suspend!
expect { subject }.to_not change(Notification, :count)
end
context 'with direct messages' do
let(:activity) { Fabricate(:mention, account: recipient, status: Fabricate(:status, account: sender, visibility: :direct)) }
let(:type) { :mention }
before do
user.settings.update('interactions.must_be_following_dm': enabled)
user.save
end
context 'when recipient is supposed to be following sender' do
let(:enabled) { true }
it 'does not notify' do
expect { subject }.to_not change(Notification, :count)
end
context 'when the message chain is initiated by recipient, but is not direct message' do
let(:reply_to) { Fabricate(:status, account: recipient) }
let!(:mention) { Fabricate(:mention, account: sender, status: reply_to) }
let(:activity) { Fabricate(:mention, account: recipient, status: Fabricate(:status, account: sender, visibility: :direct, thread: reply_to)) }
it 'does not notify' do
expect { subject }.to_not change(Notification, :count)
end
end
context 'when the message chain is initiated by recipient, but without a mention to the sender, even if the sender sends multiple messages in a row' do
let(:reply_to) { Fabricate(:status, account: recipient) }
let!(:mention) { Fabricate(:mention, account: sender, status: reply_to) }
let(:dummy_reply) { Fabricate(:status, account: sender, visibility: :direct, thread: reply_to) }
let(:activity) { Fabricate(:mention, account: recipient, status: Fabricate(:status, account: sender, visibility: :direct, thread: dummy_reply)) }
it 'does not notify' do
expect { subject }.to_not change(Notification, :count)
end
end
context 'when the message chain is initiated by the recipient with a mention to the sender' do
let(:reply_to) { Fabricate(:status, account: recipient, visibility: :direct) }
let!(:mention) { Fabricate(:mention, account: sender, status: reply_to) }
let(:activity) { Fabricate(:mention, account: recipient, status: Fabricate(:status, account: sender, visibility: :direct, thread: reply_to)) }
it 'does notify' do
expect { subject }.to change(Notification, :count)
end
end
end
context 'when recipient is NOT supposed to be following sender' do
let(:enabled) { false }
it 'does notify' do
expect { subject }.to change(Notification, :count)
end
end
end
describe 'reblogs' do
let(:status) { Fabricate(:status, account: Fabricate(:account)) }
let(:activity) { Fabricate(:status, account: sender, reblog: status) }
let(:type) { :reblog }
it 'shows reblogs by default' do
recipient.follow!(sender)
expect { subject }.to change(Notification, :count)
end
it 'shows reblogs when explicitly enabled' do
recipient.follow!(sender, reblogs: true)
expect { subject }.to change(Notification, :count)
end
it 'shows reblogs when disabled' do
recipient.follow!(sender, reblogs: false)
expect { subject }.to change(Notification, :count)
end
end
context 'with muted and blocked users' do
let(:asshole) { Fabricate(:account, username: 'asshole') }
let(:reply_to) { Fabricate(:status, account: asshole) }
let(:activity) { Fabricate(:mention, account: recipient, status: Fabricate(:status, account: sender, thread: reply_to)) }
let(:type) { :mention }
it 'does not notify when conversation is muted' do
recipient.mute_conversation!(activity.status.conversation)
expect { subject }.to_not change(Notification, :count)
end
it 'does not notify when it is a reply to a blocked user' do
recipient.block!(asshole)
expect { subject }.to_not change(Notification, :count)
end
end
context 'with sender as recipient' do
let(:sender) { recipient }
it 'does not notify when recipient is the sender' do
expect { subject }.to_not change(Notification, :count)
end
end
describe 'email' do
before do
ActionMailer::Base.deliveries.clear
user.settings.update('notification_emails.follow': enabled)
user.save
end
context 'when email notification is enabled' do
let(:enabled) { true }
it 'sends email' do
expect { subject }.to change(ActionMailer::Base.deliveries, :count).by(1)
end
end
context 'when email notification is disabled' do
let(:enabled) { false }
it "doesn't send email" do
expect { subject }.to_not change(ActionMailer::Base.deliveries, :count).from(0)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class TranslateStatusService < BaseService
CACHE_TTL = 1.day.freeze
include ERB::Util
include FormattingHelper
def call(status, target_language)
@status = status
@source_texts = source_texts
@target_language = target_language
raise Mastodon::NotPermittedError unless permitted?
status_translation = Rails.cache.fetch("v2:translations/#{@status.language}/#{@target_language}/#{content_hash}", expires_in: CACHE_TTL) do
translations = translation_backend.translate(@source_texts.values, @status.language, @target_language)
build_status_translation(translations)
end
status_translation.status = @status
status_translation
end
private
def translation_backend
@translation_backend ||= TranslationService.configured
end
def permitted?
return false unless @status.distributable? && TranslationService.configured?
languages[@status.language]&.include?(@target_language)
end
def languages
Rails.cache.fetch('translation_service/languages', expires_in: 7.days, race_condition_ttl: 1.hour) { TranslationService.configured.languages }
end
def content_hash
Digest::SHA256.base64digest(@source_texts.transform_keys { |key| key.respond_to?(:id) ? "#{key.class}-#{key.id}" : key }.to_json)
end
def source_texts
texts = {}
texts[:content] = wrap_emoji_shortcodes(status_content_format(@status)) if @status.content.present?
texts[:spoiler_text] = wrap_emoji_shortcodes(html_escape(@status.spoiler_text)) if @status.spoiler_text.present?
@status.preloadable_poll&.loaded_options&.each do |option|
texts[option] = wrap_emoji_shortcodes(html_escape(option.title))
end
@status.media_attachments.each do |media_attachment|
texts[media_attachment] = html_escape(media_attachment.description)
end
texts
end
def build_status_translation(translations)
status_translation = Translation.new(
detected_source_language: translations.first&.detected_source_language,
language: @target_language,
provider: translations.first&.provider,
content: '',
spoiler_text: '',
poll_options: [],
media_attachments: []
)
@source_texts.keys.each_with_index do |source, index|
translation = translations[index]
case source
when :content
node = unwrap_emoji_shortcodes(translation.text)
Sanitize.node!(node, Sanitize::Config::MASTODON_STRICT)
status_translation.content = node.to_html
when :spoiler_text
status_translation.spoiler_text = unwrap_emoji_shortcodes(translation.text).content
when Poll::Option
status_translation.poll_options << Translation::Option.new(
title: unwrap_emoji_shortcodes(translation.text).content
)
when MediaAttachment
status_translation.media_attachments << Translation::MediaAttachment.new(
id: source.id,
description: html_entities.decode(translation.text)
)
end
end
status_translation
end
def wrap_emoji_shortcodes(text)
EmojiFormatter.new(text, @status.emojis, { raw_shortcode: true }).to_s
end
def unwrap_emoji_shortcodes(html)
fragment = Nokogiri::HTML.fragment(html)
fragment.css('span[translate="no"]').each do |element|
element.remove_attribute('translate')
element.replace(element.children) if element.attributes.empty?
end
fragment
end
def html_entities
HTMLEntities.new
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe TranslateStatusService, type: :service do
subject(:service) { described_class.new }
let(:status) { Fabricate(:status, text: text, spoiler_text: spoiler_text, language: 'en', preloadable_poll: poll, media_attachments: media_attachments) }
let(:text) { 'Hello' }
let(:spoiler_text) { '' }
let(:poll) { nil }
let(:media_attachments) { [] }
before do
Fabricate(:custom_emoji, shortcode: 'highfive')
end
describe '#call' do
before do
translation_service = TranslationService.new
allow(translation_service).to receive(:languages).and_return({ 'en' => ['es'] })
allow(translation_service).to receive(:translate) do |texts|
texts.map do |text|
TranslationService::Translation.new(
text: text.gsub('Hello', 'Hola').gsub('higfive', 'cincoaltos'),
detected_source_language: 'en',
provider: 'Dummy'
)
end
end
allow(TranslationService).to receive_messages(configured?: true, configured: translation_service)
end
it 'returns translated status content' do
expect(service.call(status, 'es').content).to eq '<p>Hola</p>'
end
it 'returns source language' do
expect(service.call(status, 'es').detected_source_language).to eq 'en'
end
it 'returns translation provider' do
expect(service.call(status, 'es').provider).to eq 'Dummy'
end
it 'returns original status' do
expect(service.call(status, 'es').status).to eq status
end
describe 'status has content with custom emoji' do
let(:text) { 'Hello & :highfive:' }
it 'does not translate shortcode' do
expect(service.call(status, 'es').content).to eq '<p>Hola & :highfive:</p>'
end
end
describe 'status has no spoiler_text' do
it 'returns an empty string' do
expect(service.call(status, 'es').spoiler_text).to eq ''
end
end
describe 'status has spoiler_text' do
let(:spoiler_text) { 'Hello & Hello!' }
it 'translates the spoiler text' do
expect(service.call(status, 'es').spoiler_text).to eq 'Hola & Hola!'
end
end
describe 'status has spoiler_text with custom emoji' do
let(:spoiler_text) { 'Hello :highfive:' }
it 'does not translate shortcode' do
expect(service.call(status, 'es').spoiler_text).to eq 'Hola :highfive:'
end
end
describe 'status has spoiler_text with unmatched custom emoji' do
let(:spoiler_text) { 'Hello :Hello:' }
it 'translates the invalid shortcode' do
expect(service.call(status, 'es').spoiler_text).to eq 'Hola :Hola:'
end
end
describe 'status has poll' do
let(:poll) { Fabricate(:poll, options: ['Hello 1', 'Hello 2']) }
it 'translates the poll option title' do
status_translation = service.call(status, 'es')
expect(status_translation.poll_options.size).to eq 2
expect(status_translation.poll_options.first.title).to eq 'Hola 1'
end
end
describe 'status has media attachment' do
let(:media_attachments) { [Fabricate(:media_attachment, description: 'Hello & :highfive:')] }
it 'translates the media attachment description' do
status_translation = service.call(status, 'es')
media_attachment = status_translation.media_attachments.first
expect(media_attachment.id).to eq media_attachments.first.id
expect(media_attachment.description).to eq 'Hola & :highfive:'
end
end
end
describe '#source_texts' do
before do
service.instance_variable_set(:@status, status)
end
describe 'status only has content' do
it 'returns formatted content' do
expect(service.send(:source_texts)).to eq({ content: '<p>Hello</p>' })
end
end
describe 'status content contains custom emoji' do
let(:status) { Fabricate(:status, text: 'Hello :highfive:') }
it 'returns formatted content' do
source_texts = service.send(:source_texts)
expect(source_texts[:content]).to eq '<p>Hello <span translate="no">:highfive:</span></p>'
end
end
describe 'status content contains tags' do
let(:status) { Fabricate(:status, text: 'Hello #hola') }
it 'returns formatted content' do
source_texts = service.send(:source_texts)
expect(source_texts[:content]).to include '<p>Hello <a'
expect(source_texts[:content]).to include '/tags/hola'
end
end
describe 'status has spoiler text' do
let(:status) { Fabricate(:status, spoiler_text: 'Hello :highfive:') }
it 'returns formatted spoiler text' do
source_texts = service.send(:source_texts)
expect(source_texts[:spoiler_text]).to eq 'Hello <span translate="no">:highfive:</span>'
end
end
describe 'status has poll' do
let(:poll) { Fabricate(:poll, options: %w(Blue Green)) }
context 'with source texts from the service' do
let!(:source_texts) { service.send(:source_texts) }
it 'returns formatted poll options' do
expect(source_texts.size).to eq 3
expect(source_texts.values).to eq %w(<p>Hello</p> Blue Green)
end
it 'has a first key with content' do
expect(source_texts.keys.first).to eq :content
end
it 'has the first option in the second key with correct options' do
option1 = source_texts.keys.second
expect(option1).to be_a Poll::Option
expect(option1.id).to eq '0'
expect(option1.title).to eq 'Blue'
end
it 'has the second option in the third key with correct options' do
option2 = source_texts.keys.third
expect(option2).to be_a Poll::Option
expect(option2.id).to eq '1'
expect(option2.title).to eq 'Green'
end
end
end
describe 'status has poll with custom emoji' do
let(:poll) { Fabricate(:poll, options: ['Blue', 'Green :highfive:']) }
it 'returns formatted poll options' do
html = service.send(:source_texts).values.last
expect(html).to eq 'Green <span translate="no">:highfive:</span>'
end
end
describe 'status has media attachments' do
let(:text) { '' }
let(:media_attachments) { [Fabricate(:media_attachment, description: 'Hello :highfive:')] }
it 'returns media attachments without custom emoji rendering' do
source_texts = service.send(:source_texts)
expect(source_texts.size).to eq 1
key, text = source_texts.first
expect(key).to eq media_attachments.first
expect(text).to eq 'Hello :highfive:'
end
end
end
describe '#wrap_emoji_shortcodes' do
before do
service.instance_variable_set(:@status, status)
end
describe 'string contains custom emoji' do
let(:text) { ':highfive:' }
it 'renders the emoji' do
html = service.send(:wrap_emoji_shortcodes, 'Hello :highfive:'.html_safe)
expect(html).to eq 'Hello <span translate="no">:highfive:</span>'
end
end
end
describe '#unwrap_emoji_shortcodes' do
describe 'string contains custom emoji' do
it 'inserts the shortcode' do
fragment = service.send(:unwrap_emoji_shortcodes, '<p>Hello <span translate="no">:highfive:</span>!</p>')
expect(fragment.to_html).to eq '<p>Hello :highfive:!</p>'
end
it 'preserves other attributes than translate=no' do
fragment = service.send(:unwrap_emoji_shortcodes, '<p>Hello <span translate="no" class="foo">:highfive:</span>!</p>')
expect(fragment.to_html).to eq '<p>Hello <span class="foo">:highfive:</span>!</p>'
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class FanOutOnWriteService < BaseService
include Redisable
# Push a status into home and mentions feeds
# @param [Status] status
# @param [Hash] options
# @option options [Boolean] update
# @option options [Array<Integer>] silenced_account_ids
# @option options [Boolean] skip_notifications
def call(status, options = {})
@status = status
@account = status.account
@options = options
check_race_condition!
warm_payload_cache!
fan_out_to_local_recipients!
fan_out_to_public_recipients! if broadcastable?
fan_out_to_public_streams! if broadcastable?
end
private
def check_race_condition!
# I don't know why but at some point we had an issue where
# this service was being executed with status objects
# that had a null visibility - which should not be possible
# since the column in the database is not nullable.
#
# This check re-queues the service to be run at a later time
# with the full object, if something like it occurs
raise Mastodon::RaceConditionError if @status.visibility.nil?
end
def fan_out_to_local_recipients!
deliver_to_self!
unless @options[:skip_notifications]
notify_mentioned_accounts!
notify_about_update! if update?
end
case @status.visibility.to_sym
when :public, :unlisted, :private
deliver_to_all_followers!
deliver_to_lists!
when :limited
deliver_to_mentioned_followers!
else
deliver_to_mentioned_followers!
deliver_to_conversation!
end
end
def fan_out_to_public_recipients!
deliver_to_hashtag_followers!
end
def fan_out_to_public_streams!
broadcast_to_hashtag_streams!
broadcast_to_public_streams!
end
def deliver_to_self!
FeedManager.instance.push_to_home(@account, @status, update: update?) if @account.local?
end
def notify_mentioned_accounts!
@status.active_mentions.where.not(id: @options[:silenced_account_ids] || []).joins(:account).merge(Account.local).select(:id, :account_id).reorder(nil).find_in_batches do |mentions|
LocalNotificationWorker.push_bulk(mentions) do |mention|
[mention.account_id, mention.id, 'Mention', 'mention']
end
end
end
def notify_about_update!
@status.reblogged_by_accounts.merge(Account.local).select(:id).reorder(nil).find_in_batches do |accounts|
LocalNotificationWorker.push_bulk(accounts) do |account|
[account.id, @status.id, 'Status', 'update']
end
end
end
def deliver_to_all_followers!
@account.followers_for_local_distribution.select(:id).reorder(nil).find_in_batches do |followers|
FeedInsertWorker.push_bulk(followers) do |follower|
[@status.id, follower.id, 'home', { 'update' => update? }]
end
end
end
def deliver_to_hashtag_followers!
TagFollow.where(tag_id: @status.tags.map(&:id)).select(:id, :account_id).reorder(nil).find_in_batches do |follows|
FeedInsertWorker.push_bulk(follows) do |follow|
[@status.id, follow.account_id, 'tags', { 'update' => update? }]
end
end
end
def deliver_to_lists!
@account.lists_for_local_distribution.select(:id).reorder(nil).find_in_batches do |lists|
FeedInsertWorker.push_bulk(lists) do |list|
[@status.id, list.id, 'list', { 'update' => update? }]
end
end
end
def deliver_to_mentioned_followers!
@status.mentions.joins(:account).merge(@account.followers_for_local_distribution).select(:id, :account_id).reorder(nil).find_in_batches do |mentions|
FeedInsertWorker.push_bulk(mentions) do |mention|
[@status.id, mention.account_id, 'home', { 'update' => update? }]
end
end
end
def broadcast_to_hashtag_streams!
@status.tags.map(&:name).each do |hashtag|
redis.publish("timeline:hashtag:#{hashtag.mb_chars.downcase}", anonymous_payload)
redis.publish("timeline:hashtag:#{hashtag.mb_chars.downcase}:local", anonymous_payload) if @status.local?
end
end
def broadcast_to_public_streams!
return if @status.reply? && @status.in_reply_to_account_id != @account.id
redis.publish('timeline:public', anonymous_payload)
redis.publish(@status.local? ? 'timeline:public:local' : 'timeline:public:remote', anonymous_payload)
if @status.with_media?
redis.publish('timeline:public:media', anonymous_payload)
redis.publish(@status.local? ? 'timeline:public:local:media' : 'timeline:public:remote:media', anonymous_payload)
end
end
def deliver_to_conversation!
AccountConversation.add_status(@account, @status) unless update?
end
def warm_payload_cache!
Rails.cache.write("fan-out/#{@status.id}", rendered_status)
end
def anonymous_payload
@anonymous_payload ||= Oj.dump(
event: update? ? :'status.update' : :update,
payload: rendered_status
)
end
def rendered_status
@rendered_status ||= InlineRenderer.render(@status, nil, :status)
end
def update?
@options[:update]
end
def broadcastable?
@status.public_visibility? && [email protected]? && [email protected]?
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe FanOutOnWriteService, type: :service do
subject { described_class.new }
let(:last_active_at) { Time.now.utc }
let(:status) { Fabricate(:status, account: alice, visibility: visibility, text: 'Hello @bob #hoge') }
let!(:alice) { Fabricate(:user, current_sign_in_at: last_active_at).account }
let!(:bob) { Fabricate(:user, current_sign_in_at: last_active_at, account_attributes: { username: 'bob' }).account }
let!(:tom) { Fabricate(:user, current_sign_in_at: last_active_at).account }
before do
bob.follow!(alice)
tom.follow!(alice)
ProcessMentionsService.new.call(status)
ProcessHashtagsService.new.call(status)
allow(redis).to receive(:publish)
subject.call(status)
end
def home_feed_of(account)
HomeFeed.new(account).get(10).map(&:id)
end
context 'when status is public' do
let(:visibility) { 'public' }
it 'is added to the home feed of its author' do
expect(home_feed_of(alice)).to include status.id
end
it 'is added to the home feed of a follower' do
expect(home_feed_of(bob)).to include status.id
expect(home_feed_of(tom)).to include status.id
end
it 'is broadcast to the hashtag stream' do
expect(redis).to have_received(:publish).with('timeline:hashtag:hoge', anything)
expect(redis).to have_received(:publish).with('timeline:hashtag:hoge:local', anything)
end
it 'is broadcast to the public stream' do
expect(redis).to have_received(:publish).with('timeline:public', anything)
expect(redis).to have_received(:publish).with('timeline:public:local', anything)
end
end
context 'when status is limited' do
let(:visibility) { 'limited' }
it 'is added to the home feed of its author' do
expect(home_feed_of(alice)).to include status.id
end
it 'is added to the home feed of the mentioned follower' do
expect(home_feed_of(bob)).to include status.id
end
it 'is not added to the home feed of the other follower' do
expect(home_feed_of(tom)).to_not include status.id
end
it 'is not broadcast publicly' do
expect(redis).to_not have_received(:publish).with('timeline:hashtag:hoge', anything)
expect(redis).to_not have_received(:publish).with('timeline:public', anything)
end
end
context 'when status is private' do
let(:visibility) { 'private' }
it 'is added to the home feed of its author' do
expect(home_feed_of(alice)).to include status.id
end
it 'is added to the home feed of a follower' do
expect(home_feed_of(bob)).to include status.id
expect(home_feed_of(tom)).to include status.id
end
it 'is not broadcast publicly' do
expect(redis).to_not have_received(:publish).with('timeline:hashtag:hoge', anything)
expect(redis).to_not have_received(:publish).with('timeline:public', anything)
end
end
context 'when status is direct' do
let(:visibility) { 'direct' }
it 'is added to the home feed of its author' do
expect(home_feed_of(alice)).to include status.id
end
it 'is added to the home feed of the mentioned follower' do
expect(home_feed_of(bob)).to include status.id
end
it 'is not added to the home feed of the other follower' do
expect(home_feed_of(tom)).to_not include status.id
end
it 'is not broadcast publicly' do
expect(redis).to_not have_received(:publish).with('timeline:hashtag:hoge', anything)
expect(redis).to_not have_received(:publish).with('timeline:public', anything)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class ResolveURLService < BaseService
include JsonLdHelper
include Authorization
USERNAME_STATUS_RE = %r{/@(?<username>#{Account::USERNAME_RE})/(?<status_id>[0-9]+)\Z}
def call(url, on_behalf_of: nil)
@url = url
@on_behalf_of = on_behalf_of
if local_url?
process_local_url
elsif !fetched_resource.nil?
process_url
else
process_url_from_db
end
end
private
def process_url
if equals_or_includes_any?(type, ActivityPub::FetchRemoteActorService::SUPPORTED_TYPES)
ActivityPub::FetchRemoteActorService.new.call(resource_url, prefetched_body: body)
elsif equals_or_includes_any?(type, ActivityPub::Activity::Create::SUPPORTED_TYPES + ActivityPub::Activity::Create::CONVERTED_TYPES)
status = FetchRemoteStatusService.new.call(resource_url, prefetched_body: body)
authorize_with @on_behalf_of, status, :show? unless status.nil?
status
end
end
def process_url_from_db
if [500, 502, 503, 504, nil].include?(fetch_resource_service.response_code)
account = Account.find_by(uri: @url)
return account unless account.nil?
end
return unless @on_behalf_of.present? && [401, 403, 404].include?(fetch_resource_service.response_code)
# It may happen that the resource is a private toot, and thus not fetchable,
# but we can return the toot if we already know about it.
scope = Status.where(uri: @url)
# We don't have an index on `url`, so try guessing the `uri` from `url`
parsed_url = Addressable::URI.parse(@url)
parsed_url.path.match(USERNAME_STATUS_RE) do |matched|
parsed_url.path = "/users/#{matched[:username]}/statuses/#{matched[:status_id]}"
scope = scope.or(Status.where(uri: parsed_url.to_s, url: @url))
end
status = scope.first
authorize_with @on_behalf_of, status, :show? unless status.nil?
status
rescue Mastodon::NotPermittedError
nil
end
def fetched_resource
@fetched_resource ||= fetch_resource_service.call(@url)
end
def fetch_resource_service
@fetch_resource_service ||= FetchResourceService.new
end
def resource_url
fetched_resource.first
end
def body
fetched_resource.second[:prefetched_body]
end
def type
json_data['type']
end
def json_data
@json_data ||= body_to_json(body)
end
def local_url?
TagManager.instance.local_url?(@url)
end
def process_local_url
recognized_params = Rails.application.routes.recognize_path(@url)
case recognized_params[:controller]
when 'statuses'
return unless recognized_params[:action] == 'show'
status = Status.find_by(id: recognized_params[:id])
check_local_status(status)
when 'accounts'
return unless recognized_params[:action] == 'show'
Account.find_local(recognized_params[:username])
when 'home'
return unless recognized_params[:action] == 'index' && recognized_params[:username_with_domain].present?
if recognized_params[:any]&.match?(/\A[0-9]+\Z/)
status = Status.find_by(id: recognized_params[:any])
check_local_status(status)
elsif recognized_params[:any].blank?
username, domain = recognized_params[:username_with_domain].gsub(/\A@/, '').split('@')
return unless username.present? && domain.present?
Account.find_remote(username, domain)
end
end
end
def check_local_status(status)
return if status.nil?
authorize_with @on_behalf_of, status, :show?
status
rescue Mastodon::NotPermittedError
nil
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
describe ResolveURLService, type: :service do
subject { described_class.new }
describe '#call' do
it 'returns nil when there is no resource url' do
url = 'http://example.com/missing-resource'
Fabricate(:account, uri: url, domain: 'example.com')
service = instance_double(FetchResourceService)
allow(FetchResourceService).to receive(:new).and_return service
allow(service).to receive(:response_code).and_return(404)
allow(service).to receive(:call).with(url).and_return(nil)
expect(subject.call(url)).to be_nil
end
it 'returns known account on temporary error' do
url = 'http://example.com/missing-resource'
known_account = Fabricate(:account, uri: url, domain: 'example.com')
service = instance_double(FetchResourceService)
allow(FetchResourceService).to receive(:new).and_return service
allow(service).to receive(:response_code).and_return(500)
allow(service).to receive(:call).with(url).and_return(nil)
expect(subject.call(url)).to eq known_account
end
context 'when searching for a remote private status' do
let(:account) { Fabricate(:account) }
let(:poster) { Fabricate(:account, domain: 'example.com') }
let(:url) { 'https://example.com/@foo/42' }
let(:uri) { 'https://example.com/users/foo/statuses/42' }
let!(:status) { Fabricate(:status, url: url, uri: uri, account: poster, visibility: :private) }
before do
stub_request(:get, url).to_return(status: 404) if url.present?
stub_request(:get, uri).to_return(status: 404)
end
context 'when the account follows the poster' do
before do
account.follow!(poster)
end
context 'when the status uses Mastodon-style URLs' do
let(:url) { 'https://example.com/@foo/42' }
let(:uri) { 'https://example.com/users/foo/statuses/42' }
it 'returns status by url' do
expect(subject.call(url, on_behalf_of: account)).to eq(status)
end
it 'returns status by uri' do
expect(subject.call(uri, on_behalf_of: account)).to eq(status)
end
end
context 'when the status uses pleroma-style URLs' do
let(:url) { nil }
let(:uri) { 'https://example.com/objects/0123-456-789-abc-def' }
it 'returns status by uri' do
expect(subject.call(uri, on_behalf_of: account)).to eq(status)
end
end
end
context 'when the account does not follow the poster' do
context 'when the status uses Mastodon-style URLs' do
let(:url) { 'https://example.com/@foo/42' }
let(:uri) { 'https://example.com/users/foo/statuses/42' }
it 'does not return the status by url' do
expect(subject.call(url, on_behalf_of: account)).to be_nil
end
it 'does not return the status by uri' do
expect(subject.call(uri, on_behalf_of: account)).to be_nil
end
end
context 'when the status uses pleroma-style URLs' do
let(:url) { nil }
let(:uri) { 'https://example.com/objects/0123-456-789-abc-def' }
it 'returns status by uri' do
expect(subject.call(uri, on_behalf_of: account)).to be_nil
end
end
end
end
context 'when searching for a local private status' do
let(:account) { Fabricate(:account) }
let(:poster) { Fabricate(:account) }
let!(:status) { Fabricate(:status, account: poster, visibility: :private) }
let(:url) { ActivityPub::TagManager.instance.url_for(status) }
let(:uri) { ActivityPub::TagManager.instance.uri_for(status) }
context 'when the account follows the poster' do
before do
account.follow!(poster)
end
it 'returns status by url' do
expect(subject.call(url, on_behalf_of: account)).to eq(status)
end
it 'returns status by uri' do
expect(subject.call(uri, on_behalf_of: account)).to eq(status)
end
end
context 'when the account does not follow the poster' do
it 'does not return the status by url' do
expect(subject.call(url, on_behalf_of: account)).to be_nil
end
it 'does not return the status by uri' do
expect(subject.call(uri, on_behalf_of: account)).to be_nil
end
end
end
context 'when searching for a link that redirects to a local public status' do
let(:account) { Fabricate(:account) }
let(:poster) { Fabricate(:account) }
let!(:status) { Fabricate(:status, account: poster, visibility: :public) }
let(:url) { 'https://link.to/foobar' }
let(:status_url) { ActivityPub::TagManager.instance.url_for(status) }
let(:uri) { ActivityPub::TagManager.instance.uri_for(status) }
before do
stub_request(:get, url).to_return(status: 302, headers: { 'Location' => status_url })
body = ActiveModelSerializers::SerializableResource.new(status, serializer: ActivityPub::NoteSerializer, adapter: ActivityPub::Adapter).to_json
stub_request(:get, status_url).to_return(body: body, headers: { 'Content-Type' => 'application/activity+json' })
end
it 'returns status by url' do
expect(subject.call(url, on_behalf_of: account)).to eq(status)
end
end
context 'when searching for a local link of a remote private status' do
let(:account) { Fabricate(:account) }
let(:poster) { Fabricate(:account, username: 'foo', domain: 'example.com') }
let(:url) { 'https://example.com/@foo/42' }
let(:uri) { 'https://example.com/users/foo/statuses/42' }
let!(:status) { Fabricate(:status, url: url, uri: uri, account: poster, visibility: :private) }
let(:search_url) { "https://#{Rails.configuration.x.local_domain}/@[email protected]/#{status.id}" }
before do
stub_request(:get, url).to_return(status: 404) if url.present?
stub_request(:get, uri).to_return(status: 404)
end
context 'when the account follows the poster' do
before do
account.follow!(poster)
end
it 'returns the status' do
expect(subject.call(search_url, on_behalf_of: account)).to eq(status)
end
end
context 'when the account does not follow the poster' do
it 'does not return the status' do
expect(subject.call(search_url, on_behalf_of: account)).to be_nil
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
require 'zip'
class BackupService < BaseService
include Payloadable
include ContextHelper
attr_reader :account, :backup
def call(backup)
@backup = backup
@account = backup.user.account
build_archive!
end
private
def build_outbox_json!(file)
skeleton = serialize(collection_presenter, ActivityPub::CollectionSerializer)
skeleton[:@context] = full_context
skeleton[:orderedItems] = ['!PLACEHOLDER!']
skeleton = Oj.dump(skeleton)
prepend, append = skeleton.split('"!PLACEHOLDER!"')
add_comma = false
file.write(prepend)
account.statuses.with_includes.reorder(nil).find_in_batches do |statuses|
file.write(',') if add_comma
add_comma = true
file.write(statuses.map do |status|
item = serialize_payload(ActivityPub::ActivityPresenter.from_status(status), ActivityPub::ActivitySerializer)
item.delete('@context')
unless item[:type] == 'Announce' || item[:object][:attachment].blank?
item[:object][:attachment].each do |attachment|
attachment[:url] = Addressable::URI.parse(attachment[:url]).path.delete_prefix('/system/')
end
end
Oj.dump(item)
end.join(','))
GC.start
end
file.write(append)
end
def build_archive!
tmp_file = Tempfile.new(%w(archive .zip))
Zip::File.open(tmp_file, create: true) do |zipfile|
dump_outbox!(zipfile)
dump_media_attachments!(zipfile)
dump_likes!(zipfile)
dump_bookmarks!(zipfile)
dump_actor!(zipfile)
end
archive_filename = "#{['archive', Time.now.utc.strftime('%Y%m%d%H%M%S'), SecureRandom.hex(16)].join('-')}.zip"
@backup.dump = ActionDispatch::Http::UploadedFile.new(tempfile: tmp_file, filename: archive_filename)
@backup.processed = true
@backup.save!
ensure
tmp_file.close
tmp_file.unlink
end
def dump_media_attachments!(zipfile)
MediaAttachment.attached.where(account: account).find_in_batches do |media_attachments|
media_attachments.each do |m|
path = m.file&.path
next unless path
path = path.gsub(%r{\A.*/system/}, '')
path = path.gsub(%r{\A/+}, '')
download_to_zip(zipfile, m.file, path)
end
GC.start
end
end
def dump_outbox!(zipfile)
zipfile.get_output_stream('outbox.json') do |io|
build_outbox_json!(io)
end
end
def dump_actor!(zipfile)
actor = serialize(account, ActivityPub::ActorSerializer)
actor[:icon][:url] = "avatar#{File.extname(actor[:icon][:url])}" if actor[:icon]
actor[:image][:url] = "header#{File.extname(actor[:image][:url])}" if actor[:image]
actor[:outbox] = 'outbox.json'
actor[:likes] = 'likes.json'
actor[:bookmarks] = 'bookmarks.json'
download_to_zip(zipfile, account.avatar, "avatar#{File.extname(account.avatar.path)}") if account.avatar.exists?
download_to_zip(zipfile, account.header, "header#{File.extname(account.header.path)}") if account.header.exists?
json = Oj.dump(actor)
zipfile.get_output_stream('actor.json') do |io|
io.write(json)
end
end
def dump_likes!(zipfile)
skeleton = serialize(ActivityPub::CollectionPresenter.new(id: 'likes.json', type: :ordered, size: 0, items: []), ActivityPub::CollectionSerializer)
skeleton.delete(:totalItems)
skeleton[:orderedItems] = ['!PLACEHOLDER!']
skeleton = Oj.dump(skeleton)
prepend, append = skeleton.split('"!PLACEHOLDER!"')
zipfile.get_output_stream('likes.json') do |io|
io.write(prepend)
add_comma = false
Status.reorder(nil).joins(:favourites).includes(:account).merge(account.favourites).find_in_batches do |statuses|
io.write(',') if add_comma
add_comma = true
io.write(statuses.map do |status|
Oj.dump(ActivityPub::TagManager.instance.uri_for(status))
end.join(','))
GC.start
end
io.write(append)
end
end
def dump_bookmarks!(zipfile)
skeleton = serialize(ActivityPub::CollectionPresenter.new(id: 'bookmarks.json', type: :ordered, size: 0, items: []), ActivityPub::CollectionSerializer)
skeleton.delete(:totalItems)
skeleton[:orderedItems] = ['!PLACEHOLDER!']
skeleton = Oj.dump(skeleton)
prepend, append = skeleton.split('"!PLACEHOLDER!"')
zipfile.get_output_stream('bookmarks.json') do |io|
io.write(prepend)
add_comma = false
Status.reorder(nil).joins(:bookmarks).includes(:account).merge(account.bookmarks).find_in_batches do |statuses|
io.write(',') if add_comma
add_comma = true
io.write(statuses.map do |status|
Oj.dump(ActivityPub::TagManager.instance.uri_for(status))
end.join(','))
GC.start
end
io.write(append)
end
end
def collection_presenter
ActivityPub::CollectionPresenter.new(
id: 'outbox.json',
type: :ordered,
size: account.statuses_count,
items: []
)
end
def serialize(object, serializer)
ActiveModelSerializers::SerializableResource.new(
object,
serializer: serializer,
adapter: ActivityPub::Adapter
).as_json
end
CHUNK_SIZE = 1.megabyte
def download_to_zip(zipfile, attachment, filename)
adapter = Paperclip.io_adapters.for(attachment)
zipfile.get_output_stream(filename) do |io|
while (buffer = adapter.read(CHUNK_SIZE))
io.write(buffer)
end
end
rescue Errno::ENOENT, Seahorse::Client::NetworkingError => e
Rails.logger.warn "Could not backup file #{filename}: #{e}"
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe BackupService, type: :service do
subject(:service_call) { described_class.new.call(backup) }
let!(:user) { Fabricate(:user) }
let!(:attachment) { Fabricate(:media_attachment, account: user.account) }
let!(:status) { Fabricate(:status, account: user.account, text: 'Hello', visibility: :public, media_attachments: [attachment]) }
let!(:private_status) { Fabricate(:status, account: user.account, text: 'secret', visibility: :private) }
let!(:favourite) { Fabricate(:favourite, account: user.account) }
let!(:bookmark) { Fabricate(:bookmark, account: user.account) }
let!(:backup) { Fabricate(:backup, user: user) }
def read_zip_file(backup, filename)
file = Paperclip.io_adapters.for(backup.dump)
Zip::File.open(file) do |zipfile|
entry = zipfile.glob(filename).first
return entry.get_input_stream.read
end
end
context 'when the user has an avatar and header' do
before do
user.account.update!(avatar: attachment_fixture('avatar.gif'))
user.account.update!(header: attachment_fixture('emojo.png'))
end
it 'stores them as expected' do
service_call
json = export_json(:actor)
avatar_path = json.dig('icon', 'url')
header_path = json.dig('image', 'url')
expect(avatar_path).to_not be_nil
expect(header_path).to_not be_nil
expect(read_zip_file(backup, avatar_path)).to be_present
expect(read_zip_file(backup, header_path)).to be_present
end
end
it 'marks the backup as processed and exports files' do
expect { service_call }.to process_backup
expect_outbox_export
expect_likes_export
expect_bookmarks_export
end
def process_backup
change(backup, :processed).from(false).to(true)
end
def expect_outbox_export
json = export_json(:outbox)
aggregate_failures do
expect(json['@context']).to_not be_nil
expect(json['type']).to eq 'OrderedCollection'
expect(json['totalItems']).to eq 2
expect(json['orderedItems'][0]['@context']).to be_nil
expect(json['orderedItems'][0]).to include_create_item(status)
expect(json['orderedItems'][1]).to include_create_item(private_status)
end
end
def expect_likes_export
json = export_json(:likes)
aggregate_failures do
expect(json['type']).to eq 'OrderedCollection'
expect(json['orderedItems']).to eq [ActivityPub::TagManager.instance.uri_for(favourite.status)]
end
end
def expect_bookmarks_export
json = export_json(:bookmarks)
aggregate_failures do
expect(json['type']).to eq 'OrderedCollection'
expect(json['orderedItems']).to eq [ActivityPub::TagManager.instance.uri_for(bookmark.status)]
end
end
def export_json(type)
Oj.load(read_zip_file(backup, "#{type}.json"))
end
def include_create_item(status)
include({
'type' => 'Create',
'object' => include({
'id' => ActivityPub::TagManager.instance.uri_for(status),
'content' => "<p>#{status.text}</p>",
}),
})
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class AccountStatusesCleanupService < BaseService
# @param [AccountStatusesCleanupPolicy] account_policy
# @param [Integer] budget
# @return [Integer]
def call(account_policy, budget = 50)
return 0 unless account_policy.enabled?
cutoff_id = account_policy.compute_cutoff_id
return 0 if cutoff_id.blank?
num_deleted = 0
last_deleted = nil
account_policy.statuses_to_delete(budget, cutoff_id, account_policy.last_inspected).reorder(nil).find_each(order: :asc) do |status|
status.discard_with_reblogs
RemovalWorker.perform_async(status.id, { 'redraft' => false })
num_deleted += 1
last_deleted = status.id
end
account_policy.record_last_inspected(last_deleted.presence || cutoff_id)
num_deleted
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
describe AccountStatusesCleanupService, type: :service do
let(:account) { Fabricate(:account, username: 'alice', domain: nil) }
let(:account_policy) { Fabricate(:account_statuses_cleanup_policy, account: account) }
let!(:unrelated_status) { Fabricate(:status, created_at: 3.years.ago) }
describe '#call' do
context 'when the account has not posted anything' do
it 'returns 0 deleted toots' do
expect(subject.call(account_policy)).to eq 0
end
end
context 'when the account has posted several old statuses' do
let!(:very_old_status) { Fabricate(:status, created_at: 3.years.ago, account: account) }
let!(:old_status) { Fabricate(:status, created_at: 1.year.ago, account: account) }
let!(:another_old_status) { Fabricate(:status, created_at: 1.year.ago, account: account) }
let!(:recent_status) { Fabricate(:status, created_at: 1.day.ago, account: account) }
context 'when given a budget of 1' do
it 'reports 1 deleted toot' do
expect(subject.call(account_policy, 1)).to eq 1
end
end
context 'when given a normal budget of 10' do
it 'reports 3 deleted statuses' do
expect(subject.call(account_policy, 10)).to eq 3
end
it 'records the last deleted id' do
subject.call(account_policy, 10)
expect(account_policy.last_inspected).to eq [old_status.id, another_old_status.id].max
end
it 'actually deletes the statuses' do
subject.call(account_policy, 10)
expect(Status.find_by(id: [very_old_status.id, old_status.id, another_old_status.id])).to be_nil
end
end
context 'when called repeatedly with a budget of 2' do
it 'reports 2 then 1 deleted statuses' do
expect(subject.call(account_policy, 2)).to eq 2
expect(subject.call(account_policy, 2)).to eq 1
end
it 'actually deletes the statuses in the expected order' do
subject.call(account_policy, 2)
expect(Status.find_by(id: very_old_status.id)).to be_nil
subject.call(account_policy, 2)
expect(Status.find_by(id: [very_old_status.id, old_status.id, another_old_status.id])).to be_nil
end
end
context 'when a self-faved toot is unfaved' do
let!(:self_faved) { Fabricate(:status, created_at: 6.months.ago, account: account) }
let!(:favourite) { Fabricate(:favourite, account: account, status: self_faved) }
it 'deletes it once unfaved' do
expect(subject.call(account_policy, 20)).to eq 3
expect(Status.find_by(id: self_faved.id)).to_not be_nil
expect(subject.call(account_policy, 20)).to eq 0
favourite.destroy!
expect(subject.call(account_policy, 20)).to eq 1
expect(Status.find_by(id: self_faved.id)).to be_nil
end
end
context 'when there are more un-deletable old toots than the early search cutoff' do
before do
stub_const 'AccountStatusesCleanupPolicy::EARLY_SEARCH_CUTOFF', 5
# Old statuses that should be cut-off
10.times do
Fabricate(:status, created_at: 4.years.ago, visibility: :direct, account: account)
end
# New statuses that prevent cut-off id to reach the last status
10.times do
Fabricate(:status, created_at: 4.seconds.ago, visibility: :direct, account: account)
end
end
it 'reports 0 deleted statuses then 0 then 3 then 0 again' do
expect(subject.call(account_policy, 10)).to eq 0
expect(subject.call(account_policy, 10)).to eq 0
expect(subject.call(account_policy, 10)).to eq 3
expect(subject.call(account_policy, 10)).to eq 0
end
it 'never causes the recorded id to get higher than oldest deletable toot' do
subject.call(account_policy, 10)
subject.call(account_policy, 10)
subject.call(account_policy, 10)
subject.call(account_policy, 10)
expect(account_policy.last_inspected).to be < Mastodon::Snowflake.id_at(account_policy.min_status_age.seconds.ago, with_random: false)
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class FetchOEmbedService
ENDPOINT_CACHE_EXPIRES_IN = 24.hours.freeze
URL_REGEX = %r{(=(https?(%3A|:)(//|%2F%2F)))([^&]*)}i
attr_reader :url, :options, :format, :endpoint_url
def call(url, options = {})
@url = url
@options = options
if @options[:cached_endpoint]
parse_cached_endpoint!
else
discover_endpoint!
end
fetch!
end
private
def discover_endpoint!
return if html.nil?
@format = @options[:format]
page = Nokogiri::HTML(html)
if @format.nil? || @format == :json
@endpoint_url ||= page.at_xpath('//link[@type="application/json+oembed"]|//link[@type="text/json+oembed"]')&.attribute('href')&.value
@format ||= :json if @endpoint_url
end
if @format.nil? || @format == :xml
@endpoint_url ||= page.at_xpath('//link[@type="text/xml+oembed"]')&.attribute('href')&.value
@format ||= :xml if @endpoint_url
end
return if @endpoint_url.blank?
@endpoint_url = begin
base_url = Addressable::URI.parse(@url)
# If the OEmbed endpoint is given as http but the URL we opened
# was served over https, we can assume OEmbed will be available
# through https as well
(base_url + @endpoint_url).tap do |absolute_url|
absolute_url.scheme = base_url.scheme if base_url.scheme == 'https'
end.to_s
end
cache_endpoint!
rescue Addressable::URI::InvalidURIError
@endpoint_url = nil
end
def parse_cached_endpoint!
cached = @options[:cached_endpoint]
return if cached[:endpoint].nil? || cached[:format].nil?
@endpoint_url = Addressable::Template.new(cached[:endpoint]).expand(url: @url).to_s
@format = cached[:format]
end
def cache_endpoint!
return unless URL_REGEX.match?(@endpoint_url)
url_domain = Addressable::URI.parse(@url).normalized_host
endpoint_hash = {
endpoint: @endpoint_url.gsub(URL_REGEX, '={url}'),
format: @format,
}
Rails.cache.write("oembed_endpoint:#{url_domain}", endpoint_hash, expires_in: ENDPOINT_CACHE_EXPIRES_IN)
end
def fetch!
return if @endpoint_url.blank?
body = Request.new(:get, @endpoint_url).perform do |res|
res.code == 200 ? res.body_with_limit : nil
end
validate(parse_for_format(body)) if body.present?
rescue Oj::ParseError, Ox::ParseError
nil
end
def parse_for_format(body)
case @format
when :json
Oj.load(body, mode: :strict)&.with_indifferent_access
when :xml
Ox.load(body, mode: :hash_no_attrs)&.with_indifferent_access&.dig(:oembed)
end
end
def validate(oembed)
oembed if oembed[:version].to_s == '1.0' && oembed[:type].present?
end
def html
return @html if defined?(@html)
@html = @options[:html] || Request.new(:get, @url).add_headers('Accept' => 'text/html').perform do |res|
res.code != 200 || res.mime_type != 'text/html' ? nil : res.body_with_limit
end
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
describe FetchOEmbedService, type: :service do
subject { described_class.new }
before do
stub_request(:get, 'https://host.test/provider.json').to_return(status: 404)
stub_request(:get, 'https://host.test/provider.xml').to_return(status: 404)
stub_request(:get, 'https://host.test/empty_provider.json').to_return(status: 200)
end
describe 'discover_provider' do
context 'when status code is 200 and MIME type is text/html' do
context 'when OEmbed endpoint contains URL as parameter' do
before do
stub_request(:get, 'https://www.youtube.com/watch?v=IPSbNdBmWKE').to_return(
status: 200,
headers: { 'Content-Type': 'text/html' },
body: request_fixture('oembed_youtube.html')
)
stub_request(:get, 'https://www.youtube.com/oembed?format=json&url=https%3A%2F%2Fwww.youtube.com%2Fwatch%3Fv%3DIPSbNdBmWKE').to_return(
status: 200,
headers: { 'Content-Type': 'text/html' },
body: request_fixture('oembed_json_empty.html')
)
end
it 'returns new OEmbed::Provider for JSON provider' do
subject.call('https://www.youtube.com/watch?v=IPSbNdBmWKE')
expect(subject.endpoint_url).to eq 'https://www.youtube.com/oembed?format=json&url=https%3A%2F%2Fwww.youtube.com%2Fwatch%3Fv%3DIPSbNdBmWKE'
expect(subject.format).to eq :json
end
it 'stores URL template' do
subject.call('https://www.youtube.com/watch?v=IPSbNdBmWKE')
expect(Rails.cache.read('oembed_endpoint:www.youtube.com')[:endpoint]).to eq 'https://www.youtube.com/oembed?format=json&url={url}'
end
end
context 'when both of JSON and XML provider are discoverable' do
before do
stub_request(:get, 'https://host.test/oembed.html').to_return(
status: 200,
headers: { 'Content-Type': 'text/html' },
body: request_fixture('oembed_json_xml.html')
)
end
it 'returns new OEmbed::Provider for JSON provider if :format option is set to :json' do
subject.call('https://host.test/oembed.html', format: :json)
expect(subject.endpoint_url).to eq 'https://host.test/provider.json'
expect(subject.format).to eq :json
end
it 'returns new OEmbed::Provider for XML provider if :format option is set to :xml' do
subject.call('https://host.test/oembed.html', format: :xml)
expect(subject.endpoint_url).to eq 'https://host.test/provider.xml'
expect(subject.format).to eq :xml
end
it 'does not cache OEmbed endpoint' do
subject.call('https://host.test/oembed.html', format: :xml)
expect(Rails.cache.exist?('oembed_endpoint:host.test')).to be false
end
end
context 'when JSON provider is discoverable while XML provider is not' do
before do
stub_request(:get, 'https://host.test/oembed.html').to_return(
status: 200,
headers: { 'Content-Type': 'text/html' },
body: request_fixture('oembed_json.html')
)
end
it 'returns new OEmbed::Provider for JSON provider' do
subject.call('https://host.test/oembed.html')
expect(subject.endpoint_url).to eq 'https://host.test/provider.json'
expect(subject.format).to eq :json
end
it 'does not cache OEmbed endpoint' do
subject.call('https://host.test/oembed.html')
expect(Rails.cache.exist?('oembed_endpoint:host.test')).to be false
end
end
context 'when XML provider is discoverable while JSON provider is not' do
before do
stub_request(:get, 'https://host.test/oembed.html').to_return(
status: 200,
headers: { 'Content-Type': 'text/html' },
body: request_fixture('oembed_xml.html')
)
end
it 'returns new OEmbed::Provider for XML provider' do
subject.call('https://host.test/oembed.html')
expect(subject.endpoint_url).to eq 'https://host.test/provider.xml'
expect(subject.format).to eq :xml
end
it 'does not cache OEmbed endpoint' do
subject.call('https://host.test/oembed.html')
expect(Rails.cache.exist?('oembed_endpoint:host.test')).to be false
end
end
context 'with Invalid XML provider is discoverable while JSON provider is not' do
before do
stub_request(:get, 'https://host.test/oembed.html').to_return(
status: 200,
headers: { 'Content-Type': 'text/html' },
body: request_fixture('oembed_invalid_xml.html')
)
end
it 'returns nil' do
expect(subject.call('https://host.test/oembed.html')).to be_nil
end
end
context 'with neither of JSON and XML provider is discoverable' do
before do
stub_request(:get, 'https://host.test/oembed.html').to_return(
status: 200,
headers: { 'Content-Type': 'text/html' },
body: request_fixture('oembed_undiscoverable.html')
)
end
it 'returns nil' do
expect(subject.call('https://host.test/oembed.html')).to be_nil
end
end
context 'when empty JSON provider is discoverable' do
before do
stub_request(:get, 'https://host.test/oembed.html').to_return(
status: 200,
headers: { 'Content-Type': 'text/html' },
body: request_fixture('oembed_json_empty.html')
)
end
it 'returns new OEmbed::Provider for JSON provider' do
subject.call('https://host.test/oembed.html')
expect(subject.endpoint_url).to eq 'https://host.test/empty_provider.json'
expect(subject.format).to eq :json
end
end
end
context 'when endpoint is cached' do
before do
stub_request(:get, 'http://www.youtube.com/oembed?format=json&url=https://www.youtube.com/watch?v=dqwpQarrDwk').to_return(
status: 200,
headers: { 'Content-Type': 'text/html' },
body: request_fixture('oembed_json_empty.html')
)
end
it 'returns new provider without fetching original URL first' do
subject.call('https://www.youtube.com/watch?v=dqwpQarrDwk', cached_endpoint: { endpoint: 'http://www.youtube.com/oembed?format=json&url={url}', format: :json })
expect(a_request(:get, 'https://www.youtube.com/watch?v=dqwpQarrDwk')).to_not have_been_made
expect(subject.endpoint_url).to eq 'http://www.youtube.com/oembed?format=json&url=https%3A%2F%2Fwww.youtube.com%2Fwatch%3Fv%3DdqwpQarrDwk'
expect(subject.format).to eq :json
expect(a_request(:get, 'http://www.youtube.com/oembed?format=json&url=https%3A%2F%2Fwww.youtube.com%2Fwatch%3Fv%3DdqwpQarrDwk')).to have_been_made
end
end
context 'when status code is not 200' do
before do
stub_request(:get, 'https://host.test/oembed.html').to_return(
status: 400,
headers: { 'Content-Type': 'text/html' },
body: request_fixture('oembed_xml.html')
)
end
it 'returns nil' do
expect(subject.call('https://host.test/oembed.html')).to be_nil
end
end
context 'when MIME type is not text/html' do
before do
stub_request(:get, 'https://host.test/oembed.html').to_return(
status: 200,
body: request_fixture('oembed_xml.html')
)
end
it 'returns nil' do
expect(subject.call('https://host.test/oembed.html')).to be_nil
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class UpdateStatusService < BaseService
include Redisable
include LanguagesHelper
class NoChangesSubmittedError < StandardError; end
# @param [Status] status
# @param [Integer] account_id
# @param [Hash] options
# @option options [Array<Integer>] :media_ids
# @option options [Array<Hash>] :media_attributes
# @option options [Hash] :poll
# @option options [String] :text
# @option options [String] :spoiler_text
# @option options [Boolean] :sensitive
# @option options [String] :language
def call(status, account_id, options = {})
@status = status
@options = options
@account_id = account_id
@media_attachments_changed = false
@poll_changed = false
Status.transaction do
create_previous_edit!
update_media_attachments! if @options.key?(:media_ids)
update_poll! if @options.key?(:poll)
update_immediate_attributes!
create_edit!
end
queue_poll_notifications!
reset_preview_card!
update_metadata!
broadcast_updates!
@status
rescue NoChangesSubmittedError
# For calls that result in no changes, swallow the error
# but get back to the original state
@status.reload
end
private
def update_media_attachments!
previous_media_attachments = @status.ordered_media_attachments.to_a
next_media_attachments = validate_media!
added_media_attachments = next_media_attachments - previous_media_attachments
(@options[:media_attributes] || []).each do |attributes|
media = next_media_attachments.find { |attachment| attachment.id == attributes[:id].to_i }
next if media.nil?
media.update!(attributes.slice(:thumbnail, :description, :focus))
@media_attachments_changed ||= media.significantly_changed?
end
MediaAttachment.where(id: added_media_attachments.map(&:id)).update_all(status_id: @status.id)
@status.ordered_media_attachment_ids = (@options[:media_ids] || []).map(&:to_i) & next_media_attachments.map(&:id)
@media_attachments_changed ||= previous_media_attachments.map(&:id) != @status.ordered_media_attachment_ids
@status.media_attachments.reload
end
def validate_media!
return [] if @options[:media_ids].blank? || !@options[:media_ids].is_a?(Enumerable)
raise Mastodon::ValidationError, I18n.t('media_attachments.validations.too_many') if @options[:media_ids].size > 4 || @options[:poll].present?
media_attachments = @status.account.media_attachments.where(status_id: [nil, @status.id]).where(scheduled_status_id: nil).where(id: @options[:media_ids].take(4).map(&:to_i)).to_a
raise Mastodon::ValidationError, I18n.t('media_attachments.validations.images_and_video') if media_attachments.size > 1 && media_attachments.find(&:audio_or_video?)
raise Mastodon::ValidationError, I18n.t('media_attachments.validations.not_ready') if media_attachments.any?(&:not_processed?)
media_attachments
end
def update_poll!
previous_poll = @status.preloadable_poll
@previous_expires_at = previous_poll&.expires_at
if @options[:poll].present?
poll = previous_poll || @status.account.polls.new(status: @status, votes_count: 0)
# If for some reasons the options were changed, it invalidates all previous
# votes, so we need to remove them
@poll_changed = true if @options[:poll][:options] != poll.options || ActiveModel::Type::Boolean.new.cast(@options[:poll][:multiple]) != poll.multiple
poll.options = @options[:poll][:options]
poll.hide_totals = @options[:poll][:hide_totals] || false
poll.multiple = @options[:poll][:multiple] || false
poll.expires_in = @options[:poll][:expires_in]
poll.reset_votes! if @poll_changed
poll.save!
@status.poll_id = poll.id
elsif previous_poll.present?
previous_poll.destroy
@poll_changed = true
@status.poll_id = nil
end
@poll_changed = true if @previous_expires_at != @status.preloadable_poll&.expires_at
end
def update_immediate_attributes!
@status.text = @options[:text].presence || @options.delete(:spoiler_text) || '' if @options.key?(:text)
@status.spoiler_text = @options[:spoiler_text] || '' if @options.key?(:spoiler_text)
@status.sensitive = @options[:sensitive] || @options[:spoiler_text].present? if @options.key?(:sensitive) || @options.key?(:spoiler_text)
@status.language = valid_locale_cascade(@options[:language], @status.language, @status.account.user&.preferred_posting_language, I18n.default_locale)
# We raise here to rollback the entire transaction
raise NoChangesSubmittedError unless significant_changes?
@status.edited_at = Time.now.utc
@status.save!
end
def reset_preview_card!
return unless @status.text_previously_changed?
@status.reset_preview_card!
LinkCrawlWorker.perform_async(@status.id)
end
def update_metadata!
ProcessHashtagsService.new.call(@status)
ProcessMentionsService.new.call(@status)
end
def broadcast_updates!
DistributionWorker.perform_async(@status.id, { 'update' => true })
ActivityPub::StatusUpdateDistributionWorker.perform_async(@status.id)
end
def queue_poll_notifications!
poll = @status.preloadable_poll
# If the poll had no expiration date set but now has, or now has a sooner
# expiration date, schedule a notification
return unless poll.present? && poll.expires_at.present?
PollExpirationNotifyWorker.remove_from_scheduled(poll.id) if @previous_expires_at.present? && @previous_expires_at > poll.expires_at
PollExpirationNotifyWorker.perform_at(poll.expires_at + 5.minutes, poll.id)
end
def create_previous_edit!
# We only need to create a previous edit when no previous edits exist, e.g.
# when the status has never been edited. For other cases, we always create
# an edit, so the step can be skipped
return if @status.edits.any?
@status.snapshot!(at_time: @status.created_at, rate_limit: false)
end
def create_edit!
@status.snapshot!(account_id: @account_id)
end
def significant_changes?
@status.changed? || @poll_changed || @media_attachments_changed
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe UpdateStatusService, type: :service do
subject { described_class.new }
context 'when nothing changes' do
let!(:status) { Fabricate(:status, text: 'Foo', language: 'en') }
before do
allow(ActivityPub::DistributionWorker).to receive(:perform_async)
subject.call(status, status.account_id, text: 'Foo')
end
it 'does not create an edit' do
expect(status.reload.edits).to be_empty
end
it 'does not notify anyone' do
expect(ActivityPub::DistributionWorker).to_not have_received(:perform_async)
end
end
context 'when text changes' do
let(:status) { Fabricate(:status, text: 'Foo') }
let(:preview_card) { Fabricate(:preview_card) }
before do
PreviewCardsStatus.create(status: status, preview_card: preview_card)
subject.call(status, status.account_id, text: 'Bar')
end
it 'updates text' do
expect(status.reload.text).to eq 'Bar'
end
it 'resets preview card' do
expect(status.reload.preview_card).to be_nil
end
it 'saves edit history' do
expect(status.edits.pluck(:text)).to eq %w(Foo Bar)
end
end
context 'when content warning changes' do
let(:status) { Fabricate(:status, text: 'Foo', spoiler_text: '') }
let(:preview_card) { Fabricate(:preview_card) }
before do
PreviewCardsStatus.create(status: status, preview_card: preview_card)
subject.call(status, status.account_id, text: 'Foo', spoiler_text: 'Bar')
end
it 'updates content warning' do
expect(status.reload.spoiler_text).to eq 'Bar'
end
it 'saves edit history' do
expect(status.edits.pluck(:text, :spoiler_text)).to eq [['Foo', ''], ['Foo', 'Bar']]
end
end
context 'when media attachments change' do
let!(:status) { Fabricate(:status, text: 'Foo') }
let!(:detached_media_attachment) { Fabricate(:media_attachment, account: status.account) }
let!(:attached_media_attachment) { Fabricate(:media_attachment, account: status.account) }
before do
status.media_attachments << detached_media_attachment
subject.call(status, status.account_id, text: 'Foo', media_ids: [attached_media_attachment.id])
end
it 'updates media attachments' do
expect(status.ordered_media_attachments).to eq [attached_media_attachment]
end
it 'does not detach detached media attachments' do
expect(detached_media_attachment.reload.status_id).to eq status.id
end
it 'attaches attached media attachments' do
expect(attached_media_attachment.reload.status_id).to eq status.id
end
it 'saves edit history' do
expect(status.edits.pluck(:ordered_media_attachment_ids)).to eq [[detached_media_attachment.id], [attached_media_attachment.id]]
end
end
context 'when already-attached media changes' do
let!(:status) { Fabricate(:status, text: 'Foo') }
let!(:media_attachment) { Fabricate(:media_attachment, account: status.account, description: 'Old description') }
before do
status.media_attachments << media_attachment
subject.call(status, status.account_id, text: 'Foo', media_ids: [media_attachment.id], media_attributes: [{ id: media_attachment.id, description: 'New description' }])
end
it 'does not detach media attachment' do
expect(media_attachment.reload.status_id).to eq status.id
end
it 'updates the media attachment description' do
expect(media_attachment.reload.description).to eq 'New description'
end
it 'saves edit history' do
expect(status.edits.map { |edit| edit.ordered_media_attachments.map(&:description) }).to eq [['Old description'], ['New description']]
end
end
context 'when poll changes', :sidekiq_fake do
let(:account) { Fabricate(:account) }
let!(:status) { Fabricate(:status, text: 'Foo', account: account, poll_attributes: { options: %w(Foo Bar), account: account, multiple: false, hide_totals: false, expires_at: 7.days.from_now }) }
let!(:poll) { status.poll }
let!(:voter) { Fabricate(:account) }
before do
status.update(poll: poll)
VoteService.new.call(voter, poll, [0])
subject.call(status, status.account_id, text: 'Foo', poll: { options: %w(Bar Baz Foo), expires_in: 5.days.to_i })
end
it 'updates poll' do
poll = status.poll.reload
expect(poll.options).to eq %w(Bar Baz Foo)
end
it 'resets votes' do
poll = status.poll.reload
expect(poll.votes_count).to eq 0
expect(poll.votes.count).to eq 0
expect(poll.cached_tallies).to eq [0, 0, 0]
end
it 'saves edit history' do
expect(status.edits.pluck(:poll_options)).to eq [%w(Foo Bar), %w(Bar Baz Foo)]
end
it 'requeues expiration notification' do
poll = status.poll.reload
expect(PollExpirationNotifyWorker).to have_enqueued_sidekiq_job(poll.id).at(poll.expires_at + 5.minutes)
end
end
context 'when mentions in text change' do
let!(:account) { Fabricate(:account) }
let!(:alice) { Fabricate(:account, username: 'alice') }
let!(:bob) { Fabricate(:account, username: 'bob') }
let!(:status) { PostStatusService.new.call(account, text: 'Hello @alice') }
before do
subject.call(status, status.account_id, text: 'Hello @bob')
end
it 'changes mentions' do
expect(status.active_mentions.pluck(:account_id)).to eq [bob.id]
end
it 'keeps old mentions as silent mentions' do
expect(status.mentions.pluck(:account_id)).to contain_exactly(alice.id, bob.id)
end
end
context 'when hashtags in text change' do
let!(:account) { Fabricate(:account) }
let!(:status) { PostStatusService.new.call(account, text: 'Hello #foo') }
before do
subject.call(status, status.account_id, text: 'Hello #bar')
end
it 'changes tags' do
expect(status.tags.pluck(:name)).to eq %w(bar)
end
end
it 'notifies ActivityPub about the update' do
status = Fabricate(:status, text: 'Foo')
allow(ActivityPub::DistributionWorker).to receive(:perform_async)
subject.call(status, status.account_id, text: 'Bar')
expect(ActivityPub::DistributionWorker).to have_received(:perform_async)
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class BootstrapTimelineService < BaseService
def call(source_account)
@source_account = source_account
autofollow_inviter!
notify_staff!
end
private
def autofollow_inviter!
return unless @source_account&.user&.invite&.autofollow?
FollowService.new.call(@source_account, @source_account.user.invite.user.account)
end
def notify_staff!
User.those_who_can(:manage_users).includes(:account).find_each do |user|
LocalNotificationWorker.perform_async(user.account_id, @source_account.id, 'Account', 'admin.sign_up')
end
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe BootstrapTimelineService, type: :service do
subject { described_class.new }
context 'when the new user has registered from an invite' do
let(:service) { instance_double(FollowService) }
let(:autofollow) { false }
let(:inviter) { Fabricate(:user, confirmed_at: 2.days.ago) }
let(:invite) { Fabricate(:invite, user: inviter, max_uses: nil, expires_at: 1.hour.from_now, autofollow: autofollow) }
let(:new_user) { Fabricate(:user, invite_code: invite.code) }
before do
allow(FollowService).to receive(:new).and_return(service)
allow(service).to receive(:call)
end
context 'when the invite has auto-follow enabled' do
let(:autofollow) { true }
it 'calls FollowService to follow the inviter' do
subject.call(new_user.account)
expect(service).to have_received(:call).with(new_user.account, inviter.account)
end
end
context 'when the invite does not have auto-follow enable' do
let(:autofollow) { false }
it 'calls FollowService to follow the inviter' do
subject.call(new_user.account)
expect(service).to_not have_received(:call)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class ResolveAccountService < BaseService
include DomainControlHelper
include WebfingerHelper
include Redisable
include Lockable
# Find or create an account record for a remote user. When creating,
# look up the user's webfinger and fetch ActivityPub data
# @param [String, Account] uri URI in the username@domain format or account record
# @param [Hash] options
# @option options [Boolean] :redirected Do not follow further Webfinger redirects
# @option options [Boolean] :skip_webfinger Do not attempt any webfinger query or refreshing account data
# @option options [Boolean] :skip_cache Get the latest data from origin even if cache is not due to update yet
# @option options [Boolean] :suppress_errors When failing, return nil instead of raising an error
# @return [Account]
def call(uri, options = {})
return if uri.blank?
process_options!(uri, options)
# First of all we want to check if we've got the account
# record with the URI already, and if so, we can exit early
return if domain_not_allowed?(@domain)
@account ||= Account.find_remote(@username, @domain)
return @account if @account&.local? || @domain.nil? || !webfinger_update_due?
# At this point we are in need of a Webfinger query, which may
# yield us a different username/domain through a redirect
process_webfinger!(@uri)
@domain = nil if TagManager.instance.local_domain?(@domain)
# Because the username/domain pair may be different than what
# we already checked, we need to check if we've already got
# the record with that URI, again
return if domain_not_allowed?(@domain)
@account ||= Account.find_remote(@username, @domain)
if gone_from_origin? && not_yet_deleted?
queue_deletion!
return
end
return @account if @account&.local? || gone_from_origin? || !webfinger_update_due?
# Now it is certain, it is definitely a remote account, and it
# either needs to be created, or updated from fresh data
fetch_account!
rescue Webfinger::Error => e
Rails.logger.debug { "Webfinger query for #{@uri} failed: #{e}" }
raise unless @options[:suppress_errors]
end
private
def process_options!(uri, options)
@options = { suppress_errors: true }.merge(options)
if uri.is_a?(Account)
@account = uri
@username = @account.username
@domain = @account.domain
else
@username, @domain = uri.strip.gsub(/\A@/, '').split('@')
end
@domain = if TagManager.instance.local_domain?(@domain)
nil
else
TagManager.instance.normalize_domain(@domain)
end
@uri = [@username, @domain].compact.join('@')
end
def process_webfinger!(uri)
@webfinger = webfinger!("acct:#{uri}")
confirmed_username, confirmed_domain = split_acct(@webfinger.subject)
if confirmed_username.casecmp(@username).zero? && confirmed_domain.casecmp(@domain).zero?
@username = confirmed_username
@domain = confirmed_domain
return
end
# Account doesn't match, so it may have been redirected
@webfinger = webfinger!("acct:#{confirmed_username}@#{confirmed_domain}")
@username, @domain = split_acct(@webfinger.subject)
raise Webfinger::RedirectError, "Too many webfinger redirects for URI #{uri} (stopped at #{@username}@#{@domain})" unless confirmed_username.casecmp(@username).zero? && confirmed_domain.casecmp(@domain).zero?
rescue Webfinger::GoneError
@gone = true
end
def split_acct(acct)
acct.delete_prefix('acct:').split('@').tap do |parts|
raise Webfinger::Error, 'Webfinger response is missing user or host value' unless parts.size == 2
end
end
def fetch_account!
return unless activitypub_ready?
with_redis_lock("resolve:#{@username}@#{@domain}") do
@account = ActivityPub::FetchRemoteAccountService.new.call(actor_url, suppress_errors: @options[:suppress_errors])
end
@account
end
def webfinger_update_due?
return false if @options[:check_delivery_availability] && !DeliveryFailureTracker.available?(@domain)
return false if @options[:skip_webfinger]
@options[:skip_cache] || @account.nil? || @account.possibly_stale?
end
def activitypub_ready?
['application/activity+json', 'application/ld+json; profile="https://www.w3.org/ns/activitystreams"'].include?(@webfinger.link('self', 'type'))
end
def actor_url
@actor_url ||= @webfinger.link('self', 'href')
end
def gone_from_origin?
@gone
end
def not_yet_deleted?
@account.present? && [email protected]?
end
def queue_deletion!
@account.suspend!(origin: :remote)
AccountDeletionWorker.perform_async(@account.id, { 'reserve_username' => false, 'skip_activitypub' => true })
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe ResolveAccountService, type: :service do
subject { described_class.new }
before do
stub_request(:get, 'https://example.com/.well-known/host-meta').to_return(status: 404)
stub_request(:get, 'https://quitter.no/avatar/7477-300-20160211190340.png').to_return(request_fixture('avatar.txt'))
stub_request(:get, 'https://ap.example.com/.well-known/webfinger?resource=acct:[email protected]').to_return(request_fixture('activitypub-webfinger.txt'))
stub_request(:get, 'https://ap.example.com/users/foo').to_return(request_fixture('activitypub-actor.txt'))
stub_request(:get, 'https://ap.example.com/users/foo.atom').to_return(request_fixture('activitypub-feed.txt'))
stub_request(:get, %r{https://ap\.example\.com/users/foo/\w+}).to_return(status: 404)
stub_request(:get, 'https://example.com/.well-known/webfinger?resource=acct:[email protected]').to_return(status: 410)
end
context 'when using skip_webfinger' do
context 'when account is known' do
let!(:remote_account) { Fabricate(:account, username: 'foo', domain: 'ap.example.com', protocol: 'activitypub') }
context 'when domain is banned' do
let!(:domain_block) { Fabricate(:domain_block, domain: 'ap.example.com', severity: :suspend) }
it 'does not return an account' do
expect(subject.call('[email protected]', skip_webfinger: true)).to be_nil
end
it 'does not make a webfinger query' do
subject.call('[email protected]', skip_webfinger: true)
expect(a_request(:get, 'https://ap.example.com/.well-known/webfinger?resource=acct:[email protected]')).to_not have_been_made
end
end
context 'when domain is not banned' do
it 'returns the expected account' do
expect(subject.call('[email protected]', skip_webfinger: true)).to eq remote_account
end
it 'does not make a webfinger query' do
subject.call('[email protected]', skip_webfinger: true)
expect(a_request(:get, 'https://ap.example.com/.well-known/webfinger?resource=acct:[email protected]')).to_not have_been_made
end
end
end
context 'when account is not known' do
it 'does not return an account' do
expect(subject.call('[email protected]', skip_webfinger: true)).to be_nil
end
it 'does not make a webfinger query' do
subject.call('[email protected]', skip_webfinger: true)
expect(a_request(:get, 'https://ap.example.com/.well-known/webfinger?resource=acct:[email protected]')).to_not have_been_made
end
end
end
context 'when there is an LRDD endpoint but no resolvable account' do
before do
stub_request(:get, 'https://quitter.no/.well-known/host-meta').to_return(request_fixture('.host-meta.txt'))
stub_request(:get, 'https://quitter.no/.well-known/webfinger?resource=acct:[email protected]').to_return(status: 404)
end
it 'returns nil' do
expect(subject.call('[email protected]')).to be_nil
end
end
context 'when there is no LRDD endpoint nor resolvable account' do
before do
stub_request(:get, 'https://example.com/.well-known/webfinger?resource=acct:[email protected]').to_return(status: 404)
end
it 'returns nil' do
expect(subject.call('[email protected]')).to be_nil
end
end
context 'when webfinger returns http gone' do
context 'with a previously known account' do
before do
Fabricate(:account, username: 'hoge', domain: 'example.com', last_webfingered_at: nil)
allow(AccountDeletionWorker).to receive(:perform_async)
end
it 'returns nil' do
expect(subject.call('[email protected]')).to be_nil
end
it 'queues account deletion worker' do
subject.call('[email protected]')
expect(AccountDeletionWorker).to have_received(:perform_async)
end
end
context 'with a previously unknown account' do
it 'returns nil' do
expect(subject.call('[email protected]')).to be_nil
end
end
end
context 'with a legitimate webfinger redirection' do
before do
webfinger = { subject: 'acct:[email protected]', links: [{ rel: 'self', href: 'https://ap.example.com/users/foo', type: 'application/activity+json' }] }
stub_request(:get, 'https://redirected.example.com/.well-known/webfinger?resource=acct:[email protected]').to_return(body: Oj.dump(webfinger), headers: { 'Content-Type': 'application/jrd+json' })
end
it 'returns new remote account' do
account = subject.call('[email protected]')
expect(account.activitypub?).to be true
expect(account.acct).to eq '[email protected]'
expect(account.inbox_url).to eq 'https://ap.example.com/users/foo/inbox'
end
end
context 'with a misconfigured redirection' do
before do
webfinger = { subject: 'acct:[email protected]', links: [{ rel: 'self', href: 'https://ap.example.com/users/foo', type: 'application/activity+json' }] }
stub_request(:get, 'https://redirected.example.com/.well-known/webfinger?resource=acct:[email protected]').to_return(body: Oj.dump(webfinger), headers: { 'Content-Type': 'application/jrd+json' })
end
it 'returns new remote account' do
account = subject.call('[email protected]')
expect(account.activitypub?).to be true
expect(account.acct).to eq '[email protected]'
expect(account.inbox_url).to eq 'https://ap.example.com/users/foo/inbox'
end
end
context 'with too many webfinger redirections' do
before do
webfinger = { subject: 'acct:[email protected]', links: [{ rel: 'self', href: 'https://ap.example.com/users/foo', type: 'application/activity+json' }] }
stub_request(:get, 'https://redirected.example.com/.well-known/webfinger?resource=acct:[email protected]').to_return(body: Oj.dump(webfinger), headers: { 'Content-Type': 'application/jrd+json' })
webfinger2 = { subject: 'acct:[email protected]', links: [{ rel: 'self', href: 'https://ap.example.com/users/foo', type: 'application/activity+json' }] }
stub_request(:get, 'https://evil.example.com/.well-known/webfinger?resource=acct:[email protected]').to_return(body: Oj.dump(webfinger2), headers: { 'Content-Type': 'application/jrd+json' })
end
it 'does not return a new remote account' do
expect(subject.call('[email protected]')).to be_nil
end
end
context 'with webfinger response subject missing a host value' do
let(:body) { Oj.dump({ subject: 'user@' }) }
let(:url) { 'https://host.example/.well-known/webfinger?resource=acct:[email protected]' }
before do
stub_request(:get, url).to_return(status: 200, body: body)
end
it 'returns nil with incomplete subject in response' do
expect(subject.call('[email protected]')).to be_nil
end
end
context 'with an ActivityPub account' do
it 'returns new remote account' do
account = subject.call('[email protected]')
expect(account.activitypub?).to be true
expect(account.domain).to eq 'ap.example.com'
expect(account.inbox_url).to eq 'https://ap.example.com/users/foo/inbox'
end
context 'with multiple types' do
before do
stub_request(:get, 'https://ap.example.com/users/foo').to_return(request_fixture('activitypub-actor-individual.txt'))
end
it 'returns new remote account' do
account = subject.call('[email protected]')
expect(account.activitypub?).to be true
expect(account.domain).to eq 'ap.example.com'
expect(account.inbox_url).to eq 'https://ap.example.com/users/foo/inbox'
expect(account.actor_type).to eq 'Person'
end
end
end
context 'with an already-known actor changing acct: URI' do
let!(:duplicate) { Fabricate(:account, username: 'foo', domain: 'old.example.com', uri: 'https://ap.example.com/users/foo') }
let!(:status) { Fabricate(:status, account: duplicate, text: 'foo') }
it 'returns new remote account' do
account = subject.call('[email protected]')
expect(account.activitypub?).to be true
expect(account.domain).to eq 'ap.example.com'
expect(account.inbox_url).to eq 'https://ap.example.com/users/foo/inbox'
expect(account.uri).to eq 'https://ap.example.com/users/foo'
end
it 'merges accounts' do
account = subject.call('[email protected]')
expect(status.reload.account_id).to eq account.id
expect(Account.where(uri: account.uri).count).to eq 1
end
end
context 'with an already-known acct: URI changing ActivityPub id' do
let!(:old_account) { Fabricate(:account, username: 'foo', domain: 'ap.example.com', uri: 'https://old.example.com/users/foo', last_webfingered_at: nil) }
let!(:status) { Fabricate(:status, account: old_account, text: 'foo') }
it 'returns new remote account' do
account = subject.call('[email protected]')
expect(account.activitypub?).to be true
expect(account.domain).to eq 'ap.example.com'
expect(account.inbox_url).to eq 'https://ap.example.com/users/foo/inbox'
expect(account.uri).to eq 'https://ap.example.com/users/foo'
end
end
it 'processes one remote account at a time using locks' do
wait_for_start = true
fail_occurred = false
return_values = Concurrent::Array.new
threads = Array.new(5) do
Thread.new do
true while wait_for_start
begin
return_values << described_class.new.call('[email protected]')
rescue ActiveRecord::RecordNotUnique
fail_occurred = true
ensure
RedisConfiguration.pool.checkin if Thread.current[:redis]
end
end
end
wait_for_start = false
threads.each(&:join)
expect(fail_occurred).to be false
expect(return_values).to_not include(nil)
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class FollowService < BaseService
include Redisable
include Payloadable
include DomainControlHelper
# Follow a remote user, notify remote user about the follow
# @param [Account] source_account From which to follow
# @param [Account] target_account Account to follow
# @param [Hash] options
# @option [Boolean] :reblogs Whether or not to show reblogs, defaults to true
# @option [Boolean] :notify Whether to create notifications about new posts, defaults to false
# @option [Array<String>] :languages Which languages to allow on the home feed from this account, defaults to all
# @option [Boolean] :bypass_locked
# @option [Boolean] :bypass_limit Allow following past the total follow number
# @option [Boolean] :with_rate_limit
def call(source_account, target_account, options = {})
@source_account = source_account
@target_account = target_account
@options = { bypass_locked: false, bypass_limit: false, with_rate_limit: false }.merge(options)
raise ActiveRecord::RecordNotFound if following_not_possible?
raise Mastodon::NotPermittedError if following_not_allowed?
if @source_account.following?(@target_account)
return change_follow_options!
elsif @source_account.requested?(@target_account)
return change_follow_request_options!
end
ActivityTracker.increment('activity:interactions')
# When an account follows someone for the first time, avoid showing
# an empty home feed while the follow request is being processed
# and the feeds are being merged
mark_home_feed_as_partial! if @source_account.not_following_anyone?
if (@target_account.locked? && !@options[:bypass_locked]) || @source_account.silenced? || @target_account.activitypub?
request_follow!
elsif @target_account.local?
direct_follow!
end
end
private
def mark_home_feed_as_partial!
redis.set("account:#{@source_account.id}:regeneration", true, nx: true, ex: 1.day.seconds)
end
def following_not_possible?
@target_account.nil? || @target_account.id == @source_account.id || @target_account.unavailable?
end
def following_not_allowed?
domain_not_allowed?(@target_account.domain) || @target_account.blocking?(@source_account) || @source_account.blocking?(@target_account) || @target_account.moved? || (!@target_account.local? && @target_account.ostatus?) || @source_account.domain_blocking?(@target_account.domain)
end
def change_follow_options!
@source_account.follow!(@target_account, **follow_options)
end
def change_follow_request_options!
@source_account.request_follow!(@target_account, **follow_options)
end
def request_follow!
follow_request = @source_account.request_follow!(@target_account, **follow_options.merge(rate_limit: @options[:with_rate_limit], bypass_limit: @options[:bypass_limit]))
if @target_account.local?
LocalNotificationWorker.perform_async(@target_account.id, follow_request.id, follow_request.class.name, 'follow_request')
elsif @target_account.activitypub?
ActivityPub::DeliveryWorker.perform_async(build_json(follow_request), @source_account.id, @target_account.inbox_url, { 'bypass_availability' => true })
end
follow_request
end
def direct_follow!
follow = @source_account.follow!(@target_account, **follow_options.merge(rate_limit: @options[:with_rate_limit], bypass_limit: @options[:bypass_limit]))
LocalNotificationWorker.perform_async(@target_account.id, follow.id, follow.class.name, 'follow')
MergeWorker.perform_async(@target_account.id, @source_account.id)
follow
end
def build_json(follow_request)
Oj.dump(serialize_payload(follow_request, ActivityPub::FollowSerializer))
end
def follow_options
@options.slice(:reblogs, :notify, :languages)
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe FollowService, type: :service do
subject { described_class.new }
let(:sender) { Fabricate(:account, username: 'alice') }
context 'when local account' do
describe 'locked account' do
let(:bob) { Fabricate(:account, locked: true, username: 'bob') }
before do
subject.call(sender, bob)
end
it 'creates a follow request with reblogs' do
expect(FollowRequest.find_by(account: sender, target_account: bob, show_reblogs: true)).to_not be_nil
end
end
describe 'locked account, no reblogs' do
let(:bob) { Fabricate(:account, locked: true, username: 'bob') }
before do
subject.call(sender, bob, reblogs: false)
end
it 'creates a follow request without reblogs' do
expect(FollowRequest.find_by(account: sender, target_account: bob, show_reblogs: false)).to_not be_nil
end
end
describe 'unlocked account, from silenced account' do
let(:bob) { Fabricate(:account, username: 'bob') }
before do
sender.touch(:silenced_at)
subject.call(sender, bob)
end
it 'creates a follow request with reblogs' do
expect(FollowRequest.find_by(account: sender, target_account: bob, show_reblogs: true)).to_not be_nil
end
end
describe 'unlocked account, from a muted account' do
let(:bob) { Fabricate(:account, username: 'bob') }
before do
bob.mute!(sender)
subject.call(sender, bob)
end
it 'creates a following relation with reblogs' do
expect(sender.following?(bob)).to be true
expect(sender.muting_reblogs?(bob)).to be false
end
end
describe 'unlocked account' do
let(:bob) { Fabricate(:account, username: 'bob') }
before do
subject.call(sender, bob)
end
it 'creates a following relation with reblogs' do
expect(sender.following?(bob)).to be true
expect(sender.muting_reblogs?(bob)).to be false
end
end
describe 'unlocked account, no reblogs' do
let(:bob) { Fabricate(:account, username: 'bob') }
before do
subject.call(sender, bob, reblogs: false)
end
it 'creates a following relation without reblogs' do
expect(sender.following?(bob)).to be true
expect(sender.muting_reblogs?(bob)).to be true
end
end
describe 'already followed account' do
let(:bob) { Fabricate(:account, username: 'bob') }
before do
sender.follow!(bob)
subject.call(sender, bob)
end
it 'keeps a following relation' do
expect(sender.following?(bob)).to be true
end
end
describe 'already followed account, turning reblogs off' do
let(:bob) { Fabricate(:account, username: 'bob') }
before do
sender.follow!(bob, reblogs: true)
subject.call(sender, bob, reblogs: false)
end
it 'disables reblogs' do
expect(sender.muting_reblogs?(bob)).to be true
end
end
describe 'already followed account, turning reblogs on' do
let(:bob) { Fabricate(:account, username: 'bob') }
before do
sender.follow!(bob, reblogs: false)
subject.call(sender, bob, reblogs: true)
end
it 'disables reblogs' do
expect(sender.muting_reblogs?(bob)).to be false
end
end
describe 'already followed account, changing languages' do
let(:bob) { Fabricate(:account, username: 'bob') }
before do
sender.follow!(bob)
subject.call(sender, bob, languages: %w(en es))
end
it 'changes languages' do
expect(Follow.find_by(account: sender, target_account: bob)&.languages).to match_array %w(en es)
end
end
end
context 'when remote ActivityPub account' do
let(:bob) { Fabricate(:account, username: 'bob', domain: 'example.com', protocol: :activitypub, inbox_url: 'http://example.com/inbox') }
before do
stub_request(:post, 'http://example.com/inbox').to_return(status: 200, body: '', headers: {})
subject.call(sender, bob)
end
it 'creates follow request' do
expect(FollowRequest.find_by(account: sender, target_account: bob)).to_not be_nil
end
it 'sends a follow activity to the inbox' do
expect(a_request(:post, 'http://example.com/inbox')).to have_been_made.once
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class DeleteAccountService < BaseService
include Payloadable
ASSOCIATIONS_ON_SUSPEND = %w(
account_notes
account_pins
active_relationships
aliases
block_relationships
blocked_by_relationships
conversation_mutes
conversations
custom_filters
devices
domain_blocks
featured_tags
follow_requests
list_accounts
migrations
mute_relationships
muted_by_relationships
notifications
owned_lists
passive_relationships
report_notes
scheduled_statuses
status_pins
).freeze
# The following associations have no important side-effects
# in callbacks and all of their own associations are secured
# by foreign keys, making them safe to delete without loading
# into memory
ASSOCIATIONS_WITHOUT_SIDE_EFFECTS = %w(
account_notes
account_pins
aliases
conversation_mutes
conversations
custom_filters
devices
domain_blocks
featured_tags
follow_requests
list_accounts
migrations
mute_relationships
muted_by_relationships
notifications
owned_lists
scheduled_statuses
status_pins
)
ASSOCIATIONS_ON_DESTROY = %w(
reports
targeted_moderation_notes
targeted_reports
).freeze
# Suspend or remove an account and remove as much of its data
# as possible. If it's a local account and it has not been confirmed
# or never been approved, then side effects are skipped and both
# the user and account records are removed fully. Otherwise,
# it is controlled by options.
# @param [Account]
# @param [Hash] options
# @option [Boolean] :reserve_email Keep user record. Only applicable for local accounts
# @option [Boolean] :reserve_username Keep account record
# @option [Boolean] :skip_side_effects Side effects are ActivityPub and streaming API payloads
# @option [Boolean] :skip_activitypub Skip sending ActivityPub payloads. Implied by :skip_side_effects
# @option [Time] :suspended_at Only applicable when :reserve_username is true
def call(account, **options)
@account = account
@options = { reserve_username: true, reserve_email: true }.merge(options)
if @account.local? && @account.user_unconfirmed_or_pending?
@options[:reserve_email] = false
@options[:reserve_username] = false
@options[:skip_side_effects] = true
end
@options[:skip_activitypub] = true if @options[:skip_side_effects]
distribute_activities!
purge_content!
fulfill_deletion_request!
end
private
def distribute_activities!
return if skip_activitypub?
if @account.local?
delete_actor!
elsif @account.activitypub?
reject_follows!
undo_follows!
end
end
def reject_follows!
# When deleting a remote account, the account obviously doesn't
# actually become deleted on its origin server, i.e. unlike a
# locally deleted account it continues to have access to its home
# feed and other content. To prevent it from being able to continue
# to access toots it would receive because it follows local accounts,
# we have to force it to unfollow them.
ActivityPub::DeliveryWorker.push_bulk(Follow.where(account: @account)) do |follow|
[Oj.dump(serialize_payload(follow, ActivityPub::RejectFollowSerializer)), follow.target_account_id, @account.inbox_url]
end
end
def undo_follows!
# When deleting a remote account, the account obviously doesn't
# actually become deleted on its origin server, but following relationships
# are severed on our end. Therefore, make the remote server aware that the
# follow relationships are severed to avoid confusion and potential issues
# if the remote account gets un-suspended.
ActivityPub::DeliveryWorker.push_bulk(Follow.where(target_account: @account)) do |follow|
[Oj.dump(serialize_payload(follow, ActivityPub::UndoFollowSerializer)), follow.account_id, @account.inbox_url]
end
end
def purge_user!
return if [email protected]? || @account.user.nil?
if keep_user_record?
@account.user.disable!
@account.user.invites.where(uses: 0).destroy_all
else
@account.user.destroy
end
end
def purge_content!
purge_user!
purge_profile!
purge_statuses!
purge_mentions!
purge_media_attachments!
purge_polls!
purge_generated_notifications!
purge_favourites!
purge_bookmarks!
purge_feeds!
purge_other_associations!
@account.destroy unless keep_account_record?
end
def purge_statuses!
@account.statuses.reorder(nil).where.not(id: reported_status_ids).in_batches do |statuses|
BatchedRemoveStatusService.new.call(statuses, skip_side_effects: skip_side_effects?)
end
end
def purge_mentions!
@account.mentions.reorder(nil).where.not(status_id: reported_status_ids).in_batches.delete_all
end
def purge_media_attachments!
@account.media_attachments.find_each do |media_attachment|
next if keep_account_record? && reported_status_ids.include?(media_attachment.status_id)
media_attachment.destroy
end
end
def purge_polls!
@account.polls.reorder(nil).where.not(status_id: reported_status_ids).in_batches.delete_all
end
def purge_generated_notifications!
# By deleting polls and statuses without callbacks, we've left behind
# polymorphically associated notifications generated by this account
Notification.where(from_account: @account).in_batches.delete_all
end
def purge_favourites!
@account.favourites.in_batches do |favourites|
ids = favourites.pluck(:status_id)
StatusStat.where(status_id: ids).update_all('favourites_count = GREATEST(0, favourites_count - 1)')
Chewy.strategy.current.update(StatusesIndex, ids) if Chewy.enabled?
Rails.cache.delete_multi(ids.map { |id| "statuses/#{id}" })
favourites.delete_all
end
end
def purge_bookmarks!
@account.bookmarks.in_batches do |bookmarks|
Chewy.strategy.current.update(StatusesIndex, bookmarks.pluck(:status_id)) if Chewy.enabled?
bookmarks.delete_all
end
end
def purge_other_associations!
associations_for_destruction.each do |association_name|
purge_association(association_name)
end
end
def purge_feeds!
return unless @account.local?
FeedManager.instance.clean_feeds!(:home, [@account.id])
FeedManager.instance.clean_feeds!(:list, @account.owned_lists.pluck(:id))
end
def purge_profile!
# If the account is going to be destroyed
# there is no point wasting time updating
# its values first
return unless keep_account_record?
@account.silenced_at = nil
@account.suspended_at = @options[:suspended_at] || Time.now.utc
@account.suspension_origin = :local
@account.locked = false
@account.memorial = false
@account.discoverable = false
@account.trendable = false
@account.display_name = ''
@account.note = ''
@account.fields = []
@account.statuses_count = 0
@account.followers_count = 0
@account.following_count = 0
@account.moved_to_account = nil
@account.reviewed_at = nil
@account.requested_review_at = nil
@account.also_known_as = []
@account.avatar.destroy
@account.header.destroy
@account.save!
end
def fulfill_deletion_request!
@account.deletion_request&.destroy
end
def purge_association(association_name)
association = @account.public_send(association_name)
if ASSOCIATIONS_WITHOUT_SIDE_EFFECTS.include?(association_name)
association.in_batches.delete_all
else
association.in_batches.destroy_all
end
end
def delete_actor!
ActivityPub::DeliveryWorker.push_bulk(delivery_inboxes, limit: 1_000) do |inbox_url|
[delete_actor_json, @account.id, inbox_url]
end
ActivityPub::LowPriorityDeliveryWorker.push_bulk(low_priority_delivery_inboxes, limit: 1_000) do |inbox_url|
[delete_actor_json, @account.id, inbox_url]
end
end
def delete_actor_json
@delete_actor_json ||= Oj.dump(serialize_payload(@account, ActivityPub::DeleteActorSerializer, signer: @account, always_sign: true))
end
def delivery_inboxes
@delivery_inboxes ||= @account.followers.inboxes + Relay.enabled.pluck(:inbox_url)
end
def low_priority_delivery_inboxes
Account.inboxes - delivery_inboxes
end
def reported_status_ids
@reported_status_ids ||= Report.where(target_account: @account).unresolved.pluck(:status_ids).flatten.uniq
end
def associations_for_destruction
if keep_account_record?
ASSOCIATIONS_ON_SUSPEND
else
ASSOCIATIONS_ON_SUSPEND + ASSOCIATIONS_ON_DESTROY
end
end
def keep_user_record?
@options[:reserve_email]
end
def keep_account_record?
@options[:reserve_username]
end
def skip_side_effects?
@options[:skip_side_effects]
end
def skip_activitypub?
@options[:skip_activitypub]
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe DeleteAccountService, type: :service do
shared_examples 'common behavior' do
subject { described_class.new.call(account) }
let!(:status) { Fabricate(:status, account: account) }
let!(:mention) { Fabricate(:mention, account: local_follower) }
let!(:status_with_mention) { Fabricate(:status, account: account, mentions: [mention]) }
let!(:media_attachment) { Fabricate(:media_attachment, account: account) }
let!(:notification) { Fabricate(:notification, account: account) }
let!(:favourite) { Fabricate(:favourite, account: account, status: Fabricate(:status, account: local_follower)) }
let!(:poll) { Fabricate(:poll, account: account) }
let!(:poll_vote) { Fabricate(:poll_vote, account: local_follower, poll: poll) }
let!(:active_relationship) { Fabricate(:follow, account: account, target_account: local_follower) }
let!(:passive_relationship) { Fabricate(:follow, account: local_follower, target_account: account) }
let!(:endorsement) { Fabricate(:account_pin, account: local_follower, target_account: account) }
let!(:mention_notification) { Fabricate(:notification, account: local_follower, activity: mention, type: :mention) }
let!(:status_notification) { Fabricate(:notification, account: local_follower, activity: status, type: :status) }
let!(:poll_notification) { Fabricate(:notification, account: local_follower, activity: poll, type: :poll) }
let!(:favourite_notification) { Fabricate(:notification, account: local_follower, activity: favourite, type: :favourite) }
let!(:follow_notification) { Fabricate(:notification, account: local_follower, activity: active_relationship, type: :follow) }
let!(:account_note) { Fabricate(:account_note, account: account) }
it 'deletes associated owned records' do
expect { subject }.to change {
[
account.statuses,
account.media_attachments,
account.notifications,
account.favourites,
account.active_relationships,
account.passive_relationships,
account.polls,
account.account_notes,
].map(&:count)
}.from([2, 1, 1, 1, 1, 1, 1, 1]).to([0, 0, 0, 0, 0, 0, 0, 0])
end
it 'deletes associated target records' do
expect { subject }.to change {
[
AccountPin.where(target_account: account),
].map(&:count)
}.from([1]).to([0])
end
it 'deletes associated target notifications' do
expect { subject }.to change {
%w(
poll favourite status mention follow
).map { |type| Notification.where(type: type).count }
}.from([1, 1, 1, 1, 1]).to([0, 0, 0, 0, 0])
end
end
describe '#call on local account' do
before do
stub_request(:post, 'https://alice.com/inbox').to_return(status: 201)
stub_request(:post, 'https://bob.com/inbox').to_return(status: 201)
end
let!(:remote_alice) { Fabricate(:account, inbox_url: 'https://alice.com/inbox', domain: 'alice.com', protocol: :activitypub) }
let!(:remote_bob) { Fabricate(:account, inbox_url: 'https://bob.com/inbox', domain: 'bob.com', protocol: :activitypub) }
include_examples 'common behavior' do
let!(:account) { Fabricate(:account) }
let!(:local_follower) { Fabricate(:account) }
it 'sends a delete actor activity to all known inboxes' do
subject
expect(a_request(:post, 'https://alice.com/inbox')).to have_been_made.once
expect(a_request(:post, 'https://bob.com/inbox')).to have_been_made.once
end
end
end
describe '#call on remote account' do
before do
stub_request(:post, 'https://alice.com/inbox').to_return(status: 201)
stub_request(:post, 'https://bob.com/inbox').to_return(status: 201)
end
include_examples 'common behavior' do
let!(:account) { Fabricate(:account, inbox_url: 'https://bob.com/inbox', protocol: :activitypub, domain: 'bob.com') }
let!(:local_follower) { Fabricate(:account) }
it 'sends expected activities to followed and follower inboxes' do
subject
expect(a_request(:post, account.inbox_url).with(
body:
hash_including({
'type' => 'Reject',
'object' => hash_including({
'type' => 'Follow',
'actor' => account.uri,
'object' => ActivityPub::TagManager.instance.uri_for(local_follower),
}),
})
)).to have_been_made.once
expect(a_request(:post, account.inbox_url).with(
body: hash_including({
'type' => 'Undo',
'object' => hash_including({
'type' => 'Follow',
'actor' => ActivityPub::TagManager.instance.uri_for(local_follower),
'object' => account.uri,
}),
})
)).to have_been_made.once
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class RemoveStatusService < BaseService
include Redisable
include Payloadable
include Lockable
# Delete a status
# @param [Status] status
# @param [Hash] options
# @option [Boolean] :redraft
# @option [Boolean] :immediate
# @option [Boolean] :preserve
# @option [Boolean] :original_removed
# @option [Boolean] :skip_streaming
def call(status, **options)
@payload = Oj.dump(event: :delete, payload: status.id.to_s)
@status = status
@account = status.account
@options = options
with_redis_lock("distribute:#{@status.id}") do
@status.discard_with_reblogs
StatusPin.find_by(status: @status)&.destroy
remove_from_self if @account.local?
remove_from_followers
remove_from_lists
# There is no reason to send out Undo activities when the
# cause is that the original object has been removed, since
# original object being removed implicitly removes reblogs
# of it. The Delete activity of the original is forwarded
# separately.
remove_from_remote_reach if @account.local? && !@options[:original_removed]
# Since reblogs don't mention anyone, don't get reblogged,
# favourited and don't contain their own media attachments
# or hashtags, this can be skipped
unless @status.reblog?
remove_from_mentions
remove_reblogs
remove_from_hashtags
remove_from_public
remove_from_media if @status.with_media?
remove_media
end
@status.destroy! if permanently?
end
end
private
# The following FeedManager calls all do not result in redis publishes for
# streaming, as the `:update` option is false
def remove_from_self
FeedManager.instance.unpush_from_home(@account, @status)
end
def remove_from_followers
@account.followers_for_local_distribution.includes(:user).reorder(nil).find_each do |follower|
FeedManager.instance.unpush_from_home(follower, @status)
end
end
def remove_from_lists
@account.lists_for_local_distribution.select(:id, :account_id).includes(account: :user).reorder(nil).find_each do |list|
FeedManager.instance.unpush_from_list(list, @status)
end
end
def remove_from_mentions
# For limited visibility statuses, the mentions that determine
# who receives them in their home feed are a subset of followers
# and therefore the delete is already handled by sending it to all
# followers. Here we send a delete to actively mentioned accounts
# that may not follow the account
return if skip_streaming?
@status.active_mentions.find_each do |mention|
redis.publish("timeline:#{mention.account_id}", @payload)
end
end
def remove_from_remote_reach
# Followers, relays, people who got mentioned in the status,
# or who reblogged it from someone else might not follow
# the author and wouldn't normally receive the delete
# notification - so here, we explicitly send it to them
status_reach_finder = StatusReachFinder.new(@status, unsafe: true)
ActivityPub::DeliveryWorker.push_bulk(status_reach_finder.inboxes, limit: 1_000) do |inbox_url|
[signed_activity_json, @account.id, inbox_url]
end
end
def signed_activity_json
@signed_activity_json ||= Oj.dump(serialize_payload(@status, @status.reblog? ? ActivityPub::UndoAnnounceSerializer : ActivityPub::DeleteSerializer, signer: @account, always_sign: true))
end
def remove_reblogs
# We delete reblogs of the status before the original status,
# because once original status is gone, reblogs will disappear
# without us being able to do all the fancy stuff
@status.reblogs.rewhere(deleted_at: [nil, @status.deleted_at]).includes(:account).reorder(nil).find_each do |reblog|
RemoveStatusService.new.call(reblog, original_removed: true, skip_streaming: skip_streaming?)
end
end
def remove_from_hashtags
@account.featured_tags.where(tag_id: @status.tags.map(&:id)).find_each do |featured_tag|
featured_tag.decrement(@status.id)
end
return unless @status.public_visibility?
return if skip_streaming?
@status.tags.map(&:name).each do |hashtag|
redis.publish("timeline:hashtag:#{hashtag.mb_chars.downcase}", @payload)
redis.publish("timeline:hashtag:#{hashtag.mb_chars.downcase}:local", @payload) if @status.local?
end
end
def remove_from_public
return unless @status.public_visibility?
return if skip_streaming?
redis.publish('timeline:public', @payload)
redis.publish(@status.local? ? 'timeline:public:local' : 'timeline:public:remote', @payload)
end
def remove_from_media
return unless @status.public_visibility?
return if skip_streaming?
redis.publish('timeline:public:media', @payload)
redis.publish(@status.local? ? 'timeline:public:local:media' : 'timeline:public:remote:media', @payload)
end
def remove_media
return if @options[:redraft] || !permanently?
@status.media_attachments.destroy_all
end
def permanently?
@options[:immediate] || !(@options[:preserve] || @status.reported?)
end
def skip_streaming?
!!@options[:skip_streaming]
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe RemoveStatusService, type: :service do
subject { described_class.new }
let!(:alice) { Fabricate(:account) }
let!(:bob) { Fabricate(:account, username: 'bob', domain: 'example.com') }
let!(:jeff) { Fabricate(:account) }
let!(:hank) { Fabricate(:account, username: 'hank', protocol: :activitypub, domain: 'example.com', inbox_url: 'http://example.com/inbox') }
let!(:bill) { Fabricate(:account, username: 'bill', protocol: :activitypub, domain: 'example2.com', inbox_url: 'http://example2.com/inbox') }
before do
stub_request(:post, 'http://example.com/inbox').to_return(status: 200)
stub_request(:post, 'http://example2.com/inbox').to_return(status: 200)
jeff.follow!(alice)
hank.follow!(alice)
end
context 'when removed status is not a reblog' do
let!(:status) { PostStatusService.new.call(alice, text: 'Hello @[email protected] ThisIsASecret') }
before do
FavouriteService.new.call(jeff, status)
Fabricate(:status, account: bill, reblog: status, uri: 'hoge')
end
it 'removes status from author\'s home feed' do
subject.call(status)
expect(HomeFeed.new(alice).get(10).pluck(:id)).to_not include(status.id)
end
it 'removes status from local follower\'s home feed' do
subject.call(status)
expect(HomeFeed.new(jeff).get(10).pluck(:id)).to_not include(status.id)
end
it 'sends Delete activity to followers' do
subject.call(status)
expect(a_request(:post, 'http://example.com/inbox').with(
body: hash_including({
'type' => 'Delete',
'object' => {
'type' => 'Tombstone',
'id' => ActivityPub::TagManager.instance.uri_for(status),
'atomUri' => OStatus::TagManager.instance.uri_for(status),
},
})
)).to have_been_made.once
end
it 'sends Delete activity to rebloggers' do
subject.call(status)
expect(a_request(:post, 'http://example2.com/inbox').with(
body: hash_including({
'type' => 'Delete',
'object' => {
'type' => 'Tombstone',
'id' => ActivityPub::TagManager.instance.uri_for(status),
'atomUri' => OStatus::TagManager.instance.uri_for(status),
},
})
)).to have_been_made.once
end
it 'remove status from notifications' do
expect { subject.call(status) }.to change {
Notification.where(activity_type: 'Favourite', from_account: jeff, account: alice).count
}.from(1).to(0)
end
end
context 'when removed status is a private self-reblog' do
let!(:original_status) { Fabricate(:status, account: alice, text: 'Hello ThisIsASecret', visibility: :private) }
let!(:status) { ReblogService.new.call(alice, original_status) }
it 'sends Undo activity to followers' do
subject.call(status)
expect(a_request(:post, 'http://example.com/inbox').with(
body: hash_including({
'type' => 'Undo',
'object' => hash_including({
'type' => 'Announce',
'object' => ActivityPub::TagManager.instance.uri_for(original_status),
}),
})
)).to have_been_made.once
end
end
context 'when removed status is public self-reblog' do
let!(:original_status) { Fabricate(:status, account: alice, text: 'Hello ThisIsASecret', visibility: :public) }
let!(:status) { ReblogService.new.call(alice, original_status) }
it 'sends Undo activity to followers' do
subject.call(status)
expect(a_request(:post, 'http://example.com/inbox').with(
body: hash_including({
'type' => 'Undo',
'object' => hash_including({
'type' => 'Announce',
'object' => ActivityPub::TagManager.instance.uri_for(original_status),
}),
})
)).to have_been_made.once
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class BulkImportRowService
def call(row)
@account = row.bulk_import.account
@data = row.data
@type = row.bulk_import.type.to_sym
case @type
when :following, :blocking, :muting, :lists
target_acct = @data['acct']
target_domain = domain(target_acct)
@target_account = stoplight_wrap_request(target_domain) { ResolveAccountService.new.call(target_acct, { check_delivery_availability: true }) }
return false if @target_account.nil?
when :bookmarks
target_uri = @data['uri']
target_domain = Addressable::URI.parse(target_uri).normalized_host
@target_status = ActivityPub::TagManager.instance.uri_to_resource(target_uri, Status)
return false if @target_status.nil? && ActivityPub::TagManager.instance.local_uri?(target_uri)
@target_status ||= stoplight_wrap_request(target_domain) { ActivityPub::FetchRemoteStatusService.new.call(target_uri) }
return false if @target_status.nil?
end
case @type
when :following
FollowService.new.call(@account, @target_account, reblogs: @data['show_reblogs'], notify: @data['notify'], languages: @data['languages'])
when :blocking
BlockService.new.call(@account, @target_account)
when :muting
MuteService.new.call(@account, @target_account, notifications: @data['hide_notifications'])
when :bookmarks
return false unless StatusPolicy.new(@account, @target_status).show?
@account.bookmarks.find_or_create_by!(status: @target_status)
when :lists
list = @account.owned_lists.find_or_create_by!(title: @data['list_name'])
FollowService.new.call(@account, @target_account) unless @account.id == @target_account.id
list.accounts << @target_account
end
true
rescue ActiveRecord::RecordNotFound
false
end
def domain(uri)
domain = uri.is_a?(Account) ? uri.domain : uri.split('@')[1]
TagManager.instance.local_domain?(domain) ? nil : TagManager.instance.normalize_domain(domain)
end
def stoplight_wrap_request(domain, &block)
if domain.present?
Stoplight("source:#{domain}", &block)
.with_fallback { nil }
.with_threshold(1)
.with_cool_off_time(5.minutes.seconds)
.with_error_handler { |error, handle| error.is_a?(HTTP::Error) || error.is_a?(OpenSSL::SSL::SSLError) ? handle.call(error) : raise(error) }
.run
else
yield
end
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe BulkImportRowService do
subject { described_class.new }
let(:account) { Fabricate(:account) }
let(:import) { Fabricate(:bulk_import, account: account, type: import_type) }
let(:import_row) { Fabricate(:bulk_import_row, bulk_import: import, data: data) }
describe '#call' do
context 'when importing a follow' do
let(:import_type) { 'following' }
let(:target_account) { Fabricate(:account) }
let(:service_double) { instance_double(FollowService, call: nil) }
let(:data) do
{ 'acct' => target_account.acct }
end
before do
allow(FollowService).to receive(:new).and_return(service_double)
end
it 'calls FollowService with the expected arguments and returns true' do
expect(subject.call(import_row)).to be true
expect(service_double).to have_received(:call).with(account, target_account, { reblogs: nil, notify: nil, languages: nil })
end
end
context 'when importing a block' do
let(:import_type) { 'blocking' }
let(:target_account) { Fabricate(:account) }
let(:service_double) { instance_double(BlockService, call: nil) }
let(:data) do
{ 'acct' => target_account.acct }
end
before do
allow(BlockService).to receive(:new).and_return(service_double)
end
it 'calls BlockService with the expected arguments and returns true' do
expect(subject.call(import_row)).to be true
expect(service_double).to have_received(:call).with(account, target_account)
end
end
context 'when importing a mute' do
let(:import_type) { 'muting' }
let(:target_account) { Fabricate(:account) }
let(:service_double) { instance_double(MuteService, call: nil) }
let(:data) do
{ 'acct' => target_account.acct }
end
before do
allow(MuteService).to receive(:new).and_return(service_double)
end
it 'calls MuteService with the expected arguments and returns true' do
expect(subject.call(import_row)).to be true
expect(service_double).to have_received(:call).with(account, target_account, { notifications: nil })
end
end
context 'when importing a bookmark' do
let(:import_type) { 'bookmarks' }
let(:data) do
{ 'uri' => ActivityPub::TagManager.instance.uri_for(target_status) }
end
context 'when the status is public' do
let(:target_status) { Fabricate(:status) }
it 'bookmarks the status and returns true' do
expect(subject.call(import_row)).to be true
expect(account.bookmarked?(target_status)).to be true
end
end
context 'when the status is not accessible to the user' do
let(:target_status) { Fabricate(:status, visibility: :direct) }
it 'does not bookmark the status and returns false' do
expect(subject.call(import_row)).to be false
expect(account.bookmarked?(target_status)).to be false
end
end
end
context 'when importing a list row' do
let(:import_type) { 'lists' }
let(:target_account) { Fabricate(:account) }
let(:data) do
{ 'acct' => target_account.acct, 'list_name' => 'my list' }
end
shared_examples 'common behavior' do
context 'when the target account is already followed' do
before do
account.follow!(target_account)
end
it 'returns true' do
expect(subject.call(import_row)).to be true
end
it 'adds the target account to the list' do
expect { subject.call(import_row) }.to change { ListAccount.joins(:list).exists?(account_id: target_account.id, list: { title: 'my list' }) }.from(false).to(true)
end
end
context 'when the user already requested to follow the target account' do
before do
account.request_follow!(target_account)
end
it 'returns true' do
expect(subject.call(import_row)).to be true
end
it 'adds the target account to the list' do
expect { subject.call(import_row) }.to change { ListAccount.joins(:list).exists?(account_id: target_account.id, list: { title: 'my list' }) }.from(false).to(true)
end
end
context 'when the target account is neither followed nor requested' do
it 'returns true' do
expect(subject.call(import_row)).to be true
end
it 'adds the target account to the list' do
expect { subject.call(import_row) }.to change { ListAccount.joins(:list).exists?(account_id: target_account.id, list: { title: 'my list' }) }.from(false).to(true)
end
end
context 'when the target account is the user themself' do
let(:target_account) { account }
it 'returns true' do
expect(subject.call(import_row)).to be true
end
it 'adds the target account to the list' do
expect { subject.call(import_row) }.to change { ListAccount.joins(:list).exists?(account_id: target_account.id, list: { title: 'my list' }) }.from(false).to(true)
end
end
end
context 'when the list does not exist yet' do
include_examples 'common behavior'
end
context 'when the list exists' do
before do
Fabricate(:list, account: account, title: 'my list')
end
include_examples 'common behavior'
it 'does not create a new list' do
account.follow!(target_account)
expect { subject.call(import_row) }.to_not(change { List.where(title: 'my list').count })
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class ActivityPub::FetchRemoteKeyService < BaseService
include JsonLdHelper
class Error < StandardError; end
# Returns actor that owns the key
def call(uri, id: true, prefetched_body: nil, suppress_errors: true)
raise Error, 'No key URI given' if uri.blank?
if prefetched_body.nil?
if id
@json = fetch_resource_without_id_validation(uri)
if actor_type?
@json = fetch_resource(@json['id'], true)
elsif uri != @json['id']
raise Error, "Fetched URI #{uri} has wrong id #{@json['id']}"
end
else
@json = fetch_resource(uri, id)
end
else
@json = body_to_json(prefetched_body, compare_id: id ? uri : nil)
end
raise Error, "Unable to fetch key JSON at #{uri}" if @json.nil?
raise Error, "Unsupported JSON-LD context for document #{uri}" unless supported_context?(@json)
raise Error, "Unexpected object type for key #{uri}" unless expected_type?
return find_actor(@json['id'], @json, suppress_errors) if actor_type?
@owner = fetch_resource(owner_uri, true)
raise Error, "Unable to fetch actor JSON #{owner_uri}" if @owner.nil?
raise Error, "Unsupported JSON-LD context for document #{owner_uri}" unless supported_context?(@owner)
raise Error, "Unexpected object type for actor #{owner_uri} (expected any of: #{SUPPORTED_TYPES})" unless expected_owner_type?
raise Error, "publicKey id for #{owner_uri} does not correspond to #{@json['id']}" unless confirmed_owner?
find_actor(owner_uri, @owner, suppress_errors)
rescue Error => e
Rails.logger.debug { "Fetching key #{uri} failed: #{e.message}" }
raise unless suppress_errors
end
private
def find_actor(uri, prefetched_body, suppress_errors)
actor = ActivityPub::TagManager.instance.uri_to_actor(uri)
actor ||= ActivityPub::FetchRemoteActorService.new.call(uri, prefetched_body: prefetched_body, suppress_errors: suppress_errors)
actor
end
def expected_type?
actor_type? || public_key?
end
def actor_type?
equals_or_includes_any?(@json['type'], ActivityPub::FetchRemoteActorService::SUPPORTED_TYPES)
end
def public_key?
@json['publicKeyPem'].present? && @json['owner'].present?
end
def owner_uri
@owner_uri ||= value_or_id(@json['owner'])
end
def expected_owner_type?
equals_or_includes_any?(@owner['type'], ActivityPub::FetchRemoteActorService::SUPPORTED_TYPES)
end
def confirmed_owner?
value_or_id(@owner['publicKey']) == @json['id']
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe ActivityPub::FetchRemoteKeyService, type: :service do
subject { described_class.new }
let(:webfinger) { { subject: 'acct:[email protected]', links: [{ rel: 'self', href: 'https://example.com/alice' }] } }
let(:public_key_pem) do
<<~TEXT
-----BEGIN PUBLIC KEY-----
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAu3L4vnpNLzVH31MeWI39
4F0wKeJFsLDAsNXGeOu0QF2x+h1zLWZw/agqD2R3JPU9/kaDJGPIV2Sn5zLyUA9S
6swCCMOtn7BBR9g9sucgXJmUFB0tACH2QSgHywMAybGfmSb3LsEMNKsGJ9VsvYoh
8lDET6X4Pyw+ZJU0/OLo/41q9w+OrGtlsTm/PuPIeXnxa6BLqnDaxC+4IcjG/FiP
ahNCTINl/1F/TgSSDZ4Taf4U9XFEIFw8wmgploELozzIzKq+t8nhQYkgAkt64euW
pva3qL5KD1mTIZQEP+LZvh3s2WHrLi3fhbdRuwQ2c0KkJA2oSTFPDpqqbPGZ3Qvu
HQIDAQAB
-----END PUBLIC KEY-----
TEXT
end
let(:public_key_id) { 'https://example.com/alice#main-key' }
let(:key_json) do
{
id: public_key_id,
owner: 'https://example.com/alice',
publicKeyPem: public_key_pem,
}
end
let(:actor_public_key) { key_json }
let(:actor) do
{
'@context': [
'https://www.w3.org/ns/activitystreams',
'https://w3id.org/security/v1',
],
id: 'https://example.com/alice',
type: 'Person',
preferredUsername: 'alice',
name: 'Alice',
summary: 'Foo bar',
inbox: 'http://example.com/alice/inbox',
publicKey: actor_public_key,
}
end
before do
stub_request(:get, 'https://example.com/alice').to_return(body: Oj.dump(actor))
stub_request(:get, 'https://example.com/.well-known/webfinger?resource=acct:[email protected]').to_return(body: Oj.dump(webfinger), headers: { 'Content-Type': 'application/jrd+json' })
end
describe '#call' do
let(:account) { subject.call(public_key_id, id: false) }
context 'when the key is a sub-object from the actor' do
before do
stub_request(:get, public_key_id).to_return(body: Oj.dump(actor))
end
it 'returns the expected account' do
expect(account.uri).to eq 'https://example.com/alice'
end
end
context 'when the key is a separate document' do
let(:public_key_id) { 'https://example.com/alice-public-key.json' }
before do
stub_request(:get, public_key_id).to_return(body: Oj.dump(key_json.merge({ '@context': ['https://www.w3.org/ns/activitystreams', 'https://w3id.org/security/v1'] })))
end
it 'returns the expected account' do
expect(account.uri).to eq 'https://example.com/alice'
end
end
context 'when the key and owner do not match' do
let(:public_key_id) { 'https://example.com/fake-public-key.json' }
let(:actor_public_key) { 'https://example.com/alice-public-key.json' }
before do
stub_request(:get, public_key_id).to_return(body: Oj.dump(key_json.merge({ '@context': ['https://www.w3.org/ns/activitystreams', 'https://w3id.org/security/v1'] })))
end
it 'returns the nil' do
expect(account).to be_nil
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class ActivityPub::ProcessCollectionService < BaseService
include JsonLdHelper
def call(body, actor, **options)
@account = actor
@json = original_json = Oj.load(body, mode: :strict)
@options = options
return unless @json.is_a?(Hash)
begin
@json = compact(@json) if @json['signature'].is_a?(Hash)
rescue JSON::LD::JsonLdError => e
Rails.logger.debug { "Error when compacting JSON-LD document for #{value_or_id(@json['actor'])}: #{e.message}" }
@json = original_json.without('signature')
end
return if !supported_context? || (different_actor? && verify_account!.nil?) || suspended_actor? || @account.local?
return unless @account.is_a?(Account)
if @json['signature'].present?
# We have verified the signature, but in the compaction step above, might
# have introduced incompatibilities with other servers that do not
# normalize the JSON-LD documents (for instance, previous Mastodon
# versions), so skip redistribution if we can't get a safe document.
patch_for_forwarding!(original_json, @json)
@json.delete('signature') unless safe_for_forwarding?(original_json, @json)
end
case @json['type']
when 'Collection', 'CollectionPage'
process_items @json['items']
when 'OrderedCollection', 'OrderedCollectionPage'
process_items @json['orderedItems']
else
process_items [@json]
end
rescue Oj::ParseError
nil
end
private
def different_actor?
@json['actor'].present? && value_or_id(@json['actor']) != @account.uri
end
def suspended_actor?
@account.suspended? && !activity_allowed_while_suspended?
end
def activity_allowed_while_suspended?
%w(Delete Reject Undo Update).include?(@json['type'])
end
def process_items(items)
items.reverse_each.filter_map { |item| process_item(item) }
end
def supported_context?
super(@json)
end
def process_item(item)
activity = ActivityPub::Activity.factory(item, @account, **@options)
activity&.perform
end
def verify_account!
@options[:relayed_through_actor] = @account
@account = ActivityPub::LinkedDataSignature.new(@json).verify_actor!
@account = nil unless @account.is_a?(Account)
@account
rescue JSON::LD::JsonLdError, RDF::WriterError => e
Rails.logger.debug { "Could not verify LD-Signature for #{value_or_id(@json['actor'])}: #{e.message}" }
nil
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe ActivityPub::ProcessCollectionService, type: :service do
subject { described_class.new }
let(:actor) { Fabricate(:account, domain: 'example.com', uri: 'http://example.com/account') }
let(:payload) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
id: 'foo',
type: 'Create',
actor: ActivityPub::TagManager.instance.uri_for(actor),
object: {
id: 'bar',
type: 'Note',
content: 'Lorem ipsum',
},
}
end
let(:json) { Oj.dump(payload) }
describe '#call' do
context 'when actor is suspended' do
before do
actor.suspend!(origin: :remote)
end
%w(Accept Add Announce Block Create Flag Follow Like Move Remove).each do |activity_type|
context "with #{activity_type} activity" do
let(:payload) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
id: 'foo',
type: activity_type,
actor: ActivityPub::TagManager.instance.uri_for(actor),
}
end
it 'does not process payload' do
allow(ActivityPub::Activity).to receive(:factory)
subject.call(json, actor)
expect(ActivityPub::Activity).to_not have_received(:factory)
end
end
end
%w(Delete Reject Undo Update).each do |activity_type|
context "with #{activity_type} activity" do
let(:payload) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
id: 'foo',
type: activity_type,
actor: ActivityPub::TagManager.instance.uri_for(actor),
}
end
it 'processes the payload' do
allow(ActivityPub::Activity).to receive(:factory)
subject.call(json, actor)
expect(ActivityPub::Activity).to have_received(:factory)
end
end
end
end
context 'when actor differs from sender' do
let(:forwarder) { Fabricate(:account, domain: 'example.com', uri: 'http://example.com/other_account') }
it 'does not process payload if no signature exists' do
signature_double = instance_double(ActivityPub::LinkedDataSignature, verify_actor!: nil)
allow(ActivityPub::LinkedDataSignature).to receive(:new).and_return(signature_double)
allow(ActivityPub::Activity).to receive(:factory)
subject.call(json, forwarder)
expect(ActivityPub::Activity).to_not have_received(:factory)
end
it 'processes payload with actor if valid signature exists' do
payload['signature'] = { 'type' => 'RsaSignature2017' }
signature_double = instance_double(ActivityPub::LinkedDataSignature, verify_actor!: actor)
allow(ActivityPub::LinkedDataSignature).to receive(:new).and_return(signature_double)
allow(ActivityPub::Activity).to receive(:factory).with(instance_of(Hash), actor, instance_of(Hash))
subject.call(json, forwarder)
expect(ActivityPub::Activity).to have_received(:factory).with(instance_of(Hash), actor, instance_of(Hash))
end
it 'does not process payload if invalid signature exists' do
payload['signature'] = { 'type' => 'RsaSignature2017' }
signature_double = instance_double(ActivityPub::LinkedDataSignature, verify_actor!: nil)
allow(ActivityPub::LinkedDataSignature).to receive(:new).and_return(signature_double)
allow(ActivityPub::Activity).to receive(:factory)
subject.call(json, forwarder)
expect(ActivityPub::Activity).to_not have_received(:factory)
end
context 'when receiving a fabricated status' do
let!(:actor) do
Fabricate(:account,
username: 'bob',
domain: 'example.com',
uri: 'https://example.com/users/bob',
private_key: nil,
public_key: <<~TEXT)
-----BEGIN PUBLIC KEY-----
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuuYyoyfsRkYnXRotMsId
W3euBDDfiv9oVqOxUVC7bhel8KednIMrMCRWFAkgJhbrlzbIkjVr68o1MP9qLcn7
CmH/BXHp7yhuFTr4byjdJKpwB+/i2jNEsvDH5jR8WTAeTCe0x/QHg21V3F7dSI5m
CCZ/1dSIyOXLRTWVlfDlm3rE4ntlCo+US3/7oSWbg/4/4qEnt1HC32kvklgScxua
4LR5ATdoXa5bFoopPWhul7MJ6NyWCyQyScUuGdlj8EN4kmKQJvphKHrI9fvhgOuG
TvhTR1S5InA4azSSchY0tXEEw/VNxraeX0KPjbgr6DPcwhPd/m0nhVDq0zVyVBBD
MwIDAQAB
-----END PUBLIC KEY-----
TEXT
end
let(:payload) do
{
'@context': [
'https://www.w3.org/ns/activitystreams',
nil,
{ object: 'https://www.w3.org/ns/activitystreams#object' },
],
id: 'https://example.com/users/bob/fake-status/activity',
type: 'Create',
actor: 'https://example.com/users/bob',
published: '2022-01-22T15:00:00Z',
to: [
'https://www.w3.org/ns/activitystreams#Public',
],
cc: [
'https://example.com/users/bob/followers',
],
signature: {
type: 'RsaSignature2017',
creator: 'https://example.com/users/bob#main-key',
created: '2022-03-09T21:57:25Z',
signatureValue: 'WculK0LelTQ0MvGwU9TPoq5pFzFfGYRDCJqjZ232/Udj4' \
'CHqDTGOSw5UTDLShqBOyycCkbZGrQwXG+dpyDpQLSe1UV' \
'PZ5TPQtc/9XtI57WlS2nMNpdvRuxGnnb2btPdesXZ7n3p' \
'Cxo0zjaXrJMe0mqQh5QJO22mahb4bDwwmfTHgbD3nmkD+' \
'fBfGi+UV2qWwqr+jlV4L4JqNkh0gWljF5KTePLRRZCuWi' \
'Q/FAt7c67636cdIPf7fR+usjuZltTQyLZKEGuK8VUn2Gk' \
'fsx5qns7Vcjvlz1JqlAjyO8HPBbzTTHzUG2nUOIgC3Poj' \
'CSWv6mNTmRGoLZzOscCAYQA6cKw==',
},
'@id': 'https://example.com/users/bob/statuses/107928807471117876/activity',
'@type': 'https://www.w3.org/ns/activitystreams#Create',
'https://www.w3.org/ns/activitystreams#actor': {
'@id': 'https://example.com/users/bob',
},
'https://www.w3.org/ns/activitystreams#cc': {
'@id': 'https://example.com/users/bob/followers',
},
object: {
id: 'https://example.com/users/bob/fake-status',
type: 'Note',
published: '2022-01-22T15:00:00Z',
url: 'https://www.youtube.com/watch?v=dQw4w9WgXcQ&feature=puck-was-here',
attributedTo: 'https://example.com/users/bob',
to: [
'https://www.w3.org/ns/activitystreams#Public',
],
cc: [
'https://example.com/users/bob/followers',
],
sensitive: false,
atomUri: 'https://example.com/users/bob/fake-status',
conversation: 'tag:example.com,2022-03-09:objectId=15:objectType=Conversation',
content: '<p>puck was here</p>',
'@id': 'https://example.com/users/bob/statuses/107928807471117876',
'@type': 'https://www.w3.org/ns/activitystreams#Note',
'http://ostatus.org#atomUri': 'https://example.com/users/bob/statuses/107928807471117876',
'http://ostatus.org#conversation': 'tag:example.com,2022-03-09:objectId=15:objectType=Conversation',
'https://www.w3.org/ns/activitystreams#attachment': [],
'https://www.w3.org/ns/activitystreams#attributedTo': {
'@id': 'https://example.com/users/bob',
},
'https://www.w3.org/ns/activitystreams#cc': {
'@id': 'https://example.com/users/bob/followers',
},
'https://www.w3.org/ns/activitystreams#content': [
'<p>hello world</p>',
{
'@value': '<p>hello world</p>',
'@language': 'en',
},
],
'https://www.w3.org/ns/activitystreams#published': {
'@type': 'http://www.w3.org/2001/XMLSchema#dateTime',
'@value': '2022-03-09T21:55:07Z',
},
'https://www.w3.org/ns/activitystreams#replies': {
'@id': 'https://example.com/users/bob/statuses/107928807471117876/replies',
'@type': 'https://www.w3.org/ns/activitystreams#Collection',
'https://www.w3.org/ns/activitystreams#first': {
'@type': 'https://www.w3.org/ns/activitystreams#CollectionPage',
'https://www.w3.org/ns/activitystreams#items': [],
'https://www.w3.org/ns/activitystreams#next': {
'@id': 'https://example.com/users/bob/statuses/107928807471117876/replies?only_other_accounts=true&page=true',
},
'https://www.w3.org/ns/activitystreams#partOf': {
'@id': 'https://example.com/users/bob/statuses/107928807471117876/replies',
},
},
},
'https://www.w3.org/ns/activitystreams#sensitive': false,
'https://www.w3.org/ns/activitystreams#tag': [],
'https://www.w3.org/ns/activitystreams#to': {
'@id': 'https://www.w3.org/ns/activitystreams#Public',
},
'https://www.w3.org/ns/activitystreams#url': {
'@id': 'https://example.com/@bob/107928807471117876',
},
},
'https://www.w3.org/ns/activitystreams#published': {
'@type': 'http://www.w3.org/2001/XMLSchema#dateTime',
'@value': '2022-03-09T21:55:07Z',
},
'https://www.w3.org/ns/activitystreams#to': {
'@id': 'https://www.w3.org/ns/activitystreams#Public',
},
}
end
it 'does not process forged payload' do
allow(ActivityPub::Activity).to receive(:factory)
subject.call(json, forwarder)
expect(ActivityPub::Activity).to_not have_received(:factory).with(
hash_including(
'object' => hash_including(
'id' => 'https://example.com/users/bob/fake-status'
)
),
anything,
anything
)
expect(ActivityPub::Activity).to_not have_received(:factory).with(
hash_including(
'object' => hash_including(
'content' => '<p>puck was here</p>'
)
),
anything,
anything
)
expect(Status.where(uri: 'https://example.com/users/bob/fake-status').exists?).to be false
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class ActivityPub::FetchRemoteActorService < BaseService
include JsonLdHelper
include DomainControlHelper
include WebfingerHelper
class Error < StandardError; end
SUPPORTED_TYPES = %w(Application Group Organization Person Service).freeze
# Does a WebFinger roundtrip on each call, unless `only_key` is true
def call(uri, id: true, prefetched_body: nil, break_on_redirect: false, only_key: false, suppress_errors: true, request_id: nil)
return if domain_not_allowed?(uri)
return ActivityPub::TagManager.instance.uri_to_actor(uri) if ActivityPub::TagManager.instance.local_uri?(uri)
@json = begin
if prefetched_body.nil?
fetch_resource(uri, id)
else
body_to_json(prefetched_body, compare_id: id ? uri : nil)
end
rescue Oj::ParseError
raise Error, "Error parsing JSON-LD document #{uri}"
end
raise Error, "Error fetching actor JSON at #{uri}" if @json.nil?
raise Error, "Unsupported JSON-LD context for document #{uri}" unless supported_context?
raise Error, "Unexpected object type for actor #{uri} (expected any of: #{SUPPORTED_TYPES})" unless expected_type?
raise Error, "Actor #{uri} has moved to #{@json['movedTo']}" if break_on_redirect && @json['movedTo'].present?
raise Error, "Actor #{uri} has no 'preferredUsername', which is a requirement for Mastodon compatibility" if @json['preferredUsername'].blank?
@uri = @json['id']
@username = @json['preferredUsername']
@domain = Addressable::URI.parse(@uri).normalized_host
check_webfinger! unless only_key
ActivityPub::ProcessAccountService.new.call(@username, @domain, @json, only_key: only_key, verified_webfinger: !only_key, request_id: request_id)
rescue Error => e
Rails.logger.debug { "Fetching actor #{uri} failed: #{e.message}" }
raise unless suppress_errors
end
private
def check_webfinger!
webfinger = webfinger!("acct:#{@username}@#{@domain}")
confirmed_username, confirmed_domain = split_acct(webfinger.subject)
if @username.casecmp(confirmed_username).zero? && @domain.casecmp(confirmed_domain).zero?
raise Error, "Webfinger response for #{@username}@#{@domain} does not loop back to #{@uri}" if webfinger.link('self', 'href') != @uri
return
end
webfinger = webfinger!("acct:#{confirmed_username}@#{confirmed_domain}")
@username, @domain = split_acct(webfinger.subject)
raise Webfinger::RedirectError, "Too many webfinger redirects for URI #{@uri} (stopped at #{@username}@#{@domain})" unless confirmed_username.casecmp(@username).zero? && confirmed_domain.casecmp(@domain).zero?
raise Error, "Webfinger response for #{@username}@#{@domain} does not loop back to #{@uri}" if webfinger.link('self', 'href') != @uri
rescue Webfinger::RedirectError => e
raise Error, e.message
rescue Webfinger::Error => e
raise Error, "Webfinger error when resolving #{@username}@#{@domain}: #{e.message}"
end
def split_acct(acct)
acct.delete_prefix('acct:').split('@')
end
def supported_context?
super(@json)
end
def expected_type?
equals_or_includes_any?(@json['type'], SUPPORTED_TYPES)
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe ActivityPub::FetchRemoteActorService, type: :service do
subject { described_class.new }
let!(:actor) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
id: 'https://example.com/alice',
type: 'Person',
preferredUsername: 'alice',
name: 'Alice',
summary: 'Foo bar',
inbox: 'http://example.com/alice/inbox',
}
end
describe '#call' do
let(:account) { subject.call('https://example.com/alice', id: true) }
shared_examples 'sets profile data' do
it 'returns an account' do
expect(account).to be_an Account
end
it 'sets display name' do
expect(account.display_name).to eq 'Alice'
end
it 'sets note' do
expect(account.note).to eq 'Foo bar'
end
it 'sets URL' do
expect(account.url).to eq 'https://example.com/alice'
end
end
context 'when the account does not have a inbox' do
let!(:webfinger) { { subject: 'acct:[email protected]', links: [{ rel: 'self', href: 'https://example.com/alice' }] } }
before do
actor[:inbox] = nil
stub_request(:get, 'https://example.com/alice').to_return(body: Oj.dump(actor))
stub_request(:get, 'https://example.com/.well-known/webfinger?resource=acct:[email protected]').to_return(body: Oj.dump(webfinger), headers: { 'Content-Type': 'application/jrd+json' })
end
it 'fetches resource' do
account
expect(a_request(:get, 'https://example.com/alice')).to have_been_made.once
end
it 'looks up webfinger' do
account
expect(a_request(:get, 'https://example.com/.well-known/webfinger?resource=acct:[email protected]')).to have_been_made.once
end
it 'returns nil' do
expect(account).to be_nil
end
end
context 'when URI and WebFinger share the same host' do
let!(:webfinger) { { subject: 'acct:[email protected]', links: [{ rel: 'self', href: 'https://example.com/alice' }] } }
before do
stub_request(:get, 'https://example.com/alice').to_return(body: Oj.dump(actor))
stub_request(:get, 'https://example.com/.well-known/webfinger?resource=acct:[email protected]').to_return(body: Oj.dump(webfinger), headers: { 'Content-Type': 'application/jrd+json' })
end
it 'fetches resource' do
account
expect(a_request(:get, 'https://example.com/alice')).to have_been_made.once
end
it 'looks up webfinger' do
account
expect(a_request(:get, 'https://example.com/.well-known/webfinger?resource=acct:[email protected]')).to have_been_made.once
end
it 'sets username and domain from webfinger' do
expect(account.username).to eq 'alice'
expect(account.domain).to eq 'example.com'
end
include_examples 'sets profile data'
end
context 'when WebFinger presents different domain than URI' do
let!(:webfinger) { { subject: 'acct:[email protected]', links: [{ rel: 'self', href: 'https://example.com/alice' }] } }
before do
stub_request(:get, 'https://example.com/alice').to_return(body: Oj.dump(actor))
stub_request(:get, 'https://example.com/.well-known/webfinger?resource=acct:[email protected]').to_return(body: Oj.dump(webfinger), headers: { 'Content-Type': 'application/jrd+json' })
stub_request(:get, 'https://iscool.af/.well-known/webfinger?resource=acct:[email protected]').to_return(body: Oj.dump(webfinger), headers: { 'Content-Type': 'application/jrd+json' })
end
it 'fetches resource' do
account
expect(a_request(:get, 'https://example.com/alice')).to have_been_made.once
end
it 'looks up webfinger' do
account
expect(a_request(:get, 'https://example.com/.well-known/webfinger?resource=acct:[email protected]')).to have_been_made.once
end
it 'looks up "redirected" webfinger' do
account
expect(a_request(:get, 'https://iscool.af/.well-known/webfinger?resource=acct:[email protected]')).to have_been_made.once
end
it 'sets username and domain from final webfinger' do
expect(account.username).to eq 'alice'
expect(account.domain).to eq 'iscool.af'
end
include_examples 'sets profile data'
end
context 'when WebFinger returns a different URI' do
let!(:webfinger) { { subject: 'acct:[email protected]', links: [{ rel: 'self', href: 'https://example.com/bob' }] } }
before do
stub_request(:get, 'https://example.com/alice').to_return(body: Oj.dump(actor))
stub_request(:get, 'https://example.com/.well-known/webfinger?resource=acct:[email protected]').to_return(body: Oj.dump(webfinger), headers: { 'Content-Type': 'application/jrd+json' })
end
it 'fetches resource' do
account
expect(a_request(:get, 'https://example.com/alice')).to have_been_made.once
end
it 'looks up webfinger' do
account
expect(a_request(:get, 'https://example.com/.well-known/webfinger?resource=acct:[email protected]')).to have_been_made.once
end
it 'does not create account' do
expect(account).to be_nil
end
end
context 'when WebFinger returns a different URI after a redirection' do
let!(:webfinger) { { subject: 'acct:[email protected]', links: [{ rel: 'self', href: 'https://example.com/bob' }] } }
before do
stub_request(:get, 'https://example.com/alice').to_return(body: Oj.dump(actor))
stub_request(:get, 'https://example.com/.well-known/webfinger?resource=acct:[email protected]').to_return(body: Oj.dump(webfinger), headers: { 'Content-Type': 'application/jrd+json' })
stub_request(:get, 'https://iscool.af/.well-known/webfinger?resource=acct:[email protected]').to_return(body: Oj.dump(webfinger), headers: { 'Content-Type': 'application/jrd+json' })
end
it 'fetches resource' do
account
expect(a_request(:get, 'https://example.com/alice')).to have_been_made.once
end
it 'looks up webfinger' do
account
expect(a_request(:get, 'https://example.com/.well-known/webfinger?resource=acct:[email protected]')).to have_been_made.once
end
it 'looks up "redirected" webfinger' do
account
expect(a_request(:get, 'https://iscool.af/.well-known/webfinger?resource=acct:[email protected]')).to have_been_made.once
end
it 'does not create account' do
expect(account).to be_nil
end
end
context 'with wrong id' do
it 'does not create account' do
expect(subject.call('https://fake.address/@foo', prefetched_body: Oj.dump(actor))).to be_nil
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class ActivityPub::FetchFeaturedCollectionService < BaseService
include JsonLdHelper
def call(account, **options)
return if account.featured_collection_url.blank? || account.suspended? || account.local?
@account = account
@options = options
@json = fetch_resource(@account.featured_collection_url, true, local_follower)
return unless supported_context?(@json)
process_items(collection_items(@json))
end
private
def collection_items(collection)
collection = fetch_collection(collection['first']) if collection['first'].present?
return unless collection.is_a?(Hash)
case collection['type']
when 'Collection', 'CollectionPage'
collection['items']
when 'OrderedCollection', 'OrderedCollectionPage'
collection['orderedItems']
end
end
def fetch_collection(collection_or_uri)
return collection_or_uri if collection_or_uri.is_a?(Hash)
return if non_matching_uri_hosts?(@account.uri, collection_or_uri)
fetch_resource_without_id_validation(collection_or_uri, local_follower, true)
end
def process_items(items)
return if items.nil?
process_note_items(items) if @options[:note]
process_hashtag_items(items) if @options[:hashtag]
end
def process_note_items(items)
status_ids = items.filter_map do |item|
next unless item.is_a?(String) || item['type'] == 'Note'
uri = value_or_id(item)
next if ActivityPub::TagManager.instance.local_uri?(uri) || non_matching_uri_hosts?(@account.uri, uri)
status = ActivityPub::FetchRemoteStatusService.new.call(uri, on_behalf_of: local_follower, expected_actor_uri: @account.uri, request_id: @options[:request_id])
next unless status&.account_id == @account.id
status.id
rescue ActiveRecord::RecordInvalid => e
Rails.logger.debug { "Invalid pinned status #{uri}: #{e.message}" }
nil
end
to_remove = []
to_add = status_ids
StatusPin.where(account: @account).pluck(:status_id).each do |status_id|
if status_ids.include?(status_id)
to_add.delete(status_id)
else
to_remove << status_id
end
end
StatusPin.where(account: @account, status_id: to_remove).delete_all unless to_remove.empty?
to_add.each do |status_id|
StatusPin.create!(account: @account, status_id: status_id)
end
end
def process_hashtag_items(items)
names = items.filter_map { |item| item['type'] == 'Hashtag' && item['name']&.delete_prefix('#') }.map { |name| HashtagNormalizer.new.normalize(name) }
to_remove = []
to_add = names
FeaturedTag.where(account: @account).map(&:name).each do |name|
if names.include?(name)
to_add.delete(name)
else
to_remove << name
end
end
FeaturedTag.includes(:tag).where(account: @account, tags: { name: to_remove }).delete_all unless to_remove.empty?
to_add.each do |name|
FeaturedTag.create!(account: @account, name: name)
end
end
def local_follower
return @local_follower if defined?(@local_follower)
@local_follower = @account.followers.local.without_suspended.first
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe ActivityPub::FetchFeaturedCollectionService, type: :service do
subject { described_class.new }
let(:actor) { Fabricate(:account, domain: 'example.com', uri: 'https://example.com/account', featured_collection_url: 'https://example.com/account/pinned') }
let!(:known_status) { Fabricate(:status, account: actor, uri: 'https://example.com/account/pinned/1') }
let(:status_json_pinned_known) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
type: 'Note',
id: 'https://example.com/account/pinned/known',
content: 'foo',
attributedTo: actor.uri,
to: 'https://www.w3.org/ns/activitystreams#Public',
}
end
let(:status_json_pinned_unknown_inlined) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
type: 'Note',
id: 'https://example.com/account/pinned/unknown-inlined',
content: 'foo',
attributedTo: actor.uri,
to: 'https://www.w3.org/ns/activitystreams#Public',
}
end
let(:status_json_pinned_unknown_unreachable) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
type: 'Note',
id: 'https://example.com/account/pinned/unknown-reachable',
content: 'foo',
attributedTo: actor.uri,
to: 'https://www.w3.org/ns/activitystreams#Public',
}
end
let(:featured_with_null) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
id: 'https://example.com/account/collections/featured',
totalItems: 0,
type: 'OrderedCollection',
}
end
let(:items) do
[
'https://example.com/account/pinned/known', # known
status_json_pinned_unknown_inlined, # unknown inlined
'https://example.com/account/pinned/unknown-unreachable', # unknown unreachable
'https://example.com/account/pinned/unknown-reachable', # unknown reachable
'https://example.com/account/collections/featured', # featured with null
]
end
let(:payload) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
type: 'Collection',
id: actor.featured_collection_url,
items: items,
}.with_indifferent_access
end
shared_examples 'sets pinned posts' do
before do
stub_request(:get, 'https://example.com/account/pinned/known').to_return(status: 200, body: Oj.dump(status_json_pinned_known))
stub_request(:get, 'https://example.com/account/pinned/unknown-inlined').to_return(status: 200, body: Oj.dump(status_json_pinned_unknown_inlined))
stub_request(:get, 'https://example.com/account/pinned/unknown-unreachable').to_return(status: 404)
stub_request(:get, 'https://example.com/account/pinned/unknown-reachable').to_return(status: 200, body: Oj.dump(status_json_pinned_unknown_unreachable))
stub_request(:get, 'https://example.com/account/collections/featured').to_return(status: 200, body: Oj.dump(featured_with_null))
subject.call(actor, note: true, hashtag: false)
end
it 'sets expected posts as pinned posts' do
expect(actor.pinned_statuses.pluck(:uri)).to contain_exactly(
'https://example.com/account/pinned/known',
'https://example.com/account/pinned/unknown-inlined',
'https://example.com/account/pinned/unknown-reachable'
)
end
end
describe '#call' do
context 'when the endpoint is a Collection' do
before do
stub_request(:get, actor.featured_collection_url).to_return(status: 200, body: Oj.dump(payload))
end
it_behaves_like 'sets pinned posts'
end
context 'when the endpoint is an OrderedCollection' do
let(:payload) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
type: 'OrderedCollection',
id: actor.featured_collection_url,
orderedItems: items,
}.with_indifferent_access
end
before do
stub_request(:get, actor.featured_collection_url).to_return(status: 200, body: Oj.dump(payload))
end
it_behaves_like 'sets pinned posts'
end
context 'when the endpoint is a paginated Collection' do
let(:payload) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
type: 'Collection',
id: actor.featured_collection_url,
first: {
type: 'CollectionPage',
partOf: actor.featured_collection_url,
items: items,
},
}.with_indifferent_access
end
before do
stub_request(:get, actor.featured_collection_url).to_return(status: 200, body: Oj.dump(payload))
end
it_behaves_like 'sets pinned posts'
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class ActivityPub::FetchRemoteAccountService < ActivityPub::FetchRemoteActorService
# Does a WebFinger roundtrip on each call, unless `only_key` is true
def call(uri, id: true, prefetched_body: nil, break_on_redirect: false, only_key: false, suppress_errors: true, request_id: nil)
actor = super
return actor if actor.nil? || actor.is_a?(Account)
Rails.logger.debug { "Fetching account #{uri} failed: Expected Account, got #{actor.class.name}" }
raise Error, "Expected Account, got #{actor.class.name}" unless suppress_errors
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe ActivityPub::FetchRemoteAccountService, type: :service do
subject { described_class.new }
let!(:actor) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
id: 'https://example.com/alice',
type: 'Person',
preferredUsername: 'alice',
name: 'Alice',
summary: 'Foo bar',
inbox: 'http://example.com/alice/inbox',
}
end
describe '#call' do
let(:account) { subject.call('https://example.com/alice', id: true) }
shared_examples 'sets profile data' do
it 'returns an account' do
expect(account).to be_an Account
end
it 'sets display name' do
expect(account.display_name).to eq 'Alice'
end
it 'sets note' do
expect(account.note).to eq 'Foo bar'
end
it 'sets URL' do
expect(account.url).to eq 'https://example.com/alice'
end
end
context 'when the account does not have a inbox' do
let!(:webfinger) { { subject: 'acct:[email protected]', links: [{ rel: 'self', href: 'https://example.com/alice' }] } }
before do
actor[:inbox] = nil
stub_request(:get, 'https://example.com/alice').to_return(body: Oj.dump(actor))
stub_request(:get, 'https://example.com/.well-known/webfinger?resource=acct:[email protected]').to_return(body: Oj.dump(webfinger), headers: { 'Content-Type': 'application/jrd+json' })
end
it 'fetches resource' do
account
expect(a_request(:get, 'https://example.com/alice')).to have_been_made.once
end
it 'looks up webfinger' do
account
expect(a_request(:get, 'https://example.com/.well-known/webfinger?resource=acct:[email protected]')).to have_been_made.once
end
it 'returns nil' do
expect(account).to be_nil
end
end
context 'when URI and WebFinger share the same host' do
let!(:webfinger) { { subject: 'acct:[email protected]', links: [{ rel: 'self', href: 'https://example.com/alice' }] } }
before do
stub_request(:get, 'https://example.com/alice').to_return(body: Oj.dump(actor))
stub_request(:get, 'https://example.com/.well-known/webfinger?resource=acct:[email protected]').to_return(body: Oj.dump(webfinger), headers: { 'Content-Type': 'application/jrd+json' })
end
it 'fetches resource' do
account
expect(a_request(:get, 'https://example.com/alice')).to have_been_made.once
end
it 'looks up webfinger' do
account
expect(a_request(:get, 'https://example.com/.well-known/webfinger?resource=acct:[email protected]')).to have_been_made.once
end
it 'sets username and domain from webfinger' do
expect(account.username).to eq 'alice'
expect(account.domain).to eq 'example.com'
end
include_examples 'sets profile data'
end
context 'when WebFinger presents different domain than URI' do
let!(:webfinger) { { subject: 'acct:[email protected]', links: [{ rel: 'self', href: 'https://example.com/alice' }] } }
before do
stub_request(:get, 'https://example.com/alice').to_return(body: Oj.dump(actor))
stub_request(:get, 'https://example.com/.well-known/webfinger?resource=acct:[email protected]').to_return(body: Oj.dump(webfinger), headers: { 'Content-Type': 'application/jrd+json' })
stub_request(:get, 'https://iscool.af/.well-known/webfinger?resource=acct:[email protected]').to_return(body: Oj.dump(webfinger), headers: { 'Content-Type': 'application/jrd+json' })
end
it 'fetches resource' do
account
expect(a_request(:get, 'https://example.com/alice')).to have_been_made.once
end
it 'looks up webfinger' do
account
expect(a_request(:get, 'https://example.com/.well-known/webfinger?resource=acct:[email protected]')).to have_been_made.once
end
it 'looks up "redirected" webfinger' do
account
expect(a_request(:get, 'https://iscool.af/.well-known/webfinger?resource=acct:[email protected]')).to have_been_made.once
end
it 'sets username and domain from final webfinger' do
expect(account.username).to eq 'alice'
expect(account.domain).to eq 'iscool.af'
end
include_examples 'sets profile data'
end
context 'when WebFinger returns a different URI' do
let!(:webfinger) { { subject: 'acct:[email protected]', links: [{ rel: 'self', href: 'https://example.com/bob' }] } }
before do
stub_request(:get, 'https://example.com/alice').to_return(body: Oj.dump(actor))
stub_request(:get, 'https://example.com/.well-known/webfinger?resource=acct:[email protected]').to_return(body: Oj.dump(webfinger), headers: { 'Content-Type': 'application/jrd+json' })
end
it 'fetches resource' do
account
expect(a_request(:get, 'https://example.com/alice')).to have_been_made.once
end
it 'looks up webfinger' do
account
expect(a_request(:get, 'https://example.com/.well-known/webfinger?resource=acct:[email protected]')).to have_been_made.once
end
it 'does not create account' do
expect(account).to be_nil
end
end
context 'when WebFinger returns a different URI after a redirection' do
let!(:webfinger) { { subject: 'acct:[email protected]', links: [{ rel: 'self', href: 'https://example.com/bob' }] } }
before do
stub_request(:get, 'https://example.com/alice').to_return(body: Oj.dump(actor))
stub_request(:get, 'https://example.com/.well-known/webfinger?resource=acct:[email protected]').to_return(body: Oj.dump(webfinger), headers: { 'Content-Type': 'application/jrd+json' })
stub_request(:get, 'https://iscool.af/.well-known/webfinger?resource=acct:[email protected]').to_return(body: Oj.dump(webfinger), headers: { 'Content-Type': 'application/jrd+json' })
end
it 'fetches resource' do
account
expect(a_request(:get, 'https://example.com/alice')).to have_been_made.once
end
it 'looks up webfinger' do
account
expect(a_request(:get, 'https://example.com/.well-known/webfinger?resource=acct:[email protected]')).to have_been_made.once
end
it 'looks up "redirected" webfinger' do
account
expect(a_request(:get, 'https://iscool.af/.well-known/webfinger?resource=acct:[email protected]')).to have_been_made.once
end
it 'does not create account' do
expect(account).to be_nil
end
end
context 'with wrong id' do
it 'does not create account' do
expect(subject.call('https://fake.address/@foo', prefetched_body: Oj.dump(actor))).to be_nil
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class ActivityPub::FetchFeaturedTagsCollectionService < BaseService
include JsonLdHelper
def call(account, url)
return if url.blank? || account.suspended? || account.local?
@account = account
@json = fetch_resource(url, true, local_follower)
return unless supported_context?(@json)
process_items(collection_items(@json))
end
private
def collection_items(collection)
all_items = []
collection = fetch_collection(collection['first']) if collection['first'].present?
while collection.is_a?(Hash)
items = case collection['type']
when 'Collection', 'CollectionPage'
collection['items']
when 'OrderedCollection', 'OrderedCollectionPage'
collection['orderedItems']
end
break if items.blank?
all_items.concat(items)
break if all_items.size >= FeaturedTag::LIMIT
collection = collection['next'].present? ? fetch_collection(collection['next']) : nil
end
all_items
end
def fetch_collection(collection_or_uri)
return collection_or_uri if collection_or_uri.is_a?(Hash)
return if non_matching_uri_hosts?(@account.uri, collection_or_uri)
fetch_resource_without_id_validation(collection_or_uri, local_follower, true)
end
def process_items(items)
names = items.filter_map { |item| item['type'] == 'Hashtag' && item['name']&.delete_prefix('#') }.take(FeaturedTag::LIMIT)
tags = names.index_by { |name| HashtagNormalizer.new.normalize(name) }
normalized_names = tags.keys
FeaturedTag.includes(:tag).references(:tag).where(account: @account).where.not(tag: { name: normalized_names }).delete_all
FeaturedTag.includes(:tag).references(:tag).where(account: @account, tag: { name: normalized_names }).find_each do |featured_tag|
featured_tag.update(name: tags.delete(featured_tag.tag.name))
end
tags.each_value do |name|
FeaturedTag.create!(account: @account, name: name)
end
end
def local_follower
return @local_follower if defined?(@local_follower)
@local_follower = @account.followers.local.without_suspended.first
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe ActivityPub::FetchFeaturedTagsCollectionService, type: :service do
subject { described_class.new }
let(:collection_url) { 'https://example.com/account/tags' }
let(:actor) { Fabricate(:account, domain: 'example.com', uri: 'https://example.com/account') }
let(:items) do
[
{ type: 'Hashtag', href: 'https://example.com/account/tagged/foo', name: 'Foo' },
{ type: 'Hashtag', href: 'https://example.com/account/tagged/bar', name: 'bar' },
{ type: 'Hashtag', href: 'https://example.com/account/tagged/baz', name: 'baZ' },
]
end
let(:payload) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
type: 'Collection',
id: collection_url,
items: items,
}.with_indifferent_access
end
shared_examples 'sets featured tags' do
before do
subject.call(actor, collection_url)
end
it 'sets expected tags as pinned tags' do
expect(actor.featured_tags.map(&:display_name)).to match_array %w(Foo bar baZ)
end
end
describe '#call' do
context 'when the endpoint is a Collection' do
before do
stub_request(:get, collection_url).to_return(status: 200, body: Oj.dump(payload))
end
it_behaves_like 'sets featured tags'
end
context 'when the account already has featured tags' do
before do
stub_request(:get, collection_url).to_return(status: 200, body: Oj.dump(payload))
actor.featured_tags.create!(name: 'FoO')
actor.featured_tags.create!(name: 'baz')
actor.featured_tags.create!(name: 'oh').update(name: nil)
end
it_behaves_like 'sets featured tags'
end
context 'when the endpoint is an OrderedCollection' do
let(:payload) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
type: 'OrderedCollection',
id: collection_url,
orderedItems: items,
}.with_indifferent_access
end
before do
stub_request(:get, collection_url).to_return(status: 200, body: Oj.dump(payload))
end
it_behaves_like 'sets featured tags'
end
context 'when the endpoint is a paginated Collection' do
let(:payload) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
type: 'Collection',
id: collection_url,
first: {
type: 'CollectionPage',
partOf: collection_url,
items: items,
},
}.with_indifferent_access
end
before do
stub_request(:get, collection_url).to_return(status: 200, body: Oj.dump(payload))
end
it_behaves_like 'sets featured tags'
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class ActivityPub::FetchRemoteStatusService < BaseService
include JsonLdHelper
include DomainControlHelper
include Redisable
DISCOVERIES_PER_REQUEST = 1000
# Should be called when uri has already been checked for locality
def call(uri, id: true, prefetched_body: nil, on_behalf_of: nil, expected_actor_uri: nil, request_id: nil)
return if domain_not_allowed?(uri)
@request_id = request_id || "#{Time.now.utc.to_i}-status-#{uri}"
@json = if prefetched_body.nil?
fetch_resource(uri, id, on_behalf_of)
else
body_to_json(prefetched_body, compare_id: id ? uri : nil)
end
return unless supported_context?
actor_uri = nil
activity_json = nil
object_uri = nil
if expected_object_type?
actor_uri = value_or_id(first_of_value(@json['attributedTo']))
activity_json = { 'type' => 'Create', 'actor' => actor_uri, 'object' => @json }
object_uri = uri_from_bearcap(@json['id'])
elsif expected_activity_type?
actor_uri = value_or_id(first_of_value(@json['actor']))
activity_json = @json
object_uri = uri_from_bearcap(value_or_id(@json['object']))
end
return if activity_json.nil? || object_uri.nil? || !trustworthy_attribution?(@json['id'], actor_uri)
return if expected_actor_uri.present? && actor_uri != expected_actor_uri
return ActivityPub::TagManager.instance.uri_to_resource(object_uri, Status) if ActivityPub::TagManager.instance.local_uri?(object_uri)
actor = account_from_uri(actor_uri)
return if actor.nil? || actor.suspended?
# If we fetched a status that already exists, then we need to treat the
# activity as an update rather than create
activity_json['type'] = 'Update' if equals_or_includes_any?(activity_json['type'], %w(Create)) && Status.where(uri: object_uri, account_id: actor.id).exists?
with_redis do |redis|
discoveries = redis.incr("status_discovery_per_request:#{@request_id}")
redis.expire("status_discovery_per_request:#{@request_id}", 5.minutes.seconds)
return nil if discoveries > DISCOVERIES_PER_REQUEST
end
ActivityPub::Activity.factory(activity_json, actor, request_id: @request_id).perform
end
private
def trustworthy_attribution?(uri, attributed_to)
return false if uri.nil? || attributed_to.nil?
Addressable::URI.parse(uri).normalized_host.casecmp(Addressable::URI.parse(attributed_to).normalized_host).zero?
end
def account_from_uri(uri)
actor = ActivityPub::TagManager.instance.uri_to_resource(uri, Account)
actor = ActivityPub::FetchRemoteAccountService.new.call(uri, id: true, request_id: @request_id) if actor.nil? || actor.possibly_stale?
actor
end
def supported_context?
super(@json)
end
def expected_activity_type?
equals_or_includes_any?(@json['type'], %w(Create Announce))
end
def expected_object_type?
equals_or_includes_any?(@json['type'], ActivityPub::Activity::Create::SUPPORTED_TYPES + ActivityPub::Activity::Create::CONVERTED_TYPES)
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe ActivityPub::FetchRemoteStatusService, type: :service do
include ActionView::Helpers::TextHelper
subject { described_class.new }
let!(:sender) { Fabricate(:account, domain: 'foo.bar', uri: 'https://foo.bar') }
let!(:recipient) { Fabricate(:account) }
let(:existing_status) { nil }
let(:note) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
id: 'https://foo.bar/@foo/1234',
type: 'Note',
content: 'Lorem ipsum',
attributedTo: ActivityPub::TagManager.instance.uri_for(sender),
}
end
before do
stub_request(:get, 'https://foo.bar/watch?v=12345').to_return(status: 404, body: '')
stub_request(:get, object[:id]).to_return(body: Oj.dump(object))
end
describe '#call' do
before do
existing_status
subject.call(object[:id], prefetched_body: Oj.dump(object))
end
context 'with Note object' do
let(:object) { note }
it 'creates status' do
status = sender.statuses.first
expect(status).to_not be_nil
expect(status.text).to eq 'Lorem ipsum'
end
end
context 'with Video object' do
let(:object) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
id: 'https://foo.bar/@foo/1234',
type: 'Video',
name: 'Nyan Cat 10 hours remix',
attributedTo: ActivityPub::TagManager.instance.uri_for(sender),
url: [
{
type: 'Link',
mimeType: 'application/x-bittorrent',
href: 'https://foo.bar/12345.torrent',
},
{
type: 'Link',
mimeType: 'text/html',
href: 'https://foo.bar/watch?v=12345',
},
],
}
end
it 'creates status' do
status = sender.statuses.first
expect(status).to_not be_nil
expect(status.url).to eq 'https://foo.bar/watch?v=12345'
expect(strip_tags(status.text)).to eq 'Nyan Cat 10 hours remixhttps://foo.bar/watch?v=12345'
end
end
context 'with Audio object' do
let(:object) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
id: 'https://foo.bar/@foo/1234',
type: 'Audio',
name: 'Nyan Cat 10 hours remix',
attributedTo: ActivityPub::TagManager.instance.uri_for(sender),
url: [
{
type: 'Link',
mimeType: 'application/x-bittorrent',
href: 'https://foo.bar/12345.torrent',
},
{
type: 'Link',
mimeType: 'text/html',
href: 'https://foo.bar/watch?v=12345',
},
],
}
end
it 'creates status' do
status = sender.statuses.first
expect(status).to_not be_nil
expect(status.url).to eq 'https://foo.bar/watch?v=12345'
expect(strip_tags(status.text)).to eq 'Nyan Cat 10 hours remixhttps://foo.bar/watch?v=12345'
end
end
context 'with Event object' do
let(:object) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
id: 'https://foo.bar/@foo/1234',
type: 'Event',
name: "Let's change the world",
attributedTo: ActivityPub::TagManager.instance.uri_for(sender),
}
end
it 'creates status' do
status = sender.statuses.first
expect(status).to_not be_nil
expect(status.url).to eq 'https://foo.bar/@foo/1234'
expect(strip_tags(status.text)).to eq "Let's change the worldhttps://foo.bar/@foo/1234"
end
end
context 'with wrong id' do
let(:note) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
id: 'https://real.address/@foo/1234',
type: 'Note',
content: 'Lorem ipsum',
attributedTo: ActivityPub::TagManager.instance.uri_for(sender),
}
end
let(:object) do
temp = note.dup
temp[:id] = 'https://fake.address/@foo/5678'
temp
end
it 'does not create status' do
expect(sender.statuses.first).to be_nil
end
end
context 'with a valid Create activity' do
let(:object) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
id: 'https://foo.bar/@foo/1234/create',
type: 'Create',
actor: ActivityPub::TagManager.instance.uri_for(sender),
object: note,
}
end
it 'creates status' do
status = sender.statuses.first
expect(status).to_not be_nil
expect(status.uri).to eq note[:id]
expect(status.text).to eq note[:content]
end
end
context 'with a Create activity with a mismatching id' do
let(:object) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
id: 'https://foo.bar/@foo/1234/create',
type: 'Create',
actor: ActivityPub::TagManager.instance.uri_for(sender),
object: {
id: 'https://real.address/@foo/1234',
type: 'Note',
content: 'Lorem ipsum',
attributedTo: ActivityPub::TagManager.instance.uri_for(sender),
},
}
end
it 'does not create status' do
expect(sender.statuses.first).to be_nil
end
end
context 'when status already exists' do
let(:existing_status) { Fabricate(:status, account: sender, text: 'Foo', uri: note[:id]) }
context 'with a Note object' do
let(:object) { note.merge(updated: '2021-09-08T22:39:25Z') }
it 'updates status' do
existing_status.reload
expect(existing_status.text).to eq 'Lorem ipsum'
expect(existing_status.edits).to_not be_empty
end
end
context 'with a Create activity' do
let(:object) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
id: 'https://foo.bar/@foo/1234/create',
type: 'Create',
actor: ActivityPub::TagManager.instance.uri_for(sender),
object: note.merge(updated: '2021-09-08T22:39:25Z'),
}
end
it 'updates status' do
existing_status.reload
expect(existing_status.text).to eq 'Lorem ipsum'
expect(existing_status.edits).to_not be_empty
end
end
end
end
context 'with statuses referencing other statuses' do
before do
stub_const 'ActivityPub::FetchRemoteStatusService::DISCOVERIES_PER_REQUEST', 5
end
context 'when using inReplyTo' do
let(:object) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
id: 'https://foo.bar/@foo/1',
type: 'Note',
content: 'Lorem ipsum',
inReplyTo: 'https://foo.bar/@foo/2',
attributedTo: ActivityPub::TagManager.instance.uri_for(sender),
}
end
before do
8.times do |i|
status_json = {
'@context': 'https://www.w3.org/ns/activitystreams',
id: "https://foo.bar/@foo/#{i}",
type: 'Note',
content: 'Lorem ipsum',
inReplyTo: "https://foo.bar/@foo/#{i + 1}",
attributedTo: ActivityPub::TagManager.instance.uri_for(sender),
to: 'as:Public',
}.with_indifferent_access
stub_request(:get, "https://foo.bar/@foo/#{i}").to_return(status: 200, body: status_json.to_json, headers: { 'Content-Type': 'application/activity+json' })
end
end
it 'creates at least some statuses' do
expect { subject.call(object[:id], prefetched_body: Oj.dump(object)) }.to change { sender.statuses.count }.by_at_least(2)
end
it 'creates no more account than the limit allows' do
expect { subject.call(object[:id], prefetched_body: Oj.dump(object)) }.to change { sender.statuses.count }.by_at_most(5)
end
end
context 'when using replies' do
let(:object) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
id: 'https://foo.bar/@foo/1',
type: 'Note',
content: 'Lorem ipsum',
replies: {
type: 'Collection',
id: 'https://foo.bar/@foo/1/replies',
first: {
type: 'CollectionPage',
partOf: 'https://foo.bar/@foo/1/replies',
items: ['https://foo.bar/@foo/2'],
},
},
attributedTo: ActivityPub::TagManager.instance.uri_for(sender),
}
end
before do
8.times do |i|
status_json = {
'@context': 'https://www.w3.org/ns/activitystreams',
id: "https://foo.bar/@foo/#{i}",
type: 'Note',
content: 'Lorem ipsum',
replies: {
type: 'Collection',
id: "https://foo.bar/@foo/#{i}/replies",
first: {
type: 'CollectionPage',
partOf: "https://foo.bar/@foo/#{i}/replies",
items: ["https://foo.bar/@foo/#{i + 1}"],
},
},
attributedTo: ActivityPub::TagManager.instance.uri_for(sender),
to: 'as:Public',
}.with_indifferent_access
stub_request(:get, "https://foo.bar/@foo/#{i}").to_return(status: 200, body: status_json.to_json, headers: { 'Content-Type': 'application/activity+json' })
end
end
it 'creates at least some statuses' do
expect { subject.call(object[:id], prefetched_body: Oj.dump(object)) }.to change { sender.statuses.count }.by_at_least(2)
end
it 'creates no more account than the limit allows' do
expect { subject.call(object[:id], prefetched_body: Oj.dump(object)) }.to change { sender.statuses.count }.by_at_most(5)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class ActivityPub::ProcessAccountService < BaseService
include JsonLdHelper
include DomainControlHelper
include Redisable
include Lockable
SUBDOMAINS_RATELIMIT = 10
DISCOVERIES_PER_REQUEST = 400
# Should be called with confirmed valid JSON
# and WebFinger-resolved username and domain
def call(username, domain, json, options = {})
return if json['inbox'].blank? || unsupported_uri_scheme?(json['id']) || domain_not_allowed?(domain)
@options = options
@json = json
@uri = @json['id']
@username = username
@domain = TagManager.instance.normalize_domain(domain)
@collections = {}
# The key does not need to be unguessable, it just needs to be somewhat unique
@options[:request_id] ||= "#{Time.now.utc.to_i}-#{username}@#{domain}"
with_redis_lock("process_account:#{@uri}") do
@account = Account.remote.find_by(uri: @uri) if @options[:only_key]
@account ||= Account.find_remote(@username, @domain)
@old_public_key = @account&.public_key
@old_protocol = @account&.protocol
@suspension_changed = false
if @account.nil?
with_redis do |redis|
return nil if redis.pfcount("unique_subdomains_for:#{PublicSuffix.domain(@domain, ignore_private: true)}") >= SUBDOMAINS_RATELIMIT
discoveries = redis.incr("discovery_per_request:#{@options[:request_id]}")
redis.expire("discovery_per_request:#{@options[:request_id]}", 5.minutes.seconds)
return nil if discoveries > DISCOVERIES_PER_REQUEST
end
create_account
end
update_account
process_tags
process_duplicate_accounts! if @options[:verified_webfinger]
end
after_protocol_change! if protocol_changed?
after_key_change! if key_changed? && !@options[:signed_with_known_key]
clear_tombstones! if key_changed?
after_suspension_change! if suspension_changed?
unless @options[:only_key] || @account.suspended?
check_featured_collection! if @account.featured_collection_url.present?
check_featured_tags_collection! if @json['featuredTags'].present?
check_links! if @account.fields.any?(&:requires_verification?)
end
@account
rescue Oj::ParseError
nil
end
private
def create_account
@account = Account.new
@account.protocol = :activitypub
@account.username = @username
@account.domain = @domain
@account.private_key = nil
@account.suspended_at = domain_block.created_at if auto_suspend?
@account.suspension_origin = :local if auto_suspend?
@account.silenced_at = domain_block.created_at if auto_silence?
set_immediate_protocol_attributes!
@account.save!
end
def update_account
@account.last_webfingered_at = Time.now.utc unless @options[:only_key]
@account.protocol = :activitypub
set_suspension!
set_immediate_protocol_attributes!
set_fetchable_key! unless @account.suspended? && @account.suspension_origin_local?
set_immediate_attributes! unless @account.suspended?
set_fetchable_attributes! unless @options[:only_key] || @account.suspended?
@account.save_with_optional_media!
end
def set_immediate_protocol_attributes!
@account.inbox_url = @json['inbox'] || ''
@account.outbox_url = @json['outbox'] || ''
@account.shared_inbox_url = (@json['endpoints'].is_a?(Hash) ? @json['endpoints']['sharedInbox'] : @json['sharedInbox']) || ''
@account.followers_url = @json['followers'] || ''
@account.url = url || @uri
@account.uri = @uri
@account.actor_type = actor_type
@account.created_at = @json['published'] if @json['published'].present?
end
def set_immediate_attributes!
@account.featured_collection_url = @json['featured'] || ''
@account.devices_url = @json['devices'] || ''
@account.display_name = @json['name'] || ''
@account.note = @json['summary'] || ''
@account.locked = @json['manuallyApprovesFollowers'] || false
@account.fields = property_values || {}
@account.also_known_as = as_array(@json['alsoKnownAs'] || []).map { |item| value_or_id(item) }
@account.discoverable = @json['discoverable'] || false
@account.indexable = @json['indexable'] || false
@account.memorial = @json['memorial'] || false
end
def set_fetchable_key!
@account.public_key = public_key || ''
end
def set_fetchable_attributes!
begin
@account.avatar_remote_url = image_url('icon') || '' unless skip_download?
@account.avatar = nil if @account.avatar_remote_url.blank?
rescue Mastodon::UnexpectedResponseError, HTTP::TimeoutError, HTTP::ConnectionError, OpenSSL::SSL::SSLError
RedownloadAvatarWorker.perform_in(rand(30..600).seconds, @account.id)
end
begin
@account.header_remote_url = image_url('image') || '' unless skip_download?
@account.header = nil if @account.header_remote_url.blank?
rescue Mastodon::UnexpectedResponseError, HTTP::TimeoutError, HTTP::ConnectionError, OpenSSL::SSL::SSLError
RedownloadHeaderWorker.perform_in(rand(30..600).seconds, @account.id)
end
@account.statuses_count = outbox_total_items if outbox_total_items.present?
@account.following_count = following_total_items if following_total_items.present?
@account.followers_count = followers_total_items if followers_total_items.present?
@account.hide_collections = following_private? || followers_private?
@account.moved_to_account = @json['movedTo'].present? ? moved_account : nil
end
def set_suspension!
return if @account.suspended? && @account.suspension_origin_local?
if @account.suspended? && !@json['suspended']
@account.unsuspend!
@suspension_changed = true
elsif [email protected]? && @json['suspended']
@account.suspend!(origin: :remote)
@suspension_changed = true
end
end
def after_protocol_change!
ActivityPub::PostUpgradeWorker.perform_async(@account.domain)
end
def after_key_change!
RefollowWorker.perform_async(@account.id)
end
def after_suspension_change!
if @account.suspended?
Admin::SuspensionWorker.perform_async(@account.id)
else
Admin::UnsuspensionWorker.perform_async(@account.id)
end
end
def check_featured_collection!
ActivityPub::SynchronizeFeaturedCollectionWorker.perform_async(@account.id, { 'hashtag' => @json['featuredTags'].blank?, 'request_id' => @options[:request_id] })
end
def check_featured_tags_collection!
ActivityPub::SynchronizeFeaturedTagsCollectionWorker.perform_async(@account.id, @json['featuredTags'])
end
def check_links!
VerifyAccountLinksWorker.perform_in(rand(10.minutes.to_i), @account.id)
end
def process_duplicate_accounts!
return unless Account.where(uri: @account.uri).where.not(id: @account.id).exists?
AccountMergingWorker.perform_async(@account.id)
end
def actor_type
if @json['type'].is_a?(Array)
@json['type'].find { |type| ActivityPub::FetchRemoteAccountService::SUPPORTED_TYPES.include?(type) }
else
@json['type']
end
end
def image_url(key)
value = first_of_value(@json[key])
return if value.nil?
return value['url'] if value.is_a?(Hash)
image = fetch_resource_without_id_validation(value)
image['url'] if image
end
def public_key
value = first_of_value(@json['publicKey'])
return if value.nil?
return value['publicKeyPem'] if value.is_a?(Hash)
key = fetch_resource_without_id_validation(value)
key['publicKeyPem'] if key
end
def url
return if @json['url'].blank?
url_candidate = url_to_href(@json['url'], 'text/html')
if unsupported_uri_scheme?(url_candidate) || mismatching_origin?(url_candidate)
nil
else
url_candidate
end
end
def property_values
return unless @json['attachment'].is_a?(Array)
as_array(@json['attachment']).select { |attachment| attachment['type'] == 'PropertyValue' }.map { |attachment| attachment.slice('name', 'value') }
end
def mismatching_origin?(url)
needle = Addressable::URI.parse(url).host
haystack = Addressable::URI.parse(@uri).host
!haystack.casecmp(needle).zero?
end
def outbox_total_items
collection_info('outbox').first
end
def following_total_items
collection_info('following').first
end
def followers_total_items
collection_info('followers').first
end
def following_private?
!collection_info('following').last
end
def followers_private?
!collection_info('followers').last
end
def collection_info(type)
return [nil, nil] if @json[type].blank?
return @collections[type] if @collections.key?(type)
collection = fetch_resource_without_id_validation(@json[type])
total_items = collection.is_a?(Hash) && collection['totalItems'].present? && collection['totalItems'].is_a?(Numeric) ? collection['totalItems'] : nil
has_first_page = collection.is_a?(Hash) && collection['first'].present?
@collections[type] = [total_items, has_first_page]
rescue HTTP::Error, OpenSSL::SSL::SSLError, Mastodon::LengthValidationError
@collections[type] = [nil, nil]
end
def moved_account
account = ActivityPub::TagManager.instance.uri_to_resource(@json['movedTo'], Account)
account ||= ActivityPub::FetchRemoteAccountService.new.call(@json['movedTo'], id: true, break_on_redirect: true, request_id: @options[:request_id])
account
end
def skip_download?
@account.suspended? || domain_block&.reject_media?
end
def auto_suspend?
domain_block&.suspend?
end
def auto_silence?
domain_block&.silence?
end
def domain_block
return @domain_block if defined?(@domain_block)
@domain_block = DomainBlock.rule_for(@domain)
end
def key_changed?
!@old_public_key.nil? && @old_public_key != @account.public_key
end
def suspension_changed?
@suspension_changed
end
def clear_tombstones!
Tombstone.where(account_id: @account.id).delete_all
end
def protocol_changed?
!@old_protocol.nil? && @old_protocol != @account.protocol
end
def process_tags
return if @json['tag'].blank?
as_array(@json['tag']).each do |tag|
process_emoji tag if equals_or_includes?(tag['type'], 'Emoji')
end
end
def process_emoji(tag)
return if skip_download?
return if tag['name'].blank? || tag['icon'].blank? || tag['icon']['url'].blank?
shortcode = tag['name'].delete(':')
image_url = tag['icon']['url']
uri = tag['id']
updated = tag['updated']
emoji = CustomEmoji.find_by(shortcode: shortcode, domain: @account.domain)
return unless emoji.nil? || image_url != emoji.image_remote_url || (updated && updated >= emoji.updated_at)
emoji ||= CustomEmoji.new(domain: @account.domain, shortcode: shortcode, uri: uri)
emoji.image_remote_url = image_url
emoji.save
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe ActivityPub::ProcessAccountService, type: :service do
subject { described_class.new }
context 'with property values' do
let(:payload) do
{
id: 'https://foo.test',
type: 'Actor',
inbox: 'https://foo.test/inbox',
attachment: [
{ type: 'PropertyValue', name: 'Pronouns', value: 'They/them' },
{ type: 'PropertyValue', name: 'Occupation', value: 'Unit test' },
{ type: 'PropertyValue', name: 'non-string', value: %w(foo bar) },
],
}.with_indifferent_access
end
it 'parses out of attachment' do
account = subject.call('alice', 'example.com', payload)
expect(account.fields).to be_a Array
expect(account.fields.size).to eq 2
expect(account.fields[0]).to be_a Account::Field
expect(account.fields[0].name).to eq 'Pronouns'
expect(account.fields[0].value).to eq 'They/them'
expect(account.fields[1]).to be_a Account::Field
expect(account.fields[1].name).to eq 'Occupation'
expect(account.fields[1].value).to eq 'Unit test'
end
end
context 'when account is not suspended' do
subject { described_class.new.call('alice', 'example.com', payload) }
let!(:account) { Fabricate(:account, username: 'alice', domain: 'example.com') }
let(:payload) do
{
id: 'https://foo.test',
type: 'Actor',
inbox: 'https://foo.test/inbox',
suspended: true,
}.with_indifferent_access
end
before do
allow(Admin::SuspensionWorker).to receive(:perform_async)
end
it 'suspends account remotely' do
expect(subject.suspended?).to be true
expect(subject.suspension_origin_remote?).to be true
end
it 'queues suspension worker' do
subject
expect(Admin::SuspensionWorker).to have_received(:perform_async)
end
end
context 'when account is suspended' do
subject { described_class.new.call('alice', 'example.com', payload) }
let!(:account) { Fabricate(:account, username: 'alice', domain: 'example.com', display_name: '') }
let(:payload) do
{
id: 'https://foo.test',
type: 'Actor',
inbox: 'https://foo.test/inbox',
suspended: false,
name: 'Hoge',
}.with_indifferent_access
end
before do
allow(Admin::UnsuspensionWorker).to receive(:perform_async)
account.suspend!(origin: suspension_origin)
end
context 'when locally' do
let(:suspension_origin) { :local }
it 'does not unsuspend it' do
expect(subject.suspended?).to be true
end
it 'does not update any attributes' do
expect(subject.display_name).to_not eq 'Hoge'
end
end
context 'when remotely' do
let(:suspension_origin) { :remote }
it 'unsuspends it' do
expect(subject.suspended?).to be false
end
it 'queues unsuspension worker' do
subject
expect(Admin::UnsuspensionWorker).to have_received(:perform_async)
end
it 'updates attributes' do
expect(subject.display_name).to eq 'Hoge'
end
end
end
context 'when discovering many subdomains in a short timeframe' do
subject do
8.times do |i|
domain = "test#{i}.testdomain.com"
json = {
id: "https://#{domain}/users/1",
type: 'Actor',
inbox: "https://#{domain}/inbox",
}.with_indifferent_access
described_class.new.call('alice', domain, json)
end
end
before do
stub_const 'ActivityPub::ProcessAccountService::SUBDOMAINS_RATELIMIT', 5
end
it 'creates at least some accounts' do
expect { subject }.to change { Account.remote.count }.by_at_least(2)
end
it 'creates no more account than the limit allows' do
expect { subject }.to change { Account.remote.count }.by_at_most(5)
end
end
context 'when Accounts referencing other accounts' do
let(:payload) do
{
'@context': ['https://www.w3.org/ns/activitystreams'],
id: 'https://foo.test/users/1',
type: 'Person',
inbox: 'https://foo.test/inbox',
featured: 'https://foo.test/users/1/featured',
preferredUsername: 'user1',
}.with_indifferent_access
end
before do
stub_const 'ActivityPub::ProcessAccountService::DISCOVERIES_PER_REQUEST', 5
8.times do |i|
actor_json = {
'@context': ['https://www.w3.org/ns/activitystreams'],
id: "https://foo.test/users/#{i}",
type: 'Person',
inbox: 'https://foo.test/inbox',
featured: "https://foo.test/users/#{i}/featured",
preferredUsername: "user#{i}",
}.with_indifferent_access
status_json = {
'@context': ['https://www.w3.org/ns/activitystreams'],
id: "https://foo.test/users/#{i}/status",
attributedTo: "https://foo.test/users/#{i}",
type: 'Note',
content: "@user#{i + 1} test",
tag: [
{
type: 'Mention',
href: "https://foo.test/users/#{i + 1}",
name: "@user#{i + 1}",
},
],
to: ['as:Public', "https://foo.test/users/#{i + 1}"],
}.with_indifferent_access
featured_json = {
'@context': ['https://www.w3.org/ns/activitystreams'],
id: "https://foo.test/users/#{i}/featured",
type: 'OrderedCollection',
totalItems: 1,
orderedItems: [status_json],
}.with_indifferent_access
webfinger = {
subject: "acct:user#{i}@foo.test",
links: [{ rel: 'self', href: "https://foo.test/users/#{i}" }],
}.with_indifferent_access
stub_request(:get, "https://foo.test/users/#{i}").to_return(status: 200, body: actor_json.to_json, headers: { 'Content-Type': 'application/activity+json' })
stub_request(:get, "https://foo.test/users/#{i}/featured").to_return(status: 200, body: featured_json.to_json, headers: { 'Content-Type': 'application/activity+json' })
stub_request(:get, "https://foo.test/users/#{i}/status").to_return(status: 200, body: status_json.to_json, headers: { 'Content-Type': 'application/activity+json' })
stub_request(:get, "https://foo.test/.well-known/webfinger?resource=acct:user#{i}@foo.test").to_return(body: webfinger.to_json, headers: { 'Content-Type': 'application/jrd+json' })
end
end
it 'creates at least some accounts' do
expect { subject.call('user1', 'foo.test', payload) }.to change { Account.remote.count }.by_at_least(2)
end
it 'creates no more account than the limit allows' do
expect { subject.call('user1', 'foo.test', payload) }.to change { Account.remote.count }.by_at_most(5)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class ActivityPub::FetchRepliesService < BaseService
include JsonLdHelper
def call(parent_status, collection_or_uri, allow_synchronous_requests: true, request_id: nil)
@account = parent_status.account
@allow_synchronous_requests = allow_synchronous_requests
@items = collection_items(collection_or_uri)
return if @items.nil?
FetchReplyWorker.push_bulk(filtered_replies) { |reply_uri| [reply_uri, { 'request_id' => request_id }] }
@items
end
private
def collection_items(collection_or_uri)
collection = fetch_collection(collection_or_uri)
return unless collection.is_a?(Hash)
collection = fetch_collection(collection['first']) if collection['first'].present?
return unless collection.is_a?(Hash)
case collection['type']
when 'Collection', 'CollectionPage'
collection['items']
when 'OrderedCollection', 'OrderedCollectionPage'
collection['orderedItems']
end
end
def fetch_collection(collection_or_uri)
return collection_or_uri if collection_or_uri.is_a?(Hash)
return unless @allow_synchronous_requests
return if non_matching_uri_hosts?(@account.uri, collection_or_uri)
fetch_resource_without_id_validation(collection_or_uri, nil, true)
end
def filtered_replies
# Only fetch replies to the same server as the original status to avoid
# amplification attacks.
# Also limit to 5 fetched replies to limit potential for DoS.
@items.map { |item| value_or_id(item) }.reject { |uri| non_matching_uri_hosts?(@account.uri, uri) }.take(5)
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe ActivityPub::FetchRepliesService, type: :service do
subject { described_class.new }
let(:actor) { Fabricate(:account, domain: 'example.com', uri: 'http://example.com/account') }
let(:status) { Fabricate(:status, account: actor) }
let(:collection_uri) { 'http://example.com/replies/1' }
let(:items) do
[
'http://example.com/self-reply-1',
'http://example.com/self-reply-2',
'http://example.com/self-reply-3',
'http://other.com/other-reply-1',
'http://other.com/other-reply-2',
'http://other.com/other-reply-3',
'http://example.com/self-reply-4',
'http://example.com/self-reply-5',
'http://example.com/self-reply-6',
]
end
let(:payload) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
type: 'Collection',
id: collection_uri,
items: items,
}.with_indifferent_access
end
describe '#call' do
context 'when the payload is a Collection with inlined replies' do
context 'when passing the collection itself' do
it 'spawns workers for up to 5 replies on the same server' do
allow(FetchReplyWorker).to receive(:push_bulk)
subject.call(status, payload)
expect(FetchReplyWorker).to have_received(:push_bulk).with(['http://example.com/self-reply-1', 'http://example.com/self-reply-2', 'http://example.com/self-reply-3', 'http://example.com/self-reply-4', 'http://example.com/self-reply-5'])
end
end
context 'when passing the URL to the collection' do
before do
stub_request(:get, collection_uri).to_return(status: 200, body: Oj.dump(payload))
end
it 'spawns workers for up to 5 replies on the same server' do
allow(FetchReplyWorker).to receive(:push_bulk)
subject.call(status, collection_uri)
expect(FetchReplyWorker).to have_received(:push_bulk).with(['http://example.com/self-reply-1', 'http://example.com/self-reply-2', 'http://example.com/self-reply-3', 'http://example.com/self-reply-4', 'http://example.com/self-reply-5'])
end
end
end
context 'when the payload is an OrderedCollection with inlined replies' do
let(:payload) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
type: 'OrderedCollection',
id: collection_uri,
orderedItems: items,
}.with_indifferent_access
end
context 'when passing the collection itself' do
it 'spawns workers for up to 5 replies on the same server' do
allow(FetchReplyWorker).to receive(:push_bulk)
subject.call(status, payload)
expect(FetchReplyWorker).to have_received(:push_bulk).with(['http://example.com/self-reply-1', 'http://example.com/self-reply-2', 'http://example.com/self-reply-3', 'http://example.com/self-reply-4', 'http://example.com/self-reply-5'])
end
end
context 'when passing the URL to the collection' do
before do
stub_request(:get, collection_uri).to_return(status: 200, body: Oj.dump(payload))
end
it 'spawns workers for up to 5 replies on the same server' do
allow(FetchReplyWorker).to receive(:push_bulk)
subject.call(status, collection_uri)
expect(FetchReplyWorker).to have_received(:push_bulk).with(['http://example.com/self-reply-1', 'http://example.com/self-reply-2', 'http://example.com/self-reply-3', 'http://example.com/self-reply-4', 'http://example.com/self-reply-5'])
end
end
end
context 'when the payload is a paginated Collection with inlined replies' do
let(:payload) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
type: 'Collection',
id: collection_uri,
first: {
type: 'CollectionPage',
partOf: collection_uri,
items: items,
},
}.with_indifferent_access
end
context 'when passing the collection itself' do
it 'spawns workers for up to 5 replies on the same server' do
allow(FetchReplyWorker).to receive(:push_bulk)
subject.call(status, payload)
expect(FetchReplyWorker).to have_received(:push_bulk).with(['http://example.com/self-reply-1', 'http://example.com/self-reply-2', 'http://example.com/self-reply-3', 'http://example.com/self-reply-4', 'http://example.com/self-reply-5'])
end
end
context 'when passing the URL to the collection' do
before do
stub_request(:get, collection_uri).to_return(status: 200, body: Oj.dump(payload))
end
it 'spawns workers for up to 5 replies on the same server' do
allow(FetchReplyWorker).to receive(:push_bulk)
subject.call(status, collection_uri)
expect(FetchReplyWorker).to have_received(:push_bulk).with(['http://example.com/self-reply-1', 'http://example.com/self-reply-2', 'http://example.com/self-reply-3', 'http://example.com/self-reply-4', 'http://example.com/self-reply-5'])
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class ActivityPub::SynchronizeFollowersService < BaseService
include JsonLdHelper
include Payloadable
def call(account, partial_collection_url)
@account = account
items = collection_items(partial_collection_url)
return if items.nil?
# There could be unresolved accounts (hence the call to .compact) but this
# should never happen in practice, since in almost all cases we keep an
# Account record, and should we not do that, we should have sent a Delete.
# In any case there is not much we can do if that occurs.
@expected_followers = items.filter_map { |uri| ActivityPub::TagManager.instance.uri_to_resource(uri, Account) }
remove_unexpected_local_followers!
handle_unexpected_outgoing_follows!
end
private
def remove_unexpected_local_followers!
@account.followers.local.where.not(id: @expected_followers.map(&:id)).reorder(nil).find_each do |unexpected_follower|
UnfollowService.new.call(unexpected_follower, @account)
end
end
def handle_unexpected_outgoing_follows!
@expected_followers.each do |expected_follower|
next if expected_follower.following?(@account)
if expected_follower.requested?(@account)
# For some reason the follow request went through but we missed it
expected_follower.follow_requests.find_by(target_account: @account)&.authorize!
else
# Since we were not aware of the follow from our side, we do not have an
# ID for it that we can include in the Undo activity. For this reason,
# the Undo may not work with software that relies exclusively on
# matching activity IDs and not the actor and target
follow = Follow.new(account: expected_follower, target_account: @account)
ActivityPub::DeliveryWorker.perform_async(build_undo_follow_json(follow), follow.account_id, follow.target_account.inbox_url)
end
end
end
def build_undo_follow_json(follow)
Oj.dump(serialize_payload(follow, ActivityPub::UndoFollowSerializer))
end
def collection_items(collection_or_uri)
collection = fetch_collection(collection_or_uri)
return unless collection.is_a?(Hash)
collection = fetch_collection(collection['first']) if collection['first'].present?
return unless collection.is_a?(Hash)
case collection['type']
when 'Collection', 'CollectionPage'
collection['items']
when 'OrderedCollection', 'OrderedCollectionPage'
collection['orderedItems']
end
end
def fetch_collection(collection_or_uri)
return collection_or_uri if collection_or_uri.is_a?(Hash)
return if non_matching_uri_hosts?(@account.uri, collection_or_uri)
fetch_resource_without_id_validation(collection_or_uri, nil, true)
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe ActivityPub::SynchronizeFollowersService, type: :service do
subject { described_class.new }
let(:actor) { Fabricate(:account, domain: 'example.com', uri: 'http://example.com/account', inbox_url: 'http://example.com/inbox') }
let(:alice) { Fabricate(:account, username: 'alice') }
let(:bob) { Fabricate(:account, username: 'bob') }
let(:eve) { Fabricate(:account, username: 'eve') }
let(:mallory) { Fabricate(:account, username: 'mallory') }
let(:collection_uri) { 'http://example.com/partial-followers' }
let(:items) do
[
ActivityPub::TagManager.instance.uri_for(alice),
ActivityPub::TagManager.instance.uri_for(eve),
ActivityPub::TagManager.instance.uri_for(mallory),
]
end
let(:payload) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
type: 'Collection',
id: collection_uri,
items: items,
}.with_indifferent_access
end
shared_examples 'synchronizes followers' do
before do
alice.follow!(actor)
bob.follow!(actor)
mallory.request_follow!(actor)
allow(ActivityPub::DeliveryWorker).to receive(:perform_async)
subject.call(actor, collection_uri)
end
it 'keeps expected followers' do
expect(alice.following?(actor)).to be true
end
it 'removes local followers not in the remote list' do
expect(bob.following?(actor)).to be false
end
it 'converts follow requests to follow relationships when they have been accepted' do
expect(mallory.following?(actor)).to be true
end
it 'sends an Undo Follow to the actor' do
expect(ActivityPub::DeliveryWorker).to have_received(:perform_async).with(anything, eve.id, actor.inbox_url)
end
end
describe '#call' do
context 'when the endpoint is a Collection of actor URIs' do
before do
stub_request(:get, collection_uri).to_return(status: 200, body: Oj.dump(payload))
end
it_behaves_like 'synchronizes followers'
end
context 'when the endpoint is an OrderedCollection of actor URIs' do
let(:payload) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
type: 'OrderedCollection',
id: collection_uri,
orderedItems: items,
}.with_indifferent_access
end
before do
stub_request(:get, collection_uri).to_return(status: 200, body: Oj.dump(payload))
end
it_behaves_like 'synchronizes followers'
end
context 'when the endpoint is a paginated Collection of actor URIs' do
let(:payload) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
type: 'Collection',
id: collection_uri,
first: {
type: 'CollectionPage',
partOf: collection_uri,
items: items,
},
}.with_indifferent_access
end
before do
stub_request(:get, collection_uri).to_return(status: 200, body: Oj.dump(payload))
end
it_behaves_like 'synchronizes followers'
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class ActivityPub::ProcessStatusUpdateService < BaseService
include JsonLdHelper
include Redisable
include Lockable
def call(status, activity_json, object_json, request_id: nil)
raise ArgumentError, 'Status has unsaved changes' if status.changed?
@activity_json = activity_json
@json = object_json
@status_parser = ActivityPub::Parser::StatusParser.new(@json)
@uri = @status_parser.uri
@status = status
@account = status.account
@media_attachments_changed = false
@poll_changed = false
@request_id = request_id
# Only native types can be updated at the moment
return @status if !expected_type? || already_updated_more_recently?
if @status_parser.edited_at.present? && (@status.edited_at.nil? || @status_parser.edited_at > @status.edited_at)
handle_explicit_update!
else
handle_implicit_update!
end
@status
end
private
def handle_explicit_update!
last_edit_date = @status.edited_at.presence || @status.created_at
# Only allow processing one create/update per status at a time
with_redis_lock("create:#{@uri}") do
Status.transaction do
record_previous_edit!
update_media_attachments!
update_poll!
update_immediate_attributes!
update_metadata!
create_edits!
end
download_media_files!
queue_poll_notifications!
next unless significant_changes?
reset_preview_card!
broadcast_updates!
end
forward_activity! if significant_changes? && @status_parser.edited_at > last_edit_date
end
def handle_implicit_update!
with_redis_lock("create:#{@uri}") do
update_poll!(allow_significant_changes: false)
queue_poll_notifications!
end
end
def update_media_attachments!
previous_media_attachments = @status.media_attachments.to_a
previous_media_attachments_ids = @status.ordered_media_attachment_ids || previous_media_attachments.map(&:id)
@next_media_attachments = []
as_array(@json['attachment']).each do |attachment|
media_attachment_parser = ActivityPub::Parser::MediaAttachmentParser.new(attachment)
next if media_attachment_parser.remote_url.blank? || @next_media_attachments.size > 4
begin
media_attachment = previous_media_attachments.find { |previous_media_attachment| previous_media_attachment.remote_url == media_attachment_parser.remote_url }
media_attachment ||= MediaAttachment.new(account: @account, remote_url: media_attachment_parser.remote_url)
# If a previously existing media attachment was significantly updated, mark
# media attachments as changed even if none were added or removed
@media_attachments_changed = true if media_attachment_parser.significantly_changes?(media_attachment)
media_attachment.description = media_attachment_parser.description
media_attachment.focus = media_attachment_parser.focus
media_attachment.thumbnail_remote_url = media_attachment_parser.thumbnail_remote_url
media_attachment.blurhash = media_attachment_parser.blurhash
media_attachment.status_id = @status.id
media_attachment.skip_download = unsupported_media_type?(media_attachment_parser.file_content_type) || skip_download?
media_attachment.save!
@next_media_attachments << media_attachment
rescue Addressable::URI::InvalidURIError => e
Rails.logger.debug { "Invalid URL in attachment: #{e}" }
end
end
@status.ordered_media_attachment_ids = @next_media_attachments.map(&:id)
@media_attachments_changed = true if @status.ordered_media_attachment_ids != previous_media_attachments_ids
end
def download_media_files!
@next_media_attachments.each do |media_attachment|
next if media_attachment.skip_download
media_attachment.download_file! if media_attachment.remote_url_previously_changed?
media_attachment.download_thumbnail! if media_attachment.thumbnail_remote_url_previously_changed?
media_attachment.save
rescue Mastodon::UnexpectedResponseError, HTTP::TimeoutError, HTTP::ConnectionError, OpenSSL::SSL::SSLError
RedownloadMediaWorker.perform_in(rand(30..600).seconds, media_attachment.id)
rescue Seahorse::Client::NetworkingError => e
Rails.logger.warn "Error storing media attachment: #{e}"
end
@status.media_attachments.reload
end
def update_poll!(allow_significant_changes: true)
previous_poll = @status.preloadable_poll
@previous_expires_at = previous_poll&.expires_at
poll_parser = ActivityPub::Parser::PollParser.new(@json)
if poll_parser.valid?
poll = previous_poll || @account.polls.new(status: @status)
# If for some reasons the options were changed, it invalidates all previous
# votes, so we need to remove them
@poll_changed = true if poll_parser.significantly_changes?(poll)
return if @poll_changed && !allow_significant_changes
poll.last_fetched_at = Time.now.utc
poll.options = poll_parser.options
poll.multiple = poll_parser.multiple
poll.expires_at = poll_parser.expires_at
poll.voters_count = poll_parser.voters_count
poll.cached_tallies = poll_parser.cached_tallies
poll.reset_votes! if @poll_changed
poll.save!
@status.poll_id = poll.id
elsif previous_poll.present?
return unless allow_significant_changes
previous_poll.destroy!
@poll_changed = true
@status.poll_id = nil
end
end
def update_immediate_attributes!
@status.text = @status_parser.text || ''
@status.spoiler_text = @status_parser.spoiler_text || ''
@status.sensitive = @account.sensitized? || @status_parser.sensitive || false
@status.language = @status_parser.language
@significant_changes = text_significantly_changed? || @status.spoiler_text_changed? || @media_attachments_changed || @poll_changed
@status.edited_at = @status_parser.edited_at if significant_changes?
@status.save!
end
def update_metadata!
@raw_tags = []
@raw_mentions = []
@raw_emojis = []
as_array(@json['tag']).each do |tag|
if equals_or_includes?(tag['type'], 'Hashtag')
@raw_tags << tag['name'] if tag['name'].present?
elsif equals_or_includes?(tag['type'], 'Mention')
@raw_mentions << tag['href'] if tag['href'].present?
elsif equals_or_includes?(tag['type'], 'Emoji')
@raw_emojis << tag
end
end
update_tags!
update_mentions!
update_emojis!
end
def update_tags!
@status.tags = Tag.find_or_create_by_names(@raw_tags)
end
def update_mentions!
previous_mentions = @status.active_mentions.includes(:account).to_a
current_mentions = []
@raw_mentions.each do |href|
next if href.blank?
account = ActivityPub::TagManager.instance.uri_to_resource(href, Account)
account ||= ActivityPub::FetchRemoteAccountService.new.call(href, request_id: @request_id)
next if account.nil?
mention = previous_mentions.find { |x| x.account_id == account.id }
mention ||= account.mentions.new(status: @status)
current_mentions << mention
end
current_mentions.each do |mention|
mention.save if mention.new_record?
end
# If previous mentions are no longer contained in the text, convert them
# to silent mentions, since withdrawing access from someone who already
# received a notification might be more confusing
removed_mentions = previous_mentions - current_mentions
Mention.where(id: removed_mentions.map(&:id)).update_all(silent: true) unless removed_mentions.empty?
end
def update_emojis!
return if skip_download?
@raw_emojis.each do |raw_emoji|
custom_emoji_parser = ActivityPub::Parser::CustomEmojiParser.new(raw_emoji)
next if custom_emoji_parser.shortcode.blank? || custom_emoji_parser.image_remote_url.blank?
emoji = CustomEmoji.find_by(shortcode: custom_emoji_parser.shortcode, domain: @account.domain)
next unless emoji.nil? || custom_emoji_parser.image_remote_url != emoji.image_remote_url || (custom_emoji_parser.updated_at && custom_emoji_parser.updated_at >= emoji.updated_at)
begin
emoji ||= CustomEmoji.new(domain: @account.domain, shortcode: custom_emoji_parser.shortcode, uri: custom_emoji_parser.uri)
emoji.image_remote_url = custom_emoji_parser.image_remote_url
emoji.save
rescue Seahorse::Client::NetworkingError => e
Rails.logger.warn "Error storing emoji: #{e}"
end
end
end
def expected_type?
equals_or_includes_any?(@json['type'], %w(Note Question))
end
def record_previous_edit!
@previous_edit = @status.build_snapshot(at_time: @status.created_at, rate_limit: false) if @status.edits.empty?
end
def create_edits!
return unless significant_changes?
@previous_edit&.save!
@status.snapshot!(account_id: @account.id, rate_limit: false)
end
def skip_download?
return @skip_download if defined?(@skip_download)
@skip_download ||= DomainBlock.reject_media?(@account.domain)
end
def unsupported_media_type?(mime_type)
mime_type.present? && !MediaAttachment.supported_mime_types.include?(mime_type)
end
def significant_changes?
@significant_changes
end
def text_significantly_changed?
return false unless @status.text_changed?
old, new = @status.text_change
HtmlAwareFormatter.new(old, false).to_s != HtmlAwareFormatter.new(new, false).to_s
end
def already_updated_more_recently?
@status.edited_at.present? && @status_parser.edited_at.present? && @status.edited_at > @status_parser.edited_at
end
def reset_preview_card!
@status.reset_preview_card!
LinkCrawlWorker.perform_in(rand(1..59).seconds, @status.id)
end
def broadcast_updates!
::DistributionWorker.perform_async(@status.id, { 'update' => true })
end
def queue_poll_notifications!
poll = @status.preloadable_poll
# If the poll had no expiration date set but now has, or now has a sooner
# expiration date, and people have voted, schedule a notification
return unless poll.present? && poll.expires_at.present? && poll.votes.exists?
PollExpirationNotifyWorker.remove_from_scheduled(poll.id) if @previous_expires_at.present? && @previous_expires_at > poll.expires_at
PollExpirationNotifyWorker.perform_at(poll.expires_at + 5.minutes, poll.id)
end
def forward_activity!
forwarder.forward! if forwarder.forwardable?
end
def forwarder
@forwarder ||= ActivityPub::Forwarder.new(@account, @activity_json, @status)
end
end
``` | # frozen_string_literal: true
require 'rails_helper'
def poll_option_json(name, votes)
{ type: 'Note', name: name, replies: { type: 'Collection', totalItems: votes } }
end
RSpec.describe ActivityPub::ProcessStatusUpdateService, type: :service do
subject { described_class.new }
let!(:status) { Fabricate(:status, text: 'Hello world', account: Fabricate(:account, domain: 'example.com')) }
let(:payload) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
id: 'foo',
type: 'Note',
summary: 'Show more',
content: 'Hello universe',
updated: '2021-09-08T22:39:25Z',
tag: [
{ type: 'Hashtag', name: 'hoge' },
{ type: 'Mention', href: ActivityPub::TagManager.instance.uri_for(alice) },
],
}
end
let(:json) { Oj.load(Oj.dump(payload)) }
let(:alice) { Fabricate(:account) }
let(:bob) { Fabricate(:account) }
let(:mentions) { [] }
let(:tags) { [] }
let(:media_attachments) { [] }
before do
mentions.each { |a| Fabricate(:mention, status: status, account: a) }
tags.each { |t| status.tags << t }
media_attachments.each { |m| status.media_attachments << m }
end
describe '#call' do
it 'updates text' do
subject.call(status, json, json)
expect(status.reload.text).to eq 'Hello universe'
end
it 'updates content warning' do
subject.call(status, json, json)
expect(status.reload.spoiler_text).to eq 'Show more'
end
context 'when the changes are only in sanitized-out HTML' do
let!(:status) { Fabricate(:status, text: '<p>Hello world <a href="https://joinmastodon.org" rel="nofollow">joinmastodon.org</a></p>', account: Fabricate(:account, domain: 'example.com')) }
let(:payload) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
id: 'foo',
type: 'Note',
updated: '2021-09-08T22:39:25Z',
content: '<p>Hello world <a href="https://joinmastodon.org" rel="noreferrer">joinmastodon.org</a></p>',
}
end
before do
subject.call(status, json, json)
end
it 'does not create any edits' do
expect(status.reload.edits).to be_empty
end
it 'does not mark status as edited' do
expect(status.edited?).to be false
end
end
context 'when the status has not been explicitly edited' do
let(:payload) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
id: 'foo',
type: 'Note',
content: 'Updated text',
}
end
before do
subject.call(status, json, json)
end
it 'does not create any edits' do
expect(status.reload.edits).to be_empty
end
it 'does not mark status as edited' do
expect(status.reload.edited?).to be false
end
it 'does not update the text' do
expect(status.reload.text).to eq 'Hello world'
end
end
context 'when the status has not been explicitly edited and features a poll' do
let(:account) { Fabricate(:account, domain: 'example.com') }
let!(:expiration) { 10.days.from_now.utc }
let!(:status) do
Fabricate(:status,
text: 'Hello world',
account: account,
poll_attributes: {
options: %w(Foo Bar),
account: account,
multiple: false,
hide_totals: false,
expires_at: expiration,
})
end
let(:payload) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
id: 'https://example.com/foo',
type: 'Question',
content: 'Hello world',
endTime: expiration.iso8601,
oneOf: [
poll_option_json('Foo', 4),
poll_option_json('Bar', 3),
],
}
end
before do
subject.call(status, json, json)
end
it 'does not create any edits' do
expect(status.reload.edits).to be_empty
end
it 'does not mark status as edited' do
expect(status.reload.edited?).to be false
end
it 'does not update the text' do
expect(status.reload.text).to eq 'Hello world'
end
it 'updates tallies' do
expect(status.poll.reload.cached_tallies).to eq [4, 3]
end
end
context 'when the status changes a poll despite being not explicitly marked as updated' do
let(:account) { Fabricate(:account, domain: 'example.com') }
let!(:expiration) { 10.days.from_now.utc }
let!(:status) do
Fabricate(:status,
text: 'Hello world',
account: account,
poll_attributes: {
options: %w(Foo Bar),
account: account,
multiple: false,
hide_totals: false,
expires_at: expiration,
})
end
let(:payload) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
id: 'https://example.com/foo',
type: 'Question',
content: 'Hello world',
endTime: expiration.iso8601,
oneOf: [
poll_option_json('Foo', 4),
poll_option_json('Bar', 3),
poll_option_json('Baz', 3),
],
}
end
before do
subject.call(status, json, json)
end
it 'does not create any edits' do
expect(status.reload.edits).to be_empty
end
it 'does not mark status as edited' do
expect(status.reload.edited?).to be false
end
it 'does not update the text' do
expect(status.reload.text).to eq 'Hello world'
end
it 'does not update tallies' do
expect(status.poll.reload.cached_tallies).to eq [0, 0]
end
end
context 'when receiving an edit older than the latest processed' do
before do
status.snapshot!(at_time: status.created_at, rate_limit: false)
status.update!(text: 'Hello newer world', edited_at: Time.now.utc)
status.snapshot!(rate_limit: false)
end
it 'does not create any edits' do
expect { subject.call(status, json, json) }.to_not(change { status.reload.edits.pluck(&:id) })
end
it 'does not update the text, spoiler_text or edited_at' do
expect { subject.call(status, json, json) }.to_not(change { s = status.reload; [s.text, s.spoiler_text, s.edited_at] })
end
end
context 'with no changes at all' do
let(:payload) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
id: 'foo',
type: 'Note',
content: 'Hello world',
}
end
before do
subject.call(status, json, json)
end
it 'does not create any edits' do
expect(status.reload.edits).to be_empty
end
it 'does not mark status as edited' do
expect(status.edited?).to be false
end
end
context 'with no changes and originally with no ordered_media_attachment_ids' do
let(:payload) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
id: 'foo',
type: 'Note',
content: 'Hello world',
}
end
before do
status.update(ordered_media_attachment_ids: nil)
subject.call(status, json, json)
end
it 'does not create any edits' do
expect(status.reload.edits).to be_empty
end
it 'does not mark status as edited' do
expect(status.edited?).to be false
end
end
context 'when originally without tags' do
before do
subject.call(status, json, json)
end
it 'updates tags' do
expect(status.tags.reload.map(&:name)).to eq %w(hoge)
end
end
context 'when originally with tags' do
let(:tags) { [Fabricate(:tag, name: 'test'), Fabricate(:tag, name: 'foo')] }
let(:payload) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
id: 'foo',
type: 'Note',
summary: 'Show more',
content: 'Hello universe',
updated: '2021-09-08T22:39:25Z',
tag: [
{ type: 'Hashtag', name: 'foo' },
],
}
end
before do
subject.call(status, json, json)
end
it 'updates tags' do
expect(status.tags.reload.map(&:name)).to eq %w(foo)
end
end
context 'when originally without mentions' do
before do
subject.call(status, json, json)
end
it 'updates mentions' do
expect(status.active_mentions.reload.map(&:account_id)).to eq [alice.id]
end
end
context 'when originally with mentions' do
let(:mentions) { [alice, bob] }
before do
subject.call(status, json, json)
end
it 'updates mentions' do
expect(status.active_mentions.reload.map(&:account_id)).to eq [alice.id]
end
end
context 'when originally without media attachments' do
before do
stub_request(:get, 'https://example.com/foo.png').to_return(body: attachment_fixture('emojo.png'))
subject.call(status, json, json)
end
let(:payload) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
id: 'foo',
type: 'Note',
content: 'Hello universe',
updated: '2021-09-08T22:39:25Z',
attachment: [
{ type: 'Image', mediaType: 'image/png', url: 'https://example.com/foo.png' },
],
}
end
it 'updates media attachments' do
media_attachment = status.reload.ordered_media_attachments.first
expect(media_attachment).to_not be_nil
expect(media_attachment.remote_url).to eq 'https://example.com/foo.png'
end
it 'fetches the attachment' do
expect(a_request(:get, 'https://example.com/foo.png')).to have_been_made
end
it 'records media change in edit' do
expect(status.edits.reload.last.ordered_media_attachment_ids).to_not be_empty
end
end
context 'when originally with media attachments' do
let(:media_attachments) { [Fabricate(:media_attachment, remote_url: 'https://example.com/foo.png'), Fabricate(:media_attachment, remote_url: 'https://example.com/unused.png')] }
let(:payload) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
id: 'foo',
type: 'Note',
content: 'Hello universe',
updated: '2021-09-08T22:39:25Z',
attachment: [
{ type: 'Image', mediaType: 'image/png', url: 'https://example.com/foo.png', name: 'A picture' },
],
}
end
before do
allow(RedownloadMediaWorker).to receive(:perform_async)
subject.call(status, json, json)
end
it 'updates the existing media attachment in-place' do
media_attachment = status.media_attachments.ordered.reload.first
expect(media_attachment).to_not be_nil
expect(media_attachment.remote_url).to eq 'https://example.com/foo.png'
expect(media_attachment.description).to eq 'A picture'
end
it 'does not queue redownload for the existing media attachment' do
expect(RedownloadMediaWorker).to_not have_received(:perform_async)
end
it 'updates media attachments' do
expect(status.ordered_media_attachments.map(&:remote_url)).to eq %w(https://example.com/foo.png)
end
it 'records media change in edit' do
expect(status.edits.reload.last.ordered_media_attachment_ids).to_not be_empty
end
end
context 'when originally with a poll' do
before do
poll = Fabricate(:poll, status: status)
status.update(preloadable_poll: poll)
subject.call(status, json, json)
end
it 'removes poll' do
expect(status.reload.poll).to be_nil
end
it 'records media change in edit' do
expect(status.edits.reload.last.poll_options).to be_nil
end
end
context 'when originally without a poll' do
let(:payload) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
id: 'foo',
type: 'Question',
content: 'Hello universe',
updated: '2021-09-08T22:39:25Z',
closed: true,
oneOf: [
{ type: 'Note', name: 'Foo' },
{ type: 'Note', name: 'Bar' },
{ type: 'Note', name: 'Baz' },
],
}
end
before do
subject.call(status, json, json)
end
it 'creates a poll' do
poll = status.reload.poll
expect(poll).to_not be_nil
expect(poll.options).to eq %w(Foo Bar Baz)
end
it 'records media change in edit' do
expect(status.edits.reload.last.poll_options).to eq %w(Foo Bar Baz)
end
end
it 'creates edit history' do
subject.call(status, json, json)
expect(status.edits.reload.map(&:text)).to eq ['Hello world', 'Hello universe']
end
it 'sets edited timestamp' do
subject.call(status, json, json)
expect(status.reload.edited_at.to_s).to eq '2021-09-08 22:39:25 UTC'
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class UploadService
# Temporarily introduced for upload API: https://gitlab.com/gitlab-org/gitlab/-/issues/325788
attr_accessor :override_max_attachment_size
def initialize(model, file, uploader_class = FileUploader, **uploader_context)
@model = model
@file = file
@uploader_class = uploader_class
@uploader_context = uploader_context
end
def execute
return unless file && file.size <= max_attachment_size
uploader = uploader_class.new(model, nil, **uploader_context)
uploader.store!(file)
uploader
end
private
attr_reader :model, :file, :uploader_class, :uploader_context
def max_attachment_size
override_max_attachment_size || Gitlab::CurrentSettings.max_attachment_size.megabytes.to_i
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe UploadService, feature_category: :shared do
describe 'File service' do
before do
@user = create(:user)
@project = create(:project, creator_id: @user.id, namespace: @user.namespace)
end
context 'for valid gif file' do
before do
gif = fixture_file_upload('spec/fixtures/banana_sample.gif', 'image/gif')
@link_to_file = upload_file(@project, gif)
end
it { expect(@link_to_file).to have_key(:alt) }
it { expect(@link_to_file).to have_key(:url) }
it { expect(@link_to_file).to have_value('banana_sample') }
it { expect(@link_to_file[:url]).to match('banana_sample.gif') }
end
context 'for valid png file' do
before do
png = fixture_file_upload('spec/fixtures/dk.png',
'image/png')
@link_to_file = upload_file(@project, png)
end
it { expect(@link_to_file).to have_key(:alt) }
it { expect(@link_to_file).to have_key(:url) }
it { expect(@link_to_file).to have_value('dk') }
it { expect(@link_to_file[:url]).to match('dk.png') }
end
context 'for valid jpg file' do
before do
jpg = fixture_file_upload('spec/fixtures/rails_sample.jpg', 'image/jpg')
@link_to_file = upload_file(@project, jpg)
end
it { expect(@link_to_file).to have_key(:alt) }
it { expect(@link_to_file).to have_key(:url) }
it { expect(@link_to_file).to have_value('rails_sample') }
it { expect(@link_to_file[:url]).to match('rails_sample.jpg') }
end
context 'for txt file' do
before do
txt = fixture_file_upload('spec/fixtures/doc_sample.txt', 'text/plain')
@link_to_file = upload_file(@project, txt)
end
it { expect(@link_to_file).to have_key(:alt) }
it { expect(@link_to_file).to have_key(:url) }
it { expect(@link_to_file).to have_value('doc_sample.txt') }
it { expect(@link_to_file[:url]).to match('doc_sample.txt') }
end
context 'for too large a file' do
before do
txt = fixture_file_upload('spec/fixtures/doc_sample.txt', 'text/plain')
allow(txt).to receive(:size) { 1000.megabytes.to_i }
@link_to_file = upload_file(@project, txt)
end
it { expect(@link_to_file).to eq({}) }
end
describe '#override_max_attachment_size' do
let(:txt) { fixture_file_upload('spec/fixtures/doc_sample.txt', 'text/plain') }
let(:service) { described_class.new(@project, txt, FileUploader) }
subject { service.execute.to_h }
before do
allow(txt).to receive(:size) { 100.megabytes.to_i }
end
it 'allows the upload' do
service.override_max_attachment_size = 101.megabytes
expect(subject.keys).to eq(%i[alt url markdown])
end
it 'disallows the upload' do
service.override_max_attachment_size = 99.megabytes
expect(subject).to eq({})
end
end
end
def upload_file(project, file)
described_class.new(project, file, FileUploader).execute.to_h
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class NoteSummary
attr_reader :note
attr_reader :metadata
def initialize(noteable, project, author, body, action: nil, commit_count: nil, created_at: nil)
@note = { noteable: noteable,
created_at: created_at || noteable.system_note_timestamp,
project: project, author: author, note: body }
@metadata = { action: action, commit_count: commit_count }.compact
if action == 'description' && noteable.saved_description_version
@metadata[:description_version] = noteable.saved_description_version
end
set_commit_params if note[:noteable].is_a?(Commit)
end
def metadata?
metadata.present?
end
def set_commit_params
note.merge!(noteable_type: 'Commit', commit_id: note[:noteable].id)
note[:noteable] = nil
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe NoteSummary, feature_category: :code_review_workflow do
let(:project) { build(:project) }
let(:noteable) { build(:issue) }
let(:user) { build(:user) }
def create_note_summary
described_class.new(noteable, project, user, 'note', action: 'icon', commit_count: 5)
end
describe '#metadata?' do
it 'returns true when metadata present' do
expect(create_note_summary.metadata?).to be_truthy
end
it 'returns false when metadata not present' do
expect(described_class.new(noteable, project, user, 'note').metadata?).to be_falsey
end
end
describe '#note' do
it 'returns note hash' do
freeze_time do
expect(create_note_summary.note).to eq(
noteable: noteable,
project: project,
author: user,
note: 'note',
created_at: Time.current
)
end
end
context 'when noteable is a commit' do
let(:noteable) { build(:commit, system_note_timestamp: Time.zone.at(43)) }
it 'returns note hash specific to commit' do
expect(create_note_summary.note).to eq(
noteable: nil, project: project, author: user, note: 'note',
noteable_type: 'Commit', commit_id: noteable.id,
created_at: Time.zone.at(43)
)
end
end
end
describe '#metadata' do
it 'returns metadata hash' do
expect(create_note_summary.metadata).to eq(action: 'icon', commit_count: 5)
end
context 'description action and noteable has saved_description_version' do
before do
noteable.saved_description_version = 1
end
subject { described_class.new(noteable, project, user, 'note', action: 'description') }
it 'sets the description_version metadata' do
expect(subject.metadata).to include(description_version: 1)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class AutoMergeService < BaseService
include Gitlab::Utils::StrongMemoize
STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS = 'merge_when_pipeline_succeeds'
STRATEGIES = [STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS].freeze
class << self
def all_strategies_ordered_by_preference
STRATEGIES
end
def get_service_class(strategy)
return unless all_strategies_ordered_by_preference.include?(strategy)
"::AutoMerge::#{strategy.camelize}Service".constantize
end
end
def execute(merge_request, strategy = nil)
strategy ||= preferred_strategy(merge_request)
service = get_service_instance(merge_request, strategy)
return :failed unless service&.available_for?(merge_request)
service.execute(merge_request)
end
def update(merge_request)
return :failed unless merge_request.auto_merge_enabled?
strategy = merge_request.auto_merge_strategy
get_service_instance(merge_request, strategy).update(merge_request)
end
def process(merge_request)
return unless merge_request.auto_merge_enabled?
strategy = merge_request.auto_merge_strategy
get_service_instance(merge_request, strategy).process(merge_request)
end
def cancel(merge_request)
return error("Can't cancel the automatic merge", 406) unless merge_request.auto_merge_enabled?
strategy = merge_request.auto_merge_strategy
get_service_instance(merge_request, strategy).cancel(merge_request)
end
def abort(merge_request, reason)
return error("Can't abort the automatic merge", 406) unless merge_request.auto_merge_enabled?
strategy = merge_request.auto_merge_strategy
get_service_instance(merge_request, strategy).abort(merge_request, reason)
end
def available_strategies(merge_request)
self.class.all_strategies_ordered_by_preference.select do |strategy|
get_service_instance(merge_request, strategy).available_for?(merge_request)
end
end
def preferred_strategy(merge_request)
available_strategies(merge_request).first
end
private
def get_service_instance(merge_request, strategy)
strong_memoize("service_instance_#{merge_request.id}_#{strategy}") do
self.class.get_service_class(strategy)&.new(project, current_user, params)
end
end
end
AutoMergeService.prepend_mod_with('AutoMergeService')
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe AutoMergeService, feature_category: :code_review_workflow do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let(:service) { described_class.new(project, user) }
before_all do
project.add_maintainer(user)
end
describe '.all_strategies_ordered_by_preference' do
subject { described_class.all_strategies_ordered_by_preference }
it 'returns all strategies in preference order' do
if Gitlab.ee?
is_expected.to contain_exactly(
AutoMergeService::STRATEGY_MERGE_TRAIN,
AutoMergeService::STRATEGY_ADD_TO_MERGE_TRAIN_WHEN_PIPELINE_SUCCEEDS,
AutoMergeService::STRATEGY_MERGE_WHEN_CHECKS_PASS,
AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS
)
else
is_expected.to eq([AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS])
end
end
end
describe '#available_strategies' do
subject { service.available_strategies(merge_request) }
let(:merge_request) do
create(:merge_request, source_project: project)
end
let(:pipeline_status) { :running }
before do
create(
:ci_pipeline,
pipeline_status,
ref: merge_request.source_branch,
sha: merge_request.diff_head_sha,
project: merge_request.source_project
)
merge_request.update_head_pipeline
end
it 'returns available strategies' do
is_expected.to include('merge_when_pipeline_succeeds')
end
context 'when the head piipeline succeeded' do
let(:pipeline_status) { :success }
it 'returns available strategies' do
is_expected.to be_empty
end
end
end
describe '#preferred_strategy' do
subject { service.preferred_strategy(merge_request) }
let(:merge_request) do
create(:merge_request, source_project: project)
end
let(:pipeline_status) { :running }
before do
create(
:ci_pipeline,
pipeline_status,
ref: merge_request.source_branch,
sha: merge_request.diff_head_sha,
project: merge_request.source_project
)
merge_request.update_head_pipeline
stub_licensed_features(merge_request_approvers: true) if Gitlab.ee?
end
it 'returns preferred strategy', if: Gitlab.ee? do
is_expected.to eq('merge_when_checks_pass')
end
it 'returns preferred strategy', unless: Gitlab.ee? do
is_expected.to eq('merge_when_pipeline_succeeds')
end
context 'when the head pipeline succeeded' do
let(:pipeline_status) { :success }
it 'returns available strategies' do
is_expected.to be_nil
end
end
end
describe '.get_service_class' do
subject { described_class.get_service_class(strategy) }
let(:strategy) { AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS }
it 'returns service instance' do
is_expected.to eq(AutoMerge::MergeWhenPipelineSucceedsService)
end
context 'when strategy is not present' do
let(:strategy) {}
it 'returns nil' do
is_expected.to be_nil
end
end
end
describe '#execute' do
subject { service.execute(merge_request, strategy) }
let(:merge_request) do
create(:merge_request, source_project: project)
end
let(:pipeline_status) { :running }
let(:strategy) { AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS }
before do
create(
:ci_pipeline,
pipeline_status,
ref: merge_request.source_branch,
sha: merge_request.diff_head_sha,
project: merge_request.source_project
)
merge_request.update_head_pipeline
end
it 'delegates to a relevant service instance' do
expect_next_instance_of(AutoMerge::MergeWhenPipelineSucceedsService) do |service|
expect(service).to receive(:execute).with(merge_request)
end
subject
end
context 'when the head pipeline succeeded' do
let(:pipeline_status) { :success }
it 'returns failed' do
is_expected.to eq(:failed)
end
end
context 'when strategy is not specified' do
let(:strategy) {}
before do
stub_licensed_features(merge_request_approvers: true) if Gitlab.ee?
end
it 'chooses the most preferred strategy', if: Gitlab.ee? do
is_expected.to eq(:merge_when_checks_pass)
end
it 'chooses the most preferred strategy', unless: Gitlab.ee? do
is_expected.to eq(:merge_when_pipeline_succeeds)
end
end
end
describe '#update' do
subject { service.update(merge_request) } # rubocop:disable Rails/SaveBang
context 'when auto merge is enabled' do
let(:merge_request) { create(:merge_request, :merge_when_pipeline_succeeds) }
it 'delegates to a relevant service instance' do
expect_next_instance_of(AutoMerge::MergeWhenPipelineSucceedsService) do |service|
expect(service).to receive(:update).with(merge_request)
end
subject
end
end
context 'when auto merge is not enabled' do
let(:merge_request) { create(:merge_request) }
it 'returns failed' do
is_expected.to eq(:failed)
end
end
end
describe '#process' do
subject { service.process(merge_request) }
let(:merge_request) { create(:merge_request, :merge_when_pipeline_succeeds) }
it 'delegates to a relevant service instance' do
expect_next_instance_of(AutoMerge::MergeWhenPipelineSucceedsService) do |service|
expect(service).to receive(:process).with(merge_request)
end
subject
end
context 'when auto merge is not enabled' do
let(:merge_request) { create(:merge_request) }
it 'returns nil' do
is_expected.to be_nil
end
end
end
describe '#cancel' do
subject { service.cancel(merge_request) }
let(:merge_request) { create(:merge_request, :merge_when_pipeline_succeeds) }
it 'delegates to a relevant service instance' do
expect_next_instance_of(AutoMerge::MergeWhenPipelineSucceedsService) do |service|
expect(service).to receive(:cancel).with(merge_request)
end
subject
end
context 'when auto merge is not enabled' do
let(:merge_request) { create(:merge_request) }
it 'returns error' do
expect(subject[:message]).to eq("Can't cancel the automatic merge")
expect(subject[:status]).to eq(:error)
expect(subject[:http_status]).to eq(406)
end
end
end
describe '#abort' do
subject { service.abort(merge_request, error) }
let(:merge_request) { create(:merge_request, :merge_when_pipeline_succeeds) }
let(:error) { 'an error' }
it 'delegates to a relevant service instance' do
expect_next_instance_of(AutoMerge::MergeWhenPipelineSucceedsService) do |service|
expect(service).to receive(:abort).with(merge_request, error)
end
subject
end
context 'when auto merge is not enabled' do
let(:merge_request) { create(:merge_request) }
it 'returns error' do
expect(subject[:message]).to eq("Can't abort the automatic merge")
expect(subject[:status]).to eq(:error)
expect(subject[:http_status]).to eq(406)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
# RepositoryArchiveCleanUpService removes cached repository archives
# that are generated on-the-fly by Gitaly. These files are stored in the
# following form (as defined in lib/gitlab/git/repository.rb) and served
# by GitLab Workhorse:
#
# /path/to/repository/downloads/project-N/sha/@v2/archive.format
#
# Legacy paths omit the @v2 prefix.
#
# For example:
#
# /var/opt/gitlab/gitlab-rails/shared/cache/archive/project-1/master/@v2/archive.zip
class RepositoryArchiveCleanUpService
LAST_MODIFIED_TIME_IN_MINUTES = 120
# For `/path/project-N/sha/@v2/archive.zip`, `find /path -maxdepth 4` will find this file
MAX_ARCHIVE_DEPTH = 4
attr_reader :mmin, :path
def initialize(mmin = LAST_MODIFIED_TIME_IN_MINUTES)
@mmin = mmin
@path = Gitlab.config.gitlab.repository_downloads_path
end
def execute
Gitlab::Metrics.measure(:repository_archive_clean_up) do
next unless File.directory?(path)
clean_up_old_archives
clean_up_empty_directories
end
end
private
def clean_up_old_archives
run(%W[find #{path} -mindepth 1 -maxdepth #{MAX_ARCHIVE_DEPTH} -type f \( -name \*.tar -o -name \*.bz2 -o -name \*.tar.gz -o -name \*.zip \) -mmin +#{mmin} -delete])
end
def clean_up_empty_directories
(1...MAX_ARCHIVE_DEPTH).reverse_each { |depth| clean_up_empty_directories_with_depth(depth) }
end
def clean_up_empty_directories_with_depth(depth)
run(%W[find #{path} -mindepth #{depth} -maxdepth #{depth} -type d -empty -delete])
end
def run(cmd)
Gitlab::Popen.popen(cmd)
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe RepositoryArchiveCleanUpService, feature_category: :source_code_management do
subject(:service) { described_class.new }
describe '#execute (new archive locations)' do
let(:sha) { "0" * 40 }
it 'removes outdated archives and directories in a new-style path' do
in_directory_with_files("project-#{non_existing_record_id}/#{sha}", %w[tar tar.bz2 tar.gz zip], 3.hours) do |dirname, files|
service.execute
files.each { |filename| expect(File.exist?(filename)).to be_falsy }
expect(File.directory?(dirname)).to be_falsy
expect(File.directory?(File.dirname(dirname))).to be_falsy
end
end
it 'removes outdated archives and directories in a versioned path' do
in_directory_with_files("project-#{non_existing_record_id}/#{sha}/@v2", %w[tar tar.bz2 tar.gz zip], 3.hours) do |dirname, files|
service.execute
files.each { |filename| expect(File.exist?(filename)).to be_falsy }
expect(File.directory?(dirname)).to be_falsy
expect(File.directory?(File.dirname(dirname))).to be_falsy
end
end
it 'does not remove directories when they contain outdated non-archives' do
in_directory_with_files("project-#{non_existing_record_id}/#{sha}", %w[tar conf rb], 2.hours) do |dirname, files|
service.execute
expect(File.directory?(dirname)).to be_truthy
end
end
it 'does not remove in-date archives in a new-style path' do
in_directory_with_files("project-#{non_existing_record_id}/#{sha}", %w[tar tar.bz2 tar.gz zip], 1.hour) do |dirname, files|
service.execute
files.each { |filename| expect(File.exist?(filename)).to be_truthy }
end
end
end
describe '#execute (legacy archive locations)' do
context 'when the downloads directory does not exist' do
it 'does not remove any archives' do
path = '/invalid/path/'
stub_repository_downloads_path(path)
allow(File).to receive(:directory?).and_call_original
expect(File).to receive(:directory?).with(path).and_return(false)
expect(service).not_to receive(:clean_up_old_archives)
expect(service).not_to receive(:clean_up_empty_directories)
service.execute
end
end
context 'when the downloads directory exists' do
shared_examples 'invalid archive files' do |dirname, extensions, mtime|
it 'does not remove files and directory' do
in_directory_with_files(dirname, extensions, mtime) do |dir, files|
service.execute
files.each { |file| expect(File.exist?(file)).to eq true }
expect(File.directory?(dir)).to eq true
end
end
end
it 'removes files older than 2 hours that matches valid archive extensions' do
# In macOS, the the `mmin` parameter for `find` rounds up, so add a full
# minute to ensure these files are deemed old.
in_directory_with_files('sample.git', %w[tar tar.bz2 tar.gz zip], 121.minutes) do |dir, files|
service.execute
files.each { |file| expect(File.exist?(file)).to eq false }
expect(File.directory?(dir)).to eq false
end
end
context 'with files older than 2 hours that does not matches valid archive extensions' do
it_behaves_like 'invalid archive files', 'sample.git', %w[conf rb], 121.minutes
end
context 'with files older than 2 hours inside invalid directories' do
it_behaves_like 'invalid archive files', 'john/t/doe/sample.git', %w[conf rb tar tar.gz], 121.minutes
end
context 'with files newer than 2 hours that matches valid archive extensions' do
it_behaves_like 'invalid archive files', 'sample.git', %w[tar tar.bz2 tar.gz zip], 1.hour
end
context 'with files newer than 2 hours that does not matches valid archive extensions' do
it_behaves_like 'invalid archive files', 'sample.git', %w[conf rb], 1.hour
end
context 'with files newer than 2 hours inside invalid directories' do
it_behaves_like 'invalid archive files', 'sample.git', %w[conf rb tar tar.gz], 1.hour
end
end
end
def in_directory_with_files(dirname, extensions, mtime)
Dir.mktmpdir do |tmpdir|
stub_repository_downloads_path(tmpdir)
dir = File.join(tmpdir, dirname)
files = create_temporary_files(dir, extensions, mtime)
yield(dir, files)
end
end
def stub_repository_downloads_path(path)
allow(Gitlab.config.gitlab).to receive(:repository_downloads_path).and_return(path)
end
def create_temporary_files(dir, extensions, mtime)
FileUtils.mkdir_p(dir)
FileUtils.touch(extensions.map { |ext| File.join(dir, "sample.#{ext}") }, mtime: Time.now.utc - mtime)
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class ImportExportCleanUpService
LAST_MODIFIED_TIME_IN_MINUTES = 1440
DIR_DEPTH = 5
attr_reader :mmin, :path
def initialize(mmin = LAST_MODIFIED_TIME_IN_MINUTES)
@mmin = mmin
@path = Gitlab::ImportExport.storage_path
end
def execute
Gitlab::Metrics.measure(:import_export_clean_up) do
execute_cleanup
end
end
private
def execute_cleanup
clean_up_export_object_files
ensure
# We don't want a failure in cleaning up object storage from
# blocking us from cleaning up temporary storage.
clean_up_export_files if File.directory?(path)
end
def clean_up_export_files
old_directories do |dir|
FileUtils.remove_entry(dir)
logger.info(
message: 'Removed Import/Export tmp directory',
dir_path: dir
)
end
end
def clean_up_export_object_files
ImportExportUpload.with_export_file.updated_before(mmin.minutes.ago).each do |upload|
upload.remove_export_file!
upload.save!
logger.info(
message: 'Removed Import/Export export_file',
project_id: upload.project_id,
group_id: upload.group_id
)
end
end
def old_directories
IO.popen(directories_cmd) do |find|
find.each_line(chomp: true) do |directory|
yield directory
end
end
end
def directories_cmd
%W[find #{path} -mindepth #{DIR_DEPTH} -maxdepth #{DIR_DEPTH} -type d -not -path #{path} -mmin +#{mmin}]
end
def logger
@logger ||= Gitlab::Import::Logger.build
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe ImportExportCleanUpService, feature_category: :importers do
describe '#execute' do
let(:service) { described_class.new }
let(:tmp_import_export_folder) { 'tmp/gitlab_exports' }
before do
allow_next_instance_of(Gitlab::Import::Logger) do |logger|
allow(logger).to receive(:info)
end
end
context 'when the import/export tmp storage directory does not exist' do
it 'does not remove any archives' do
path = '/invalid/path/'
stub_repository_downloads_path(path)
expect(service).not_to receive(:clean_up_export_files)
service.execute
end
end
context 'when the import/export tmp storage directory exists' do
shared_examples 'removes old tmp files' do |subdir|
it 'removes old files and logs' do
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
expect(logger)
.to receive(:info)
.with(
message: 'Removed Import/Export tmp directory',
dir_path: anything
)
end
validate_cleanup(subdir: subdir, mtime: 2.days.ago, expected: false)
end
it 'does not remove new files or logs' do
expect(Gitlab::Import::Logger).not_to receive(:new)
validate_cleanup(subdir: subdir, mtime: 2.hours.ago, expected: true)
end
end
include_examples 'removes old tmp files', '@hashed'
include_examples 'removes old tmp files', '@groups'
end
context 'with uploader exports' do
it 'removes old files and logs' do
upload = create(
:import_export_upload,
updated_at: 2.days.ago,
export_file: fixture_file_upload('spec/fixtures/project_export.tar.gz')
)
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
expect(logger)
.to receive(:info)
.with(
message: 'Removed Import/Export export_file',
project_id: upload.project_id,
group_id: upload.group_id
)
end
expect { service.execute }.to change { upload.reload.export_file.file.nil? }.to(true)
expect(ImportExportUpload.where(export_file: nil)).to include(upload)
end
it 'does not remove new files or logs' do
upload = create(
:import_export_upload,
updated_at: 1.hour.ago,
export_file: fixture_file_upload('spec/fixtures/project_export.tar.gz')
)
expect(Gitlab::Import::Logger).not_to receive(:new)
expect { service.execute }.not_to change { upload.reload.export_file.file.nil? }
expect(ImportExportUpload.where.not(export_file: nil)).to include(upload)
end
end
def validate_cleanup(subdir:, mtime:, expected:)
in_directory_with_files(mtime: mtime, subdir: subdir) do |dir, files|
service.execute
files.each { |file| expect(File.exist?(file)).to eq(expected) }
expect(File.directory?(dir)).to eq(expected)
end
end
def in_directory_with_files(mtime:, subdir:)
Dir.mktmpdir do |tmpdir|
stub_repository_downloads_path(tmpdir)
hashed = Digest::SHA2.hexdigest(subdir)
subdir_path = [subdir, hashed[0..1], hashed[2..3], hashed, hashed[4..10]]
dir = File.join(tmpdir, tmp_import_export_folder, *[subdir_path])
FileUtils.mkdir_p(dir)
File.utime(mtime.to_i, mtime.to_i, dir)
files = FileUtils.touch(file_list(dir) + [dir], mtime: mtime.to_time)
yield(dir, files)
end
end
def stub_repository_downloads_path(path)
new_shared_settings = Settings.shared.merge('path' => path)
allow(Settings).to receive(:shared).and_return(new_shared_settings)
end
def file_list(dir)
Array.new(5) do |num|
File.join(dir, "random-#{num}.tar.gz")
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
# Finds the correct checkbox in the passed in markdown/html and toggles it's state,
# returning the updated markdown/html.
# We don't care if the text has changed above or below the specific checkbox, as long
# the checkbox still exists at exactly the same line number and the text is equal.
# If successful, new values are available in `updated_markdown` and `updated_markdown_html`
class TaskListToggleService
attr_reader :updated_markdown, :updated_markdown_html
def initialize(markdown, markdown_html, line_source:, line_number:, toggle_as_checked:)
@markdown = markdown
@markdown_html = markdown_html
@line_source = line_source
@line_number = line_number
@toggle_as_checked = toggle_as_checked
@updated_markdown, @updated_markdown_html = nil
end
def execute
return false unless markdown && markdown_html
toggle_markdown && toggle_markdown_html
end
private
attr_reader :markdown, :markdown_html, :toggle_as_checked
attr_reader :line_source, :line_number
def toggle_markdown
source_lines = markdown.split("\n")
source_line_index = line_number - 1
markdown_task = source_lines[source_line_index]
# The source in the DB could be using either \n or \r\n line endings
return unless markdown_task.chomp == line_source
return unless source_checkbox = Taskable::ITEM_PATTERN.match(markdown_task)
currently_checked = TaskList::Item.new(source_checkbox[2]).complete?
# Check `toggle_as_checked` to make sure we don't accidentally replace
# any `[ ]` or `[x]` in the middle of the text
if currently_checked
markdown_task.sub!(Taskable::COMPLETE_PATTERN, '[ ]') unless toggle_as_checked
elsif toggle_as_checked
markdown_task.sub!(Taskable::INCOMPLETE_PATTERN, '[x]')
end
source_lines[source_line_index] = markdown_task
@updated_markdown = source_lines.join("\n")
end
def toggle_markdown_html
html = Nokogiri::HTML.fragment(markdown_html)
html_checkbox = get_html_checkbox(html)
return unless html_checkbox
if toggle_as_checked
html_checkbox[:checked] = 'checked'
else
html_checkbox.remove_attribute('checked')
end
@updated_markdown_html = html.to_html
end
# When using CommonMark, we should be able to use the embedded `sourcepos` attribute to
# target the exact line in the DOM.
def get_html_checkbox(html)
html.css(".task-list-item[data-sourcepos^='#{line_number}:'] input.task-list-item-checkbox").first
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe TaskListToggleService, feature_category: :team_planning do
let(:markdown) do
<<-EOT.strip_heredoc
* [ ] Task 1
* [x] Task 2
A paragraph
1. [X] Item 1
- [ ] Sub-item 1
- [ ] loose list
with an embedded paragraph
+ [ ] No-break space (U+00A0)
2) [ ] Another item
EOT
end
let(:markdown_html) do
<<-EOT.strip_heredoc
<ul data-sourcepos="1:1-3:0" class="task-list" dir="auto">
<li data-sourcepos="1:1-1:12" class="task-list-item">
<input type="checkbox" class="task-list-item-checkbox" disabled> Task 1
</li>
<li data-sourcepos="2:1-3:0" class="task-list-item">
<input type="checkbox" class="task-list-item-checkbox" disabled checked> Task 2
</li>
</ul>
<p data-sourcepos="4:1-4:11" dir="auto">A paragraph</p>
<ol data-sourcepos="6:1-8:0" class="task-list" dir="auto">
<li data-sourcepos="6:1-8:0" class="task-list-item">
<input type="checkbox" class="task-list-item-checkbox" checked="" disabled=""> Item 1
<ul data-sourcepos="7:4-8:0" class="task-list">
<li data-sourcepos="7:4-8:0" class="task-list-item">
<input type="checkbox" class="task-list-item-checkbox" disabled=""> Sub-item 1
</li>
</ul>
</li>
</ol>
<ul data-sourcepos="9:1-12:0" class="task-list" dir="auto">
<li data-sourcepos="9:1-12:0" class="task-list-item">
<p data-sourcepos="9:3-9:16"><input type="checkbox" class="task-list-item-checkbox" disabled=""> loose list</p>
<p data-sourcepos="11:3-11:28">with an embedded paragraph</p>
</li>
</ul>
<ul data-sourcepos="13:1-13:21" class="task-list" dir="auto">
<li data-sourcepos="13:1-13:21" class="task-list-item">
<input type="checkbox" class="task-list-item-checkbox" disabled=""> No-break space (U+00A0)
</li>
</ul>
<ol start="2" data-sourcepos="15:1-15:19" class="task-list" dir="auto">
<li data-sourcepos="15:1-15:19" class="task-list-item">
<input type="checkbox" class="task-list-item-checkbox" disabled> Another item
</li>
</ol>
EOT
end
it 'checks Task 1' do
toggler = described_class.new(
markdown, markdown_html,
toggle_as_checked: true,
line_source: '* [ ] Task 1',
line_number: 1
)
expect(toggler.execute).to be_truthy
expect(toggler.updated_markdown.lines[0]).to eq "* [x] Task 1\n"
expect(toggler.updated_markdown_html).to include('disabled checked> Task 1')
end
it 'unchecks Item 1' do
toggler = described_class.new(
markdown, markdown_html,
toggle_as_checked: false,
line_source: '1. [X] Item 1',
line_number: 6
)
expect(toggler.execute).to be_truthy
expect(toggler.updated_markdown.lines[5]).to eq "1. [ ] Item 1\n"
expect(toggler.updated_markdown_html).to include('disabled> Item 1')
end
it 'checks task in loose list' do
toggler = described_class.new(
markdown, markdown_html,
toggle_as_checked: true,
line_source: '- [ ] loose list',
line_number: 9
)
expect(toggler.execute).to be_truthy
expect(toggler.updated_markdown.lines[8]).to eq "- [x] loose list\n"
expect(toggler.updated_markdown_html).to include('disabled checked> loose list')
end
it 'checks task with no-break space' do
toggler = described_class.new(
markdown, markdown_html,
toggle_as_checked: true,
line_source: '+ [ ] No-break space (U+00A0)',
line_number: 13
)
expect(toggler.execute).to be_truthy
expect(toggler.updated_markdown.lines[12]).to eq "+ [x] No-break space (U+00A0)\n"
expect(toggler.updated_markdown_html).to include('disabled checked> No-break space (U+00A0)')
end
it 'checks Another item' do
toggler = described_class.new(
markdown, markdown_html,
toggle_as_checked: true,
line_source: '2) [ ] Another item',
line_number: 15
)
expect(toggler.execute).to be_truthy
expect(toggler.updated_markdown.lines[14]).to eq "2) [x] Another item"
expect(toggler.updated_markdown_html).to include('disabled checked> Another item')
end
it 'returns false if line_source does not match the text' do
toggler = described_class.new(
markdown, markdown_html,
toggle_as_checked: false,
line_source: '* [x] Task Added',
line_number: 2
)
expect(toggler.execute).to be_falsey
end
it 'tolerates \r\n line endings' do
rn_markdown = markdown.gsub("\n", "\r\n")
toggler = described_class.new(
rn_markdown,
markdown_html,
toggle_as_checked: true,
line_source: '* [ ] Task 1',
line_number: 1
)
expect(toggler.execute).to be_truthy
expect(toggler.updated_markdown.lines[0]).to eq "* [x] Task 1\r\n"
expect(toggler.updated_markdown_html).to include('disabled checked> Task 1')
end
it 'returns false if markdown is nil' do
toggler = described_class.new(
nil,
markdown_html,
toggle_as_checked: false,
line_source: '* [x] Task Added',
line_number: 2
)
expect(toggler.execute).to be_falsey
end
it 'returns false if markdown_html is nil' do
toggler = described_class.new(
markdown,
nil,
toggle_as_checked: false,
line_source: '* [x] Task Added',
line_number: 2
)
expect(toggler.execute).to be_falsey
end
it 'properly handles tasks in a blockquote' do
markdown =
<<-EOT.strip_heredoc
> > * [ ] Task 1
> * [x] Task 2
EOT
markdown_html = parse_markdown(markdown)
toggler = described_class.new(
markdown,
markdown_html,
toggle_as_checked: true,
line_source: '> > * [ ] Task 1',
line_number: 1
)
expect(toggler.execute).to be_truthy
expect(toggler.updated_markdown.lines[0]).to eq "> > * [x] Task 1\n"
expect(toggler.updated_markdown_html).to include('disabled checked> Task 1')
end
it 'properly handles a GitLab blockquote' do
markdown =
<<-EOT.strip_heredoc
>>>
gitlab blockquote
>>>
* [ ] Task 1
* [x] Task 2
EOT
markdown_html = parse_markdown(markdown)
toggler = described_class.new(
markdown,
markdown_html,
toggle_as_checked: true,
line_source: '* [ ] Task 1',
line_number: 5
)
expect(toggler.execute).to be_truthy
expect(toggler.updated_markdown.lines[4]).to eq "* [x] Task 1\n"
expect(toggler.updated_markdown_html).to include('disabled checked> Task 1')
end
context 'when clicking an embedded subtask' do
it 'properly handles it inside an unordered list' do
markdown =
<<-EOT.strip_heredoc
- - [ ] Task 1
- [x] Task 2
EOT
markdown_html = parse_markdown(markdown)
toggler = described_class.new(
markdown,
markdown_html,
toggle_as_checked: true,
line_source: '- - [ ] Task 1',
line_number: 1
)
expect(toggler.execute).to be_truthy
expect(toggler.updated_markdown.lines[0]).to eq "- - [x] Task 1\n"
expect(toggler.updated_markdown_html).to include('disabled checked> Task 1')
end
it 'properly handles it inside an ordered list' do
markdown =
<<-EOT.strip_heredoc
1. - [ ] Task 1
- [x] Task 2
EOT
markdown_html = parse_markdown(markdown)
toggler = described_class.new(
markdown,
markdown_html,
toggle_as_checked: true,
line_source: '1. - [ ] Task 1',
line_number: 1
)
expect(toggler.execute).to be_truthy
expect(toggler.updated_markdown.lines[0]).to eq "1. - [x] Task 1\n"
expect(toggler.updated_markdown_html).to include('disabled checked> Task 1')
end
end
def parse_markdown(markdown)
Banzai::Pipeline::FullPipeline.call(markdown, project: nil)[:output].to_html
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class UpdateContainerRegistryInfoService
def execute
registry_config = Gitlab.config.registry
return unless registry_config.enabled && registry_config.api_url.presence
# registry_info will query the /v2 route of the registry API. This route
# requires authentication, but not authorization (the response has no body,
# only headers that show the version of the registry). There might be no
# associated user when running this (e.g. from a rake task or a cron job),
# so we need to generate a valid JWT token with no access permissions to
# authenticate as a trusted client.
token = Auth::ContainerRegistryAuthenticationService.access_token({})
client = ContainerRegistry::Client.new(registry_config.api_url, token: token)
info = client.registry_info
gitlab_api_client = ContainerRegistry::GitlabApiClient.new(registry_config.api_url, token: token)
if gitlab_api_client.supports_gitlab_api?
info[:features] ||= []
info[:features] << ContainerRegistry::GitlabApiClient::REGISTRY_GITLAB_V1_API_FEATURE
end
Gitlab::CurrentSettings.update!(
container_registry_vendor: info[:vendor] || '',
container_registry_version: info[:version] || '',
container_registry_features: info[:features] || [],
container_registry_db_enabled: info[:db_enabled] || false
)
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe UpdateContainerRegistryInfoService, feature_category: :container_registry do
let_it_be(:application_settings) { Gitlab::CurrentSettings }
let_it_be(:api_url) { 'http://registry.gitlab' }
describe '#execute' do
before do
stub_access_token
stub_container_registry_config(enabled: true, api_url: api_url)
end
subject { described_class.new.execute }
shared_examples 'invalid config' do
it 'does not update the application settings' do
expect(application_settings).not_to receive(:update!)
subject
end
it 'does not raise an error' do
expect { subject }.not_to raise_error
end
end
context 'when container registry is disabled' do
before do
stub_container_registry_config(enabled: false)
end
it_behaves_like 'invalid config'
end
context 'when container registry api_url is blank' do
before do
stub_container_registry_config(api_url: '')
end
it_behaves_like 'invalid config'
end
context 'when creating a registry client instance' do
let(:token) { 'foo' }
let(:client) { ContainerRegistry::Client.new(api_url, token: token) }
before do
stub_registry_info({})
stub_supports_gitlab_api(false)
end
it 'uses a token with no access permissions' do
expect(Auth::ContainerRegistryAuthenticationService)
.to receive(:access_token).with({}).and_return(token)
expect(ContainerRegistry::Client)
.to receive(:new).with(api_url, token: token).and_return(client)
subject
end
end
context 'when unabled to detect the container registry type' do
it 'sets the application settings to their defaults' do
stub_registry_info({})
stub_supports_gitlab_api(false)
subject
application_settings.reload
expect(application_settings.container_registry_vendor).to be_blank
expect(application_settings.container_registry_version).to be_blank
expect(application_settings.container_registry_features).to eq([])
expect(application_settings.container_registry_db_enabled).to be_falsey
end
end
context 'when able to detect the container registry type' do
context 'when using the GitLab container registry' do
it 'updates application settings accordingly' do
stub_registry_info(vendor: 'gitlab', version: '2.9.1-gitlab', features: %w[a b c], db_enabled: true)
stub_supports_gitlab_api(true)
subject
application_settings.reload
expect(application_settings.container_registry_vendor).to eq('gitlab')
expect(application_settings.container_registry_version).to eq('2.9.1-gitlab')
expect(application_settings.container_registry_features)
.to match_array(%W[a b c #{ContainerRegistry::GitlabApiClient::REGISTRY_GITLAB_V1_API_FEATURE}])
expect(application_settings.container_registry_db_enabled).to be_truthy
end
end
context 'when using a third-party container registry' do
it 'updates application settings accordingly' do
stub_registry_info(vendor: 'other', version: nil, features: nil, db_enabled: false)
stub_supports_gitlab_api(false)
subject
application_settings.reload
expect(application_settings.container_registry_vendor).to eq('other')
expect(application_settings.container_registry_version).to be_blank
expect(application_settings.container_registry_features).to eq([])
expect(application_settings.container_registry_db_enabled).to be_falsey
end
end
end
end
def stub_access_token
allow(Auth::ContainerRegistryAuthenticationService)
.to receive(:access_token).with({}).and_return('foo')
end
def stub_registry_info(output)
allow_next_instance_of(ContainerRegistry::Client) do |client|
allow(client).to receive(:registry_info).and_return(output)
end
end
def stub_supports_gitlab_api(output)
allow_next_instance_of(ContainerRegistry::GitlabApiClient) do |client|
allow(client).to receive(:supports_gitlab_api?).and_return(output)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
# Base class, scoped by container (project or group).
#
# New or existing services which only require a project or group container
# should subclass BaseProjectService or BaseGroupService.
#
# If you require a different but specific, non-polymorphic container
# consider creating a new subclass, and update the related comment at
# the top of the original BaseService.
class BaseContainerService
include BaseServiceUtility
include ::Gitlab::Utils::StrongMemoize
attr_accessor :project, :group
attr_reader :container, :current_user, :params
def initialize(container:, current_user: nil, params: {})
@container = container
@current_user = current_user
@params = params.dup
handle_container_type(container)
end
def project_container?
container.is_a?(::Project)
end
def group_container?
container.is_a?(::Group)
end
def namespace_container?
container.is_a?(::Namespace)
end
def project_group
project&.group
end
strong_memoize_attr :project_group
private
def handle_container_type(container)
case container
when Project
@project = container
when Group
@group = container
when Namespaces::ProjectNamespace
@project = container.project
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe BaseContainerService, feature_category: :container_registry do
let(:project) { Project.new }
let(:user) { User.new }
describe '#initialize' do
it 'accepts container and current_user' do
subject = described_class.new(container: project, current_user: user)
expect(subject.container).to eq(project)
expect(subject.current_user).to eq(user)
end
it 'treats current_user as optional' do
subject = described_class.new(container: project)
expect(subject.current_user).to be_nil
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class GravatarService
def execute(email, size = nil, scale = 2, username: nil)
return if Gitlab::FIPS.enabled?
return unless Gitlab::CurrentSettings.gravatar_enabled?
identifier = email.presence || username.presence
return unless identifier
hash = Digest::MD5.hexdigest(identifier.strip.downcase)
size = Groups::GroupMembersHelper::AVATAR_SIZE unless size && size > 0
sprintf gravatar_url,
hash: hash,
size: size * scale,
email: ERB::Util.url_encode(email&.strip || ''),
username: ERB::Util.url_encode(username&.strip || '')
end
def gitlab_config
Gitlab.config.gitlab
end
def gravatar_config
Gitlab.config.gravatar
end
def gravatar_url
if gitlab_config.https
gravatar_config.ssl_url
else
gravatar_config.plain_url
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe GravatarService, feature_category: :user_profile do
describe '#execute' do
let(:url) { 'http://example.com/avatar?hash=%{hash}&size=%{size}&email=%{email}&username=%{username}' }
before do
allow(Gitlab.config.gravatar).to receive(:plain_url).and_return(url)
end
it 'replaces the placeholders' do
avatar_url = described_class.new.execute('[email protected]', 100, 2, username: 'user')
expect(avatar_url).to include("hash=#{Digest::MD5.hexdigest('[email protected]')}")
expect(avatar_url).to include("size=200")
expect(avatar_url).to include("email=user%40example.com")
expect(avatar_url).to include("username=user")
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class AccessTokenValidationService
# Results:
VALID = :valid
EXPIRED = :expired
REVOKED = :revoked
INSUFFICIENT_SCOPE = :insufficient_scope
IMPERSONATION_DISABLED = :impersonation_disabled
attr_reader :token, :request
def initialize(token, request: nil)
@token = token
@request = request
end
def validate(scopes: [])
if token.expired?
EXPIRED
elsif token.revoked?
REVOKED
elsif !self.include_any_scope?(scopes)
INSUFFICIENT_SCOPE
elsif token.respond_to?(:impersonation) &&
token.impersonation &&
!Gitlab.config.gitlab.impersonation_enabled
IMPERSONATION_DISABLED
else
VALID
end
end
# True if the token's scope contains any of the passed scopes.
def include_any_scope?(required_scopes)
if required_scopes.blank?
true
else
# We're comparing each required_scope against all token scopes, which would
# take quadratic time. This consideration is irrelevant here because of the
# small number of records involved.
# https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/12300/#note_33689006
token_scopes = token.scopes.map(&:to_sym)
required_scopes.any? do |scope|
scope = API::Scope.new(scope) unless scope.is_a?(API::Scope)
scope.sufficient?(token_scopes, request)
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe AccessTokenValidationService, feature_category: :system_access do
describe ".include_any_scope?" do
let(:request) { double("request") }
it "returns true if the required scope is present in the token's scopes" do
token = double("token", scopes: [:api, :read_user])
scopes = [:api]
expect(described_class.new(token, request: request).include_any_scope?(scopes)).to be(true)
end
it "returns true if more than one of the required scopes is present in the token's scopes" do
token = double("token", scopes: [:api, :read_user, :other_scope])
scopes = [:api, :other_scope]
expect(described_class.new(token, request: request).include_any_scope?(scopes)).to be(true)
end
it "returns true if the list of required scopes is an exact match for the token's scopes" do
token = double("token", scopes: [:api, :read_user, :other_scope])
scopes = [:api, :read_user, :other_scope]
expect(described_class.new(token, request: request).include_any_scope?(scopes)).to be(true)
end
it "returns true if the list of required scopes contains all of the token's scopes, in addition to others" do
token = double("token", scopes: [:api, :read_user])
scopes = [:api, :read_user, :other_scope]
expect(described_class.new(token, request: request).include_any_scope?(scopes)).to be(true)
end
it 'returns true if the list of required scopes is blank' do
token = double("token", scopes: [])
scopes = []
expect(described_class.new(token, request: request).include_any_scope?(scopes)).to be(true)
end
it "returns false if there are no scopes in common between the required scopes and the token scopes" do
token = double("token", scopes: [:api, :read_user])
scopes = [:other_scope]
expect(described_class.new(token, request: request).include_any_scope?(scopes)).to be(false)
end
context "conditions" do
it "ignores any scopes whose `if` condition returns false" do
token = double("token", scopes: [:api, :read_user])
scopes = [API::Scope.new(:api, if: ->(_) { false })]
expect(described_class.new(token, request: request).include_any_scope?(scopes)).to be(false)
end
it "does not ignore scopes whose `if` condition is not set" do
token = double("token", scopes: [:api, :read_user])
scopes = [API::Scope.new(:api, if: ->(_) { false }), :read_user]
expect(described_class.new(token, request: request).include_any_scope?(scopes)).to be(true)
end
it "does not ignore scopes whose `if` condition returns true" do
token = double("token", scopes: [:api, :read_user])
scopes = [API::Scope.new(:api, if: ->(_) { true }), API::Scope.new(:read_user, if: ->(_) { false })]
expect(described_class.new(token, request: request).include_any_scope?(scopes)).to be(true)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class AuditEventService
include AuditEventSaveType
# Instantiates a new service
#
# @param [User, token String] author the entity who authors the change
# @param [User, Project, Group] entity the scope which audit event belongs to
# This param is also used to determine the visibility of the audit event.
# - Project: events are visible at Project and Instance level
# - Group: events are visible at Group and Instance level
# - User: events are visible at Instance level
# @param [Hash] details extra data of audit event
# @param [Symbol] save_type the type to save the event
# Can be selected from the following, :database, :stream, :database_and_stream .
# @params [DateTime] created_at the time the action occured
#
# @return [AuditEventService]
def initialize(author, entity, details = {}, save_type = :database_and_stream, created_at = DateTime.current)
@author = build_author(author)
@entity = entity
@details = details
@ip_address = resolve_ip_address(@author)
@save_type = save_type
@created_at = created_at
end
# Builds the @details attribute for authentication
#
# This uses the @author as the target object being audited
#
# @return [AuditEventService]
def for_authentication
mark_as_authentication_event!
@details = {
with: @details[:with],
target_id: @author.id,
target_type: 'User',
target_details: @author.name
}
self
end
# Writes event to a file and creates an event record in DB
#
# @return [AuditEvent] persisted if saves and non-persisted if fails
def security_event
log_security_event_to_file
log_authentication_event_to_database
log_security_event_to_database
end
# Writes event to a file
def log_security_event_to_file
file_logger.info(base_payload.merge(formatted_details))
end
private
attr_reader :ip_address
def build_author(author)
case author
when User
author.impersonated? ? Gitlab::Audit::ImpersonatedAuthor.new(author) : author
else
Gitlab::Audit::UnauthenticatedAuthor.new(name: author)
end
end
def resolve_ip_address(author)
Gitlab::RequestContext.instance.client_ip ||
author.current_sign_in_ip
end
def base_payload
{
author_id: @author.id,
author_name: @author.name,
entity_id: @entity.id,
entity_type: @entity.class.name,
created_at: @created_at
}
end
def authentication_event_payload
{
# @author can be a User or various Gitlab::Audit authors.
# Only capture real users for successful authentication events.
user: author_if_user,
user_name: @author.name,
ip_address: ip_address,
result: AuthenticationEvent.results[:success],
provider: @details[:with]
}
end
def author_if_user
@author if @author.is_a?(User)
end
def file_logger
@file_logger ||= Gitlab::AuditJsonLogger.build
end
def formatted_details
@details.merge(@details.slice(:from, :to).transform_values(&:to_s))
end
def mark_as_authentication_event!
@authentication_event = true
end
def authentication_event?
@authentication_event
end
def log_security_event_to_database
return if Gitlab::Database.read_only?
event = build_event
save_or_track event
event
end
def build_event
AuditEvent.new(base_payload.merge(details: @details))
end
def stream_event_to_external_destinations(_event)
# Defined in EE
end
def log_authentication_event_to_database
return unless Gitlab::Database.read_write? && authentication_event?
event = AuthenticationEvent.new(authentication_event_payload)
save_or_track event
event
end
def save_or_track(event)
event.save! if should_save_database?(@save_type)
stream_event_to_external_destinations(event) if should_save_stream?(@save_type)
rescue StandardError => e
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(e, audit_event_type: event.class.to_s)
end
end
AuditEventService.prepend_mod_with('AuditEventService')
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe AuditEventService, :with_license, feature_category: :audit_events do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user, :with_sign_ins) }
let_it_be(:project_member) { create(:project_member, user: user) }
let(:service) { described_class.new(user, project, { action: :destroy }) }
let(:logger) { instance_double(Gitlab::AuditJsonLogger) }
describe '#security_event' do
it 'creates an event and logs to a file' do
expect(service).to receive(:file_logger).and_return(logger)
expect(logger).to receive(:info).with({ author_id: user.id,
author_name: user.name,
entity_id: project.id,
entity_type: "Project",
action: :destroy,
created_at: anything })
expect { service.security_event }.to change(AuditEvent, :count).by(1)
end
it 'formats from and to fields' do
service = described_class.new(
user, project,
{
from: true,
to: false,
action: :create,
target_id: 1
})
expect(service).to receive(:file_logger).and_return(logger)
expect(logger).to receive(:info).with({ author_id: user.id,
author_name: user.name,
entity_type: 'Project',
entity_id: project.id,
from: 'true',
to: 'false',
action: :create,
target_id: 1,
created_at: anything })
expect { service.security_event }.to change(AuditEvent, :count).by(1)
details = AuditEvent.last.details
expect(details[:from]).to be true
expect(details[:to]).to be false
expect(details[:action]).to eq(:create)
expect(details[:target_id]).to eq(1)
end
context 'when defining created_at manually' do
let(:service) { described_class.new(user, project, { action: :destroy }, :database, 3.weeks.ago) }
it 'is overridden successfully' do
freeze_time do
expect(service).to receive(:file_logger).and_return(logger)
expect(logger).to receive(:info).with({ author_id: user.id,
author_name: user.name,
entity_id: project.id,
entity_type: "Project",
action: :destroy,
created_at: 3.weeks.ago })
expect { service.security_event }.to change(AuditEvent, :count).by(1)
expect(AuditEvent.last.created_at).to eq(3.weeks.ago)
end
end
end
context 'authentication event' do
let(:audit_service) { described_class.new(user, user, with: 'standard') }
it 'creates an authentication event' do
expect(AuthenticationEvent).to receive(:new).with(
{
user: user,
user_name: user.name,
ip_address: user.current_sign_in_ip,
result: AuthenticationEvent.results[:success],
provider: 'standard'
}
).and_call_original
audit_service.for_authentication.security_event
end
it 'tracks exceptions when the event cannot be created' do
allow_next_instance_of(AuditEvent) do |event|
allow(event).to receive(:valid?).and_return(false)
end
expect(Gitlab::ErrorTracking).to(
receive(:track_and_raise_for_dev_exception)
)
audit_service.for_authentication.security_event
end
context 'with IP address', :request_store do
using RSpec::Parameterized::TableSyntax
where(:from_context, :from_author_sign_in, :output) do
'192.168.0.2' | '192.168.0.3' | '192.168.0.2'
nil | '192.168.0.3' | '192.168.0.3'
end
with_them do
let(:user) { create(:user, current_sign_in_ip: from_author_sign_in) }
let(:audit_service) { described_class.new(user, user, with: 'standard') }
before do
allow(Gitlab::RequestContext.instance).to receive(:client_ip).and_return(from_context)
end
specify do
expect(AuthenticationEvent).to receive(:new).with(hash_including(ip_address: output)).and_call_original
audit_service.for_authentication.security_event
end
end
end
end
end
describe '#log_security_event_to_file' do
it 'logs security event to file' do
expect(service).to receive(:file_logger).and_return(logger)
expect(logger).to receive(:info).with({ author_id: user.id,
author_name: user.name,
entity_type: 'Project',
entity_id: project.id,
action: :destroy,
created_at: anything })
service.log_security_event_to_file
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
# SystemNoteService
#
# Used for creating system notes (e.g., when a user references a merge request
# from an issue, an issue's assignee changes, an issue is closed, etc.)
module SystemNoteService
extend self
# Called when commits are added to a merge request
#
# noteable - Noteable object
# project - Project owning noteable
# author - User performing the change
# new_commits - Array of Commits added since last push
# existing_commits - Array of Commits added in a previous push
# oldrev - Optional String SHA of a previous Commit
#
# Returns the created Note object
def add_commits(noteable, project, author, new_commits, existing_commits = [], oldrev = nil)
::SystemNotes::CommitService.new(noteable: noteable, project: project, author: author).add_commits(new_commits, existing_commits, oldrev)
end
# Called when a commit was tagged
#
# noteable - Noteable object
# project - Project owning noteable
# author - User performing the tag
# tag_name - The created tag name
#
# Returns the created Note object
def tag_commit(noteable, project, author, tag_name)
::SystemNotes::CommitService.new(noteable: noteable, project: project, author: author).tag_commit(tag_name)
end
def change_assignee(noteable, project, author, assignee)
::SystemNotes::IssuablesService.new(noteable: noteable, project: project, author: author).change_assignee(assignee)
end
def change_issuable_assignees(issuable, project, author, old_assignees)
::SystemNotes::IssuablesService.new(noteable: issuable, project: project, author: author).change_issuable_assignees(old_assignees)
end
def change_issuable_reviewers(issuable, project, author, old_reviewers)
::SystemNotes::IssuablesService.new(noteable: issuable, project: project, author: author).change_issuable_reviewers(old_reviewers)
end
def change_issuable_contacts(issuable, project, author, added_count, removed_count)
::SystemNotes::IssuablesService.new(noteable: issuable, project: project, author: author).change_issuable_contacts(added_count, removed_count)
end
def relate_issuable(noteable, noteable_ref, user)
::SystemNotes::IssuablesService.new(noteable: noteable, project: noteable.project, author: user).relate_issuable(noteable_ref)
end
def unrelate_issuable(noteable, noteable_ref, user)
::SystemNotes::IssuablesService.new(noteable: noteable, project: noteable.project, author: user).unrelate_issuable(noteable_ref)
end
# Called when the due_date or start_date of a Noteable is changed
#
# noteable - Noteable object
# project - Project owning noteable
# author - User performing the change
# due_date - Due date being assigned, or nil
#
# Example Note text:
#
# "removed due date"
#
# "changed due date to September 20, 2018 and changed start date to September 25, 2018"
#
# Returns the created Note object
def change_start_date_or_due_date(noteable, project, author, changed_dates)
::SystemNotes::TimeTrackingService.new(
noteable: noteable,
project: project,
author: author
).change_start_date_or_due_date(changed_dates)
end
# Called when the estimated time of a Noteable is changed
#
# noteable - Noteable object
# project - Project owning noteable
# author - User performing the change
# time_estimate - Estimated time
#
# Example Note text:
#
# "removed time estimate"
#
# "changed time estimate to 3d 5h"
#
# Returns the created Note object
def change_time_estimate(noteable, project, author)
::SystemNotes::TimeTrackingService.new(noteable: noteable, project: project, author: author).change_time_estimate
end
# Called when the spent time of a Noteable is changed
#
# noteable - Noteable object
# project - Project owning noteable
# author - User performing the change
# time_spent - Spent time
#
# Example Note text:
#
# "removed time spent"
#
# "added 2h 30m of time spent"
#
# Returns the created Note object
def change_time_spent(noteable, project, author)
::SystemNotes::TimeTrackingService.new(noteable: noteable, project: project, author: author).change_time_spent
end
# Called when a timelog is added to an issuable
#
# issuable - Issuable object (Issue, WorkItem or MergeRequest)
# project - Project owning the issuable
# author - User performing the change
# timelog - Created timelog
#
# Example Note text:
#
# "subtracted 1h 15m of time spent"
#
# "added 2h 30m of time spent"
#
# Returns the created Note object
def created_timelog(issuable, project, author, timelog)
::SystemNotes::TimeTrackingService.new(noteable: issuable, project: project, author: author).created_timelog(timelog)
end
# Called when a timelog is removed from a Noteable
#
# noteable - Noteable object
# project - Project owning the noteable
# author - User performing the change
# timelog - The removed timelog
#
# Example Note text:
# "deleted 2h 30m of time spent from 22-03-2022"
#
# Returns the created Note object
def remove_timelog(noteable, project, author, timelog)
::SystemNotes::TimeTrackingService.new(noteable: noteable, project: project, author: author).remove_timelog(timelog)
end
def close_after_error_tracking_resolve(issue, project, author)
::SystemNotes::IssuablesService.new(noteable: issue, project: project, author: author).close_after_error_tracking_resolve
end
def change_status(noteable, project, author, status, source = nil)
::SystemNotes::IssuablesService.new(noteable: noteable, project: project, author: author).change_status(status, source)
end
# Called when 'merge when pipeline succeeds' is executed
def merge_when_pipeline_succeeds(noteable, project, author, sha)
::SystemNotes::MergeRequestsService.new(noteable: noteable, project: project, author: author).merge_when_pipeline_succeeds(sha)
end
# Called when 'merge when pipeline succeeds' is canceled
def cancel_merge_when_pipeline_succeeds(noteable, project, author)
::SystemNotes::MergeRequestsService.new(noteable: noteable, project: project, author: author).cancel_merge_when_pipeline_succeeds
end
# Called when 'merge when pipeline succeeds' is aborted
def abort_merge_when_pipeline_succeeds(noteable, project, author, reason)
::SystemNotes::MergeRequestsService.new(noteable: noteable, project: project, author: author).abort_merge_when_pipeline_succeeds(reason)
end
def handle_merge_request_draft(noteable, project, author)
::SystemNotes::MergeRequestsService.new(noteable: noteable, project: project, author: author).handle_merge_request_draft
end
def add_merge_request_draft_from_commit(noteable, project, author, commit)
::SystemNotes::MergeRequestsService.new(noteable: noteable, project: project, author: author).add_merge_request_draft_from_commit(commit)
end
def resolve_all_discussions(merge_request, project, author)
::SystemNotes::MergeRequestsService.new(noteable: merge_request, project: project, author: author).resolve_all_discussions
end
def discussion_continued_in_issue(discussion, project, author, issue)
::SystemNotes::MergeRequestsService.new(project: project, author: author).discussion_continued_in_issue(discussion, issue)
end
def diff_discussion_outdated(discussion, project, author, change_position)
::SystemNotes::MergeRequestsService.new(project: project, author: author).diff_discussion_outdated(discussion, change_position)
end
def change_title(noteable, project, author, old_title)
::SystemNotes::IssuablesService.new(noteable: noteable, project: project, author: author).change_title(old_title)
end
def change_description(noteable, project, author)
::SystemNotes::IssuablesService.new(noteable: noteable, project: project, author: author).change_description
end
def change_issue_confidentiality(issue, project, author)
::SystemNotes::IssuablesService.new(noteable: issue, project: project, author: author).change_issue_confidentiality
end
# Called when a branch in Noteable is changed
#
# noteable - Noteable object
# project - Project owning noteable
# author - User performing the change
# branch_type - 'source' or 'target'
# event_type - the source of event: 'update' or 'delete'
# old_branch - old branch name
# new_branch - new branch name
#
# Example Note text is based on event_type:
#
# update: "changed target branch from `Old` to `New`"
# delete: "deleted the `Old` branch. This merge request now targets the `New` branch"
#
# Returns the created Note object
def change_branch(noteable, project, author, branch_type, event_type, old_branch, new_branch)
::SystemNotes::MergeRequestsService.new(noteable: noteable, project: project, author: author)
.change_branch(branch_type, event_type, old_branch, new_branch)
end
# Called when a branch in Noteable is added or deleted
#
# noteable - Noteable object
# project - Project owning noteable
# author - User performing the change
# branch_type - :source or :target
# branch - branch name
# presence - :add or :delete
#
# Example Note text:
#
# "restored target branch `feature`"
#
# Returns the created Note object
def change_branch_presence(noteable, project, author, branch_type, branch, presence)
::SystemNotes::MergeRequestsService.new(noteable: noteable, project: project, author: author).change_branch_presence(branch_type, branch, presence)
end
# Called when a branch is created from the 'new branch' button on a issue
# Example note text:
#
# "created branch `201-issue-branch-button`"
def new_issue_branch(issue, project, author, branch, branch_project: nil)
::SystemNotes::MergeRequestsService.new(noteable: issue, project: project, author: author).new_issue_branch(branch, branch_project: branch_project)
end
def new_merge_request(issue, project, author, merge_request)
::SystemNotes::MergeRequestsService.new(noteable: issue, project: project, author: author).new_merge_request(merge_request)
end
def cross_reference(mentioned, mentioned_in, author)
::SystemNotes::IssuablesService.new(noteable: mentioned, author: author).cross_reference(mentioned_in)
end
def cross_reference_exists?(mentioned, mentioned_in)
::SystemNotes::IssuablesService.new(noteable: mentioned).cross_reference_exists?(mentioned_in)
end
def change_task_status(noteable, project, author, new_task)
::SystemNotes::IssuablesService.new(noteable: noteable, project: project, author: author).change_task_status(new_task)
end
def noteable_moved(noteable, project, noteable_ref, author, direction:)
::SystemNotes::IssuablesService.new(noteable: noteable, project: project, author: author).noteable_moved(noteable_ref, direction)
end
def noteable_cloned(noteable, project, noteable_ref, author, direction:, created_at: nil)
::SystemNotes::IssuablesService.new(noteable: noteable, project: project, author: author).noteable_cloned(noteable_ref, direction, created_at: created_at)
end
def mark_duplicate_issue(noteable, project, author, canonical_issue)
::SystemNotes::IssuablesService.new(noteable: noteable, project: project, author: author).mark_duplicate_issue(canonical_issue)
end
def mark_canonical_issue_of_duplicate(noteable, project, author, duplicate_issue)
::SystemNotes::IssuablesService.new(noteable: noteable, project: project, author: author).mark_canonical_issue_of_duplicate(duplicate_issue)
end
def add_email_participants(noteable, project, author, body)
::SystemNotes::IssuablesService.new(noteable: noteable, project: project, author: author).add_email_participants(body)
end
def discussion_lock(issuable, author)
::SystemNotes::IssuablesService.new(noteable: issuable, project: issuable.project, author: author).discussion_lock
end
def cross_reference_disallowed?(mentioned, mentioned_in)
::SystemNotes::IssuablesService.new(noteable: mentioned).cross_reference_disallowed?(mentioned_in)
end
def relate_work_item(noteable, work_item, user)
::SystemNotes::IssuablesService
.new(noteable: noteable, project: noteable.project, author: user)
.hierarchy_changed(work_item, 'relate')
end
def unrelate_work_item(noteable, work_item, user)
::SystemNotes::IssuablesService
.new(noteable: noteable, project: noteable.project, author: user)
.hierarchy_changed(work_item, 'unrelate')
end
def zoom_link_added(issue, project, author)
::SystemNotes::ZoomService.new(noteable: issue, project: project, author: author).zoom_link_added
end
def zoom_link_removed(issue, project, author)
::SystemNotes::ZoomService.new(noteable: issue, project: project, author: author).zoom_link_removed
end
def auto_resolve_prometheus_alert(noteable, project, author)
::SystemNotes::IssuablesService.new(noteable: noteable, project: project, author: author).auto_resolve_prometheus_alert
end
# Parameters:
# - version [DesignManagement::Version]
#
# Example Note text:
#
# "added [1 designs](link-to-version)"
# "changed [2 designs](link-to-version)"
#
# Returns [Array<Note>]: the created Note objects
def design_version_added(version)
::SystemNotes::DesignManagementService.new(noteable: version.issue, project: version.issue.project, author: version.author).design_version_added(version)
end
# Called when a new discussion is created on a design
#
# discussion_note - DiscussionNote
#
# Example Note text:
#
# "started a discussion on screen.png"
#
# Returns the created Note object
def design_discussion_added(discussion_note)
design = discussion_note.noteable
::SystemNotes::DesignManagementService.new(noteable: design.issue, project: design.project, author: discussion_note.author).design_discussion_added(discussion_note)
end
# Called when the merge request is approved by user
#
# noteable - Noteable object
# user - User performing approve
#
# Example Note text:
#
# "approved this merge request"
#
# Returns the created Note object
def approve_mr(noteable, user)
merge_requests_service(noteable, noteable.project, user).approve_mr
end
def unapprove_mr(noteable, user)
merge_requests_service(noteable, noteable.project, user).unapprove_mr
end
def change_alert_status(alert, author, reason = nil)
::SystemNotes::AlertManagementService.new(noteable: alert, project: alert.project, author: author).change_alert_status(reason)
end
def new_alert_issue(alert, issue, author)
::SystemNotes::AlertManagementService.new(noteable: alert, project: alert.project, author: author).new_alert_issue(issue)
end
def create_new_alert(alert, monitoring_tool)
::SystemNotes::AlertManagementService.new(noteable: alert, project: alert.project).create_new_alert(monitoring_tool)
end
def change_incident_severity(incident, author)
::SystemNotes::IncidentService.new(noteable: incident, project: incident.project, author: author).change_incident_severity
end
def change_incident_status(incident, author, reason = nil)
::SystemNotes::IncidentService.new(noteable: incident, project: incident.project, author: author).change_incident_status(reason)
end
def log_resolving_alert(alert, monitoring_tool)
::SystemNotes::AlertManagementService.new(noteable: alert, project: alert.project).log_resolving_alert(monitoring_tool)
end
def change_issue_type(issue, author, previous_type)
::SystemNotes::IssuablesService.new(noteable: issue, project: issue.project, author: author).change_issue_type(previous_type)
end
def add_timeline_event(timeline_event)
incidents_service(timeline_event.incident).add_timeline_event(timeline_event)
end
def edit_timeline_event(timeline_event, author, was_changed:)
incidents_service(timeline_event.incident).edit_timeline_event(timeline_event, author, was_changed: was_changed)
end
def delete_timeline_event(noteable, author)
incidents_service(noteable).delete_timeline_event(author)
end
private
def merge_requests_service(noteable, project, author)
::SystemNotes::MergeRequestsService.new(noteable: noteable, project: project, author: author)
end
def incidents_service(incident)
::SystemNotes::IncidentsService.new(noteable: incident)
end
end
SystemNoteService.prepend_mod_with('SystemNoteService')
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe SystemNoteService, feature_category: :shared do
include Gitlab::Routing
include RepoHelpers
include AssetsHelpers
include DesignManagementTestHelpers
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :repository, group: group) }
let_it_be(:author) { create(:user) }
let(:noteable) { create(:issue, project: project) }
let(:issue) { noteable }
describe '.add_commits' do
let(:new_commits) { double }
let(:old_commits) { double }
let(:oldrev) { double }
it 'calls CommitService' do
expect_next_instance_of(::SystemNotes::CommitService) do |service|
expect(service).to receive(:add_commits).with(new_commits, old_commits, oldrev)
end
described_class.add_commits(noteable, project, author, new_commits, old_commits, oldrev)
end
end
describe '.tag_commit' do
let(:tag_name) { double }
it 'calls CommitService' do
expect_next_instance_of(::SystemNotes::CommitService) do |service|
expect(service).to receive(:tag_commit).with(tag_name)
end
described_class.tag_commit(noteable, project, author, tag_name)
end
end
describe '.change_assignee' do
let(:assignee) { double }
it 'calls IssuableService' do
expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
expect(service).to receive(:change_assignee).with(assignee)
end
described_class.change_assignee(noteable, project, author, assignee)
end
end
describe '.change_issuable_assignees' do
let(:assignees) { [double, double] }
it 'calls IssuableService' do
expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
expect(service).to receive(:change_issuable_assignees).with(assignees)
end
described_class.change_issuable_assignees(noteable, project, author, assignees)
end
end
describe '.change_issuable_reviewers' do
let(:reviewers) { [double, double] }
it 'calls IssuableService' do
expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
expect(service).to receive(:change_issuable_reviewers).with(reviewers)
end
described_class.change_issuable_reviewers(noteable, project, author, reviewers)
end
end
describe '.change_issuable_contacts' do
let(:added_count) { 5 }
let(:removed_count) { 3 }
it 'calls IssuableService' do
expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
expect(service).to receive(:change_issuable_contacts).with(added_count, removed_count)
end
described_class.change_issuable_contacts(noteable, project, author, added_count, removed_count)
end
end
describe '.close_after_error_tracking_resolve' do
it 'calls IssuableService' do
expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
expect(service).to receive(:close_after_error_tracking_resolve)
end
described_class.close_after_error_tracking_resolve(noteable, project, author)
end
end
describe '.relate_issuable' do
let(:noteable_ref) { double }
let(:noteable) { double }
before do
allow(noteable).to receive(:project).and_return(double)
end
it 'calls IssuableService' do
expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
expect(service).to receive(:relate_issuable).with(noteable_ref)
end
described_class.relate_issuable(noteable, noteable_ref, double)
end
end
describe '.unrelate_issuable' do
let(:noteable_ref) { double }
let(:noteable) { double }
before do
allow(noteable).to receive(:project).and_return(double)
end
it 'calls IssuableService' do
expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
expect(service).to receive(:unrelate_issuable).with(noteable_ref)
end
described_class.unrelate_issuable(noteable, noteable_ref, double)
end
end
describe '.change_start_date_or_due_date' do
let(:changed_dates) { double }
it 'calls TimeTrackingService' do
expect_next_instance_of(::SystemNotes::TimeTrackingService) do |service|
expect(service).to receive(:change_start_date_or_due_date).with(changed_dates)
end
described_class.change_start_date_or_due_date(noteable, project, author, changed_dates)
end
end
describe '.change_status' do
let(:status) { double }
let(:source) { double }
it 'calls IssuableService' do
expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
expect(service).to receive(:change_status).with(status, source)
end
described_class.change_status(noteable, project, author, status, source)
end
end
describe '.merge_when_pipeline_succeeds' do
it 'calls MergeRequestsService' do
sha = double
expect_next_instance_of(::SystemNotes::MergeRequestsService) do |service|
expect(service).to receive(:merge_when_pipeline_succeeds).with(sha)
end
described_class.merge_when_pipeline_succeeds(noteable, project, author, sha)
end
end
describe '.cancel_merge_when_pipeline_succeeds' do
it 'calls MergeRequestsService' do
expect_next_instance_of(::SystemNotes::MergeRequestsService) do |service|
expect(service).to receive(:cancel_merge_when_pipeline_succeeds)
end
described_class.cancel_merge_when_pipeline_succeeds(noteable, project, author)
end
end
describe '.abort_merge_when_pipeline_succeeds' do
it 'calls MergeRequestsService' do
reason = double
expect_next_instance_of(::SystemNotes::MergeRequestsService) do |service|
expect(service).to receive(:abort_merge_when_pipeline_succeeds).with(reason)
end
described_class.abort_merge_when_pipeline_succeeds(noteable, project, author, reason)
end
end
describe '.change_title' do
let(:title) { double }
it 'calls IssuableService' do
expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
expect(service).to receive(:change_title).with(title)
end
described_class.change_title(noteable, project, author, title)
end
end
describe '.change_description' do
it 'calls IssuableService' do
expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
expect(service).to receive(:change_description)
end
described_class.change_description(noteable, project, author)
end
end
describe '.change_issue_confidentiality' do
it 'calls IssuableService' do
expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
expect(service).to receive(:change_issue_confidentiality)
end
described_class.change_issue_confidentiality(noteable, project, author)
end
end
describe '.change_branch' do
it 'calls MergeRequestsService' do
old_branch = double('old_branch')
new_branch = double('new_branch')
branch_type = double('branch_type')
event_type = double('event_type')
expect_next_instance_of(::SystemNotes::MergeRequestsService) do |service|
expect(service).to receive(:change_branch).with(branch_type, event_type, old_branch, new_branch)
end
described_class.change_branch(noteable, project, author, branch_type, event_type, old_branch, new_branch)
end
end
describe '.change_branch_presence' do
it 'calls MergeRequestsService' do
presence = double
branch = double
branch_type = double
expect_next_instance_of(::SystemNotes::MergeRequestsService) do |service|
expect(service).to receive(:change_branch_presence).with(branch_type, branch, presence)
end
described_class.change_branch_presence(noteable, project, author, branch_type, branch, presence)
end
end
describe '.new_issue_branch' do
it 'calls MergeRequestsService' do
branch = double
branch_project = double
expect_next_instance_of(::SystemNotes::MergeRequestsService) do |service|
expect(service).to receive(:new_issue_branch).with(branch, branch_project: branch_project)
end
described_class.new_issue_branch(noteable, project, author, branch, branch_project: branch_project)
end
end
describe '.new_merge_request' do
it 'calls MergeRequestsService' do
merge_request = double
expect_next_instance_of(::SystemNotes::MergeRequestsService) do |service|
expect(service).to receive(:new_merge_request).with(merge_request)
end
described_class.new_merge_request(noteable, project, author, merge_request)
end
end
describe '.zoom_link_added' do
it 'calls ZoomService' do
expect_next_instance_of(::SystemNotes::ZoomService) do |service|
expect(service).to receive(:zoom_link_added)
end
described_class.zoom_link_added(noteable, project, author)
end
end
describe '.zoom_link_removed' do
it 'calls ZoomService' do
expect_next_instance_of(::SystemNotes::ZoomService) do |service|
expect(service).to receive(:zoom_link_removed)
end
described_class.zoom_link_removed(noteable, project, author)
end
end
describe '.cross_reference' do
let(:mentioned_in) { double }
it 'calls IssuableService' do
expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
expect(service).to receive(:cross_reference).with(mentioned_in)
end
described_class.cross_reference(double, mentioned_in, double)
end
end
describe '.cross_reference_disallowed?' do
let(:mentioned_in) { double }
it 'calls IssuableService' do
expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
expect(service).to receive(:cross_reference_disallowed?).with(mentioned_in)
end
described_class.cross_reference_disallowed?(double, mentioned_in)
end
end
describe '.cross_reference_exists?' do
let(:mentioned_in) { double }
it 'calls IssuableService' do
expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
expect(service).to receive(:cross_reference_exists?).with(mentioned_in)
end
described_class.cross_reference_exists?(double, mentioned_in)
end
end
describe '.noteable_moved' do
let(:noteable_ref) { double }
let(:direction) { double }
it 'calls IssuableService' do
expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
expect(service).to receive(:noteable_moved).with(noteable_ref, direction)
end
described_class.noteable_moved(double, double, noteable_ref, double, direction: direction)
end
end
describe '.noteable_cloned' do
let(:noteable_ref) { double }
let(:direction) { double }
let(:created_at) { double }
it 'calls IssuableService' do
expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
expect(service).to receive(:noteable_cloned).with(noteable_ref, direction, created_at: created_at)
end
described_class.noteable_cloned(double, double, noteable_ref, double, direction: direction, created_at: created_at)
end
end
describe '.change_time_estimate' do
it 'calls TimeTrackingService' do
expect_next_instance_of(::SystemNotes::TimeTrackingService) do |service|
expect(service).to receive(:change_time_estimate)
end
described_class.change_time_estimate(noteable, project, author)
end
end
describe '.discussion_continued_in_issue' do
let(:discussion) { create(:diff_note_on_merge_request, project: project).to_discussion }
let(:merge_request) { discussion.noteable }
let(:issue) { create(:issue, project: project) }
def reloaded_merge_request
MergeRequest.find(merge_request.id)
end
subject { described_class.discussion_continued_in_issue(discussion, project, author, issue) }
it_behaves_like 'a system note' do
let(:expected_noteable) { discussion.first_note.noteable }
let(:action) { 'discussion' }
end
it 'creates a new note in the discussion' do
# we need to completely rebuild the merge request object, or the `@discussions` on the merge request are not reloaded.
expect { subject }.to change { reloaded_merge_request.discussions.first.notes.size }.by(1)
end
it 'mentions the created issue in the system note' do
expect(subject.note).to include(issue.to_reference)
end
end
describe '.change_time_spent' do
it 'calls TimeTrackingService' do
expect_next_instance_of(::SystemNotes::TimeTrackingService) do |service|
expect(service).to receive(:change_time_spent)
end
described_class.change_time_spent(noteable, project, author)
end
end
describe '.created_timelog' do
let(:issue) { create(:issue, project: project) }
let(:timelog) { create(:timelog, user: author, issue: issue, time_spent: 1800) }
it 'calls TimeTrackingService' do
expect_next_instance_of(::SystemNotes::TimeTrackingService) do |service|
expect(service).to receive(:created_timelog)
end
described_class.created_timelog(noteable, project, author, timelog)
end
end
describe '.remove_timelog' do
let(:issue) { create(:issue, project: project) }
let(:timelog) { create(:timelog, user: author, issue: issue, time_spent: 1800) }
it 'calls TimeTrackingService' do
expect_next_instance_of(::SystemNotes::TimeTrackingService) do |service|
expect(service).to receive(:remove_timelog)
end
described_class.remove_timelog(noteable, project, author, timelog)
end
end
describe '.handle_merge_request_draft' do
it 'calls MergeRequestsService' do
expect_next_instance_of(::SystemNotes::MergeRequestsService) do |service|
expect(service).to receive(:handle_merge_request_draft)
end
described_class.handle_merge_request_draft(noteable, project, author)
end
end
describe '.add_merge_request_draft_from_commit' do
it 'calls MergeRequestsService' do
commit = double
expect_next_instance_of(::SystemNotes::MergeRequestsService) do |service|
expect(service).to receive(:add_merge_request_draft_from_commit).with(commit)
end
described_class.add_merge_request_draft_from_commit(noteable, project, author, commit)
end
end
describe '.change_task_status' do
let(:new_task) { double }
it 'calls IssuableService' do
expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
expect(service).to receive(:change_task_status).with(new_task)
end
described_class.change_task_status(noteable, project, author, new_task)
end
end
describe '.resolve_all_discussions' do
it 'calls MergeRequestsService' do
expect_next_instance_of(::SystemNotes::MergeRequestsService) do |service|
expect(service).to receive(:resolve_all_discussions)
end
described_class.resolve_all_discussions(noteable, project, author)
end
end
describe '.diff_discussion_outdated' do
it 'calls MergeRequestsService' do
discussion = double
change_position = double
expect_next_instance_of(::SystemNotes::MergeRequestsService) do |service|
expect(service).to receive(:diff_discussion_outdated).with(discussion, change_position)
end
described_class.diff_discussion_outdated(discussion, project, author, change_position)
end
end
describe '.mark_duplicate_issue' do
let(:canonical_issue) { double }
it 'calls IssuableService' do
expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
expect(service).to receive(:mark_duplicate_issue).with(canonical_issue)
end
described_class.mark_duplicate_issue(noteable, project, author, canonical_issue)
end
end
describe '.mark_canonical_issue_of_duplicate' do
let(:duplicate_issue) { double }
it 'calls IssuableService' do
expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
expect(service).to receive(:mark_canonical_issue_of_duplicate).with(duplicate_issue)
end
described_class.mark_canonical_issue_of_duplicate(noteable, project, author, duplicate_issue)
end
end
describe '.discussion_lock' do
let(:issuable) { double }
before do
allow(issuable).to receive(:project).and_return(double)
end
it 'calls IssuableService' do
expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
expect(service).to receive(:discussion_lock)
end
described_class.discussion_lock(issuable, double)
end
end
describe '.auto_resolve_prometheus_alert' do
it 'calls IssuableService' do
expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
expect(service).to receive(:auto_resolve_prometheus_alert)
end
described_class.auto_resolve_prometheus_alert(noteable, project, author)
end
end
describe '.design_version_added' do
let(:version) { create(:design_version) }
it 'calls DesignManagementService' do
expect_next_instance_of(SystemNotes::DesignManagementService) do |service|
expect(service).to receive(:design_version_added).with(version)
end
described_class.design_version_added(version)
end
end
describe '.design_discussion_added' do
let(:discussion_note) { create(:diff_note_on_design) }
it 'calls DesignManagementService' do
expect_next_instance_of(SystemNotes::DesignManagementService) do |service|
expect(service).to receive(:design_discussion_added).with(discussion_note)
end
described_class.design_discussion_added(discussion_note)
end
end
describe '.approve_mr' do
it 'calls MergeRequestsService' do
expect_next_instance_of(::SystemNotes::MergeRequestsService) do |service|
expect(service).to receive(:approve_mr)
end
described_class.approve_mr(noteable, author)
end
end
describe '.unapprove_mr' do
it 'calls MergeRequestsService' do
expect_next_instance_of(::SystemNotes::MergeRequestsService) do |service|
expect(service).to receive(:unapprove_mr)
end
described_class.unapprove_mr(noteable, author)
end
end
describe '.change_alert_status' do
let(:alert) { build(:alert_management_alert) }
context 'with status change reason' do
let(:reason) { 'reason for status change' }
it 'calls AlertManagementService' do
expect_next_instance_of(SystemNotes::AlertManagementService) do |service|
expect(service).to receive(:change_alert_status).with(reason)
end
described_class.change_alert_status(alert, author, reason)
end
end
context 'without status change reason' do
it 'calls AlertManagementService' do
expect_next_instance_of(SystemNotes::AlertManagementService) do |service|
expect(service).to receive(:change_alert_status).with(nil)
end
described_class.change_alert_status(alert, author)
end
end
end
describe '.new_alert_issue' do
let(:alert) { build(:alert_management_alert, :with_incident) }
it 'calls AlertManagementService' do
expect_next_instance_of(SystemNotes::AlertManagementService) do |service|
expect(service).to receive(:new_alert_issue).with(alert.issue)
end
described_class.new_alert_issue(alert, alert.issue, author)
end
end
describe '.create_new_alert' do
let(:alert) { build(:alert_management_alert) }
let(:monitoring_tool) { 'Prometheus' }
it 'calls AlertManagementService' do
expect_next_instance_of(SystemNotes::AlertManagementService) do |service|
expect(service).to receive(:create_new_alert).with(monitoring_tool)
end
described_class.create_new_alert(alert, monitoring_tool)
end
end
describe '.change_incident_severity' do
let(:incident) { build(:incident) }
it 'calls IncidentService' do
expect_next_instance_of(SystemNotes::IncidentService) do |service|
expect(service).to receive(:change_incident_severity)
end
described_class.change_incident_severity(incident, author)
end
end
describe '.change_incident_status' do
let(:incident) { instance_double('Issue', project: project) }
context 'with status change reason' do
let(:reason) { 'reason for status change' }
it 'calls IncidentService' do
expect_next_instance_of(SystemNotes::IncidentService) do |service|
expect(service).to receive(:change_incident_status).with(reason)
end
described_class.change_incident_status(incident, author, reason)
end
end
context 'without status change reason' do
it 'calls IncidentService' do
expect_next_instance_of(SystemNotes::IncidentService) do |service|
expect(service).to receive(:change_incident_status).with(nil)
end
described_class.change_incident_status(incident, author)
end
end
end
describe '.log_resolving_alert' do
let(:alert) { build(:alert_management_alert) }
let(:monitoring_tool) { 'Prometheus' }
it 'calls AlertManagementService' do
expect_next_instance_of(SystemNotes::AlertManagementService) do |service|
expect(service).to receive(:log_resolving_alert).with(monitoring_tool)
end
described_class.log_resolving_alert(alert, monitoring_tool)
end
end
describe '.change_issue_type' do
let(:incident) { build(:incident) }
it 'calls IssuableService' do
expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
expect(service).to receive(:change_issue_type).with('issue')
end
described_class.change_issue_type(incident, author, 'issue')
end
end
describe '.add_timeline_event' do
let(:timeline_event) { instance_double('IncidentManagement::TimelineEvent', incident: noteable, project: project) }
it 'calls IncidentsService' do
expect_next_instance_of(::SystemNotes::IncidentsService) do |service|
expect(service).to receive(:add_timeline_event).with(timeline_event)
end
described_class.add_timeline_event(timeline_event)
end
end
describe '.edit_timeline_event' do
let(:timeline_event) { instance_double('IncidentManagement::TimelineEvent', incident: noteable, project: project) }
it 'calls IncidentsService' do
expect_next_instance_of(::SystemNotes::IncidentsService) do |service|
expect(service).to receive(:edit_timeline_event).with(timeline_event, author, was_changed: :occurred_at)
end
described_class.edit_timeline_event(timeline_event, author, was_changed: :occurred_at)
end
end
describe '.delete_timeline_event' do
it 'calls IncidentsService' do
expect_next_instance_of(::SystemNotes::IncidentsService) do |service|
expect(service).to receive(:delete_timeline_event).with(author)
end
described_class.delete_timeline_event(noteable, author)
end
end
describe '.relate_work_item' do
let(:work_item) { double('work_item', issue_type: :task) }
let(:noteable) { double }
before do
allow(noteable).to receive(:project).and_return(double)
end
it 'calls IssuableService' do
expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
expect(service).to receive(:hierarchy_changed).with(work_item, 'relate')
end
described_class.relate_work_item(noteable, work_item, double)
end
end
describe '.unrelate_wotk_item' do
let(:work_item) { double('work_item', issue_type: :task) }
let(:noteable) { double }
before do
allow(noteable).to receive(:project).and_return(double)
end
it 'calls IssuableService' do
expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
expect(service).to receive(:hierarchy_changed).with(work_item, 'unrelate')
end
described_class.unrelate_work_item(noteable, work_item, double)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class BulkPushEventPayloadService
def initialize(event, push_data)
@event = event
@push_data = push_data
end
def execute
@event.build_push_event_payload(
action: @push_data[:action],
commit_count: 0,
ref_count: @push_data[:ref_count],
ref_type: @push_data[:ref_type]
)
@event.push_event_payload.tap(&:save!)
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkPushEventPayloadService, feature_category: :source_code_management do
let(:event) { create(:push_event) }
let(:push_data) do
{
action: :created,
ref_count: 4,
ref_type: :branch
}
end
subject { described_class.new(event, push_data) }
it 'creates a PushEventPayload' do
push_event_payload = subject.execute
expect(push_event_payload).to be_persisted
expect(push_event_payload.action).to eq(push_data[:action].to_s)
expect(push_event_payload.commit_count).to eq(0)
expect(push_event_payload.ref_count).to eq(push_data[:ref_count])
expect(push_event_payload.ref_type).to eq(push_data[:ref_type].to_s)
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
# TodoService class
#
# Used for creating/updating todos after certain user actions
#
# Ex.
# TodoService.new.new_issue(issue, current_user)
#
class TodoService
include Gitlab::Utils::UsageData
# When create an issue we should:
#
# * create a todo for assignee if issue is assigned
# * create a todo for each mentioned user on issue
#
def new_issue(issue, current_user)
new_issuable(issue, current_user)
end
# When update an issue we should:
#
# * mark all pending todos related to the issue for the current user as done
#
def update_issue(issue, current_user, skip_users = [])
update_issuable(issue, current_user, skip_users)
end
# When close an issue we should:
#
# * mark all pending todos related to the target for the current user as done
#
def close_issue(issue, current_user)
resolve_todos_for_target(issue, current_user)
end
# When we destroy a todo target we should:
#
# * refresh the todos count cache for all users with todos on the target
#
# This needs to yield back to the caller to destroy the target, because it
# collects the todo users before the todos themselves are deleted, then
# updates the todo counts for those users.
#
def destroy_target(target)
todo_user_ids = target.todos.distinct_user_ids
yield target
Users::UpdateTodoCountCacheService.new(todo_user_ids).execute if todo_user_ids.present?
end
# When we reassign an assignable object (issuable, alert) we should:
#
# * create a pending todo for new assignee if object is assigned
#
def reassigned_assignable(issuable, current_user, old_assignees = [])
create_assignment_todo(issuable, current_user, old_assignees)
end
# When we reassign an reviewable object (merge request) we should:
#
# * create a pending todo for new reviewer if object is assigned
#
def reassigned_reviewable(issuable, current_user, old_reviewers = [])
create_reviewer_todo(issuable, current_user, old_reviewers)
end
# When create a merge request we should:
#
# * creates a pending todo for assignee if merge request is assigned
# * create a todo for each mentioned user on merge request
#
def new_merge_request(merge_request, current_user)
new_issuable(merge_request, current_user)
end
# When update a merge request we should:
#
# * create a todo for each mentioned user on merge request
#
def update_merge_request(merge_request, current_user, skip_users = [])
update_issuable(merge_request, current_user, skip_users)
end
# When close a merge request we should:
#
# * mark all pending todos related to the target for the current user as done
#
def close_merge_request(merge_request, current_user)
resolve_todos_for_target(merge_request, current_user)
end
# When merge a merge request we should:
#
# * mark all pending todos related to the target for the current user as done
#
def merge_merge_request(merge_request, current_user)
resolve_todos_for_target(merge_request, current_user)
end
# When a build fails on the HEAD of a merge request we should:
#
# * create a todo for each merge participant
#
def merge_request_build_failed(merge_request)
merge_request.merge_participants.each do |user|
create_build_failed_todo(merge_request, user)
end
end
# When a new commit is pushed to a merge request we should:
#
# * mark all pending todos related to the merge request for that user as done
#
def merge_request_push(merge_request, current_user)
resolve_todos_for_target(merge_request, current_user)
end
# When a build is retried to a merge request we should:
#
# * mark all pending todos related to the merge request as done for each merge participant
#
def merge_request_build_retried(merge_request)
merge_request.merge_participants.each do |user|
resolve_todos_for_target(merge_request, user)
end
end
# When a merge request could not be merged due to its unmergeable state we should:
#
# * create a todo for each merge participant
#
def merge_request_became_unmergeable(merge_request)
merge_request.merge_participants.each do |user|
create_unmergeable_todo(merge_request, user)
end
end
# When create a note we should:
#
# * mark all pending todos related to the noteable for the note author as done
# * create a todo for each mentioned user on note
#
def new_note(note, current_user)
handle_note(note, current_user)
end
# When update a note we should:
#
# * mark all pending todos related to the noteable for the current user as done
# * create a todo for each new user mentioned on note
#
def update_note(note, current_user, skip_users = [])
handle_note(note, current_user, skip_users)
end
# When an emoji is awarded we should:
#
# * mark all pending todos related to the awardable for the current user as done
#
def new_award_emoji(awardable, current_user)
resolve_todos_for_target(awardable, current_user)
end
# When user marks a target as todo
def mark_todo(target, current_user)
project = target.project
attributes = attributes_for_todo(project, target, current_user, Todo::MARKED)
create_todos(current_user, attributes, target_namespace(target), project)
end
def todo_exist?(issuable, current_user)
TodosFinder.new(current_user).any_for_target?(issuable, :pending)
end
# Resolves all todos related to target for the current_user
def resolve_todos_for_target(target, current_user)
attributes = attributes_for_target(target)
resolve_todos(pending_todos([current_user], attributes), current_user)
end
# Resolves all todos related to target for all users
def resolve_todos_with_attributes_for_target(target, attributes, resolution: :done, resolved_by_action: :system_done)
target_attributes = { target_id: target.id, target_type: target.class.polymorphic_name }
attributes.merge!(target_attributes)
attributes[:preload_user_association] = true
todos = PendingTodosFinder.new(attributes).execute
users = todos.map(&:user)
todos_ids = todos.batch_update(state: resolution, resolved_by_action: resolved_by_action)
users.each(&:update_todos_count_cache)
todos_ids
end
def resolve_todos(todos, current_user, resolution: :done, resolved_by_action: :system_done)
todos_ids = todos.batch_update(state: resolution, resolved_by_action: resolved_by_action)
current_user.update_todos_count_cache
todos_ids
end
def resolve_todo(todo, current_user, resolution: :done, resolved_by_action: :system_done)
return if todo.done?
todo.update(state: resolution, resolved_by_action: resolved_by_action)
current_user.update_todos_count_cache
end
def resolve_access_request_todos(member)
return if member.nil?
# Group or Project
target = member.source
todos_params = {
state: :pending,
author_id: member.user_id,
action: ::Todo::MEMBER_ACCESS_REQUESTED,
type: target.class.polymorphic_name
}
resolve_todos_with_attributes_for_target(target, todos_params)
end
def restore_todos(todos, current_user)
todos_ids = todos.batch_update(state: :pending)
current_user.update_todos_count_cache
todos_ids
end
def restore_todo(todo, current_user)
return if todo.pending?
todo.update(state: :pending)
current_user.update_todos_count_cache
end
def create_request_review_todo(target, author, reviewers)
project = target.project
attributes = attributes_for_todo(project, target, author, Todo::REVIEW_REQUESTED)
create_todos(reviewers, attributes, project.namespace, project)
end
def create_member_access_request_todos(member)
source = member.source
attributes = attributes_for_access_request_todos(source, member.user, Todo::MEMBER_ACCESS_REQUESTED)
approvers = source.access_request_approvers_to_be_notified.map(&:user)
return true if approvers.empty?
if source.instance_of? Project
project = source
namespace = project.namespace
else
project = nil
namespace = source
end
create_todos(approvers, attributes, namespace, project)
end
private
def create_todos(users, attributes, namespace, project)
users = Array(users)
return if users.empty?
users_single_todos, users_multiple_todos = users.partition { |u| Feature.disabled?(:multiple_todos, u) }
excluded_user_ids = []
if users_single_todos.present?
excluded_user_ids += pending_todos(
users_single_todos,
attributes.slice(:project_id, :target_id, :target_type, :commit_id, :discussion)
).distinct_user_ids
end
if users_multiple_todos.present? && !Todo::ACTIONS_MULTIPLE_ALLOWED.include?(attributes.fetch(:action))
excluded_user_ids += pending_todos(
users_multiple_todos,
attributes.slice(:project_id, :target_id, :target_type, :commit_id, :discussion, :action)
).distinct_user_ids
end
users.reject! { |user| excluded_user_ids.include?(user.id) }
todos = users.map do |user|
issue_type = attributes.delete(:issue_type)
track_todo_creation(user, issue_type, namespace, project)
Todo.create(attributes.merge(user_id: user.id))
end
Users::UpdateTodoCountCacheService.new(users.map(&:id)).execute
todos
end
def new_issuable(issuable, author)
create_assignment_todo(issuable, author)
create_reviewer_todo(issuable, author) if issuable.allows_reviewers?
create_mention_todos(issuable.project, issuable, author)
end
def update_issuable(issuable, author, skip_users = [])
# Skip toggling a task list item in a description
return if toggling_tasks?(issuable)
create_mention_todos(issuable.project, issuable, author, nil, skip_users)
end
def toggling_tasks?(issuable)
issuable.previous_changes.include?('description') &&
issuable.tasks? && issuable.updated_tasks.any?
end
def handle_note(note, author, skip_users = [])
return unless note.can_create_todo?
project = note.project
target = note.noteable
resolve_todos_for_target(target, author)
create_mention_todos(project, target, author, note, skip_users)
end
def create_assignment_todo(target, author, old_assignees = [])
if target.assignees.any?
project = target.project
assignees = target.assignees - old_assignees
attributes = attributes_for_todo(project, target, author, Todo::ASSIGNED)
create_todos(assignees, attributes, target_namespace(target), project)
end
end
def create_reviewer_todo(target, author, old_reviewers = [])
if target.reviewers.any?
reviewers = target.reviewers - old_reviewers
create_request_review_todo(target, author, reviewers)
end
end
def create_mention_todos(parent, target, author, note = nil, skip_users = [])
# Create Todos for directly addressed users
directly_addressed_users = filter_directly_addressed_users(parent, note || target, author, skip_users)
attributes = attributes_for_todo(parent, target, author, Todo::DIRECTLY_ADDRESSED, note)
create_todos(directly_addressed_users, attributes, parent&.namespace, parent)
# Create Todos for mentioned users
mentioned_users = filter_mentioned_users(parent, note || target, author, skip_users + directly_addressed_users)
attributes = attributes_for_todo(parent, target, author, Todo::MENTIONED, note)
create_todos(mentioned_users, attributes, parent&.namespace, parent)
end
def create_build_failed_todo(merge_request, todo_author)
project = merge_request.project
attributes = attributes_for_todo(project, merge_request, todo_author, Todo::BUILD_FAILED)
create_todos(todo_author, attributes, project.namespace, project)
end
def create_unmergeable_todo(merge_request, todo_author)
project = merge_request.project
attributes = attributes_for_todo(project, merge_request, todo_author, Todo::UNMERGEABLE)
create_todos(todo_author, attributes, project.namespace, project)
end
def attributes_for_target(target)
attributes = {
project_id: target&.project&.id,
target_id: target.id,
target_type: target.class.try(:polymorphic_name) || target.class.name,
commit_id: nil
}
case target
when Commit
attributes.merge!(target_id: nil, commit_id: target.id)
when Issue
attributes[:issue_type] = target.issue_type
attributes[:group] = target.namespace if target.project.blank?
when Discussion
attributes.merge!(target_type: nil, target_id: nil, discussion: target)
end
attributes
end
def attributes_for_todo(project, target, author, action, note = nil)
attributes_for_target(target).merge!(
project_id: project&.id,
author_id: author.id,
action: action,
note: note
)
end
def filter_todo_users(users, parent, target)
reject_users_without_access(users, parent, target).uniq
end
def filter_mentioned_users(parent, target, author, skip_users = [])
mentioned_users = target.mentioned_users(author) - skip_users
filter_todo_users(mentioned_users, parent, target)
end
def filter_directly_addressed_users(parent, target, author, skip_users = [])
directly_addressed_users = target.directly_addressed_users(author) - skip_users
filter_todo_users(directly_addressed_users, parent, target)
end
def reject_users_without_access(users, parent, target)
if target.respond_to?(:to_ability_name)
select_users(users, :"read_#{target.to_ability_name}", target)
else
select_users(users, :read_project, parent)
end
end
def select_users(users, ability, subject)
users.select do |user|
user.can?(ability.to_sym, subject)
end
end
def pending_todos(users, criteria = {})
PendingTodosFinder.new(criteria.merge(users: users)).execute
end
def track_todo_creation(user, issue_type, namespace, project)
return unless issue_type == 'incident'
event = "incident_management_incident_todo"
track_usage_event(event, user.id)
Gitlab::Tracking.event(
self.class.to_s,
event,
project: project,
namespace: namespace,
user: user,
label: 'redis_hll_counters.incident_management.incident_management_total_unique_counts_monthly',
context: [Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: event).to_context]
)
end
def attributes_for_access_request_todos(source, author, action, note = nil)
attributes = {
target_id: source.id,
target_type: source.class.polymorphic_name,
author_id: author.id,
action: action,
note: note
}
if source.instance_of? Project
attributes[:project_id] = source.id
attributes[:group_id] = source.group.id if source.group.present?
else
attributes[:group_id] = source.id
end
attributes
end
def target_namespace(target)
project = target.project
project&.namespace || target.try(:namespace)
end
end
TodoService.prepend_mod_with('TodoService')
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe TodoService, feature_category: :team_planning do
include AfterNextHelpers
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:author) { create(:user) }
let_it_be(:assignee) { create(:user) }
let_it_be(:non_member) { create(:user) }
let_it_be(:member) { create(:user) }
let_it_be(:guest) { create(:user) }
let_it_be(:admin) { create(:admin) }
let_it_be(:john_doe) { create(:user) }
let_it_be(:skipped) { create(:user) }
let(:skip_users) { [skipped] }
let(:mentions) { 'FYI: ' + [author, assignee, john_doe, member, guest, non_member, admin, skipped].map(&:to_reference).join(' ') }
let(:directly_addressed) { [author, assignee, john_doe, member, guest, non_member, admin, skipped].map(&:to_reference).join(' ') }
let(:directly_addressed_and_mentioned) { member.to_reference + ", what do you think? cc: " + [guest, admin, skipped].map(&:to_reference).join(' ') }
let(:service) { described_class.new }
before_all do
project.add_guest(guest)
project.add_developer(author)
project.add_developer(assignee)
project.add_developer(member)
project.add_developer(john_doe)
project.add_developer(skipped)
end
shared_examples 'reassigned target' do
let(:additional_todo_attributes) { {} }
it 'creates a pending todo for new assignee' do
target_unassigned.assignees = [john_doe]
service.send(described_method, target_unassigned, author)
should_create_todo(
user: john_doe,
target: target_unassigned,
action: Todo::ASSIGNED,
**additional_todo_attributes
)
end
it 'does not create a todo if unassigned' do
target_assigned.assignees = []
should_not_create_any_todo { service.send(described_method, target_assigned, author) }
end
it 'creates a todo if new assignee is the current user' do
target_assigned.assignees = [john_doe]
service.send(described_method, target_assigned, john_doe)
should_create_todo(
user: john_doe,
target: target_assigned,
author: john_doe,
action: Todo::ASSIGNED,
**additional_todo_attributes
)
end
it 'does not create a todo for guests' do
service.send(described_method, target_assigned, author)
should_not_create_todo(user: guest, target: target_assigned, action: Todo::MENTIONED)
end
it 'does not create a directly addressed todo for guests' do
service.send(described_method, addressed_target_assigned, author)
should_not_create_todo(user: guest, target: addressed_target_assigned, action: Todo::DIRECTLY_ADDRESSED)
end
it 'does not create a todo if already assigned' do
should_not_create_any_todo { service.send(described_method, target_assigned, author, [john_doe]) }
end
end
shared_examples 'reassigned reviewable target' do
context 'with no existing reviewers' do
let(:assigned_reviewers) { [] }
it 'creates a pending todo for new reviewer' do
target.reviewers = [john_doe]
service.send(described_method, target, author)
should_create_todo(user: john_doe, target: target, action: Todo::REVIEW_REQUESTED)
end
end
context 'with an existing reviewer' do
let(:assigned_reviewers) { [john_doe] }
it 'does not create a todo if unassigned' do
target.reviewers = []
should_not_create_any_todo { service.send(described_method, target, author) }
end
it 'creates a todo if new reviewer is the current user' do
target.reviewers = [john_doe]
service.send(described_method, target, john_doe)
should_create_todo(user: john_doe, target: target, author: john_doe, action: Todo::REVIEW_REQUESTED)
end
it 'does not create a todo if already assigned' do
should_not_create_any_todo { service.send(described_method, target, author, [john_doe]) }
end
end
end
describe 'Issues' do
let(:issue) { create(:issue, project: project, author: author, description: "- [ ] Task 1\n- [ ] Task 2 #{mentions}") }
let(:addressed_issue) { create(:issue, project: project, author: author, description: "#{directly_addressed}\n- [ ] Task 1\n- [ ] Task 2") }
let(:assigned_issue) { create(:issue, project: project, assignees: [john_doe]) }
let(:unassigned_issue) { create(:issue, project: project, assignees: []) }
let(:confidential_issue) { create(:issue, :confidential, project: project, author: author, assignees: [assignee], description: mentions) }
let(:addressed_confident_issue) { create(:issue, :confidential, project: project, author: author, assignees: [assignee], description: directly_addressed) }
describe '#new_issue' do
it 'creates a todo if assigned' do
service.new_issue(assigned_issue, author)
should_create_todo(user: john_doe, target: assigned_issue, action: Todo::ASSIGNED)
end
it 'does not create a todo if unassigned' do
should_not_create_any_todo { service.new_issue(unassigned_issue, author) }
end
it 'creates a todo if assignee is the current user' do
unassigned_issue.assignees = [john_doe]
service.new_issue(unassigned_issue, john_doe)
should_create_todo(user: john_doe, target: unassigned_issue, author: john_doe, action: Todo::ASSIGNED)
end
it 'creates a todo for each valid mentioned user' do
service.new_issue(issue, author)
should_create_todo(user: member, target: issue, action: Todo::MENTIONED)
should_create_todo(user: guest, target: issue, action: Todo::MENTIONED)
should_create_todo(user: author, target: issue, action: Todo::MENTIONED)
should_create_todo(user: john_doe, target: issue, action: Todo::MENTIONED)
should_not_create_todo(user: non_member, target: issue, action: Todo::MENTIONED)
end
it 'creates a directly addressed todo for each valid addressed user' do
service.new_issue(addressed_issue, author)
should_create_todo(user: member, target: addressed_issue, action: Todo::DIRECTLY_ADDRESSED)
should_create_todo(user: guest, target: addressed_issue, action: Todo::DIRECTLY_ADDRESSED)
should_create_todo(user: author, target: addressed_issue, action: Todo::DIRECTLY_ADDRESSED)
should_create_todo(user: john_doe, target: addressed_issue, action: Todo::DIRECTLY_ADDRESSED)
should_not_create_todo(user: non_member, target: addressed_issue, action: Todo::DIRECTLY_ADDRESSED)
end
it 'creates correct todos for each valid user based on the type of mention' do
issue.update!(description: directly_addressed_and_mentioned)
service.new_issue(issue, author)
should_create_todo(user: member, target: issue, action: Todo::DIRECTLY_ADDRESSED)
should_not_create_todo(user: admin, target: issue, action: Todo::MENTIONED)
should_create_todo(user: guest, target: issue, action: Todo::MENTIONED)
end
it 'does not create todo if user can not see the issue when issue is confidential' do
service.new_issue(confidential_issue, john_doe)
should_create_todo(user: assignee, target: confidential_issue, author: john_doe, action: Todo::ASSIGNED)
should_create_todo(user: author, target: confidential_issue, author: john_doe, action: Todo::MENTIONED)
should_create_todo(user: member, target: confidential_issue, author: john_doe, action: Todo::MENTIONED)
should_not_create_todo(user: admin, target: confidential_issue, author: john_doe, action: Todo::MENTIONED)
should_not_create_todo(user: guest, target: confidential_issue, author: john_doe, action: Todo::MENTIONED)
should_create_todo(user: john_doe, target: confidential_issue, author: john_doe, action: Todo::MENTIONED)
end
it 'does not create directly addressed todo if user cannot see the issue when issue is confidential' do
service.new_issue(addressed_confident_issue, john_doe)
should_create_todo(user: assignee, target: addressed_confident_issue, author: john_doe, action: Todo::ASSIGNED)
should_create_todo(user: author, target: addressed_confident_issue, author: john_doe, action: Todo::DIRECTLY_ADDRESSED)
should_create_todo(user: member, target: addressed_confident_issue, author: john_doe, action: Todo::DIRECTLY_ADDRESSED)
should_not_create_todo(user: admin, target: addressed_confident_issue, author: john_doe, action: Todo::DIRECTLY_ADDRESSED)
should_not_create_todo(user: guest, target: addressed_confident_issue, author: john_doe, action: Todo::DIRECTLY_ADDRESSED)
should_create_todo(user: john_doe, target: addressed_confident_issue, author: john_doe, action: Todo::DIRECTLY_ADDRESSED)
end
context 'when a private group is mentioned' do
let(:group) { create(:group, :private) }
let(:project) { create(:project, :private, group: group) }
let(:issue) { create(:issue, author: author, project: project, description: group.to_reference) }
before do
group.add_owner(author)
group.add_member(member, Gitlab::Access::DEVELOPER)
group.add_member(john_doe, Gitlab::Access::DEVELOPER)
service.new_issue(issue, author)
end
it 'creates a todo for group members' do
should_create_todo(user: member, target: issue)
should_create_todo(user: john_doe, target: issue)
end
end
context 'issue is an incident' do
let(:issue) { create(:incident, project: project, assignees: [john_doe], author: author) }
subject do
service.new_issue(issue, author)
should_create_todo(user: john_doe, target: issue, action: Todo::ASSIGNED)
end
it_behaves_like 'an incident management tracked event', :incident_management_incident_todo do
let(:current_user) { john_doe }
end
it_behaves_like 'Snowplow event tracking with RedisHLL context' do
let(:namespace) { project.namespace }
let(:category) { described_class.to_s }
let(:action) { 'incident_management_incident_todo' }
let(:label) { 'redis_hll_counters.incident_management.incident_management_total_unique_counts_monthly' }
let(:user) { john_doe }
end
end
end
describe '#update_issue' do
it 'creates a todo for each valid mentioned user not included in skip_users' do
service.update_issue(issue, author, skip_users)
should_create_todo(user: member, target: issue, action: Todo::MENTIONED)
should_create_todo(user: guest, target: issue, action: Todo::MENTIONED)
should_create_todo(user: john_doe, target: issue, action: Todo::MENTIONED)
should_create_todo(user: author, target: issue, action: Todo::MENTIONED)
should_not_create_todo(user: non_member, target: issue, action: Todo::MENTIONED)
should_not_create_todo(user: skipped, target: issue, action: Todo::MENTIONED)
end
it 'creates a todo for each valid user not included in skip_users based on the type of mention' do
issue.update!(description: directly_addressed_and_mentioned)
service.update_issue(issue, author, skip_users)
should_create_todo(user: member, target: issue, action: Todo::DIRECTLY_ADDRESSED)
should_create_todo(user: guest, target: issue, action: Todo::MENTIONED)
should_not_create_todo(user: admin, target: issue, action: Todo::MENTIONED)
should_not_create_todo(user: skipped, target: issue)
end
it 'creates a directly addressed todo for each valid addressed user not included in skip_users' do
service.update_issue(addressed_issue, author, skip_users)
should_create_todo(user: member, target: addressed_issue, action: Todo::DIRECTLY_ADDRESSED)
should_create_todo(user: guest, target: addressed_issue, action: Todo::DIRECTLY_ADDRESSED)
should_create_todo(user: john_doe, target: addressed_issue, action: Todo::DIRECTLY_ADDRESSED)
should_create_todo(user: author, target: addressed_issue, action: Todo::DIRECTLY_ADDRESSED)
should_not_create_todo(user: non_member, target: addressed_issue, action: Todo::DIRECTLY_ADDRESSED)
should_not_create_todo(user: skipped, target: addressed_issue, action: Todo::DIRECTLY_ADDRESSED)
end
it 'does not create a todo if user was already mentioned and todo is pending' do
stub_feature_flags(multiple_todos: false)
create(:todo, :mentioned, user: member, project: project, target: issue, author: author)
expect { service.update_issue(issue, author, skip_users) }.not_to change(member.todos, :count)
end
it 'does not create a todo if user was already mentioned and todo is done' do
create(:todo, :mentioned, :done, user: skipped, project: project, target: issue, author: author)
expect { service.update_issue(issue, author, skip_users) }.not_to change(skipped.todos, :count)
end
it 'does not create a directly addressed todo if user was already mentioned or addressed and todo is pending' do
stub_feature_flags(multiple_todos: false)
create(:todo, :directly_addressed, user: member, project: project, target: addressed_issue, author: author)
expect { service.update_issue(addressed_issue, author, skip_users) }.not_to change(member.todos, :count)
end
it 'does not create a directly addressed todo if user was already mentioned or addressed and todo is done' do
create(:todo, :directly_addressed, :done, user: skipped, project: project, target: addressed_issue, author: author)
expect { service.update_issue(addressed_issue, author, skip_users) }.not_to change(skipped.todos, :count)
end
it 'does not create todo if user can not see the issue when issue is confidential' do
service.update_issue(confidential_issue, john_doe)
should_create_todo(user: author, target: confidential_issue, author: john_doe, action: Todo::MENTIONED)
should_create_todo(user: assignee, target: confidential_issue, author: john_doe, action: Todo::MENTIONED)
should_create_todo(user: member, target: confidential_issue, author: john_doe, action: Todo::MENTIONED)
should_not_create_todo(user: admin, target: confidential_issue, author: john_doe, action: Todo::MENTIONED)
should_not_create_todo(user: guest, target: confidential_issue, author: john_doe, action: Todo::MENTIONED)
should_create_todo(user: john_doe, target: confidential_issue, author: john_doe, action: Todo::MENTIONED)
end
it 'does not create a directly addressed todo if user can not see the issue when issue is confidential' do
service.update_issue(addressed_confident_issue, john_doe)
should_create_todo(user: author, target: addressed_confident_issue, author: john_doe, action: Todo::DIRECTLY_ADDRESSED)
should_create_todo(user: assignee, target: addressed_confident_issue, author: john_doe, action: Todo::DIRECTLY_ADDRESSED)
should_create_todo(user: member, target: addressed_confident_issue, author: john_doe, action: Todo::DIRECTLY_ADDRESSED)
should_not_create_todo(user: admin, target: addressed_confident_issue, author: john_doe, action: Todo::DIRECTLY_ADDRESSED)
should_not_create_todo(user: guest, target: addressed_confident_issue, author: john_doe, action: Todo::DIRECTLY_ADDRESSED)
should_create_todo(user: john_doe, target: addressed_confident_issue, author: john_doe, action: Todo::DIRECTLY_ADDRESSED)
end
context 'issues with a task list' do
it 'does not create todo when tasks are marked as completed' do
issue.update!(description: "- [x] Task 1\n- [X] Task 2 #{mentions}")
service.update_issue(issue, author)
should_not_create_todo(user: admin, target: issue, action: Todo::MENTIONED)
should_not_create_todo(user: assignee, target: issue, action: Todo::MENTIONED)
should_not_create_todo(user: author, target: issue, action: Todo::MENTIONED)
should_not_create_todo(user: john_doe, target: issue, action: Todo::MENTIONED)
should_not_create_todo(user: member, target: issue, action: Todo::MENTIONED)
should_not_create_todo(user: non_member, target: issue, action: Todo::MENTIONED)
end
it 'does not create directly addressed todo when tasks are marked as completed' do
addressed_issue.update!(description: "#{directly_addressed}\n- [x] Task 1\n- [x] Task 2\n")
service.update_issue(addressed_issue, author)
should_not_create_todo(user: admin, target: addressed_issue, action: Todo::DIRECTLY_ADDRESSED)
should_not_create_todo(user: assignee, target: addressed_issue, action: Todo::DIRECTLY_ADDRESSED)
should_not_create_todo(user: author, target: addressed_issue, action: Todo::DIRECTLY_ADDRESSED)
should_not_create_todo(user: john_doe, target: addressed_issue, action: Todo::DIRECTLY_ADDRESSED)
should_not_create_todo(user: member, target: addressed_issue, action: Todo::DIRECTLY_ADDRESSED)
should_not_create_todo(user: non_member, target: addressed_issue, action: Todo::DIRECTLY_ADDRESSED)
end
it 'does not raise an error when description not change' do
issue.update!(title: 'Sample')
expect { service.update_issue(issue, author) }.not_to raise_error
end
end
end
describe '#close_issue' do
it 'marks related pending todos to the target for the user as done' do
first_todo = create(:todo, :assigned, user: john_doe, project: project, target: issue, author: author)
second_todo = create(:todo, :assigned, user: john_doe, project: project, target: issue, author: author)
service.close_issue(issue, john_doe)
expect(first_todo.reload).to be_done
expect(second_todo.reload).to be_done
end
end
describe '#destroy_target' do
it 'refreshes the todos count cache for users with todos on the target' do
create(:todo, state: :pending, target: issue, user: author, author: author, project: issue.project)
create(:todo, state: :done, target: issue, user: assignee, author: assignee, project: issue.project)
expect_next(Users::UpdateTodoCountCacheService, [author.id, assignee.id]).to receive(:execute)
service.destroy_target(issue) { issue.destroy! }
end
it 'yields the target to the caller' do
expect { |b| service.destroy_target(issue, &b) }
.to yield_with_args(issue)
end
end
describe '#resolve_todos_for_target' do
it 'marks related pending todos to the target for the user as done' do
first_todo = create(:todo, :assigned, user: john_doe, project: project, target: issue, author: author)
second_todo = create(:todo, :assigned, user: john_doe, project: project, target: issue, author: author)
service.resolve_todos_for_target(issue, john_doe)
expect(first_todo.reload).to be_done
expect(second_todo.reload).to be_done
end
describe 'cached counts' do
it 'updates when todos change' do
create(:todo, :assigned, user: john_doe, project: project, target: issue, author: author)
expect(john_doe.todos_done_count).to eq(0)
expect(john_doe.todos_pending_count).to eq(1)
expect(john_doe).to receive(:update_todos_count_cache).and_call_original
service.resolve_todos_for_target(issue, john_doe)
expect(john_doe.todos_done_count).to eq(1)
expect(john_doe.todos_pending_count).to eq(0)
end
end
end
describe '#resolve_todos_with_attributes_for_target' do
it 'marks related pending todos to the target for all the users as done' do
first_todo = create(:todo, :assigned, user: member, project: project, target: issue, author: author)
second_todo = create(:todo, :review_requested, user: john_doe, project: project, target: issue, author: author)
another_todo = create(:todo, :assigned, user: john_doe, project: project, target: project, author: author)
service.resolve_todos_with_attributes_for_target(issue, {})
expect(first_todo.reload).to be_done
expect(second_todo.reload).to be_done
expect(another_todo.reload).to be_pending
end
it 'marks related only filtered pending todos to the target for all the users as done' do
first_todo = create(:todo, :assigned, user: member, project: project, target: issue, author: author)
second_todo = create(:todo, :review_requested, user: john_doe, project: project, target: issue, author: author)
another_todo = create(:todo, :assigned, user: john_doe, project: project, target: project, author: author)
service.resolve_todos_with_attributes_for_target(issue, { action: Todo::ASSIGNED })
expect(first_todo.reload).to be_done
expect(second_todo.reload).to be_pending
expect(another_todo.reload).to be_pending
end
it 'fetches the pending todos with users preloaded' do
expect(PendingTodosFinder).to receive(:new)
.with(a_hash_including(preload_user_association: true)).and_call_original
service.resolve_todos_with_attributes_for_target(issue, { action: Todo::ASSIGNED })
end
end
describe '#new_note' do
let!(:first_todo) { create(:todo, :assigned, user: john_doe, project: project, target: issue, author: author) }
let!(:second_todo) { create(:todo, :assigned, user: john_doe, project: project, target: issue, author: author) }
let(:confidential_issue) { create(:issue, :confidential, project: project, author: author, assignees: [assignee]) }
let(:note) { create(:note, project: project, noteable: issue, author: john_doe, note: mentions) }
let(:confidential_note) { create(:note, :confidential, project: project, noteable: issue, author: john_doe, note: mentions) }
let(:addressed_note) { create(:note, project: project, noteable: issue, author: john_doe, note: directly_addressed) }
let(:note_on_commit) { create(:note_on_commit, project: project, author: john_doe, note: mentions) }
let(:addressed_note_on_commit) { create(:note_on_commit, project: project, author: john_doe, note: directly_addressed) }
let(:note_on_confidential_issue) { create(:note_on_issue, noteable: confidential_issue, project: project, note: mentions) }
let(:addressed_note_on_confidential_issue) { create(:note_on_issue, noteable: confidential_issue, project: project, note: directly_addressed) }
let(:note_on_project_snippet) { create(:note_on_project_snippet, project: project, author: john_doe, note: mentions) }
let(:system_note) { create(:system_note, project: project, noteable: issue) }
it 'mark related pending todos to the noteable for the note author as done' do
first_todo = create(:todo, :assigned, user: john_doe, project: project, target: issue, author: author)
second_todo = create(:todo, :assigned, user: john_doe, project: project, target: issue, author: author)
service.new_note(note, john_doe)
expect(first_todo.reload).to be_done
expect(second_todo.reload).to be_done
end
it 'does not mark related pending todos it is a system note' do
service.new_note(system_note, john_doe)
expect(first_todo.reload).to be_pending
expect(second_todo.reload).to be_pending
end
it 'creates a todo for each valid mentioned user' do
service.new_note(note, john_doe)
should_create_todo(user: member, target: issue, author: john_doe, action: Todo::MENTIONED, note: note)
should_create_todo(user: guest, target: issue, author: john_doe, action: Todo::MENTIONED, note: note)
should_create_todo(user: author, target: issue, author: john_doe, action: Todo::MENTIONED, note: note)
should_create_todo(user: john_doe, target: issue, author: john_doe, action: Todo::MENTIONED, note: note)
should_not_create_todo(user: non_member, target: issue, author: john_doe, action: Todo::MENTIONED, note: note)
end
it 'creates a todo for each valid user based on the type of mention' do
note.update!(note: directly_addressed_and_mentioned)
service.new_note(note, john_doe)
should_create_todo(user: member, target: issue, author: john_doe, action: Todo::DIRECTLY_ADDRESSED, note: note)
should_not_create_todo(user: admin, target: issue, author: john_doe, action: Todo::MENTIONED, note: note)
should_create_todo(user: guest, target: issue, author: john_doe, action: Todo::MENTIONED, note: note)
end
it 'creates a directly addressed todo for each valid addressed user' do
service.new_note(addressed_note, john_doe)
should_create_todo(user: member, target: issue, author: john_doe, action: Todo::DIRECTLY_ADDRESSED, note: addressed_note)
should_create_todo(user: guest, target: issue, author: john_doe, action: Todo::DIRECTLY_ADDRESSED, note: addressed_note)
should_create_todo(user: author, target: issue, author: john_doe, action: Todo::DIRECTLY_ADDRESSED, note: addressed_note)
should_create_todo(user: john_doe, target: issue, author: john_doe, action: Todo::DIRECTLY_ADDRESSED, note: addressed_note)
should_not_create_todo(user: non_member, target: issue, author: john_doe, action: Todo::DIRECTLY_ADDRESSED, note: addressed_note)
end
it 'does not create todo if user can not see the issue when leaving a note on a confidential issue' do
service.new_note(note_on_confidential_issue, john_doe)
should_create_todo(user: author, target: confidential_issue, author: john_doe, action: Todo::MENTIONED, note: note_on_confidential_issue)
should_create_todo(user: assignee, target: confidential_issue, author: john_doe, action: Todo::MENTIONED, note: note_on_confidential_issue)
should_create_todo(user: member, target: confidential_issue, author: john_doe, action: Todo::MENTIONED, note: note_on_confidential_issue)
should_not_create_todo(user: admin, target: confidential_issue, author: john_doe, action: Todo::MENTIONED, note: note_on_confidential_issue)
should_not_create_todo(user: guest, target: confidential_issue, author: john_doe, action: Todo::MENTIONED, note: note_on_confidential_issue)
should_create_todo(user: john_doe, target: confidential_issue, author: john_doe, action: Todo::MENTIONED, note: note_on_confidential_issue)
end
it 'does not create a directly addressed todo if user can not see the issue when leaving a note on a confidential issue' do
service.new_note(addressed_note_on_confidential_issue, john_doe)
should_create_todo(user: author, target: confidential_issue, author: john_doe, action: Todo::DIRECTLY_ADDRESSED, note: addressed_note_on_confidential_issue)
should_create_todo(user: assignee, target: confidential_issue, author: john_doe, action: Todo::DIRECTLY_ADDRESSED, note: addressed_note_on_confidential_issue)
should_create_todo(user: member, target: confidential_issue, author: john_doe, action: Todo::DIRECTLY_ADDRESSED, note: addressed_note_on_confidential_issue)
should_not_create_todo(user: admin, target: confidential_issue, author: john_doe, action: Todo::DIRECTLY_ADDRESSED, note: addressed_note_on_confidential_issue)
should_not_create_todo(user: guest, target: confidential_issue, author: john_doe, action: Todo::DIRECTLY_ADDRESSED, note: addressed_note_on_confidential_issue)
should_create_todo(user: john_doe, target: confidential_issue, author: john_doe, action: Todo::DIRECTLY_ADDRESSED, note: addressed_note_on_confidential_issue)
end
it 'does not create todo if user can not read confidential note' do
service.new_note(confidential_note, john_doe)
should_not_create_todo(user: non_member, target: issue, author: john_doe, action: Todo::MENTIONED, note: confidential_note)
should_not_create_todo(user: guest, target: issue, author: john_doe, action: Todo::MENTIONED, note: confidential_note)
should_create_todo(user: member, target: issue, author: john_doe, action: Todo::MENTIONED, note: confidential_note)
should_create_todo(user: author, target: issue, author: john_doe, action: Todo::MENTIONED, note: confidential_note)
should_create_todo(user: assignee, target: issue, author: john_doe, action: Todo::MENTIONED, note: confidential_note)
should_create_todo(user: john_doe, target: issue, author: john_doe, action: Todo::MENTIONED, note: confidential_note)
end
context 'commits' do
let(:base_commit_todo_attrs) { { target_id: nil, target_type: 'Commit', author: john_doe } }
context 'leaving a note on a commit in a public project' do
let(:project) { create(:project, :repository, :public) }
it 'creates a todo for each valid mentioned user' do
expected_todo = base_commit_todo_attrs.merge(
action: Todo::MENTIONED,
note: note_on_commit,
commit_id: note_on_commit.commit_id
)
service.new_note(note_on_commit, john_doe)
should_create_todo(expected_todo.merge(user: member))
should_create_todo(expected_todo.merge(user: author))
should_create_todo(expected_todo.merge(user: john_doe))
should_create_todo(expected_todo.merge(user: guest))
should_create_todo(expected_todo.merge(user: non_member))
end
it 'creates a directly addressed todo for each valid mentioned user' do
expected_todo = base_commit_todo_attrs.merge(
action: Todo::DIRECTLY_ADDRESSED,
note: addressed_note_on_commit,
commit_id: addressed_note_on_commit.commit_id
)
service.new_note(addressed_note_on_commit, john_doe)
should_create_todo(expected_todo.merge(user: member))
should_create_todo(expected_todo.merge(user: author))
should_create_todo(expected_todo.merge(user: john_doe))
should_create_todo(expected_todo.merge(user: guest))
should_create_todo(expected_todo.merge(user: non_member))
end
end
context 'leaving a note on a commit in a public project with private code' do
let_it_be(:project) { create(:project, :repository, :public, :repository_private) }
before_all do
project.add_guest(guest)
project.add_developer(author)
project.add_developer(assignee)
project.add_developer(member)
project.add_developer(john_doe)
project.add_developer(skipped)
end
it 'creates a todo for each valid mentioned user' do
expected_todo = base_commit_todo_attrs.merge(
action: Todo::MENTIONED,
note: note_on_commit,
commit_id: note_on_commit.commit_id
)
service.new_note(note_on_commit, john_doe)
should_create_todo(expected_todo.merge(user: member))
should_create_todo(expected_todo.merge(user: author))
should_create_todo(expected_todo.merge(user: john_doe))
should_create_todo(expected_todo.merge(user: guest))
should_not_create_todo(expected_todo.merge(user: non_member))
end
it 'creates a directly addressed todo for each valid mentioned user' do
expected_todo = base_commit_todo_attrs.merge(
action: Todo::DIRECTLY_ADDRESSED,
note: addressed_note_on_commit,
commit_id: addressed_note_on_commit.commit_id
)
service.new_note(addressed_note_on_commit, john_doe)
should_create_todo(expected_todo.merge(user: member))
should_create_todo(expected_todo.merge(user: author))
should_create_todo(expected_todo.merge(user: john_doe))
should_create_todo(expected_todo.merge(user: guest))
should_not_create_todo(expected_todo.merge(user: non_member))
end
end
context 'leaving a note on a commit in a private project' do
let_it_be(:project) { create(:project, :repository, :private) }
before_all do
project.add_guest(guest)
project.add_developer(author)
project.add_developer(assignee)
project.add_developer(member)
project.add_developer(john_doe)
project.add_developer(skipped)
end
it 'creates a todo for each valid mentioned user' do
expected_todo = base_commit_todo_attrs.merge(
action: Todo::MENTIONED,
note: note_on_commit,
commit_id: note_on_commit.commit_id
)
service.new_note(note_on_commit, john_doe)
should_create_todo(expected_todo.merge(user: member))
should_create_todo(expected_todo.merge(user: author))
should_create_todo(expected_todo.merge(user: john_doe))
should_not_create_todo(expected_todo.merge(user: guest))
should_not_create_todo(expected_todo.merge(user: non_member))
end
it 'creates a directly addressed todo for each valid mentioned user' do
expected_todo = base_commit_todo_attrs.merge(
action: Todo::DIRECTLY_ADDRESSED,
note: addressed_note_on_commit,
commit_id: addressed_note_on_commit.commit_id
)
service.new_note(addressed_note_on_commit, john_doe)
should_create_todo(expected_todo.merge(user: member))
should_create_todo(expected_todo.merge(user: author))
should_create_todo(expected_todo.merge(user: john_doe))
should_not_create_todo(expected_todo.merge(user: guest))
should_not_create_todo(expected_todo.merge(user: non_member))
end
end
end
it 'does not create todo when leaving a note on snippet' do
should_not_create_any_todo { service.new_note(note_on_project_snippet, john_doe) }
end
end
describe '#mark_todo' do
it 'creates a todo from an issue' do
service.mark_todo(unassigned_issue, author)
should_create_todo(user: author, target: unassigned_issue, action: Todo::MARKED)
end
context 'when issue belongs to a group' do
it 'creates a todo from an issue' do
group_issue = create(:issue, :group_level, namespace: group)
service.mark_todo(group_issue, group_issue.author)
should_create_todo(
user: group_issue.author,
author: group_issue.author,
target: group_issue,
action: Todo::MARKED,
project: nil,
group: group
)
end
end
end
describe '#todo_exists?' do
it 'returns false when no todo exist for the given issuable' do
expect(service.todo_exist?(unassigned_issue, author)).to be_falsy
end
it 'returns true when a todo exist for the given issuable' do
service.mark_todo(unassigned_issue, author)
expect(service.todo_exist?(unassigned_issue, author)).to be_truthy
end
end
context 'when multiple_todos are enabled' do
before do
stub_feature_flags(multiple_todos: true)
end
it 'creates a MENTIONED todo even if user already has a pending MENTIONED todo' do
create(:todo, :mentioned, user: member, project: project, target: issue, author: author)
expect { service.update_issue(issue, author) }.to change(member.todos, :count)
end
it 'creates a DIRECTLY_ADDRESSED todo even if user already has a pending DIRECTLY_ADDRESSED todo' do
create(:todo, :directly_addressed, user: member, project: project, target: issue, author: author)
issue.update!(description: "#{member.to_reference}, what do you think?")
expect { service.update_issue(issue, author) }.to change(member.todos, :count)
end
it 'creates an ASSIGNED todo even if user already has a pending MARKED todo' do
create(:todo, :marked, user: john_doe, project: project, target: assigned_issue, author: author)
expect { service.reassigned_assignable(assigned_issue, author) }.to change(john_doe.todos, :count)
end
it 'does not create an ASSIGNED todo if user already has an ASSIGNED todo' do
create(:todo, :assigned, user: john_doe, project: project, target: assigned_issue, author: author)
expect { service.reassigned_assignable(assigned_issue, author) }.not_to change(john_doe.todos, :count)
end
it 'creates multiple todos if a user is assigned and mentioned in a new issue' do
assigned_issue.description = mentions
service.new_issue(assigned_issue, author)
should_create_todo(user: john_doe, target: assigned_issue, action: Todo::ASSIGNED)
should_create_todo(user: john_doe, target: assigned_issue, action: Todo::MENTIONED)
end
end
end
describe 'Work Items' do
let_it_be(:work_item) { create(:work_item, :task, project: project, author: author) }
describe '#mark_todo' do
it 'creates a todo from a work item' do
service.mark_todo(work_item, author)
should_create_todo(user: author, target: work_item, action: Todo::MARKED)
end
context 'when work item belongs to a group' do
it 'creates a todo from a work item' do
group_work_item = create(:work_item, :group_level, namespace: group)
service.mark_todo(group_work_item, group_work_item.author)
should_create_todo(
user: group_work_item.author,
author: group_work_item.author,
target: group_work_item,
action: Todo::MARKED,
project: nil,
group: group
)
end
end
end
describe '#todo_exists?' do
it 'returns false when no todo exist for the given work_item' do
expect(service.todo_exist?(work_item, author)).to be_falsy
end
it 'returns true when a todo exist for the given work_item' do
service.mark_todo(work_item, author)
expect(service.todo_exist?(work_item, author)).to be_truthy
end
end
describe '#resolve_todos_for_target' do
it 'marks related pending todos to the target for the user as done' do
first_todo = create(:todo, :assigned, user: john_doe, project: project, target: work_item, author: author)
second_todo = create(:todo, :assigned, user: john_doe, project: project, target: work_item, author: author)
service.resolve_todos_for_target(work_item, john_doe)
expect(first_todo.reload).to be_done
expect(second_todo.reload).to be_done
end
describe 'cached counts' do
it 'updates when todos change' do
create(:todo, :assigned, user: john_doe, project: project, target: work_item, author: author)
expect(john_doe.todos_done_count).to eq(0)
expect(john_doe.todos_pending_count).to eq(1)
expect(john_doe).to receive(:update_todos_count_cache).and_call_original
service.resolve_todos_for_target(work_item, john_doe)
expect(john_doe.todos_done_count).to eq(1)
expect(john_doe.todos_pending_count).to eq(0)
end
end
end
end
describe '#reassigned_assignable' do
let(:described_method) { :reassigned_assignable }
context 'assignable is a merge request' do
it_behaves_like 'reassigned target' do
let(:target_assigned) { create(:merge_request, source_project: project, author: author, assignees: [john_doe], description: "- [ ] Task 1\n- [ ] Task 2 #{mentions}") }
let(:addressed_target_assigned) { create(:merge_request, source_project: project, author: author, assignees: [john_doe], description: "#{directly_addressed}\n- [ ] Task 1\n- [ ] Task 2") }
let(:target_unassigned) { create(:merge_request, source_project: project, author: author, assignees: []) }
end
end
context 'assignable is a project level issue' do
it_behaves_like 'reassigned target' do
let(:target_assigned) { create(:issue, project: project, author: author, assignees: [john_doe], description: "- [ ] Task 1\n- [ ] Task 2 #{mentions}") }
let(:addressed_target_assigned) { create(:issue, project: project, author: author, assignees: [john_doe], description: "#{directly_addressed}\n- [ ] Task 1\n- [ ] Task 2") }
let(:target_unassigned) { create(:issue, project: project, author: author, assignees: []) }
end
end
context 'assignable is a project level work_item' do
it_behaves_like 'reassigned target' do
let(:target_assigned) { create(:work_item, project: project, author: author, assignees: [john_doe], description: "- [ ] Task 1\n- [ ] Task 2 #{mentions}") }
let(:addressed_target_assigned) { create(:work_item, project: project, author: author, assignees: [john_doe], description: "#{directly_addressed}\n- [ ] Task 1\n- [ ] Task 2") }
let(:target_unassigned) { create(:work_item, project: project, author: author, assignees: []) }
end
end
context 'assignable is a group level issue' do
it_behaves_like 'reassigned target' do
let(:additional_todo_attributes) { { project: nil, group: group } }
let(:target_assigned) { create(:issue, :group_level, namespace: group, author: author, assignees: [john_doe], description: "- [ ] Task 1\n- [ ] Task 2 #{mentions}") }
let(:addressed_target_assigned) { create(:issue, :group_level, namespace: group, author: author, assignees: [john_doe], description: "#{directly_addressed}\n- [ ] Task 1\n- [ ] Task 2") }
let(:target_unassigned) { create(:issue, :group_level, namespace: group, author: author, assignees: []) }
end
end
context 'assignable is a group level work item' do
it_behaves_like 'reassigned target' do
let(:additional_todo_attributes) { { project: nil, group: group } }
let(:target_assigned) { create(:work_item, :group_level, namespace: group, author: author, assignees: [john_doe], description: "- [ ] Task 1\n- [ ] Task 2 #{mentions}") }
let(:addressed_target_assigned) { create(:work_item, :group_level, namespace: group, author: author, assignees: [john_doe], description: "#{directly_addressed}\n- [ ] Task 1\n- [ ] Task 2") }
let(:target_unassigned) { create(:work_item, :group_level, namespace: group, author: author, assignees: []) }
end
end
context 'assignable is an alert' do
it_behaves_like 'reassigned target' do
let(:target_assigned) { create(:alert_management_alert, project: project, assignees: [john_doe]) }
let(:addressed_target_assigned) { create(:alert_management_alert, project: project, assignees: [john_doe]) }
let(:target_unassigned) { create(:alert_management_alert, project: project, assignees: []) }
end
end
end
describe '#reassigned_reviewable' do
let(:described_method) { :reassigned_reviewable }
context 'reviewable is a merge request' do
it_behaves_like 'reassigned reviewable target' do
let(:assigned_reviewers) { [] }
let(:target) { create(:merge_request, source_project: project, author: author, reviewers: assigned_reviewers) }
end
end
end
describe 'Merge Requests' do
let(:mentioned_mr) { create(:merge_request, source_project: project, author: author, description: "- [ ] Task 1\n- [ ] Task 2 #{mentions}") }
let(:addressed_mr) { create(:merge_request, source_project: project, author: author, description: "#{directly_addressed}\n- [ ] Task 1\n- [ ] Task 2") }
let(:assigned_mr) { create(:merge_request, source_project: project, author: author, assignees: [john_doe]) }
let(:unassigned_mr) { create(:merge_request, source_project: project, author: author, assignees: []) }
describe '#new_merge_request' do
it 'creates a pending todo if assigned' do
service.new_merge_request(assigned_mr, author)
should_create_todo(user: john_doe, target: assigned_mr, action: Todo::ASSIGNED)
end
it 'does not create a todo if unassigned' do
should_not_create_any_todo { service.new_merge_request(unassigned_mr, author) }
end
it 'creates a todo if assignee is the current user' do
service.new_merge_request(assigned_mr, john_doe)
should_create_todo(user: john_doe, target: assigned_mr, author: john_doe, action: Todo::ASSIGNED)
end
it 'creates a todo for each valid mentioned user' do
service.new_merge_request(mentioned_mr, author)
should_create_todo(user: member, target: mentioned_mr, action: Todo::MENTIONED)
should_not_create_todo(user: guest, target: mentioned_mr, action: Todo::MENTIONED)
should_create_todo(user: author, target: mentioned_mr, action: Todo::MENTIONED)
should_create_todo(user: john_doe, target: mentioned_mr, action: Todo::MENTIONED)
should_not_create_todo(user: non_member, target: mentioned_mr, action: Todo::MENTIONED)
end
it 'creates a todo for each valid user based on the type of mention' do
mentioned_mr.update!(description: directly_addressed_and_mentioned)
service.new_merge_request(mentioned_mr, author)
should_create_todo(user: member, target: mentioned_mr, action: Todo::DIRECTLY_ADDRESSED)
should_not_create_todo(user: admin, target: mentioned_mr, action: Todo::MENTIONED)
end
it 'creates a directly addressed todo for each valid addressed user' do
service.new_merge_request(addressed_mr, author)
should_create_todo(user: member, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
should_not_create_todo(user: guest, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
should_create_todo(user: author, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
should_create_todo(user: john_doe, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
should_not_create_todo(user: non_member, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
end
end
describe '#update_merge_request' do
it 'creates a todo for each valid mentioned user not included in skip_users' do
service.update_merge_request(mentioned_mr, author, skip_users)
should_create_todo(user: member, target: mentioned_mr, action: Todo::MENTIONED)
should_not_create_todo(user: guest, target: mentioned_mr, action: Todo::MENTIONED)
should_create_todo(user: john_doe, target: mentioned_mr, action: Todo::MENTIONED)
should_create_todo(user: author, target: mentioned_mr, action: Todo::MENTIONED)
should_not_create_todo(user: non_member, target: mentioned_mr, action: Todo::MENTIONED)
should_not_create_todo(user: skipped, target: mentioned_mr, action: Todo::MENTIONED)
end
it 'creates a todo for each valid user not included in skip_users based on the type of mention' do
mentioned_mr.update!(description: directly_addressed_and_mentioned)
service.update_merge_request(mentioned_mr, author, skip_users)
should_create_todo(user: member, target: mentioned_mr, action: Todo::DIRECTLY_ADDRESSED)
should_not_create_todo(user: admin, target: mentioned_mr, action: Todo::MENTIONED)
should_not_create_todo(user: skipped, target: mentioned_mr)
end
it 'creates a directly addressed todo for each valid addressed user not included in skip_users' do
service.update_merge_request(addressed_mr, author, skip_users)
should_create_todo(user: member, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
should_not_create_todo(user: guest, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
should_create_todo(user: john_doe, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
should_create_todo(user: author, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
should_not_create_todo(user: non_member, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
should_not_create_todo(user: skipped, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
end
it 'does not create a todo if user was already mentioned and todo is pending' do
stub_feature_flags(multiple_todos: false)
create(:todo, :mentioned, user: member, project: project, target: mentioned_mr, author: author)
expect { service.update_merge_request(mentioned_mr, author) }.not_to change(member.todos, :count)
end
it 'does not create a todo if user was already mentioned and todo is done' do
create(:todo, :mentioned, :done, user: skipped, project: project, target: mentioned_mr, author: author)
expect { service.update_merge_request(mentioned_mr, author, skip_users) }.not_to change(skipped.todos, :count)
end
it 'does not create a directly addressed todo if user was already mentioned or addressed and todo is pending' do
stub_feature_flags(multiple_todos: false)
create(:todo, :directly_addressed, user: member, project: project, target: addressed_mr, author: author)
expect { service.update_merge_request(addressed_mr, author) }.not_to change(member.todos, :count)
end
it 'does not create a directly addressed todo if user was already mentioned or addressed and todo is done' do
create(:todo, :directly_addressed, user: skipped, project: project, target: addressed_mr, author: author)
expect { service.update_merge_request(addressed_mr, author, skip_users) }.not_to change(skipped.todos, :count)
end
context 'with a task list' do
it 'does not create todo when tasks are marked as completed' do
mentioned_mr.update!(description: "- [x] Task 1\n- [X] Task 2 #{mentions}")
service.update_merge_request(mentioned_mr, author)
should_not_create_todo(user: admin, target: mentioned_mr, action: Todo::MENTIONED)
should_not_create_todo(user: assignee, target: mentioned_mr, action: Todo::MENTIONED)
should_not_create_todo(user: author, target: mentioned_mr, action: Todo::MENTIONED)
should_not_create_todo(user: john_doe, target: mentioned_mr, action: Todo::MENTIONED)
should_not_create_todo(user: member, target: mentioned_mr, action: Todo::MENTIONED)
should_not_create_todo(user: non_member, target: mentioned_mr, action: Todo::MENTIONED)
should_not_create_todo(user: guest, target: mentioned_mr, action: Todo::MENTIONED)
end
it 'does not create directly addressed todo when tasks are marked as completed' do
addressed_mr.update!(description: "#{directly_addressed}\n- [x] Task 1\n- [X] Task 2")
service.update_merge_request(addressed_mr, author)
should_not_create_todo(user: admin, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
should_not_create_todo(user: assignee, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
should_not_create_todo(user: author, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
should_not_create_todo(user: john_doe, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
should_not_create_todo(user: member, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
should_not_create_todo(user: non_member, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
should_not_create_todo(user: guest, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
end
it 'does not raise an error when description not change' do
mentioned_mr.update!(title: 'Sample')
expect { service.update_merge_request(mentioned_mr, author) }.not_to raise_error
end
end
end
describe '#close_merge_request' do
it 'marks related pending todos to the target for the user as done' do
first_todo = create(:todo, :assigned, user: john_doe, project: project, target: mentioned_mr, author: author)
second_todo = create(:todo, :assigned, user: john_doe, project: project, target: mentioned_mr, author: author)
service.close_merge_request(mentioned_mr, john_doe)
expect(first_todo.reload).to be_done
expect(second_todo.reload).to be_done
end
end
describe '#merge_merge_request' do
it 'marks related pending todos to the target for the user as done' do
first_todo = create(:todo, :assigned, user: john_doe, project: project, target: mentioned_mr, author: author)
second_todo = create(:todo, :assigned, user: john_doe, project: project, target: mentioned_mr, author: author)
service.merge_merge_request(mentioned_mr, john_doe)
expect(first_todo.reload).to be_done
expect(second_todo.reload).to be_done
end
it 'does not create todo for guests' do
service.merge_merge_request(mentioned_mr, john_doe)
should_not_create_todo(user: guest, target: mentioned_mr, action: Todo::MENTIONED)
end
it 'does not create directly addressed todo for guests' do
service.merge_merge_request(addressed_mr, john_doe)
should_not_create_todo(user: guest, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
end
end
describe '#new_award_emoji' do
it 'marks related pending todos to the target for the user as done' do
todo = create(:todo, user: john_doe, project: project, target: mentioned_mr, author: author)
service.new_award_emoji(mentioned_mr, john_doe)
expect(todo.reload).to be_done
end
end
describe '#merge_request_build_failed' do
let(:merge_participants) { [unassigned_mr.author, admin] }
before do
allow(unassigned_mr).to receive(:merge_participants).and_return(merge_participants)
end
it 'creates a pending todo for each merge_participant' do
service.merge_request_build_failed(unassigned_mr)
merge_participants.each do |participant|
should_create_todo(user: participant, author: participant, target: unassigned_mr, action: Todo::BUILD_FAILED)
end
end
end
describe '#merge_request_push' do
it 'marks related pending todos to the target for the user as done' do
first_todo = create(:todo, :build_failed, user: author, project: project, target: mentioned_mr, author: john_doe)
second_todo = create(:todo, :build_failed, user: john_doe, project: project, target: mentioned_mr, author: john_doe)
service.merge_request_push(mentioned_mr, author)
expect(first_todo.reload).to be_done
expect(second_todo.reload).not_to be_done
end
end
describe '#merge_request_became_unmergeable' do
let(:merge_participants) { [admin, create(:user)] }
before do
allow(unassigned_mr).to receive(:merge_participants).and_return(merge_participants)
end
it 'creates a pending todo for each merge_participant' do
unassigned_mr.update!(merge_when_pipeline_succeeds: true, merge_user: admin)
service.merge_request_became_unmergeable(unassigned_mr)
merge_participants.each do |participant|
should_create_todo(user: participant, author: participant, target: unassigned_mr, action: Todo::UNMERGEABLE)
end
end
end
describe '#mark_todo' do
it 'creates a todo from a merge request' do
service.mark_todo(unassigned_mr, author)
should_create_todo(user: author, target: unassigned_mr, action: Todo::MARKED)
end
end
describe '#new_note' do
let_it_be(:project) { create(:project, :repository) }
before_all do
project.add_guest(guest)
project.add_developer(author)
project.add_developer(assignee)
project.add_developer(member)
project.add_developer(john_doe)
project.add_developer(skipped)
end
let(:mention) { john_doe.to_reference }
let(:diff_note_on_merge_request) { create(:diff_note_on_merge_request, project: project, noteable: unassigned_mr, author: author, note: "Hey #{mention}") }
let(:addressed_diff_note_on_merge_request) { create(:diff_note_on_merge_request, project: project, noteable: unassigned_mr, author: author, note: "#{mention}, hey!") }
let(:legacy_diff_note_on_merge_request) { create(:legacy_diff_note_on_merge_request, project: project, noteable: unassigned_mr, author: author, note: "Hey #{mention}") }
it 'creates a todo for mentioned user on new diff note' do
service.new_note(diff_note_on_merge_request, author)
should_create_todo(user: john_doe, target: unassigned_mr, author: author, action: Todo::MENTIONED, note: diff_note_on_merge_request)
end
it 'creates a directly addressed todo for addressed user on new diff note' do
service.new_note(addressed_diff_note_on_merge_request, author)
should_create_todo(user: john_doe, target: unassigned_mr, author: author, action: Todo::DIRECTLY_ADDRESSED, note: addressed_diff_note_on_merge_request)
end
it 'creates a todo for mentioned user on legacy diff note' do
service.new_note(legacy_diff_note_on_merge_request, author)
should_create_todo(user: john_doe, target: unassigned_mr, author: author, action: Todo::MENTIONED, note: legacy_diff_note_on_merge_request)
end
it 'does not create todo for guests' do
note_on_merge_request = create :note_on_merge_request, project: project, noteable: mentioned_mr, note: mentions
service.new_note(note_on_merge_request, author)
should_not_create_todo(user: guest, target: mentioned_mr, action: Todo::MENTIONED)
end
end
end
describe 'Designs' do
include DesignManagementTestHelpers
let(:issue) { create(:issue, project: project) }
let(:design) { create(:design, issue: issue) }
before do
enable_design_management
project.add_guest(author)
project.add_developer(john_doe)
end
let(:note) do
build(
:diff_note_on_design,
noteable: design,
author: author,
note: "Hey #{john_doe.to_reference}"
)
end
it 'creates a todo for mentioned user on new diff note' do
service.new_note(note, author)
should_create_todo(
user: john_doe,
target: design,
action: Todo::MENTIONED,
note: note
)
end
end
describe '#update_note' do
let_it_be(:noteable) { create(:issue, project: project) }
let(:note) { create(:note, project: project, note: mentions, noteable: noteable) }
let(:addressed_note) { create(:note, project: project, note: directly_addressed.to_s, noteable: noteable) }
it 'creates a todo for each valid mentioned user not included in skip_users' do
service.update_note(note, author, skip_users)
should_create_todo(user: member, target: noteable, action: Todo::MENTIONED)
should_create_todo(user: guest, target: noteable, action: Todo::MENTIONED)
should_create_todo(user: john_doe, target: noteable, action: Todo::MENTIONED)
should_create_todo(user: author, target: noteable, action: Todo::MENTIONED)
should_not_create_todo(user: non_member, target: noteable, action: Todo::MENTIONED)
should_not_create_todo(user: skipped, target: noteable, action: Todo::MENTIONED)
end
it 'creates a todo for each valid user not included in skip_users based on the type of mention' do
note.update!(note: directly_addressed_and_mentioned)
service.update_note(note, author, skip_users)
should_create_todo(user: member, target: noteable, action: Todo::DIRECTLY_ADDRESSED)
should_create_todo(user: guest, target: noteable, action: Todo::MENTIONED)
should_not_create_todo(user: admin, target: noteable, action: Todo::MENTIONED)
should_not_create_todo(user: skipped, target: noteable)
end
it 'creates a directly addressed todo for each valid addressed user not included in skip_users' do
service.update_note(addressed_note, author, skip_users)
should_create_todo(user: member, target: noteable, action: Todo::DIRECTLY_ADDRESSED)
should_create_todo(user: guest, target: noteable, action: Todo::DIRECTLY_ADDRESSED)
should_create_todo(user: john_doe, target: noteable, action: Todo::DIRECTLY_ADDRESSED)
should_create_todo(user: author, target: noteable, action: Todo::DIRECTLY_ADDRESSED)
should_not_create_todo(user: non_member, target: noteable, action: Todo::DIRECTLY_ADDRESSED)
should_not_create_todo(user: skipped, target: noteable, action: Todo::DIRECTLY_ADDRESSED)
end
context 'users already have pending todos and the multiple_todos feature is off' do
before do
stub_feature_flags(multiple_todos: false)
end
let_it_be(:pending_todo_for_member) { create(:todo, :mentioned, user: member, project: project, target: noteable) }
let_it_be(:pending_todo_for_guest) { create(:todo, :mentioned, user: guest, project: project, target: noteable) }
let_it_be(:pending_todo_for_admin) { create(:todo, :mentioned, user: admin, project: project, target: noteable) }
let_it_be(:note_mentioning_1_user) do
create(:note, project: project, note: "FYI #{member.to_reference}", noteable: noteable)
end
let_it_be(:note_mentioning_3_users) do
create(:note, project: project, note: 'FYI: ' + [member, guest, admin].map(&:to_reference).join(' '), noteable: noteable)
end
it 'does not create a todo if user was already mentioned and todo is pending' do
expect { service.update_note(note_mentioning_1_user, author, skip_users) }.not_to change(member.todos, :count)
end
it 'does not create N+1 queries for pending todos' do
# Excluding queries for user permissions because those do execute N+1 queries
allow_any_instance_of(User).to receive(:can?).and_return(true)
control_count = ActiveRecord::QueryRecorder.new { service.update_note(note_mentioning_1_user, author, skip_users) }.count
expect { service.update_note(note_mentioning_3_users, author, skip_users) }.not_to exceed_query_limit(control_count)
end
end
it 'does not create a todo if user was already mentioned and todo is done' do
create(:todo, :mentioned, :done, user: skipped, project: project, target: noteable, author: author)
expect { service.update_note(note, author, skip_users) }.not_to change(skipped.todos, :count)
end
it 'does not create a directly addressed todo if user was already mentioned or addressed and todo is pending' do
stub_feature_flags(multiple_todos: false)
create(:todo, :directly_addressed, user: member, project: project, target: noteable, author: author)
expect { service.update_note(addressed_note, author, skip_users) }.not_to change(member.todos, :count)
end
it 'does not create a directly addressed todo if user was already mentioned or addressed and todo is done' do
create(:todo, :directly_addressed, :done, user: skipped, project: project, target: noteable, author: author)
expect { service.update_note(addressed_note, author, skip_users) }.not_to change(skipped.todos, :count)
end
end
it 'updates cached counts when a todo is created' do
issue = create(:issue, project: project, assignees: [john_doe], author: author)
expect_next(Users::UpdateTodoCountCacheService, [john_doe.id]).to receive(:execute)
service.new_issue(issue, author)
end
shared_examples 'updating todos state' do |state, new_state, new_resolved_by = nil|
let!(:first_todo) { create(:todo, state, user: john_doe) }
let!(:second_todo) { create(:todo, state, user: john_doe) }
let(:collection) { Todo.all }
it 'updates related todos for the user with the new_state' do
method_call
expect(collection.all? { |todo| todo.reload.state?(new_state) }).to be_truthy
end
if new_resolved_by
it 'updates resolution mechanism' do
method_call
expect(collection.all? { |todo| todo.reload.resolved_by_action == new_resolved_by }).to be_truthy
end
end
it 'returns the updated ids' do
expect(method_call).to match_array([first_todo.id, second_todo.id])
end
describe 'cached counts' do
it 'updates when todos change' do
expect(john_doe.todos.where(state: new_state).count).to eq(0)
expect(john_doe.todos.where(state: state).count).to eq(2)
expect(john_doe).to receive(:update_todos_count_cache).and_call_original
method_call
expect(john_doe.todos.where(state: new_state).count).to eq(2)
expect(john_doe.todos.where(state: state).count).to eq(0)
end
end
end
describe '#resolve_todos' do
it_behaves_like 'updating todos state', :pending, :done, 'mark_done' do
subject(:method_call) do
service.resolve_todos(collection, john_doe, resolution: :done, resolved_by_action: :mark_done)
end
end
end
describe '#restore_todos' do
it_behaves_like 'updating todos state', :done, :pending do
subject(:method_call) do
service.restore_todos(collection, john_doe)
end
end
end
describe '#resolve_todo' do
let!(:todo) { create(:todo, :assigned, user: john_doe) }
it 'marks pending todo as done' do
expect do
service.resolve_todo(todo, john_doe)
todo.reload
end.to change { todo.done? }.to(true)
end
it 'saves resolution mechanism' do
expect do
service.resolve_todo(todo, john_doe, resolved_by_action: :mark_done)
todo.reload
end.to change { todo.resolved_by_mark_done? }.to(true)
end
context 'cached counts' do
it 'updates when todos change' do
expect(john_doe.todos_done_count).to eq(0)
expect(john_doe.todos_pending_count).to eq(1)
expect(john_doe).to receive(:update_todos_count_cache).and_call_original
service.resolve_todo(todo, john_doe)
expect(john_doe.todos_done_count).to eq(1)
expect(john_doe.todos_pending_count).to eq(0)
end
end
end
describe '#resolve_access_request_todos' do
let_it_be(:group) { create(:group, :public) }
let_it_be(:group_requester) { create(:group_member, :access_request, group: group, user: assignee) }
let_it_be(:project_requester) { create(:project_member, :access_request, project: project, user: non_member) }
let_it_be(:another_pending_todo) { create(:todo, state: :pending, user: john_doe) }
# access request by another user
let_it_be(:another_group_todo) do
create(:todo, state: :pending, target: group, action: Todo::MEMBER_ACCESS_REQUESTED)
end
let_it_be(:another_project_todo) do
create(:todo, state: :pending, target: project, action: Todo::MEMBER_ACCESS_REQUESTED)
end
it 'marks the todos for group access request handlers as done' do
access_request_todos = [member, john_doe].map do |group_user|
create(:todo,
user: group_user,
state: :pending,
action: Todo::MEMBER_ACCESS_REQUESTED,
author: group_requester.user,
target: group
)
end
expect do
service.resolve_access_request_todos(group_requester)
end.to change {
Todo.pending.where(target: group).for_author(group_requester.user)
.for_action(Todo::MEMBER_ACCESS_REQUESTED).count
}.from(2).to(0)
expect(access_request_todos.each(&:reload)).to all be_done
expect(another_pending_todo.reload).not_to be_done
expect(another_group_todo.reload).not_to be_done
end
it 'marks the todos for project access request handlers as done' do
# The project has 1 owner already. Adding another owner here
project.add_member(john_doe, Gitlab::Access::OWNER)
access_request_todo = create(:todo,
user: john_doe,
state: :pending,
action: Todo::MEMBER_ACCESS_REQUESTED,
author: project_requester.user,
target: project
)
expect do
service.resolve_access_request_todos(project_requester)
end.to change {
Todo.pending.where(target: project).for_author(project_requester.user)
.for_action(Todo::MEMBER_ACCESS_REQUESTED).count
}.from(2).to(0) # The original owner todo was created with the pending access request
expect(access_request_todo.reload).to be_done
expect(another_pending_todo.reload).to be_pending
expect(another_project_todo.reload).to be_pending
end
end
describe '#restore_todo' do
let!(:todo) { create(:todo, :done, user: john_doe) }
it 'marks resolved todo as pending' do
expect do
service.restore_todo(todo, john_doe)
todo.reload
end.to change { todo.pending? }.to(true)
end
context 'cached counts' do
it 'updates when todos change' do
expect(john_doe.todos_done_count).to eq(1)
expect(john_doe.todos_pending_count).to eq(0)
expect(john_doe).to receive(:update_todos_count_cache).and_call_original
service.restore_todo(todo, john_doe)
expect(john_doe.todos_done_count).to eq(0)
expect(john_doe.todos_pending_count).to eq(1)
end
end
end
describe '#create_request_review_todo' do
let(:target) { create(:merge_request, author: author, source_project: project) }
let(:reviewer) { create(:user) }
it 'creates a todo for reviewer' do
service.create_request_review_todo(target, author, reviewer)
should_create_todo(user: reviewer, target: target, action: Todo::REVIEW_REQUESTED)
end
end
describe '#create_member_access_request_todos' do
let_it_be(:group) { create(:group, :public) }
let_it_be(:project) { create(:project, :public, group: group) }
shared_examples 'member access request is raised' do
context 'when the source has more than 10 owners' do
it 'creates todos for 10 recently active source owners' do
users = create_list(:user, 12, :with_sign_ins)
users.each do |user|
source.add_owner(user)
end
ten_most_recently_active_source_owners = users.sort_by(&:last_sign_in_at).last(10)
excluded_source_owners = users - ten_most_recently_active_source_owners
service.create_member_access_request_todos(requester1)
ten_most_recently_active_source_owners.each do |owner|
expect(Todo.where(user: owner, target: source, action: Todo::MEMBER_ACCESS_REQUESTED, author: requester1.user).count).to eq 1
end
excluded_source_owners.each do |owner|
expect(Todo.where(user: owner, target: source, action: Todo::MEMBER_ACCESS_REQUESTED, author: requester1.user).count).to eq 0
end
end
end
context 'when total owners are less than 10' do
it 'creates todos for all source owners' do
users = create_list(:user, 4, :with_sign_ins)
users.map do |user|
source.add_owner(user)
end
service.create_member_access_request_todos(requester1)
users.each do |owner|
expect(Todo.where(user: owner, target: source, action: Todo::MEMBER_ACCESS_REQUESTED, author: requester1.user).count).to eq 1
end
end
end
context 'when multiple access requests are raised' do
it 'creates todos for 10 recently active source owners for multiple requests' do
users = create_list(:user, 12, :with_sign_ins)
users.each do |user|
source.add_owner(user)
end
ten_most_recently_active_source_owners = users.sort_by(&:last_sign_in_at).last(10)
excluded_source_owners = users - ten_most_recently_active_source_owners
service.create_member_access_request_todos(requester1)
service.create_member_access_request_todos(requester2)
ten_most_recently_active_source_owners.each do |owner|
expect(Todo.where(user: owner, target: source, action: Todo::MEMBER_ACCESS_REQUESTED, author: requester1.user).count).to eq 1
expect(Todo.where(user: owner, target: source, action: Todo::MEMBER_ACCESS_REQUESTED, author: requester2.user).count).to eq 1
end
excluded_source_owners.each do |owner|
expect(Todo.where(user: owner, target: source, action: Todo::MEMBER_ACCESS_REQUESTED, author: requester1.user).count).to eq 0
expect(Todo.where(user: owner, target: source, action: Todo::MEMBER_ACCESS_REQUESTED, author: requester2.user).count).to eq 0
end
end
end
end
context 'when request is raised for group' do
it_behaves_like 'member access request is raised' do
let_it_be(:source) { create(:group, :public) }
let_it_be(:requester1) { create(:group_member, :access_request, group: source, user: assignee) }
let_it_be(:requester2) { create(:group_member, :access_request, group: source, user: non_member) }
end
end
context 'when request is raised for project' do
it_behaves_like 'member access request is raised' do
let_it_be(:source) { create(:project, :public) }
let_it_be(:requester1) { create(:project_member, :access_request, project: source, user: assignee) }
let_it_be(:requester2) { create(:project_member, :access_request, project: source, user: non_member) }
end
end
end
def should_create_todo(attributes = {})
attributes.reverse_merge!(
project: project,
author: author,
state: :pending
)
expect(Todo.where(attributes).count).to eq 1
end
def should_not_create_todo(attributes = {})
attributes.reverse_merge!(
project: project,
author: author,
state: :pending
)
expect(Todo.where(attributes).count).to eq 0
end
def should_not_create_any_todo
expect { yield }.not_to change(Todo, :count)
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
# Base class for services that count a single resource such as the number of
# issues for a project.
class BaseCountService
def relation_for_count
raise(
NotImplementedError,
'"relation_for_count" must be implemented and return an ActiveRecord::Relation'
)
end
def count
Rails.cache.fetch(cache_key, cache_options) { uncached_count }.to_i
end
def count_stored?
Rails.cache.read(cache_key).present?
end
def refresh_cache(&block)
update_cache_for_key(cache_key, &block)
end
def uncached_count
relation_for_count.count
end
def delete_cache
::Gitlab::Cache.delete(cache_key)
end
def raw?
false
end
def cache_key
raise NotImplementedError, 'cache_key must be implemented and return a String, Array, or Hash'
end
# subclasses can override to add any specific options, such as
# super.merge({ expires_in: 5.minutes })
def cache_options
{ raw: raw? }
end
def update_cache_for_key(key, &block)
Rails.cache.write(key, block ? yield : uncached_count, raw: raw?)
end
end
BaseCountService.prepend_mod
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe BaseCountService, :use_clean_rails_memory_store_caching, feature_category: :shared do
let(:service) { described_class.new }
describe '#relation_for_count' do
it 'raises NotImplementedError' do
expect { service.relation_for_count }.to raise_error(NotImplementedError)
end
end
describe '#count' do
it 'returns the number of values' do
expect(service)
.to receive(:cache_key)
.and_return('foo')
expect(service)
.to receive(:uncached_count)
.and_return(5)
expect(service.count).to eq(5)
end
end
describe '#uncached_count' do
it 'returns the uncached number of values' do
expect(service)
.to receive(:relation_for_count)
.and_return(double(:relation, count: 5))
expect(service.uncached_count).to eq(5)
end
end
describe '#refresh_cache' do
it 'refreshes the cache' do
allow(service)
.to receive(:cache_key)
.and_return('foo')
allow(service)
.to receive(:uncached_count)
.and_return(4)
service.refresh_cache
expect(Rails.cache.fetch(service.cache_key, raw: service.raw?)).to eq(4)
end
end
describe '#delete_cache' do
it 'deletes the cache' do
allow(service)
.to receive(:cache_key)
.and_return('foo')
allow(service)
.to receive(:uncached_count)
.and_return(4)
service.refresh_cache
service.delete_cache
expect(Rails.cache.fetch(service.cache_key, raw: service.raw?)).to be_nil
end
end
describe '#raw?' do
it 'returns false' do
expect(service.raw?).to eq(false)
end
end
describe '#cache_key' do
it 'raises NotImplementedError' do
expect { service.cache_key }.to raise_error(NotImplementedError)
end
end
describe '#cache_options' do
it 'returns the default in options' do
expect(service.cache_options).to eq({ raw: false })
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
# Service class for creating push event payloads as stored in the
# "push_event_payloads" table.
#
# Example:
#
# data = Gitlab::DataBuilder::Push.build(...)
# event = Event.create(...)
#
# PushEventPayloadService.new(event, data).execute
class PushEventPayloadService
# event - The event this push payload belongs to.
# push_data - A Hash produced by `Gitlab::DataBuilder::Push.build` to use for
# building the push payload.
def initialize(event, push_data)
@event = event
@push_data = push_data
end
# Creates and returns a new PushEventPayload row.
#
# This method will raise upon encountering validation errors.
#
# Returns an instance of PushEventPayload.
def execute
@event.build_push_event_payload(
commit_count: commit_count,
action: action,
ref_type: ref_type,
commit_from: commit_from_id,
commit_to: commit_to_id,
ref: trimmed_ref,
commit_title: commit_title,
event_id: @event.id
)
@event.push_event_payload.save!
@event.push_event_payload
end
# Returns the commit title to use.
#
# The commit title is limited to the first line and a maximum of 70
# characters.
def commit_title
commit = @push_data.fetch(:commits).last
return unless commit && commit[:message]
raw_msg = commit[:message]
# Find where the first line ends, without turning the entire message into an
# Array of lines (this is a waste of memory for large commit messages).
index = raw_msg.index("\n")
message = index ? raw_msg[0..index] : raw_msg
message.strip.truncate(70)
end
def commit_from_id
if create?
nil
else
revision_before
end
end
def commit_to_id
if remove?
nil
else
revision_after
end
end
def commit_count
@push_data.fetch(:total_commits_count)
end
def ref
@push_data.fetch(:ref)
end
def revision_before
@push_data.fetch(:before)
end
def revision_after
@push_data.fetch(:after)
end
def trimmed_ref
Gitlab::Git.ref_name(ref)
end
def create?
Gitlab::Git.blank_ref?(revision_before)
end
def remove?
Gitlab::Git.blank_ref?(revision_after)
end
def action
if create?
:created
elsif remove?
:removed
else
:pushed
end
end
def ref_type
if Gitlab::Git.tag_ref?(ref)
:tag
else
:branch
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe PushEventPayloadService, feature_category: :source_code_management do
let(:event) { create(:push_event) }
describe '#execute' do
let(:push_data) do
{
commits: [
{
id: '1cf19a015df3523caf0a1f9d40c98a267d6a2fc2',
message: 'This is a commit'
}
],
before: '0000000000000000000000000000000000000000',
after: '1cf19a015df3523caf0a1f9d40c98a267d6a2fc2',
total_commits_count: 1,
ref: 'refs/heads/my-branch'
}
end
it 'creates a new PushEventPayload row' do
payload = described_class.new(event, push_data).execute
expect(payload.commit_count).to eq(1)
expect(payload.action).to eq('created')
expect(payload.ref_type).to eq('branch')
expect(payload.commit_from).to be_nil
expect(payload.commit_to).to eq(push_data[:after])
expect(payload.ref).to eq('my-branch')
expect(payload.commit_title).to eq('This is a commit')
expect(payload.event_id).to eq(event.id)
end
it 'sets the push_event_payload association of the used event' do
payload = described_class.new(event, push_data).execute
expect(event.push_event_payload).to eq(payload)
end
end
describe '#commit_title' do
it 'returns nil if no commits were pushed' do
service = described_class.new(event, commits: [])
expect(service.commit_title).to be_nil
end
it 'returns a String limited to 70 characters' do
service = described_class.new(event, commits: [{ message: 'a' * 100 }])
expect(service.commit_title).to eq(('a' * 67) + '...')
end
it 'does not truncate the commit message if it is shorter than 70 characters' do
service = described_class.new(event, commits: [{ message: 'Hello' }])
expect(service.commit_title).to eq('Hello')
end
it 'includes the first line of a commit message if the message spans multiple lines' do
service = described_class
.new(event, commits: [{ message: "Hello\n\nworld" }])
expect(service.commit_title).to eq('Hello')
end
end
describe '#commit_from_id' do
it 'returns nil when creating a new ref' do
service = described_class.new(event, before: Gitlab::Git::BLANK_SHA)
expect(service.commit_from_id).to be_nil
end
it 'returns the ID of the first commit when pushing to an existing ref' do
service = described_class.new(event, before: '123')
expect(service.commit_from_id).to eq('123')
end
end
describe '#commit_to_id' do
it 'returns nil when removing an existing ref' do
service = described_class.new(event, after: Gitlab::Git::BLANK_SHA)
expect(service.commit_to_id).to be_nil
end
end
describe '#commit_count' do
it 'returns the number of commits' do
service = described_class.new(event, total_commits_count: 1)
expect(service.commit_count).to eq(1)
end
it 'raises when the push data does not contain the commits count' do
service = described_class.new(event, {})
expect { service.commit_count }.to raise_error(KeyError)
end
end
describe '#ref' do
it 'returns the name of the ref' do
service = described_class.new(event, ref: 'refs/heads/foo')
expect(service.ref).to eq('refs/heads/foo')
end
it 'raises when the push data does not contain the ref name' do
service = described_class.new(event, {})
expect { service.ref }.to raise_error(KeyError)
end
end
describe '#revision_before' do
it 'returns the revision from before the push' do
service = described_class.new(event, before: 'foo')
expect(service.revision_before).to eq('foo')
end
it 'raises when the push data does not contain the before revision' do
service = described_class.new(event, {})
expect { service.revision_before }.to raise_error(KeyError)
end
end
describe '#revision_after' do
it 'returns the revision from after the push' do
service = described_class.new(event, after: 'foo')
expect(service.revision_after).to eq('foo')
end
it 'raises when the push data does not contain the after revision' do
service = described_class.new(event, {})
expect { service.revision_after }.to raise_error(KeyError)
end
end
describe '#trimmed_ref' do
it 'returns the ref name without its prefix' do
service = described_class.new(event, ref: 'refs/heads/foo')
expect(service.trimmed_ref).to eq('foo')
end
end
describe '#create?' do
it 'returns true when creating a new ref' do
service = described_class.new(event, before: Gitlab::Git::BLANK_SHA)
expect(service.create?).to eq(true)
end
it 'returns false when pushing to an existing ref' do
service = described_class.new(event, before: 'foo')
expect(service.create?).to eq(false)
end
end
describe '#remove?' do
it 'returns true when removing an existing ref' do
service = described_class.new(event, after: Gitlab::Git::BLANK_SHA)
expect(service.remove?).to eq(true)
end
it 'returns false pushing to an existing ref' do
service = described_class.new(event, after: 'foo')
expect(service.remove?).to eq(false)
end
end
describe '#action' do
it 'returns :created when creating a ref' do
service = described_class.new(event, before: Gitlab::Git::BLANK_SHA)
expect(service.action).to eq(:created)
end
it 'returns :removed when removing an existing ref' do
service = described_class.new(event, before: '123', after: Gitlab::Git::BLANK_SHA)
expect(service.action).to eq(:removed)
end
it 'returns :pushed when pushing to an existing ref' do
service = described_class.new(event, before: '123', after: '456')
expect(service.action).to eq(:pushed)
end
end
describe '#ref_type' do
it 'returns :tag for a tag' do
service = described_class.new(event, ref: 'refs/tags/1.2')
expect(service.ref_type).to eq(:tag)
end
it 'returns :branch for a branch' do
service = described_class.new(event, ref: 'refs/heads/master')
expect(service.ref_type).to eq(:branch)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
require 'resolv'
class VerifyPagesDomainService < BaseService
# The maximum number of seconds to be spent on each DNS lookup
RESOLVER_TIMEOUT_SECONDS = 15
# How long verification lasts for
VERIFICATION_PERIOD = 7.days
REMOVAL_DELAY = 1.week.freeze
attr_reader :domain
def initialize(domain)
@domain = domain
end
def execute
return error("No verification code set for #{domain.domain}") unless domain.verification_code.present?
if !verification_enabled? || dns_record_present?
verify_domain!
elsif expired?
disable_domain!
else
unverify_domain!
end
end
private
def verify_domain!
was_disabled = !domain.enabled?
was_unverified = domain.unverified?
# Prevent any pre-existing grace period from being truncated
reverify = [domain.enabled_until, VERIFICATION_PERIOD.from_now].compact.max
domain.assign_attributes(verified_at: Time.current, enabled_until: reverify, remove_at: nil)
domain.save!(validate: false)
if was_disabled
notify(:enabled)
elsif was_unverified
notify(:verification_succeeded)
end
after_successful_verification
success
end
def after_successful_verification
# method overridden in EE
end
def unverify_domain!
was_verified = domain.verified?
domain.assign_attributes(verified_at: nil)
domain.remove_at ||= REMOVAL_DELAY.from_now unless domain.enabled?
domain.save!(validate: false)
notify(:verification_failed) if was_verified
error("Couldn't verify #{domain.domain}")
end
def disable_domain!
domain.assign_attributes(verified_at: nil, enabled_until: nil)
domain.remove_at ||= REMOVAL_DELAY.from_now
domain.save!(validate: false)
notify(:disabled)
error("Couldn't verify #{domain.domain}. It is now disabled.")
end
# A domain is only expired until `disable!` has been called
def expired?
domain.enabled_until&.past?
end
def dns_record_present?
Resolv::DNS.open do |resolver|
resolver.timeouts = RESOLVER_TIMEOUT_SECONDS
check(domain.domain, resolver) || check(domain.verification_domain, resolver)
end
end
def check(domain_name, resolver)
# Append '.' to domain_name, indicating absolute FQDN
records = parse(txt_records(domain_name + '.', resolver))
records.any? do |record|
record == domain.keyed_verification_code || record == domain.verification_code
end
rescue StandardError => err
log_error("Failed to check TXT records on #{domain_name} for #{domain.domain}: #{err}")
false
end
def txt_records(domain_name, resolver)
resolver.getresources(domain_name, Resolv::DNS::Resource::IN::TXT)
end
def parse(records)
records.flat_map(&:strings).flat_map(&:split)
end
def verification_enabled?
Gitlab::CurrentSettings.pages_domain_verification_enabled?
end
def notify(type)
return unless verification_enabled?
Gitlab::AppLogger.info("Pages domain '#{domain.domain}' changed state to '#{type}'")
notification_service.public_send("pages_domain_#{type}", domain) # rubocop:disable GitlabSecurity/PublicSend
end
end
VerifyPagesDomainService.prepend_mod
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe VerifyPagesDomainService, feature_category: :pages do
using RSpec::Parameterized::TableSyntax
include EmailHelpers
let(:error_status) { { status: :error, message: "Couldn't verify #{domain.domain}" } }
subject(:service) { described_class.new(domain) }
describe '#execute' do
where(:domain_sym, :code_sym) do
:domain | :verification_code
:domain | :keyed_verification_code
:verification_domain | :verification_code
:verification_domain | :keyed_verification_code
end
with_them do
let(:domain_name) { domain.send(domain_sym) }
let(:verification_code) { domain.send(code_sym) }
shared_examples 'verifies and enables the domain' do
it 'verifies and enables the domain' do
expect(service.execute).to eq(status: :success)
expect(domain).to be_verified
expect(domain).to be_enabled
expect(domain.remove_at).to be_nil
end
end
shared_examples 'successful enablement and verification' do
context 'when txt record contains verification code' do
before do
stub_resolver(domain_name => ['something else', verification_code])
end
include_examples 'verifies and enables the domain'
end
context 'when txt record contains verification code with other text' do
before do
stub_resolver(domain_name => "something #{verification_code} else")
end
include_examples 'verifies and enables the domain'
end
end
shared_examples 'unverifies and disables domain' do
it 'unverifies domain' do
expect(service.execute).to eq(error_status)
expect(domain).not_to be_verified
end
it 'disables domain and shedules it for removal in 1 week' do
service.execute
expect(domain).not_to be_enabled
expect(domain.remove_at).to be_like_time(7.days.from_now)
end
end
context 'when domain is disabled(or new)' do
let(:domain) { create(:pages_domain, :disabled) }
include_examples 'successful enablement and verification'
context 'when txt record does not contain verification code' do
before do
stub_resolver(domain_name => 'something else')
end
include_examples 'unverifies and disables domain'
end
context 'when txt record does not contain verification code' do
before do
stub_resolver(domain_name => 'something else')
end
include_examples 'unverifies and disables domain'
end
context 'when no txt records are present' do
before do
stub_resolver
end
include_examples 'unverifies and disables domain'
end
end
context 'when domain is verified' do
let(:domain) { create(:pages_domain) }
include_examples 'successful enablement and verification'
shared_examples 'unverifing domain' do
it 'unverifies but does not disable domain' do
expect(service.execute).to eq(error_status)
expect(domain).not_to be_verified
expect(domain).to be_enabled
end
it 'does not schedule domain for removal' do
service.execute
expect(domain.remove_at).to be_nil
end
end
context 'when txt record does not contain verification code' do
before do
stub_resolver(domain_name => 'something else')
end
include_examples 'unverifing domain'
end
context 'when no txt records are present' do
before do
stub_resolver
end
include_examples 'unverifing domain'
end
end
context 'when domain is expired' do
let(:domain) { create(:pages_domain, :expired) }
context 'when the right code is present' do
before do
stub_resolver(domain_name => domain.keyed_verification_code)
end
include_examples 'verifies and enables the domain'
end
context 'when the right code is not present' do
before do
stub_resolver
end
let(:error_status) { { status: :error, message: "Couldn't verify #{domain.domain}. It is now disabled." } }
include_examples 'unverifies and disables domain'
end
end
context 'when domain is disabled and scheduled for removal' do
let(:domain) { create(:pages_domain, :disabled, :scheduled_for_removal) }
context 'when the right code is present' do
before do
stub_resolver(domain.domain => domain.keyed_verification_code)
end
it 'verifies and enables domain' do
expect(service.execute).to eq(status: :success)
expect(domain).to be_verified
expect(domain).to be_enabled
end
it 'prevent domain from being removed' do
expect { service.execute }.to change { domain.remove_at }.to(nil)
end
end
context 'when the right code is not present' do
before do
stub_resolver
end
it 'keeps domain scheduled for removal but does not change removal time' do
expect { service.execute }.not_to change { domain.remove_at }
expect(domain.remove_at).to be_present
end
end
end
context 'invalid domain' do
let(:domain) { build(:pages_domain, :expired, :with_missing_chain) }
before do
domain.save!(validate: false)
end
it 'can be disabled' do
error_status[:message] += '. It is now disabled.'
stub_resolver
expect(service.execute).to eq(error_status)
expect(domain).not_to be_verified
expect(domain).not_to be_enabled
end
end
end
context 'timeout behaviour' do
let(:domain) { create(:pages_domain) }
it 'sets a timeout on the DNS query' do
expect(stub_resolver).to receive(:timeouts=).with(described_class::RESOLVER_TIMEOUT_SECONDS)
service.execute
end
end
context 'email notifications' do
let(:notification_service) { instance_double('NotificationService') }
where(:factory, :verification_succeeds, :expected_notification) do
nil | true | nil
nil | false | :verification_failed
:reverify | true | nil
:reverify | false | :verification_failed
:unverified | true | :verification_succeeded
:unverified | false | nil
:expired | true | nil
:expired | false | :disabled
:disabled | true | :enabled
:disabled | false | nil
end
with_them do
let(:domain) { create(:pages_domain, *[factory].compact) }
before do
allow(service).to receive(:notification_service) { notification_service }
if verification_succeeds
stub_resolver(domain.domain => domain.verification_code)
else
stub_resolver
end
end
it 'sends a notification if appropriate' do
if expected_notification
expect(notification_service).to receive(:"pages_domain_#{expected_notification}").with(domain)
end
service.execute
end
end
context 'pages verification disabled' do
let(:domain) { create(:pages_domain, :disabled) }
before do
stub_application_setting(pages_domain_verification_enabled: false)
allow(service).to receive(:notification_service) { notification_service }
end
it 'skips email notifications' do
expect(notification_service).not_to receive(:pages_domain_enabled)
service.execute
end
end
end
context 'no verification code' do
let(:domain) { create(:pages_domain) }
it 'returns an error' do
domain.verification_code = ''
disallow_resolver!
expect(service.execute).to eq(status: :error, message: "No verification code set for #{domain.domain}")
end
end
context 'pages domain verification is disabled' do
let(:domain) { create(:pages_domain, :disabled) }
before do
stub_application_setting(pages_domain_verification_enabled: false)
end
it 'extends domain validity by unconditionally reverifying' do
disallow_resolver!
service.execute
expect(domain).to be_verified
expect(domain).to be_enabled
end
it 'does not shorten any grace period' do
grace = Time.current + 1.year
domain.update!(enabled_until: grace)
disallow_resolver!
service.execute
expect(domain.enabled_until).to be_like_time(grace)
end
end
end
def disallow_resolver!
expect(Resolv::DNS).not_to receive(:open)
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
# This service passes Markdown content through our GFM rewriter classes
# which rewrite references to GitLab objects and uploads within the content
# based on their visibility by the `target_parent`.
class MarkdownContentRewriterService
include Gitlab::Utils::StrongMemoize
REWRITERS = [Gitlab::Gfm::ReferenceRewriter, Gitlab::Gfm::UploadsRewriter].freeze
def initialize(current_user, object, field, source_parent, target_parent)
@current_user = current_user
@source_parent = source_parent
@target_parent = target_parent
@object = object
@field = field
validate_parameters!
@content = object[field].dup.presence
@html_field = object.cached_markdown_fields.html_field(field)
@content_html = object.cached_html_for(field)
@rewriters =
REWRITERS.map do |rewriter_class|
rewriter_class.new(@content, content_html, source_parent, current_user)
end
@result = {
field => nil,
html_field => nil
}.with_indifferent_access
end
def execute
return result unless content
unless safe_to_copy_markdown?
rewriters.each do |rewriter|
rewriter.rewrite(target_parent)
end
end
result[field] = content
result[html_field] = content_html if safe_to_copy_markdown?
result[:skip_markdown_cache_validation] = safe_to_copy_markdown?
result
end
def safe_to_copy_markdown?
strong_memoize(:safe_to_copy_markdown) do
rewriters.none?(&:needs_rewrite?)
end
end
private
def validate_parameters!
# See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/39654#note_399095117
raise ArgumentError, 'The rewriter classes require that `source_parent` is a `Project`' \
unless source_parent.is_a?(Project)
if object.cached_markdown_fields[field].nil?
raise ArgumentError, 'The `field` attribute does not contain cached markdown'
end
end
attr_reader :current_user, :content, :source_parent,
:target_parent, :rewriters, :content_html,
:field, :html_field, :object, :result
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe MarkdownContentRewriterService, feature_category: :team_planning do
let_it_be(:user) { create(:user) }
let_it_be(:source_parent) { create(:project, :public) }
let_it_be(:target_parent) { create(:project, :public) }
let(:content) { 'My content' }
let(:issue) { create(:issue, project: source_parent, description: content) }
describe '#initialize' do
it 'raises an error if source_parent is not a Project' do
expect do
described_class.new(user, issue, :description, create(:group), target_parent)
end.to raise_error(ArgumentError, 'The rewriter classes require that `source_parent` is a `Project`')
end
it 'raises an error if field does not have cached markdown' do
expect do
described_class.new(user, issue, :author, source_parent, target_parent)
end.to raise_error(ArgumentError, 'The `field` attribute does not contain cached markdown')
end
end
describe '#execute' do
subject { described_class.new(user, issue, :description, source_parent, target_parent).execute }
context 'when content does not need a rewrite' do
it 'returns original content and cached html' do
expect(subject).to eq({
'description' => issue.description,
'description_html' => issue.description_html,
'skip_markdown_cache_validation' => true
})
end
end
context 'when content needs a rewrite' do
it 'calls the rewriter classes successfully', :aggregate_failures do
described_class::REWRITERS.each do |rewriter_class|
service = double
allow(service).to receive(:needs_rewrite?).and_return(true)
expect(service).to receive(:rewrite).with(target_parent)
expect(rewriter_class).to receive(:new).and_return(service)
end
subject
end
end
# Perform simple integration-style tests for each rewriter class.
# to prove they run correctly.
context 'when content has references' do
let_it_be(:issue_to_reference) { create(:issue, project: source_parent) }
let(:content) { "See ##{issue_to_reference.iid}" }
it 'rewrites content' do
expect(subject).to eq({
'description' => "See #{source_parent.full_path}##{issue_to_reference.iid}",
'description_html' => nil,
'skip_markdown_cache_validation' => false
})
end
end
context 'when content contains an upload' do
let(:image_uploader) { build(:file_uploader, project: source_parent) }
let(:content) { "Text and #{image_uploader.markdown_link}" }
it 'rewrites content' do
new_content = subject
expect(new_content[:description]).not_to eq(content)
expect(new_content[:description].length).to eq(content.length)
expect(new_content[1]).to eq(nil)
end
end
end
describe '#safe_to_copy_markdown?' do
subject do
rewriter = described_class.new(user, issue, :description, source_parent, target_parent)
rewriter.safe_to_copy_markdown?
end
context 'when content has references' do
let(:milestone) { create(:milestone, project: source_parent) }
let(:content) { "Description that references #{milestone.to_reference}" }
it { is_expected.to eq(false) }
end
context 'when content has uploaded file references' do
let(:image_uploader) { build(:file_uploader, project: source_parent) }
let(:content) { "Text and #{image_uploader.markdown_link}" }
it { is_expected.to eq(false) }
end
context 'when content does not have references or uploads' do
let(:content) { "simples text with ```code```" }
it { is_expected.to eq(true) }
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class SearchService
include Gitlab::Allowable
include Gitlab::Utils::StrongMemoize
DEFAULT_PER_PAGE = Gitlab::SearchResults::DEFAULT_PER_PAGE
MAX_PER_PAGE = 200
attr_reader :params
def initialize(current_user, params = {})
@current_user = current_user
@params = Gitlab::Search::Params.new(params, detect_abuse: true)
end
# rubocop: disable CodeReuse/ActiveRecord
def project
strong_memoize(:project) do
if params[:project_id].present? && valid_request?
the_project = Project.find_by(id: params[:project_id])
can?(current_user, :read_project, the_project) ? the_project : nil
end
end
end
# rubocop: enable CodeReuse/ActiveRecord
# rubocop: disable CodeReuse/ActiveRecord
def group
strong_memoize(:group) do
if params[:group_id].present? && valid_request?
the_group = Group.find_by(id: params[:group_id])
can?(current_user, :read_group, the_group) ? the_group : nil
end
end
end
# rubocop: enable CodeReuse/ActiveRecord
def projects
# overridden in EE
end
def global_search?
project.blank? && group.blank?
end
def search_type
'basic'
end
def show_snippets?
strong_memoize(:show_snippets) do
params[:snippets] == 'true'
end
end
delegate :scope, to: :search_service
delegate :valid_terms_count?, :valid_query_length?, to: :params
def search_results
strong_memoize(:search_results) do
abuse_detected? ? Gitlab::EmptySearchResults.new : search_service.execute
end
end
def search_objects(preload_method = nil)
@search_objects ||= redact_unauthorized_results(
search_results.objects(scope, page: page, per_page: per_page, preload_method: preload_method)
)
end
def search_highlight
search_results.highlight_map(scope)
end
def search_aggregations
search_results.aggregations(scope)
end
def abuse_detected?
strong_memoize(:abuse_detected) do
params.abusive?
end
end
def abuse_messages
return [] unless params.abusive?
params.abuse_detection.errors.full_messages
end
def valid_request?
strong_memoize(:valid_request) do
params.valid?
end
end
def level
@level ||=
if project
'project'
elsif group
'group'
else
'global'
end
end
def global_search_enabled_for_scope?
return false if show_snippets? && Feature.disabled?(:global_search_snippet_titles_tab, current_user, type: :ops)
case params[:scope]
when 'blobs'
Feature.enabled?(:global_search_code_tab, current_user, type: :ops)
when 'commits'
Feature.enabled?(:global_search_commits_tab, current_user, type: :ops)
when 'issues'
Feature.enabled?(:global_search_issues_tab, current_user, type: :ops)
when 'merge_requests'
Feature.enabled?(:global_search_merge_requests_tab, current_user, type: :ops)
when 'snippet_titles'
Feature.enabled?(:global_search_snippet_titles_tab, current_user, type: :ops)
when 'wiki_blobs'
Feature.enabled?(:global_search_wiki_tab, current_user, type: :ops)
when 'users'
Feature.enabled?(:global_search_users_tab, current_user, type: :ops)
else
true
end
end
private
def page
[1, params[:page].to_i].max
end
def per_page
per_page_param = params[:per_page].to_i
return DEFAULT_PER_PAGE unless per_page_param > 0
[MAX_PER_PAGE, per_page_param].min
end
def visible_result?(object)
return true unless object.respond_to?(:to_ability_name) && DeclarativePolicy.has_policy?(object)
Ability.allowed?(current_user, :"read_#{object.to_ability_name}", object)
end
def redact_unauthorized_results(results_collection)
redacted_results = results_collection.reject { |object| visible_result?(object) }
if redacted_results.any?
redacted_log = redacted_results.each_with_object({}) do |object, memo|
memo[object.id] = { ability: :"read_#{object.to_ability_name}", id: object.id, class_name: object.class.name }
end
log_redacted_search_results(redacted_log.values)
return results_collection.id_not_in(redacted_log.keys) if results_collection.is_a?(ActiveRecord::Relation)
end
return results_collection if results_collection.is_a?(ActiveRecord::Relation)
permitted_results = results_collection - redacted_results
Kaminari.paginate_array(
permitted_results,
total_count: results_collection.total_count,
limit: results_collection.limit_value,
offset: results_collection.offset_value
)
end
def log_redacted_search_results(filtered_results)
logger.error(message: "redacted_search_results", filtered: filtered_results, current_user_id: current_user&.id, query: params[:search])
end
def logger
@logger ||= ::Gitlab::RedactedSearchResultsLogger.build
end
def search_service
@search_service ||=
if project
Search::ProjectService.new(current_user, project, params)
elsif show_snippets?
Search::SnippetService.new(current_user, params)
elsif group
Search::GroupService.new(current_user, group, params)
else
Search::GlobalService.new(current_user, params)
end
end
attr_reader :current_user
end
SearchService.prepend_mod_with('SearchService')
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe SearchService, feature_category: :global_search do
let_it_be(:user) { create(:user) }
let_it_be(:accessible_group) { create(:group, :private) }
let_it_be(:inaccessible_group) { create(:group, :private) }
let_it_be(:group_member) { create(:group_member, group: accessible_group, user: user) }
let_it_be(:accessible_project) { create(:project, :repository, :private, name: 'accessible_project') }
let_it_be(:note) { create(:note_on_issue, project: accessible_project) }
let_it_be(:inaccessible_project) { create(:project, :repository, :private, name: 'inaccessible_project') }
let(:snippet) { create(:snippet, author: user) }
let(:group_project) { create(:project, group: accessible_group, name: 'group_project') }
let(:public_project) { create(:project, :public, name: 'public_project') }
let(:page) { 1 }
let(:per_page) { described_class::DEFAULT_PER_PAGE }
let(:valid_search) { "what is love?" }
subject(:search_service) { described_class.new(user, search: search, scope: scope, page: page, per_page: per_page) }
before do
accessible_project.add_maintainer(user)
end
describe '#project' do
context 'when the project is accessible' do
it 'returns the project' do
project = described_class.new(user, project_id: accessible_project.id, search: valid_search).project
expect(project).to eq accessible_project
end
it 'returns the project for guests' do
search_project = create :project
search_project.add_guest(user)
project = described_class.new(user, project_id: search_project.id, search: valid_search).project
expect(project).to eq search_project
end
end
context 'when the project is not accessible' do
it 'returns nil' do
project = described_class.new(user, project_id: inaccessible_project.id, search: valid_search).project
expect(project).to be_nil
end
end
context 'when there is no project_id' do
it 'returns nil' do
project = described_class.new(user, search: valid_search).project
expect(project).to be_nil
end
end
end
describe '#group' do
context 'when the group is accessible' do
it 'returns the group' do
group = described_class.new(user, group_id: accessible_group.id, search: valid_search).group
expect(group).to eq accessible_group
end
end
context 'when the group is not accessible' do
it 'returns nil' do
group = described_class.new(user, group_id: inaccessible_group.id, search: valid_search).group
expect(group).to be_nil
end
end
context 'when there is no group_id' do
it 'returns nil' do
group = described_class.new(user, search: valid_search).group
expect(group).to be_nil
end
end
end
describe '#search_type' do
subject { described_class.new(user, search: valid_search).search_type }
it { is_expected.to eq('basic') }
end
describe '#show_snippets?' do
context 'when :snippets is \'true\'' do
it 'returns true' do
show_snippets = described_class.new(user, snippets: 'true').show_snippets?
expect(show_snippets).to be_truthy
end
end
context 'when :snippets is not \'true\'' do
it 'returns false' do
show_snippets = described_class.new(user, snippets: 'tru').show_snippets?
expect(show_snippets).to be_falsey
end
end
context 'when :snippets is missing' do
it 'returns false' do
show_snippets = described_class.new(user).show_snippets?
expect(show_snippets).to be_falsey
end
end
end
describe '#scope' do
context 'with accessible project_id' do
context 'and allowed scope' do
it 'returns the specified scope' do
scope = described_class.new(user, project_id: accessible_project.id, scope: 'notes', search: valid_search).scope
expect(scope).to eq 'notes'
end
end
context 'and disallowed scope' do
it 'returns the default scope' do
scope = described_class.new(user, project_id: accessible_project.id, scope: 'projects', search: valid_search).scope
expect(scope).to eq 'blobs'
end
end
context 'and no scope' do
it 'returns the default scope' do
scope = described_class.new(user, project_id: accessible_project.id, search: valid_search).scope
expect(scope).to eq 'blobs'
end
end
end
context 'with \'true\' snippets' do
context 'and allowed scope' do
it 'returns the specified scope' do
scope = described_class.new(user, snippets: 'true', scope: 'snippet_titles').scope
expect(scope).to eq 'snippet_titles'
end
end
context 'and disallowed scope' do
it 'returns the default scope' do
scope = described_class.new(user, snippets: 'true', scope: 'projects').scope
expect(scope).to eq 'snippet_titles'
end
end
context 'and no scope' do
it 'returns the default scope' do
scope = described_class.new(user, snippets: 'true').scope
expect(scope).to eq 'snippet_titles'
end
end
end
context 'with no project_id, no snippets' do
context 'and allowed scope' do
it 'returns the specified scope' do
scope = described_class.new(user, scope: 'issues').scope
expect(scope).to eq 'issues'
end
end
context 'and disallowed scope' do
it 'returns the default scope' do
scope = described_class.new(user, scope: 'blobs').scope
expect(scope).to eq 'projects'
end
end
context 'and no scope' do
it 'returns the default scope' do
scope = described_class.new(user).scope
expect(scope).to eq 'projects'
end
end
end
end
describe '#search_results' do
context 'with accessible project_id' do
it 'returns an instance of Gitlab::ProjectSearchResults' do
search_results = described_class.new(
user,
project_id: accessible_project.id,
scope: 'notes',
search: note.note).search_results
expect(search_results).to be_a Gitlab::ProjectSearchResults
end
end
context 'with accessible project_id and \'true\' snippets' do
it 'returns an instance of Gitlab::ProjectSearchResults' do
search_results = described_class.new(
user,
project_id: accessible_project.id,
snippets: 'true',
scope: 'notes',
search: note.note).search_results
expect(search_results).to be_a Gitlab::ProjectSearchResults
end
end
context 'with \'true\' snippets' do
it 'returns an instance of Gitlab::SnippetSearchResults' do
search_results = described_class.new(
user,
snippets: 'true',
search: snippet.title).search_results
expect(search_results).to be_a Gitlab::SnippetSearchResults
end
end
context 'with no project_id and no snippets' do
it 'returns an instance of Gitlab::SearchResults' do
search_results = described_class.new(
user,
search: public_project.name).search_results
expect(search_results).to be_a Gitlab::SearchResults
end
end
end
describe '#search_objects' do
let(:search) { '' }
let(:scope) { nil }
describe 'per_page: parameter' do
context 'when nil' do
let(:per_page) { nil }
it "defaults to #{described_class::DEFAULT_PER_PAGE}" do
expect_any_instance_of(Gitlab::SearchResults)
.to receive(:objects)
.with(anything, hash_including(per_page: described_class::DEFAULT_PER_PAGE))
.and_call_original
subject.search_objects
end
end
context 'when empty string' do
let(:per_page) { '' }
it "defaults to #{described_class::DEFAULT_PER_PAGE}" do
expect_any_instance_of(Gitlab::SearchResults)
.to receive(:objects)
.with(anything, hash_including(per_page: described_class::DEFAULT_PER_PAGE))
.and_call_original
subject.search_objects
end
end
context 'when negative' do
let(:per_page) { '-1' }
it "defaults to #{described_class::DEFAULT_PER_PAGE}" do
expect_any_instance_of(Gitlab::SearchResults)
.to receive(:objects)
.with(anything, hash_including(per_page: described_class::DEFAULT_PER_PAGE))
.and_call_original
subject.search_objects
end
end
context 'when present' do
let(:per_page) { '50' }
it "converts to integer and passes to search results" do
expect_any_instance_of(Gitlab::SearchResults)
.to receive(:objects)
.with(anything, hash_including(per_page: 50))
.and_call_original
subject.search_objects
end
end
context "when greater than #{described_class::MAX_PER_PAGE}" do
let(:per_page) { described_class::MAX_PER_PAGE + 1 }
it "passes #{described_class::MAX_PER_PAGE}" do
expect_any_instance_of(Gitlab::SearchResults)
.to receive(:objects)
.with(anything, hash_including(per_page: described_class::MAX_PER_PAGE))
.and_call_original
subject.search_objects
end
end
end
describe 'page: parameter' do
context 'when < 1' do
let(:page) { 0 }
it "defaults to 1" do
expect_any_instance_of(Gitlab::SearchResults)
.to receive(:objects)
.with(anything, hash_including(page: 1))
.and_call_original
subject.search_objects
end
end
context 'when nil' do
let(:page) { nil }
it "defaults to 1" do
expect_any_instance_of(Gitlab::SearchResults)
.to receive(:objects)
.with(anything, hash_including(page: 1))
.and_call_original
subject.search_objects
end
end
end
context 'with accessible project_id' do
it 'returns objects in the project' do
search_objects = described_class.new(
user,
project_id: accessible_project.id,
scope: 'notes',
search: note.note).search_objects
expect(search_objects.first).to eq note
end
end
context 'with accessible project_id and \'true\' snippets' do
it 'returns objects in the project' do
search_objects = described_class.new(
user,
project_id: accessible_project.id,
snippets: 'true',
scope: 'notes',
search: note.note).search_objects
expect(search_objects.first).to eq note
end
end
context 'with \'true\' snippets' do
it 'returns objects in snippets' do
search_objects = described_class.new(
user,
snippets: 'true',
search: snippet.title).search_objects
expect(search_objects.first).to eq snippet
end
end
context 'with accessible group_id' do
it 'returns objects in the group' do
search_objects = described_class.new(
user,
group_id: accessible_group.id,
search: group_project.name).search_objects
expect(search_objects.first).to eq group_project
end
end
context 'with no project_id, group_id or snippets' do
it 'returns objects in global' do
search_objects = described_class.new(
user,
search: public_project.name).search_objects
expect(search_objects.first).to eq public_project
end
end
it_behaves_like 'a redacted search results'
end
describe '#valid_request?' do
let(:scope) { 'issues' }
let(:search) { 'foobar' }
let(:params) { instance_double(Gitlab::Search::Params) }
before do
allow(Gitlab::Search::Params).to receive(:new).and_return(params)
allow(params).to receive(:valid?).and_return double(:valid?)
end
it 'is the return value of params.valid?' do
expect(subject.valid_request?).to eq(params.valid?)
end
end
describe '#abuse_messages' do
let(:scope) { 'issues' }
let(:search) { 'foobar' }
let(:params) { instance_double(Gitlab::Search::Params) }
before do
allow(Gitlab::Search::Params).to receive(:new).and_return(params)
end
it 'returns an empty array when not abusive' do
allow(params).to receive(:abusive?).and_return false
expect(subject.abuse_messages).to match_array([])
end
it 'calls on abuse_detection.errors.full_messages when abusive' do
allow(params).to receive(:abusive?).and_return true
expect(params).to receive_message_chain(:abuse_detection, :errors, :full_messages)
subject.abuse_messages
end
end
describe 'abusive search handling' do
subject { described_class.new(user, raw_params) }
let(:raw_params) { { search: search, scope: scope } }
let(:search) { 'foobar' }
let(:search_service) { double(:search_service) }
before do
expect(Gitlab::Search::Params).to receive(:new)
.with(raw_params, detect_abuse: true).and_call_original
allow(subject).to receive(:search_service).and_return search_service
end
context 'a search is abusive' do
let(:scope) { '1;drop%20table' }
it 'does NOT execute search service' do
expect(search_service).not_to receive(:execute)
subject.search_results
end
end
context 'a search is NOT abusive' do
let(:scope) { 'issues' }
it 'executes search service' do
expect(search_service).to receive(:execute)
subject.search_results
end
end
end
describe '.global_search_enabled_for_scope?' do
using RSpec::Parameterized::TableSyntax
let(:search) { 'foobar' }
where(:scope, :feature_flag, :enabled, :expected) do
'blobs' | :global_search_code_tab | false | false
'blobs' | :global_search_code_tab | true | true
'commits' | :global_search_commits_tab | false | false
'commits' | :global_search_commits_tab | true | true
'issues' | :global_search_issues_tab | false | false
'issues' | :global_search_issues_tab | true | true
'merge_requests' | :global_search_merge_requests_tab | false | false
'merge_requests' | :global_search_merge_requests_tab | true | true
'snippet_titles' | :global_search_snippet_titles_tab | false | false
'snippet_titles' | :global_search_snippet_titles_tab | true | true
'wiki_blobs' | :global_search_wiki_tab | false | false
'wiki_blobs' | :global_search_wiki_tab | true | true
'users' | :global_search_users_tab | false | false
'users' | :global_search_users_tab | true | true
'random' | :random | nil | true
end
with_them do
it 'returns false when feature_flag is not enabled and returns true when feature_flag is enabled' do
stub_feature_flags(feature_flag => enabled)
expect(subject.global_search_enabled_for_scope?).to eq expected
end
end
context 'when snippet search is enabled' do
let(:scope) { 'snippet_titles' }
before do
allow(described_class).to receive(:show_snippets?).and_return(true)
end
it 'returns false when feature_flag is not enabled' do
stub_feature_flags(global_search_snippet_titles_tab: false)
expect(subject.global_search_enabled_for_scope?).to eq false
end
it 'returns true when feature_flag is enabled' do
stub_feature_flags(global_search_snippet_titles_tab: true)
expect(subject.global_search_enabled_for_scope?).to eq true
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class SystemHooksService
def execute_hooks_for(model, event)
data = build_event_data(model, event)
model.run_after_commit_or_now do
SystemHooksService.new.execute_hooks(data)
end
end
def execute_hooks(data, hooks_scope = :all)
SystemHook.executable.hooks_for(hooks_scope).find_each do |hook|
hook.async_execute(data, 'system_hooks')
end
Gitlab::FileHook.execute_all_async(data)
end
private
def build_event_data(model, event)
builder_class = case model
when GroupMember
Gitlab::HookData::GroupMemberBuilder
when Group
Gitlab::HookData::GroupBuilder
when ProjectMember
Gitlab::HookData::ProjectMemberBuilder
when User
Gitlab::HookData::UserBuilder
when Project
Gitlab::HookData::ProjectBuilder
when Key
Gitlab::HookData::KeyBuilder
end
builder_class.new(model).build(event)
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe SystemHooksService, feature_category: :webhooks do
describe '#execute_hooks_for' do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project) }
let_it_be(:group_member) { create(:group_member, source: group, user: user) }
let_it_be(:project_member) { create(:project_member, source: project, user: user) }
let_it_be(:key) { create(:key, user: user) }
let_it_be(:deploy_key) { create(:key) }
let(:event) { :create }
using RSpec::Parameterized::TableSyntax
where(:model_name, :builder_class) do
:group_member | Gitlab::HookData::GroupMemberBuilder
:group | Gitlab::HookData::GroupBuilder
:project_member | Gitlab::HookData::ProjectMemberBuilder
:user | Gitlab::HookData::UserBuilder
:project | Gitlab::HookData::ProjectBuilder
:key | Gitlab::HookData::KeyBuilder
:deploy_key | Gitlab::HookData::KeyBuilder
end
with_them do
it 'builds the data with the relevant builder class and then calls #execute_hooks with the obtained data' do
data = double
model = public_send(model_name)
expect_next_instance_of(builder_class, model) do |builder|
expect(builder).to receive(:build).with(event).and_return(data)
end
service = described_class.new
expect_next_instance_of(SystemHooksService) do |system_hook_service|
expect(system_hook_service).to receive(:execute_hooks).with(data)
end
service.execute_hooks_for(model, event)
end
end
end
describe '#execute_hooks' do
let(:data) { { key: :value } }
subject { described_class.new.execute_hooks(data) }
it 'executes system hooks with the given data' do
hook = create(:system_hook)
allow(SystemHook).to receive_message_chain(:hooks_for, :find_each).and_yield(hook)
expect(hook).to receive(:async_execute).with(data, 'system_hooks')
subject
end
it 'executes FileHook with the given data' do
expect(Gitlab::FileHook).to receive(:execute_all_async).with(data)
subject
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
require 'securerandom'
# Compare 2 refs for one repo or between repositories
# and return Compare object that responds to commits and diffs
class CompareService
attr_reader :start_project, :start_ref_name
def initialize(new_start_project, new_start_ref_name)
@start_project = new_start_project
@start_ref_name = new_start_ref_name
end
def execute(target_project, target_ref, base_sha: nil, straight: false)
raw_compare = target_project.repository.compare_source_branch(target_ref, start_project.repository, start_ref_name, straight: straight)
return unless raw_compare && raw_compare.base && raw_compare.head
Compare.new(raw_compare, start_project, base_sha: base_sha, straight: straight)
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe CompareService, feature_category: :source_code_management do
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
let(:service) { described_class.new(project, 'feature') }
describe '#execute' do
context 'compare with base, like feature...fix' do
subject { service.execute(project, 'fix', straight: false) }
it { expect(subject.diffs.size).to eq(1) }
end
context 'straight compare, like feature..fix' do
subject { service.execute(project, 'fix', straight: true) }
it { expect(subject.diffs.size).to eq(3) }
end
context 'compare with target branch that does not exist' do
subject { service.execute(project, 'non-existent-ref') }
it { expect(subject).to be_nil }
end
context 'compare with source branch that does not exist' do
let(:service) { described_class.new(project, 'non-existent-branch') }
subject { service.execute(project, 'non-existent-ref') }
it { expect(subject).to be_nil }
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class UserAgentDetailService
def initialize(spammable:, perform_spam_check:)
@spammable = spammable
@perform_spam_check = perform_spam_check
end
def create
spam_params = Gitlab::RequestContext.instance.spam_params
if !perform_spam_check || spam_params&.user_agent.blank? || spam_params&.ip_address.blank?
message = 'Skipped UserAgentDetail creation because necessary spam_params were not provided'
return ServiceResponse.success(message: message)
end
spammable.create_user_agent_detail(user_agent: spam_params.user_agent, ip_address: spam_params.ip_address)
end
private
attr_reader :spammable, :perform_spam_check
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe UserAgentDetailService, feature_category: :instance_resiliency do
describe '#create', :request_store do
let_it_be(:spammable) { create(:issue) }
using RSpec::Parameterized::TableSyntax
where(:perform_spam_check, :spam_params_present, :user_agent, :ip_address, :creates_user_agent_detail) do
true | true | 'UA' | 'IP' | true
true | false | 'UA' | 'IP' | false
false | true | 'UA' | 'IP' | false
true | true | '' | 'IP' | false
true | true | nil | 'IP' | false
true | true | 'UA' | '' | false
true | true | 'UA' | nil | false
end
with_them do
let(:spam_params) do
instance_double('Spam::SpamParams', user_agent: user_agent, ip_address: ip_address) if spam_params_present
end
before do
allow(Gitlab::RequestContext.instance).to receive(:spam_params).and_return(spam_params)
end
subject { described_class.new(spammable: spammable, perform_spam_check: perform_spam_check).create } # rubocop:disable Rails/SaveBang
it 'creates a user agent detail when expected' do
if creates_user_agent_detail
expect { subject }.to change { UserAgentDetail.count }.by(1)
else
expect(subject).to be_a ServiceResponse
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class X509CertificateRevokeService
def execute(certificate)
return unless certificate.revoked?
certificate.x509_commit_signatures.update_all(verification_status: :unverified)
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe X509CertificateRevokeService, feature_category: :system_access do
describe '#execute' do
let(:service) { described_class.new }
let!(:x509_signature_1) { create(:x509_commit_signature, x509_certificate: x509_certificate, verification_status: :verified) }
let!(:x509_signature_2) { create(:x509_commit_signature, x509_certificate: x509_certificate, verification_status: :verified) }
context 'for revoked certificates' do
let(:x509_certificate) { create(:x509_certificate, certificate_status: :revoked) }
it 'update all commit signatures' do
expect do
service.execute(x509_certificate)
x509_signature_1.reload
x509_signature_2.reload
end
.to change(x509_signature_1, :verification_status).from('verified').to('unverified')
.and change(x509_signature_2, :verification_status).from('verified').to('unverified')
end
end
context 'for good certificates' do
let(:x509_certificate) { create(:x509_certificate) }
it 'do not update any commit signature' do
expect do
service.execute(x509_certificate)
x509_signature_1.reload
x509_signature_2.reload
end
.to not_change(x509_signature_1, :verification_status)
.and not_change(x509_signature_2, :verification_status)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
# EventCreateService class
#
# Used for creating events feed on dashboard after certain user action
#
# Ex.
# EventCreateService.new.new_issue(issue, current_user)
#
class EventCreateService
IllegalActionError = Class.new(StandardError)
DEGIGN_EVENT_LABEL = 'usage_activity_by_stage_monthly.create.action_monthly_active_users_design_management'
MR_EVENT_LABEL = 'usage_activity_by_stage_monthly.create.merge_requests_users'
MR_EVENT_PROPERTY = 'merge_request_action'
def open_issue(issue, current_user)
create_record_event(issue, current_user, :created)
end
def close_issue(issue, current_user)
create_record_event(issue, current_user, :closed)
end
def reopen_issue(issue, current_user)
create_record_event(issue, current_user, :reopened)
end
def open_mr(merge_request, current_user)
create_record_event(merge_request, current_user, :created).tap do
Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:merge_request_action, values: current_user.id)
track_snowplow_event(
action: :created,
project: merge_request.project,
user: current_user,
label: MR_EVENT_LABEL,
property: MR_EVENT_PROPERTY
)
end
end
def close_mr(merge_request, current_user)
create_record_event(merge_request, current_user, :closed).tap do
Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:merge_request_action, values: current_user.id)
track_snowplow_event(
action: :closed,
project: merge_request.project,
user: current_user,
label: MR_EVENT_LABEL,
property: MR_EVENT_PROPERTY
)
end
end
def reopen_mr(merge_request, current_user)
create_record_event(merge_request, current_user, :reopened)
end
def merge_mr(merge_request, current_user)
create_record_event(merge_request, current_user, :merged).tap do
Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:merge_request_action, values: current_user.id)
track_snowplow_event(
action: :merged,
project: merge_request.project,
user: current_user,
label: MR_EVENT_LABEL,
property: MR_EVENT_PROPERTY
)
end
end
def open_milestone(milestone, current_user)
create_record_event(milestone, current_user, :created)
end
def close_milestone(milestone, current_user)
create_record_event(milestone, current_user, :closed)
end
def reopen_milestone(milestone, current_user)
create_record_event(milestone, current_user, :reopened)
end
def destroy_milestone(milestone, current_user)
create_record_event(milestone, current_user, :destroyed)
end
def leave_note(note, current_user)
create_record_event(note, current_user, :commented).tap do
if note.is_a?(DiffNote) && note.for_merge_request?
Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:merge_request_action, values: current_user.id)
track_snowplow_event(
action: :commented,
project: note.project,
user: current_user,
label: MR_EVENT_LABEL,
property: MR_EVENT_PROPERTY
)
end
end
end
def join_project(project, current_user)
create_event(project, current_user, :joined)
end
def leave_project(project, current_user)
create_event(project, current_user, :left)
end
def expired_leave_project(project, current_user)
create_event(project, current_user, :expired)
end
def create_project(project, current_user)
create_event(project, current_user, :created)
end
def push(project, current_user, push_data)
create_push_event(PushEventPayloadService, project, current_user, push_data)
end
def bulk_push(project, current_user, push_data)
create_push_event(BulkPushEventPayloadService, project, current_user, push_data)
end
def save_designs(current_user, create: [], update: [])
records = create.zip([:created].cycle) + update.zip([:updated].cycle)
return [] if records.empty?
event_meta = { user: current_user, label: DEGIGN_EVENT_LABEL, property: :design_action }
track_snowplow_event(action: :create, project: create.first.project, **event_meta) if create.any?
track_snowplow_event(action: :update, project: update.first.project, **event_meta) if update.any?
inserted_events = create_record_events(records, current_user)
Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:design_action, values: current_user.id)
Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:git_write_action, values: current_user.id)
inserted_events
end
def destroy_designs(designs, current_user)
return [] unless designs.present?
track_snowplow_event(
action: :destroy,
project: designs.first.project,
user: current_user,
label: DEGIGN_EVENT_LABEL,
property: :design_action
)
inserted_events = create_record_events(designs.zip([:destroyed].cycle), current_user)
Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:design_action, values: current_user.id)
Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:git_write_action, values: current_user.id)
inserted_events
end
# Create a new wiki page event
#
# @param [WikiPage::Meta] wiki_page_meta The event target
# @param [User] author The event author
# @param [Symbol] action One of the Event::WIKI_ACTIONS
# @param [String] fingerprint The de-duplication fingerprint
#
# The fingerprint, if provided, should be sufficient to find duplicate events.
# Suitable values would be, for example, the current page SHA.
#
# @return [Event] the event
def wiki_event(wiki_page_meta, author, action, fingerprint)
raise IllegalActionError, action unless Event::WIKI_ACTIONS.include?(action)
Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:wiki_action, values: author.id)
Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:git_write_action, values: author.id)
duplicate = Event.for_wiki_meta(wiki_page_meta).for_fingerprint(fingerprint).first
return duplicate if duplicate.present?
create_record_event(wiki_page_meta, author, action, fingerprint.presence)
end
def approve_mr(merge_request, current_user)
create_record_event(merge_request, current_user, :approved)
end
private
def create_record_event(record, current_user, status, fingerprint = nil)
create_event(
record.resource_parent,
current_user,
status,
fingerprint: fingerprint,
target_id: record.id,
target_type: record.class.name
)
end
# If creating several events, this method will insert them all in a single
# statement
#
# @param [[Eventable, Symbol, String]] a list of tuples of records, a valid status, and fingerprint
# @param [User] the author of the event
def create_record_events(tuples, current_user)
base_attrs = {
created_at: Time.now.utc,
updated_at: Time.now.utc,
author_id: current_user.id
}
attribute_sets = tuples.map do |record, status, fingerprint|
action = Event.actions[status]
raise IllegalActionError, "#{status} is not a valid status" if action.nil?
parent_attrs(record.resource_parent)
.merge(base_attrs)
.merge(action: action, fingerprint: fingerprint, target_id: record.id, target_type: record.class.name)
end
Event.insert_all(attribute_sets, returning: %w[id])
end
def create_push_event(service_class, project, current_user, push_data)
# We're using an explicit transaction here so that any errors that may occur
# when creating push payload data will result in the event creation being
# rolled back as well.
event = Event.transaction do
new_event = create_event(project, current_user, :pushed)
service_class.new(new_event, push_data).execute
new_event
end
Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:project_action, values: current_user.id)
Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:git_write_action, values: current_user.id)
namespace = project.namespace
Gitlab::Tracking.event(
self.class.to_s,
:push,
label: 'usage_activity_by_stage_monthly.create.action_monthly_active_users_project_repo',
namespace: namespace,
user: current_user,
project: project,
property: 'project_action',
context: [Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: 'project_action').to_context]
)
Users::LastPushEventService.new(current_user)
.cache_last_push_event(event)
Users::ActivityService.new(author: current_user, namespace: namespace, project: project).execute
end
def create_event(resource_parent, current_user, status, attributes = {})
attributes.reverse_merge!(
action: status,
author_id: current_user.id
)
attributes.merge!(parent_attrs(resource_parent))
if attributes[:fingerprint].present?
Event.safe_find_or_create_by!(attributes)
else
Event.create!(attributes)
end
end
def parent_attrs(resource_parent)
resource_parent_attr = case resource_parent
when Project
:project_id
when Group
:group_id
end
return {} unless resource_parent_attr
{ resource_parent_attr => resource_parent.id }
end
def track_snowplow_event(action:, project:, user:, label:, property:)
Gitlab::Tracking.event(
self.class.to_s,
action.to_s,
label: label,
namespace: project.namespace,
user: user,
project: project,
property: property.to_s,
context: [Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: property.to_s).to_context]
)
end
end
EventCreateService.prepend_mod_with('EventCreateService')
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe EventCreateService, :clean_gitlab_redis_cache, :clean_gitlab_redis_shared_state, feature_category: :service_ping do
include SnowplowHelpers
let(:service) { described_class.new }
let(:dates) { { start_date: Date.today.beginning_of_week, end_date: Date.today.next_week } }
let_it_be(:user, reload: true) { create :user }
let_it_be(:project) { create(:project) }
shared_examples 'it records the event in the event counter' do
specify do
tracking_params = { event_names: event_action, **dates }
expect { subject }
.to change { Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(**tracking_params) }
.by(1)
end
end
shared_examples 'it records a git write event' do
specify do
tracking_params = { event_names: 'git_write_action', **dates }
expect { subject }
.to change { Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(**tracking_params) }
.by(1)
end
end
describe 'Issues' do
describe '#open_issue' do
let(:issue) { create(:issue) }
it { expect(service.open_issue(issue, issue.author)).to be_truthy }
it "creates new event" do
expect { service.open_issue(issue, issue.author) }.to change { Event.count }
end
end
describe '#close_issue' do
let(:issue) { create(:issue) }
it { expect(service.close_issue(issue, issue.author)).to be_truthy }
it "creates new event" do
expect { service.close_issue(issue, issue.author) }.to change { Event.count }
end
end
describe '#reopen_issue' do
let(:issue) { create(:issue) }
it { expect(service.reopen_issue(issue, issue.author)).to be_truthy }
it "creates new event" do
expect { service.reopen_issue(issue, issue.author) }.to change { Event.count }
end
end
end
describe 'Merge Requests', :snowplow do
describe '#open_mr' do
subject(:open_mr) { service.open_mr(merge_request, merge_request.author) }
let(:merge_request) { create(:merge_request) }
it { expect(open_mr).to be_truthy }
it "creates new event" do
expect { open_mr }.to change { Event.count }
end
it_behaves_like "it records the event in the event counter" do
let(:event_action) { :merge_request_action }
end
it_behaves_like 'Snowplow event tracking with RedisHLL context' do
let(:category) { described_class.name }
let(:action) { 'created' }
let(:label) { described_class::MR_EVENT_LABEL }
let(:namespace) { project.namespace }
let(:project) { merge_request.project }
let(:user) { merge_request.author }
let(:property) { described_class::MR_EVENT_PROPERTY }
let(:context) do
[Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: property).to_context]
end
end
end
describe '#close_mr' do
subject(:close_mr) { service.close_mr(merge_request, merge_request.author) }
let(:merge_request) { create(:merge_request) }
it { expect(close_mr).to be_truthy }
it "creates new event" do
expect { close_mr }.to change { Event.count }
end
it_behaves_like "it records the event in the event counter" do
let(:event_action) { :merge_request_action }
end
it_behaves_like 'Snowplow event tracking with RedisHLL context' do
let(:category) { described_class.name }
let(:action) { 'closed' }
let(:label) { described_class::MR_EVENT_LABEL }
let(:namespace) { project.namespace }
let(:project) { merge_request.project }
let(:user) { merge_request.author }
let(:property) { described_class::MR_EVENT_PROPERTY }
let(:context) do
[Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: property).to_context]
end
end
end
describe '#merge_mr' do
subject(:merge_mr) { service.merge_mr(merge_request, merge_request.author) }
let(:merge_request) { create(:merge_request) }
it { expect(merge_mr).to be_truthy }
it "creates new event" do
expect { merge_mr }.to change { Event.count }
end
it_behaves_like "it records the event in the event counter" do
let(:event_action) { :merge_request_action }
end
it_behaves_like 'Snowplow event tracking with RedisHLL context' do
let(:category) { described_class.name }
let(:action) { 'merged' }
let(:label) { described_class::MR_EVENT_LABEL }
let(:namespace) { project.namespace }
let(:project) { merge_request.project }
let(:user) { merge_request.author }
let(:property) { described_class::MR_EVENT_PROPERTY }
let(:context) do
[Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: property).to_context]
end
end
end
describe '#reopen_mr' do
let(:merge_request) { create(:merge_request) }
it { expect(service.reopen_mr(merge_request, merge_request.author)).to be_truthy }
it "creates new event" do
expect { service.reopen_mr(merge_request, merge_request.author) }.to change { Event.count }
end
end
describe '#approve_mr' do
let(:merge_request) { create(:merge_request) }
it { expect(service.approve_mr(merge_request, user)).to be_truthy }
it 'creates new event' do
service.approve_mr(merge_request, user)
change { Event.approved_action.where(target: merge_request).count }.by(1)
end
end
end
describe 'Milestone' do
describe '#open_milestone' do
let(:milestone) { create(:milestone) }
it { expect(service.open_milestone(milestone, user)).to be_truthy }
it "creates new event" do
expect { service.open_milestone(milestone, user) }.to change { Event.count }
end
end
describe '#close_mr' do
let(:milestone) { create(:milestone) }
it { expect(service.close_milestone(milestone, user)).to be_truthy }
it "creates new event" do
expect { service.close_milestone(milestone, user) }.to change { Event.count }
end
end
describe '#destroy_mr' do
let(:milestone) { create(:milestone) }
it { expect(service.destroy_milestone(milestone, user)).to be_truthy }
it "creates new event" do
expect { service.destroy_milestone(milestone, user) }.to change { Event.count }
end
end
end
shared_examples_for 'service for creating a push event' do |service_class|
it 'creates a new event' do
expect { subject }.to change { Event.count }
end
it 'creates the push event payload' do
expect(service_class).to receive(:new)
.with(an_instance_of(PushEvent), push_data)
.and_call_original
subject
end
it 'updates user last activity' do
expect { subject }.to change { user.last_activity_on }.to(Date.today)
end
it 'caches the last push event for the user' do
expect_next_instance_of(Users::LastPushEventService) do |instance|
expect(instance).to receive(:cache_last_push_event).with(an_instance_of(PushEvent))
end
subject
end
it 'does not create any event data when an error is raised' do
payload_service = double(:service)
allow(payload_service).to receive(:execute)
.and_raise(RuntimeError)
allow(service_class).to receive(:new)
.and_return(payload_service)
expect { subject }.to raise_error(RuntimeError)
expect(Event.count).to eq(0)
expect(PushEventPayload.count).to eq(0)
end
end
describe '#wiki_event' do
let_it_be(:user) { create(:user) }
let_it_be(:wiki_page) { create(:wiki_page) }
let_it_be(:meta) { create(:wiki_page_meta, :for_wiki_page, wiki_page: wiki_page) }
let(:fingerprint) { generate(:sha) }
def create_event
service.wiki_event(meta, user, action, fingerprint)
end
where(:action) { Event::WIKI_ACTIONS.map { |action| [action] } }
with_them do
subject { create_event }
it 'creates the event' do
expect(create_event).to have_attributes(
wiki_page?: true,
valid?: true,
persisted?: true,
action: action.to_s,
wiki_page: wiki_page,
author: user,
fingerprint: fingerprint
)
end
it 'is idempotent', :aggregate_failures do
event = nil
expect { event = create_event }.to change(Event, :count).by(1)
duplicate = nil
expect { duplicate = create_event }.not_to change(Event, :count)
expect(duplicate).to eq(event)
end
it_behaves_like "it records the event in the event counter" do
let(:event_action) { :wiki_action }
end
it_behaves_like "it records a git write event"
end
(Event.actions.keys - Event::WIKI_ACTIONS).each do |bad_action|
context "The action is #{bad_action}" do
let(:action) { bad_action }
it 'raises an error' do
expect { create_event }.to raise_error(described_class::IllegalActionError)
end
end
end
end
describe '#push', :snowplow do
let(:push_data) do
{
commits: [
{
id: '1cf19a015df3523caf0a1f9d40c98a267d6a2fc2',
message: 'This is a commit'
}
],
before: '0000000000000000000000000000000000000000',
after: '1cf19a015df3523caf0a1f9d40c98a267d6a2fc2',
total_commits_count: 1,
ref: 'refs/heads/my-branch'
}
end
subject { service.push(project, user, push_data) }
it_behaves_like 'service for creating a push event', PushEventPayloadService
it_behaves_like "it records the event in the event counter" do
let(:event_action) { :project_action }
end
it_behaves_like "it records a git write event"
it_behaves_like 'Snowplow event tracking with RedisHLL context' do
let(:category) { described_class.to_s }
let(:action) { :push }
let(:namespace) { project.namespace }
let(:label) { 'usage_activity_by_stage_monthly.create.action_monthly_active_users_project_repo' }
let(:property) { 'project_action' }
end
end
describe '#bulk_push', :snowplow do
let(:push_data) do
{
action: :created,
ref_count: 4,
ref_type: :branch
}
end
subject { service.bulk_push(project, user, push_data) }
it_behaves_like 'service for creating a push event', BulkPushEventPayloadService
it_behaves_like "it records the event in the event counter" do
let(:event_action) { :project_action }
end
it_behaves_like "it records a git write event"
it_behaves_like 'Snowplow event tracking with RedisHLL context' do
let(:category) { described_class.to_s }
let(:action) { :push }
let(:namespace) { project.namespace }
let(:label) { 'usage_activity_by_stage_monthly.create.action_monthly_active_users_project_repo' }
let(:property) { 'project_action' }
end
end
describe 'Project' do
describe '#join_project' do
subject { service.join_project(project, user) }
it { is_expected.to be_truthy }
it { expect { subject }.to change { Event.count }.from(0).to(1) }
end
describe '#expired_leave_project' do
subject { service.expired_leave_project(project, user) }
it { is_expected.to be_truthy }
it { expect { subject }.to change { Event.count }.from(0).to(1) }
end
end
describe 'design events', :snowplow do
let_it_be(:design) { create(:design, project: project) }
let_it_be(:author) { user }
before do
allow(Gitlab::Tracking).to receive(:event) # rubocop:disable RSpec/ExpectGitlabTracking
end
describe '#save_designs' do
let_it_be(:updated) { create_list(:design, 5) }
let_it_be(:created) { create_list(:design, 3) }
subject(:result) { service.save_designs(author, create: created, update: updated) }
specify { expect { result }.to change { Event.count }.by(8) }
# An addditional query due to event tracking
specify { expect { result }.not_to exceed_query_limit(2) }
it 'creates 3 created design events' do
ids = result.pluck('id')
events = Event.created_action.where(id: ids)
expect(events.map(&:design)).to match_array(created)
end
it 'creates 5 created design events' do
ids = result.pluck('id')
events = Event.updated_action.where(id: ids)
expect(events.map(&:design)).to match_array(updated)
end
it_behaves_like "it records the event in the event counter" do
let(:event_action) { :design_action }
end
it_behaves_like "it records a git write event"
describe 'Snowplow tracking' do
let(:project) { design.project }
let(:namespace) { project.namespace }
let(:category) { described_class.name }
let(:property) { :design_action.to_s }
let(:label) { ::EventCreateService::DEGIGN_EVENT_LABEL }
context 'for create event' do
it_behaves_like 'Snowplow event tracking with RedisHLL context' do
subject(:design_service) { service.save_designs(author, create: [design]) }
let(:action) { 'create' }
end
end
context 'for update event' do
it_behaves_like 'Snowplow event tracking with RedisHLL context' do
subject(:design_service) { service.save_designs(author, update: [design]) }
let(:action) { 'update' }
end
end
end
end
describe '#destroy_designs' do
let_it_be(:designs) { create_list(:design, 5) }
let_it_be(:author) { create(:user) }
subject(:result) { service.destroy_designs(designs, author) }
specify { expect { result }.to change { Event.count }.by(5) }
# An addditional query due to event tracking
specify { expect { result }.not_to exceed_query_limit(2) }
it 'creates 5 destroyed design events' do
ids = result.pluck('id')
events = Event.destroyed_action.where(id: ids)
expect(events.map(&:design)).to match_array(designs)
end
it_behaves_like "it records the event in the event counter" do
let(:event_action) { :design_action }
end
it_behaves_like "it records a git write event"
it_behaves_like 'Snowplow event tracking with RedisHLL context' do
subject(:design_service) { service.destroy_designs([design], author) }
let(:project) { design.project }
let(:namespace) { project.namespace }
let(:category) { described_class.name }
let(:action) { 'destroy' }
let(:user) { author }
let(:property) { :design_action.to_s }
let(:label) { ::EventCreateService::DEGIGN_EVENT_LABEL }
end
end
end
describe '#leave_note', :snowplow do
subject(:leave_note) { service.leave_note(note, author) }
let(:note) { create(:note) }
let(:author) { create(:user) }
let(:event_action) { :merge_request_action }
it { expect(leave_note).to be_truthy }
it "creates new event" do
expect { leave_note }.to change { Event.count }.by(1)
end
context 'when it is a diff note' do
let(:note) { create(:diff_note_on_merge_request) }
it_behaves_like "it records the event in the event counter"
it_behaves_like 'Snowplow event tracking with RedisHLL context' do
let(:note) { create(:diff_note_on_merge_request) }
let(:category) { described_class.name }
let(:action) { 'commented' }
let(:property) { described_class::MR_EVENT_PROPERTY }
let(:label) { described_class::MR_EVENT_LABEL }
let(:namespace) { project.namespace }
let(:project) { note.project }
let(:user) { author }
let(:context) do
[Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: property).to_context]
end
end
end
context 'when it is not a diff note' do
it 'does not change the unique action counter' do
tracking_params = { event_names: event_action, start_date: Date.yesterday, end_date: Date.today }
expect { subject }.not_to change { Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(**tracking_params) }
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class WebHookService
class InternalErrorResponse
ERROR_MESSAGE = 'internal error'
attr_reader :body, :headers, :code
def success?
false
end
def redirection?
false
end
def internal_server_error?
true
end
def initialize
@headers = Gitlab::HTTP::Response::Headers.new({})
@body = ''
@code = ERROR_MESSAGE
end
end
REQUEST_BODY_SIZE_LIMIT = 25.megabytes
# Response body is for UI display only. It does not make much sense to save
# whatever the receivers throw back at us
RESPONSE_BODY_SIZE_LIMIT = 8.kilobytes
# The headers are for debugging purpose. They are displayed on the UI only.
RESPONSE_HEADERS_COUNT_LIMIT = 50
RESPONSE_HEADERS_SIZE_LIMIT = 1.kilobytes
attr_accessor :hook, :data, :hook_name, :request_options
attr_reader :uniqueness_token
def self.hook_to_event(hook_name, hook = nil)
return hook.class.name.titleize if hook.is_a?(SystemHook)
hook_name.to_s.singularize.titleize
end
def initialize(hook, data, hook_name, uniqueness_token = nil, force: false)
@hook = hook
@data = data.to_h
@hook_name = hook_name.to_s
@uniqueness_token = uniqueness_token
@force = force
@request_options = {
timeout: Gitlab.config.gitlab.webhook_timeout,
allow_local_requests: hook.allow_local_requests?
}
end
def disabled?
!@force && !hook.executable?
end
def execute
if Gitlab::SilentMode.enabled?
log_silent_mode_enabled
return ServiceResponse.error(message: 'Silent mode enabled')
end
return ServiceResponse.error(message: 'Hook disabled') if disabled?
if recursion_blocked?
log_recursion_blocked
return ServiceResponse.error(message: 'Recursive webhook blocked')
end
Gitlab::WebHooks::RecursionDetection.register!(hook)
start_time = Gitlab::Metrics::System.monotonic_time
response = if parsed_url.userinfo.blank?
make_request(parsed_url.to_s)
else
make_request_with_auth
end
log_execution(
response: response,
execution_duration: ::Gitlab::Metrics::System.monotonic_time - start_time
)
ServiceResponse.success(message: response.body, payload: { http_status: response.code })
rescue *Gitlab::HTTP::HTTP_ERRORS,
Gitlab::Json::LimitedEncoder::LimitExceeded, URI::InvalidURIError => e
execution_duration = ::Gitlab::Metrics::System.monotonic_time - start_time
error_message = e.to_s
log_execution(
response: InternalErrorResponse.new,
execution_duration: execution_duration,
error_message: error_message
)
Gitlab::AppLogger.error("WebHook Error after #{execution_duration.to_i.seconds}s => #{e}")
ServiceResponse.error(message: error_message)
end
def async_execute
Gitlab::ApplicationContext.with_context(hook.application_context) do
break log_silent_mode_enabled if Gitlab::SilentMode.enabled?
break log_rate_limited if rate_limit!
break log_recursion_blocked if recursion_blocked?
params = {
"recursion_detection_request_uuid" => Gitlab::WebHooks::RecursionDetection::UUID.instance.request_uuid
}.compact
WebHookWorker.perform_async(hook.id, data.deep_stringify_keys, hook_name.to_s, params)
end
end
private
def parsed_url
@parsed_url ||= URI.parse(hook.interpolated_url)
rescue WebHook::InterpolationError => e
# Behavior-preserving fallback.
Gitlab::ErrorTracking.track_exception(e)
@parsed_url = URI.parse(hook.url)
end
def make_request(url, basic_auth = false)
Gitlab::HTTP.post(url,
body: Gitlab::Json::LimitedEncoder.encode(data, limit: REQUEST_BODY_SIZE_LIMIT),
headers: build_headers,
verify: hook.enable_ssl_verification,
basic_auth: basic_auth,
**request_options)
end
def make_request_with_auth
post_url = parsed_url.to_s.gsub("#{parsed_url.userinfo}@", '')
basic_auth = {
username: CGI.unescape(parsed_url.user),
password: CGI.unescape(parsed_url.password.presence || '')
}
make_request(post_url, basic_auth)
end
def log_execution(response:, execution_duration:, error_message: nil)
category = response_category(response)
log_data = {
trigger: hook_name,
url: hook.url,
interpolated_url: hook.interpolated_url,
execution_duration: execution_duration,
request_headers: build_headers,
request_data: data,
response_headers: safe_response_headers(response),
response_body: safe_response_body(response),
response_status: response.code,
internal_error_message: error_message
}
if @force # executed as part of test - run log-execution inline.
::WebHooks::LogExecutionService.new(hook: hook, log_data: log_data, response_category: category).execute
else
queue_log_execution_with_retry(log_data, category)
end
end
def queue_log_execution_with_retry(log_data, category)
retried = false
begin
::WebHooks::LogExecutionWorker.perform_async(
hook.id, log_data.deep_stringify_keys, category.to_s, uniqueness_token.to_s
)
rescue Gitlab::SidekiqMiddleware::SizeLimiter::ExceedLimitError
raise if retried
# Strip request data
log_data[:request_data] = ::WebHookLog::OVERSIZE_REQUEST_DATA
retried = true
retry
end
end
def response_category(response)
if response.success? || response.redirection?
:ok
elsif response.internal_server_error?
:error
else
:failed
end
end
def build_headers
@headers ||= begin
headers = {
'Content-Type' => 'application/json',
'User-Agent' => "GitLab/#{Gitlab::VERSION}",
Gitlab::WebHooks::GITLAB_EVENT_HEADER => self.class.hook_to_event(hook_name, hook),
Gitlab::WebHooks::GITLAB_UUID_HEADER => SecureRandom.uuid,
Gitlab::WebHooks::GITLAB_INSTANCE_HEADER => Gitlab.config.gitlab.base_url
}
headers['X-Gitlab-Token'] = Gitlab::Utils.remove_line_breaks(hook.token) if hook.token.present?
headers.merge!(Gitlab::WebHooks::RecursionDetection.header(hook))
end
end
# Make response headers more stylish
# Net::HTTPHeader has downcased hash with arrays: { 'content-type' => ['text/html; charset=utf-8'] }
# This method format response to capitalized hash with strings: { 'Content-Type' => 'text/html; charset=utf-8' }
# rubocop:disable Style/HashTransformValues
def safe_response_headers(response)
response.headers.each_capitalized.first(RESPONSE_HEADERS_COUNT_LIMIT).to_h do |header_key, header_value|
[enforce_utf8(header_key), string_size_limit(enforce_utf8(header_value), RESPONSE_HEADERS_SIZE_LIMIT)]
end
end
# rubocop:enable Style/HashTransformValues
def safe_response_body(response)
return '' unless response.body
response_body = enforce_utf8(response.body)
string_size_limit(response_body, RESPONSE_BODY_SIZE_LIMIT)
end
# Increments rate-limit counter.
# Returns true if hook should be rate-limited.
def rate_limit!
Gitlab::WebHooks::RateLimiter.new(hook).rate_limit!
end
def recursion_blocked?
Gitlab::WebHooks::RecursionDetection.block?(hook)
end
def log_rate_limited
log_auth_error('Webhook rate limit exceeded')
end
def log_recursion_blocked
log_auth_error(
'Recursive webhook blocked from executing',
recursion_detection: ::Gitlab::WebHooks::RecursionDetection.to_log(hook)
)
end
def log_silent_mode_enabled
log_auth_error('GitLab is in silent mode')
end
def log_auth_error(message, params = {})
Gitlab::AuthLogger.error(
params.merge(
{ message: message, hook_id: hook.id, hook_type: hook.type, hook_name: hook_name },
Gitlab::ApplicationContext.current
)
)
end
def string_size_limit(str, limit)
str.truncate_bytes(limit)
end
def enforce_utf8(str)
Gitlab::EncodingHelper.encode_utf8(str)
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state, feature_category: :webhooks do
include StubRequests
let(:ellipsis) { '…' }
let_it_be(:project) { create(:project) }
let_it_be_with_reload(:project_hook) { create(:project_hook, project: project) }
let(:data) do
{ before: 'oldrev', after: 'newrev', ref: 'ref' }
end
let(:serialized_data) { data.deep_stringify_keys }
let(:service_instance) { described_class.new(project_hook, data, :push_hooks) }
describe '#initialize' do
before do
stub_application_setting(setting_name => setting)
end
shared_examples_for 'respects outbound network setting' do
context 'when local requests are allowed' do
let(:setting) { true }
it { expect(hook.request_options[:allow_local_requests]).to be_truthy }
end
context 'when local requests are not allowed' do
let(:setting) { false }
it { expect(hook.request_options[:allow_local_requests]).to be_falsey }
end
end
context 'when SystemHook' do
let(:setting_name) { :allow_local_requests_from_system_hooks }
let(:hook) { described_class.new(build(:system_hook), data, :system_hook) }
include_examples 'respects outbound network setting'
end
context 'when ProjectHook' do
let(:setting_name) { :allow_local_requests_from_web_hooks_and_services }
let(:hook) { described_class.new(build(:project_hook), data, :project_hook) }
include_examples 'respects outbound network setting'
end
end
describe '#disabled?' do
using RSpec::Parameterized::TableSyntax
subject { described_class.new(hook, data, :push_hooks, force: forced) }
let(:hook) { double(executable?: executable, allow_local_requests?: false) }
where(:forced, :executable, :disabled) do
false | true | false
false | false | true
true | true | false
true | false | false
end
with_them do
it { is_expected.to have_attributes(disabled?: disabled) }
end
end
describe '#execute' do
let(:uuid) { SecureRandom.uuid }
let!(:recursion_uuid) { SecureRandom.uuid }
let(:headers) do
{
'Content-Type' => 'application/json',
'User-Agent' => "GitLab/#{Gitlab::VERSION}",
'X-Gitlab-Webhook-UUID' => uuid,
'X-Gitlab-Event' => 'Push Hook',
'X-Gitlab-Event-UUID' => recursion_uuid,
'X-Gitlab-Instance' => Gitlab.config.gitlab.base_url
}
end
before do
# Set stable values for the `X-Gitlab-Webhook-UUID` and `X-Gitlab-Event-UUID` headers.
allow(SecureRandom).to receive(:uuid).and_return(uuid)
Gitlab::WebHooks::RecursionDetection.set_request_uuid(recursion_uuid)
end
context 'when there is an interpolation error' do
let(:error) { ::WebHook::InterpolationError.new('boom') }
before do
stub_full_request(project_hook.url, method: :post)
allow(project_hook).to receive(:interpolated_url).and_raise(error)
end
it 'logs the error' do
expect(Gitlab::ErrorTracking).to receive(:track_exception).with(error)
expect(service_instance).to receive(:log_execution).with(
execution_duration: (be > 0),
response: have_attributes(code: 200)
)
service_instance.execute
end
end
context 'when there are URL variables' do
before do
project_hook.update!(
url: 'http://example.com/{one}/{two}',
url_variables: { 'one' => 'a', 'two' => 'b' }
)
end
it 'POSTs to the interpolated URL, and logs the hook.url' do
stub_full_request(project_hook.interpolated_url, method: :post)
expect(service_instance).to receive(:queue_log_execution_with_retry).with(
include(url: project_hook.url),
:ok
)
service_instance.execute
expect(WebMock)
.to have_requested(:post, stubbed_hostname(project_hook.interpolated_url)).once
end
context 'there is userinfo' do
before do
project_hook.update!(
url: 'http://{foo}:{bar}@example.com',
url_variables: { 'foo' => 'a', 'bar' => 'b' }
)
stub_full_request('http://example.com', method: :post)
end
it 'POSTs to the interpolated URL, and logs the hook.url' do
expect(service_instance).to receive(:queue_log_execution_with_retry).with(
include(url: project_hook.url),
:ok
)
service_instance.execute
expect(WebMock)
.to have_requested(:post, stubbed_hostname('http://example.com'))
.with(headers: headers.merge('Authorization' => 'Basic YTpi'))
.once
end
end
end
context 'when token is defined' do
before do
project_hook.token = generate(:token)
end
it 'POSTs to the webhook URL' do
stub_full_request(project_hook.url, method: :post)
service_instance.execute
expect(WebMock).to have_requested(:post, stubbed_hostname(project_hook.url)).with(
headers: headers.merge({ 'X-Gitlab-Token' => project_hook.token })
).once
end
end
context 'with SystemHook' do
let_it_be(:system_hook) { create(:system_hook) }
let(:service_instance) { described_class.new(system_hook, data, :push_hooks) }
before do
stub_full_request(system_hook.url, method: :post)
end
it 'POSTs to the webhook URL with correct headers' do
service_instance.execute
expect(WebMock).to have_requested(:post, stubbed_hostname(system_hook.url)).with(
headers: headers.merge({ 'X-Gitlab-Event' => 'System Hook' })
).once
end
end
it 'POSTs the data as JSON and returns expected headers' do
stub_full_request(project_hook.url, method: :post)
service_instance.execute
expect(WebMock).to have_requested(:post, stubbed_hostname(project_hook.url)).with(
headers: headers
).once
end
context 'when the data is a Gitlab::DataBuilder::Pipeline' do
let(:pipeline) { create(:ci_pipeline, project: project) }
let(:data) { ::Gitlab::DataBuilder::Pipeline.new(pipeline) }
it 'can log the request payload' do
stub_full_request(project_hook.url, method: :post)
# we call this with force to ensure that the logs are written inline,
# which tests that we can serialize the data to the DB correctly.
service = described_class.new(project_hook, data, :push_hooks, force: true)
expect { service.execute }.to change(::WebHookLog, :count).by(1)
end
end
context 'when auth credentials are present' do
let_it_be(:url) { 'https://example.org' }
let_it_be(:project_hook) { create(:project_hook, url: 'https://demo:[email protected]/') }
it 'uses the credentials' do
stub_full_request(url, method: :post)
service_instance.execute
expect(WebMock).to have_requested(:post, stubbed_hostname(url)).with(
headers: headers.merge('Authorization' => 'Basic ZGVtbzpkZW1v')
).once
end
end
context 'when auth credentials are partial present' do
let_it_be(:url) { 'https://example.org' }
let_it_be(:project_hook) { create(:project_hook, url: 'https://[email protected]/') }
it 'uses the credentials anyways' do
stub_full_request(url, method: :post)
service_instance.execute
expect(WebMock).to have_requested(:post, stubbed_hostname(url)).with(
headers: headers.merge('Authorization' => 'Basic ZGVtbzo=')
).once
end
end
it 'catches exceptions' do
stub_full_request(project_hook.url, method: :post).to_raise(StandardError.new('Some error'))
expect { service_instance.execute }.to raise_error(StandardError)
end
it 'does not execute disabled hooks' do
allow(service_instance).to receive(:disabled?).and_return(true)
expect(service_instance.execute).to have_attributes(status: :error, message: 'Hook disabled')
end
it 'executes and registers the hook with the recursion detection', :aggregate_failures do
stub_full_request(project_hook.url, method: :post)
cache_key = Gitlab::WebHooks::RecursionDetection.send(:cache_key_for_hook, project_hook)
::Gitlab::Redis::SharedState.with do |redis|
expect { service_instance.execute }.to change {
redis.sismember(cache_key, project_hook.id)
}.to(true)
end
expect(WebMock).to have_requested(:post, stubbed_hostname(project_hook.url))
.with(headers: headers)
.once
end
it 'blocks and logs if a recursive web hook is detected', :aggregate_failures do
stub_full_request(project_hook.url, method: :post)
Gitlab::WebHooks::RecursionDetection.register!(project_hook)
expect(Gitlab::AuthLogger).to receive(:error).with(
include(
message: 'Recursive webhook blocked from executing',
hook_id: project_hook.id,
hook_type: 'ProjectHook',
hook_name: 'push_hooks',
recursion_detection: Gitlab::WebHooks::RecursionDetection.to_log(project_hook),
'correlation_id' => kind_of(String)
)
)
service_instance.execute
expect(WebMock).not_to have_requested(:post, stubbed_hostname(project_hook.url))
end
it 'blocks and logs if the recursion count limit would be exceeded', :aggregate_failures do
stub_full_request(project_hook.url, method: :post)
stub_const("#{Gitlab::WebHooks::RecursionDetection.name}::COUNT_LIMIT", 3)
previous_hooks = create_list(:project_hook, 3)
previous_hooks.each { Gitlab::WebHooks::RecursionDetection.register!(_1) }
expect(Gitlab::AuthLogger).to receive(:error).with(
include(
message: 'Recursive webhook blocked from executing',
hook_id: project_hook.id,
hook_type: 'ProjectHook',
hook_name: 'push_hooks',
recursion_detection: Gitlab::WebHooks::RecursionDetection.to_log(project_hook),
'correlation_id' => kind_of(String)
)
)
service_instance.execute
expect(WebMock).not_to have_requested(:post, stubbed_hostname(project_hook.url))
end
context 'when silent mode is enabled' do
before do
stub_application_setting(silent_mode_enabled: true)
end
it 'blocks and logs an error' do
stub_full_request(project_hook.url, method: :post)
expect(Gitlab::AuthLogger).to receive(:error).with(include(message: 'GitLab is in silent mode'))
expect(service_instance.execute).to be_error
expect(WebMock).not_to have_requested(:post, stubbed_hostname(project_hook.url))
end
end
it 'handles exceptions' do
exceptions = Gitlab::HTTP::HTTP_ERRORS + [
Gitlab::Json::LimitedEncoder::LimitExceeded, URI::InvalidURIError
]
allow(Gitlab::WebHooks::RecursionDetection).to receive(:block?).and_return(false)
exceptions.each do |exception_class|
exception = exception_class.new('Exception message')
project_hook.enable!
stub_full_request(project_hook.url, method: :post).to_raise(exception)
expect(service_instance.execute).to have_attributes(status: :error, message: exception.to_s)
expect { service_instance.execute }.not_to raise_error
end
end
context 'when url is not encoded' do
let_it_be(:project_hook) { create(:project_hook, url: 'http://server.com/my path/') }
it 'handles exceptions' do
expect(service_instance.execute).to have_attributes(
status: :error,
message: 'bad URI(is not URI?): "http://server.com/my path/"'
)
expect { service_instance.execute }.not_to raise_error
end
end
context 'when request body size is too big' do
it 'does not perform the request' do
stub_const("#{described_class}::REQUEST_BODY_SIZE_LIMIT", 10.bytes)
expect(service_instance.execute).to have_attributes(
status: :error,
message: 'Gitlab::Json::LimitedEncoder::LimitExceeded'
)
end
end
it 'handles 200 status code' do
stub_full_request(project_hook.url, method: :post).to_return(status: 200, body: 'Success')
expect(service_instance.execute).to have_attributes(
status: :success,
payload: { http_status: 200 },
message: 'Success'
)
end
it 'handles 2xx status codes' do
stub_full_request(project_hook.url, method: :post).to_return(status: 201, body: 'Success')
expect(service_instance.execute).to have_attributes(
status: :success,
payload: { http_status: 201 },
message: 'Success'
)
end
context 'execution logging' do
let(:default_log_data) do
{
trigger: 'push_hooks',
url: project_hook.url,
interpolated_url: project_hook.interpolated_url,
request_headers: headers,
request_data: data,
response_body: 'Success',
response_headers: {},
response_status: 200,
execution_duration: be > 0,
internal_error_message: nil
}
end
context 'with success' do
before do
stub_full_request(project_hook.url, method: :post).to_return(status: 200, body: 'Success')
end
context 'when forced' do
let(:service_instance) { described_class.new(project_hook, data, :push_hooks, force: true) }
it 'logs execution inline' do
expect(::WebHooks::LogExecutionWorker).not_to receive(:perform_async)
expect(::WebHooks::LogExecutionService)
.to receive(:new)
.with(hook: project_hook, log_data: default_log_data, response_category: :ok)
.and_return(double(execute: nil))
service_instance.execute
end
end
it 'queues LogExecutionWorker correctly' do
expect(WebHooks::LogExecutionWorker).to receive(:perform_async)
.with(
project_hook.id,
hash_including(default_log_data.deep_stringify_keys),
'ok',
''
)
service_instance.execute
end
it 'queues LogExecutionWorker correctly, resulting in a log record (integration-style test)', :sidekiq_inline do
expect { service_instance.execute }.to change(::WebHookLog, :count).by(1)
end
it 'does not log in the service itself' do
expect { service_instance.execute }.not_to change(::WebHookLog, :count)
end
end
context 'with bad request' do
before do
stub_full_request(project_hook.url, method: :post).to_return(status: 400, body: 'Bad request')
end
it 'queues LogExecutionWorker correctly' do
expect(WebHooks::LogExecutionWorker).to receive(:perform_async)
.with(
project_hook.id,
hash_including(
default_log_data.merge(
response_body: 'Bad request',
response_status: 400
).deep_stringify_keys
),
'failed',
''
)
service_instance.execute
end
end
context 'with exception' do
before do
stub_full_request(project_hook.url, method: :post).to_raise(SocketError.new('Some HTTP Post error'))
end
it 'queues LogExecutionWorker correctly' do
expect(WebHooks::LogExecutionWorker).to receive(:perform_async)
.with(
project_hook.id,
hash_including(
default_log_data.merge(
response_body: '',
response_status: 'internal error',
internal_error_message: 'Some HTTP Post error'
).deep_stringify_keys
),
'error',
''
)
service_instance.execute
end
end
context 'with unsafe response body' do
before do
stub_full_request(project_hook.url, method: :post).to_return(status: 200, body: "\xBB")
end
it 'queues LogExecutionWorker with sanitized response_body' do
expect(WebHooks::LogExecutionWorker).to receive(:perform_async)
.with(
project_hook.id,
hash_including(default_log_data.merge(response_body: '').deep_stringify_keys),
'ok',
''
)
service_instance.execute
end
end
context 'with oversize response body' do
let(:oversize_body) { 'a' * (described_class::RESPONSE_BODY_SIZE_LIMIT + 1) }
let(:stripped_body) { 'a' * (described_class::RESPONSE_BODY_SIZE_LIMIT - ellipsis.bytesize) + ellipsis }
before do
stub_full_request(project_hook.url, method: :post).to_return(status: 200, body: oversize_body)
end
it 'queues LogExecutionWorker with stripped response_body' do
expect(WebHooks::LogExecutionWorker).to receive(:perform_async)
.with(
project_hook.id,
hash_including(default_log_data.merge(response_body: stripped_body).deep_stringify_keys),
'ok',
''
)
service_instance.execute
end
end
context 'with massive amount of headers' do
let(:response_headers) do
(1..described_class::RESPONSE_HEADERS_COUNT_LIMIT + 1).to_a.to_h do |num|
["header-#{num}", SecureRandom.hex(num)]
end
end
let(:expected_response_headers) do
(1..described_class::RESPONSE_HEADERS_COUNT_LIMIT).to_a.to_h do |num|
# Capitalized
["Header-#{num}", response_headers["header-#{num}"]]
end
end
before do
stub_full_request(project_hook.url, method: :post).to_return(
status: 200, body: 'Success', headers: response_headers
)
end
it 'queues LogExecutionWorker with limited amount of headers' do
expect(WebHooks::LogExecutionWorker).to receive(:perform_async)
.with(
project_hook.id,
hash_including(default_log_data.merge(response_headers: expected_response_headers).deep_stringify_keys),
'ok',
''
)
service_instance.execute
end
end
context 'with oversize header' do
let(:oversize_header) { 'a' * (described_class::RESPONSE_HEADERS_SIZE_LIMIT + 1) }
let(:stripped_header) { 'a' * (described_class::RESPONSE_HEADERS_SIZE_LIMIT - ellipsis.bytesize) + ellipsis }
let(:response_headers) { { 'oversized-header' => oversize_header } }
let(:expected_response_headers) { { 'Oversized-Header' => stripped_header } }
before do
stub_full_request(project_hook.url, method: :post).to_return(
status: 200, body: 'Success', headers: response_headers
)
end
it 'queues LogExecutionWorker with stripped header value' do
expect(WebHooks::LogExecutionWorker).to receive(:perform_async)
.with(
project_hook.id,
hash_including(default_log_data.merge(response_headers: expected_response_headers).deep_stringify_keys),
'ok',
''
)
service_instance.execute
end
end
context 'with log data exceeding Sidekiq limit' do
before do
stub_full_request(project_hook.url, method: :post).to_return(status: 200, body: 'Success')
end
it 'queues LogExecutionWorker with request_data overrided in the second attempt' do
expect(WebHooks::LogExecutionWorker).to receive(:perform_async)
.with(
project_hook.id,
hash_including(default_log_data.deep_stringify_keys),
'ok',
''
)
.and_raise(
Gitlab::SidekiqMiddleware::SizeLimiter::ExceedLimitError.new(WebHooks::LogExecutionWorker, 100, 50)
)
.ordered
expect(WebHooks::LogExecutionWorker).to receive(:perform_async)
.with(
project_hook.id,
hash_including(default_log_data.merge(
request_data: WebHookLog::OVERSIZE_REQUEST_DATA
).deep_stringify_keys),
'ok',
''
)
.and_call_original
.ordered
service_instance.execute
end
context 'new log data still exceeds limit' do
before do
allow(WebHooks::LogExecutionWorker).to receive(:perform_async).and_raise(
Gitlab::SidekiqMiddleware::SizeLimiter::ExceedLimitError.new(WebHooks::LogExecutionWorker, 100, 50)
)
end
it 'raises an exception' do
expect do
service_instance.execute
end.to raise_error(Gitlab::SidekiqMiddleware::SizeLimiter::ExceedLimitError)
end
end
end
end
end
describe '#async_execute' do
def expect_to_perform_worker(hook)
expect(WebHookWorker).to receive(:perform_async).with(
hook.id, serialized_data, 'push_hooks', an_instance_of(Hash)
)
end
def expect_to_rate_limit(hook, threshold:, throttled: false)
expect(Gitlab::ApplicationRateLimiter).to receive(:throttled?)
.with(:web_hook_calls, scope: [hook.parent.root_namespace], threshold: threshold)
.and_return(throttled)
end
context 'when rate limiting is not configured' do
it 'queues a worker without tracking the call' do
expect(Gitlab::ApplicationRateLimiter).not_to receive(:throttled?)
expect_to_perform_worker(project_hook)
service_instance.async_execute
end
end
context 'when rate limiting is configured' do
let_it_be(:threshold) { 3 }
let_it_be(:plan_limits) { create(:plan_limits, :default_plan, web_hook_calls: threshold) }
it 'queues a worker and tracks the call' do
expect_to_rate_limit(project_hook, threshold: threshold)
expect_to_perform_worker(project_hook)
service_instance.async_execute
end
context 'when the hook is throttled (via mock)' do
before do
expect_to_rate_limit(project_hook, threshold: threshold, throttled: true)
end
it 'does not queue a worker and logs an error' do
expect(WebHookWorker).not_to receive(:perform_async)
expect(Gitlab::AuthLogger).to receive(:error).with(
include(
message: 'Webhook rate limit exceeded',
hook_id: project_hook.id,
hook_type: 'ProjectHook',
hook_name: 'push_hooks',
"correlation_id" => kind_of(String),
"meta.project" => project.full_path,
"meta.related_class" => 'ProjectHook',
"meta.root_namespace" => project.root_namespace.full_path
)
)
service_instance.async_execute
end
end
context 'when the hook is throttled (via Redis)', :clean_gitlab_redis_rate_limiting, :freeze_time do
before do
expect_to_perform_worker(project_hook).exactly(threshold).times
threshold.times { service_instance.async_execute }
end
it 'stops queueing workers and logs errors' do
expect(Gitlab::AuthLogger).to receive(:error).twice
2.times { service_instance.async_execute }
end
it 'still queues workers for other hooks' do
other_hook = create(:project_hook)
expect_to_perform_worker(other_hook)
described_class.new(other_hook, data, :push_hooks).async_execute
end
end
end
context 'recursion detection' do
before do
# Set a request UUID so `RecursionDetection.block?` will query redis.
Gitlab::WebHooks::RecursionDetection.set_request_uuid(SecureRandom.uuid)
end
it 'does not queue a worker and logs an error if the call chain limit would be exceeded' do
stub_const("#{Gitlab::WebHooks::RecursionDetection.name}::COUNT_LIMIT", 3)
previous_hooks = create_list(:project_hook, 3)
previous_hooks.each { Gitlab::WebHooks::RecursionDetection.register!(_1) }
expect(WebHookWorker).not_to receive(:perform_async)
expect(Gitlab::AuthLogger).to receive(:error).with(
include(
message: 'Recursive webhook blocked from executing',
hook_id: project_hook.id,
hook_type: 'ProjectHook',
hook_name: 'push_hooks',
recursion_detection: Gitlab::WebHooks::RecursionDetection.to_log(project_hook),
'correlation_id' => kind_of(String),
'meta.project' => project.full_path,
'meta.related_class' => 'ProjectHook',
'meta.root_namespace' => project.root_namespace.full_path
)
)
service_instance.async_execute
end
it 'does not queue a worker and logs an error if a recursive call chain is detected' do
Gitlab::WebHooks::RecursionDetection.register!(project_hook)
expect(WebHookWorker).not_to receive(:perform_async)
expect(Gitlab::AuthLogger).to receive(:error).with(
include(
message: 'Recursive webhook blocked from executing',
hook_id: project_hook.id,
hook_type: 'ProjectHook',
hook_name: 'push_hooks',
recursion_detection: Gitlab::WebHooks::RecursionDetection.to_log(project_hook),
'correlation_id' => kind_of(String),
'meta.project' => project.full_path,
'meta.related_class' => 'ProjectHook',
'meta.root_namespace' => project.root_namespace.full_path
)
)
service_instance.async_execute
end
end
context 'when silent mode is enabled' do
before do
stub_application_setting(silent_mode_enabled: true)
end
it 'does not queue a worker and logs an error' do
expect(WebHookWorker).not_to receive(:perform_async)
expect(Gitlab::AuthLogger).to receive(:error).with(include(message: 'GitLab is in silent mode'))
service_instance.async_execute
end
end
context 'when hook has custom context attributes' do
it 'includes the attributes in the worker context' do
expect(WebHookWorker).to receive(:perform_async) do
expect(Gitlab::ApplicationContext.current).to include(
'meta.project' => project_hook.project.full_path,
'meta.root_namespace' => project.root_ancestor.path,
'meta.related_class' => 'ProjectHook'
)
end
service_instance.async_execute
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class CohortsService
MONTHS_INCLUDED = 12
def execute
{
months_included: MONTHS_INCLUDED,
cohorts: cohorts
}
end
# Get an array of hashes that looks like:
#
# [
# {
# registration_month: Date.new(2017, 3),
# activity_months: [3, 2, 1],
# total: 3
# inactive: 0
# },
# etc.
#
# The `months` array is always from oldest to newest, so it's always
# non-strictly decreasing from left to right.
def cohorts
months = Array.new(MONTHS_INCLUDED) { |i| i.months.ago.beginning_of_month.to_date }
Array.new(MONTHS_INCLUDED) do
registration_month = months.last
activity_months = running_totals(months, registration_month)
# Even if no users registered in this month, we always want to have a
# value to fill in the table.
inactive = counts_by_month[[registration_month, nil]].to_i
months.pop
{
registration_month: registration_month,
activity_months: activity_months[1..],
total: activity_months.first[:total],
inactive: inactive
}
end
end
private
# Calculate a running sum of active users, so users active in later months
# count as active in this month, too. Start with the most recent month first,
# for calculating the running totals, and then reverse for displaying in the
# table.
#
# Each month has a total, and a percentage of the overall total, as keys.
def running_totals(all_months, registration_month)
month_totals =
all_months
.map { |activity_month| counts_by_month[[registration_month, activity_month]] }
.reduce([]) { |result, total| result << result.last.to_i + total.to_i }
.reverse
overall_total = month_totals.first
month_totals.map do |total|
{ total: total, percentage: total == 0 ? 0 : 100 * total / overall_total }
end
end
# Get a hash that looks like:
#
# {
# [created_at_month, last_activity_on_month] => count,
# [created_at_month, last_activity_on_month_2] => count_2,
# # etc.
# }
#
# created_at_month can never be nil, but last_activity_on_month can (when a
# user has never logged in, just been created). This covers the last
# MONTHS_INCLUDED months.
# rubocop: disable CodeReuse/ActiveRecord
def counts_by_month
@counts_by_month ||=
begin
created_at_month = column_to_date('created_at')
last_activity_on_month = column_to_date('last_activity_on')
User
.where('created_at > ?', MONTHS_INCLUDED.months.ago.end_of_month)
.group(created_at_month, last_activity_on_month)
.reorder(Arel.sql("#{created_at_month} ASC, #{last_activity_on_month} ASC"))
.count
end
end
# rubocop: enable CodeReuse/ActiveRecord
def column_to_date(column)
"CAST(DATE_TRUNC('month', #{column}) AS date)"
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe CohortsService, feature_category: :shared do
describe '#execute' do
def month_start(months_ago)
months_ago.months.ago.beginning_of_month.to_date
end
# In the interests of speed and clarity, this example has minimal data.
it 'returns a list of user cohorts' do
6.times do |months_ago|
months_ago_time = (months_ago * 2).months.ago
create(:user, created_at: months_ago_time, last_activity_on: Time.current)
create(:user, created_at: months_ago_time, last_activity_on: months_ago_time)
end
create(:user) # this user is inactive and belongs to the current month
expected_cohorts = [
{
registration_month: month_start(11),
activity_months: Array.new(11) { { total: 0, percentage: 0 } },
total: 0,
inactive: 0
},
{
registration_month: month_start(10),
activity_months: Array.new(10) { { total: 1, percentage: 50 } },
total: 2,
inactive: 0
},
{
registration_month: month_start(9),
activity_months: Array.new(9) { { total: 0, percentage: 0 } },
total: 0,
inactive: 0
},
{
registration_month: month_start(8),
activity_months: Array.new(8) { { total: 1, percentage: 50 } },
total: 2,
inactive: 0
},
{
registration_month: month_start(7),
activity_months: Array.new(7) { { total: 0, percentage: 0 } },
total: 0,
inactive: 0
},
{
registration_month: month_start(6),
activity_months: Array.new(6) { { total: 1, percentage: 50 } },
total: 2,
inactive: 0
},
{
registration_month: month_start(5),
activity_months: Array.new(5) { { total: 0, percentage: 0 } },
total: 0,
inactive: 0
},
{
registration_month: month_start(4),
activity_months: Array.new(4) { { total: 1, percentage: 50 } },
total: 2,
inactive: 0
},
{
registration_month: month_start(3),
activity_months: Array.new(3) { { total: 0, percentage: 0 } },
total: 0,
inactive: 0
},
{
registration_month: month_start(2),
activity_months: Array.new(2) { { total: 1, percentage: 50 } },
total: 2,
inactive: 0
},
{
registration_month: month_start(1),
activity_months: Array.new(1) { { total: 0, percentage: 0 } },
total: 0,
inactive: 0
},
{
registration_month: month_start(0),
activity_months: [],
total: 2,
inactive: 1
}
]
expect(described_class.new.execute).to eq(months_included: 12, cohorts: expected_cohorts)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class UserProjectAccessChangedService
DELAY = 1.hour
MEDIUM_DELAY = 10.minutes
HIGH_PRIORITY = :high
MEDIUM_PRIORITY = :medium
LOW_PRIORITY = :low
def initialize(user_ids)
@user_ids = Array.wrap(user_ids)
end
def execute(priority: HIGH_PRIORITY)
return if @user_ids.empty?
bulk_args = @user_ids.map { |id| [id] }
result =
case priority
when HIGH_PRIORITY
AuthorizedProjectsWorker.bulk_perform_async(bulk_args) # rubocop:disable Scalability/BulkPerformWithContext
when MEDIUM_PRIORITY
AuthorizedProjectUpdate::UserRefreshWithLowUrgencyWorker.bulk_perform_in(MEDIUM_DELAY, bulk_args, batch_size: 100, batch_delay: 30.seconds) # rubocop:disable Scalability/BulkPerformWithContext
when LOW_PRIORITY
if Feature.disabled?(:do_not_run_safety_net_auth_refresh_jobs)
with_related_class_context do
# We wrap the execution in `with_related_class_context`so as to obtain
# the location of the original caller
# in jobs enqueued from within `AuthorizedProjectUpdate::UserRefreshFromReplicaWorker`
AuthorizedProjectUpdate::UserRefreshFromReplicaWorker.bulk_perform_in( # rubocop:disable Scalability/BulkPerformWithContext
DELAY, bulk_args, batch_size: 100, batch_delay: 30.seconds)
end
end
end
::User.sticking.bulk_stick(:user, @user_ids)
result
end
private
def with_related_class_context(&block)
current_caller_id = Gitlab::ApplicationContext.current_context_attribute('meta.caller_id').presence
Gitlab::ApplicationContext.with_context(related_class: current_caller_id, &block)
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe UserProjectAccessChangedService, feature_category: :system_access do
describe '#execute' do
it 'permits high-priority operation' do
expect(AuthorizedProjectsWorker).to receive(:bulk_perform_async)
.with([[1], [2]])
described_class.new([1, 2]).execute
end
context 'for low priority operation' do
context 'when the feature flag `do_not_run_safety_net_auth_refresh_jobs` is disabled' do
before do
stub_feature_flags(do_not_run_safety_net_auth_refresh_jobs: false)
end
it 'permits low-priority operation' do
expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
receive(:bulk_perform_in).with(
described_class::DELAY,
[[1], [2]],
{ batch_delay: 30.seconds, batch_size: 100 }
)
)
described_class.new([1, 2]).execute(priority: described_class::LOW_PRIORITY)
end
end
it 'does not perform low-priority operation' do
expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).not_to receive(:bulk_perform_in)
described_class.new([1, 2]).execute(priority: described_class::LOW_PRIORITY)
end
end
it 'permits medium-priority operation' do
expect(AuthorizedProjectUpdate::UserRefreshWithLowUrgencyWorker).to(
receive(:bulk_perform_in).with(
described_class::MEDIUM_DELAY,
[[1], [2]],
{ batch_delay: 30.seconds, batch_size: 100 }
)
)
described_class.new([1, 2]).execute(priority: described_class::MEDIUM_PRIORITY)
end
it 'sets the current caller_id as related_class in the context of all the enqueued jobs' do
Gitlab::ApplicationContext.with_context(caller_id: 'Foo') do
described_class.new([1, 2]).execute(priority: described_class::LOW_PRIORITY)
end
expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker.jobs).to all(
include(Labkit::Context.log_key(:related_class) => 'Foo')
)
end
end
context 'with load balancing enabled' do
let(:service) { described_class.new([1, 2]) }
before do
expect(AuthorizedProjectsWorker).to receive(:bulk_perform_async)
.with([[1], [2]])
.and_return(10)
end
it 'sticks all the updated users and returns the original result', :aggregate_failures do
expect(ApplicationRecord.sticking).to receive(:bulk_stick).with(:user, [1, 2])
expect(service.execute).to eq(10)
end
it 'avoids N+1 cached queries', :use_sql_query_cache, :request_store do
# Run this once to establish a baseline
control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
service.execute
end
service = described_class.new([1, 2, 3, 4, 5])
allow(AuthorizedProjectsWorker).to receive(:bulk_perform_async)
.with([[1], [2], [3], [4], [5]])
.and_return(10)
expect { service.execute }.not_to exceed_all_query_limit(control_count.count)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
# rubocop:disable GitlabSecurity/PublicSend
# NotificationService class
#
# Used for notifying users with emails about different events
#
# Ex.
# NotificationService.new.new_issue(issue, current_user)
#
# When calculating the recipients of a notification is expensive (for instance,
# in the new issue case), `#async` will make that calculation happen in Sidekiq
# instead:
#
# NotificationService.new.async.new_issue(issue, current_user)
#
class NotificationService
# These should not be called by the MailScheduler::NotificationServiceWorker -
# what would it even mean?
EXCLUDED_ACTIONS = %i[async].freeze
def self.permitted_actions
@permitted_actions ||= gitlab_extensions.flat_map do |klass|
klass.public_instance_methods(false) - EXCLUDED_ACTIONS
end.to_set
end
class Async
attr_reader :parent
delegate :respond_to_missing, to: :parent
def initialize(parent)
@parent = parent
end
def method_missing(meth, *args)
return super unless parent.respond_to?(meth)
MailScheduler::NotificationServiceWorker.perform_async(meth.to_s, *args)
end
end
def async
@async ||= Async.new(self)
end
def disabled_two_factor(user)
return unless user.can?(:receive_notifications)
mailer.disabled_two_factor_email(user).deliver_later
end
# Always notify user about ssh key added
# only if ssh key is not deploy key
#
# This is security email so it will be sent
# even if user disabled notifications. However,
# it won't be sent to internal users like the
# ghost user or the EE support bot.
def new_key(key)
if key.user&.can?(:receive_notifications)
mailer.new_ssh_key_email(key.id).deliver_later
end
end
# Always notify the user about gpg key added
#
# This is a security email so it will be sent even if the user disabled
# notifications
def new_gpg_key(gpg_key)
if gpg_key.user&.can?(:receive_notifications)
mailer.new_gpg_key_email(gpg_key.id).deliver_later
end
end
def resource_access_tokens_about_to_expire(bot_user, token_names)
recipients = bot_user.resource_bot_owners.select { |owner| owner.can?(:receive_notifications) }
resource = bot_user.resource_bot_resource
recipients.each do |recipient|
mailer.resource_access_tokens_about_to_expire_email(
recipient,
resource,
token_names
).deliver_later
end
end
# Notify the owner of the account when a new personal access token is created
def access_token_created(user, token_name)
return unless user.can?(:receive_notifications)
mailer.access_token_created_email(user, token_name).deliver_later
end
# Notify the owner of the personal access token, when it is about to expire
# And mark the token with about_to_expire_delivered
def access_token_about_to_expire(user, token_names)
return unless user.can?(:receive_notifications)
mailer.access_token_about_to_expire_email(user, token_names).deliver_later
end
# Notify the user when at least one of their personal access tokens has expired today
def access_token_expired(user, token_names = [])
return unless user.can?(:receive_notifications)
mailer.access_token_expired_email(user, token_names).deliver_later
end
# Notify the user when one of their personal access tokens is revoked
def access_token_revoked(user, token_name, source = nil)
return unless user.can?(:receive_notifications)
mailer.access_token_revoked_email(user, token_name, source).deliver_later
end
# Notify the user when at least one of their ssh key has expired today
def ssh_key_expired(user, fingerprints)
return unless user.can?(:receive_notifications)
mailer.ssh_key_expired_email(user, fingerprints).deliver_later
end
# Notify the user when at least one of their ssh key is expiring soon
def ssh_key_expiring_soon(user, fingerprints)
return unless user.can?(:receive_notifications)
mailer.ssh_key_expiring_soon_email(user, fingerprints).deliver_later
end
# Notify a user when a previously unknown IP or device is used to
# sign in to their account
def unknown_sign_in(user, ip, time)
return unless user.can?(:receive_notifications)
mailer.unknown_sign_in_email(user, ip, time).deliver_later
end
# Notify a user when a wrong 2FA OTP has been entered to
# try to sign in to their account
def two_factor_otp_attempt_failed(user, ip)
return unless user.can?(:receive_notifications)
mailer.two_factor_otp_attempt_failed_email(user, ip).deliver_later
end
# Notify a user when a new email address is added to the their account
def new_email_address_added(user, email)
return unless user.can?(:receive_notifications)
mailer.new_email_address_added_email(user, email).deliver_later
end
# When create an issue we should send an email to:
#
# * issue assignee if their notification level is not Disabled
# * project team members with notification level higher then Participating
# * watchers of the issue's labels
# * users with custom level checked with "new issue"
#
def new_issue(issue, current_user)
new_resource_email(issue, current_user, :new_issue_email)
end
# When issue text is updated, we should send an email to:
#
# * newly mentioned project team members with notification level higher than Participating
#
def new_mentions_in_issue(issue, new_mentioned_users, current_user)
new_mentions_in_resource_email(
issue,
new_mentioned_users,
current_user,
:new_mention_in_issue_email
)
end
# When we close an issue we should send an email to:
#
# * issue author if their notification level is not Disabled
# * issue assignee if their notification level is not Disabled
# * project team members with notification level higher then Participating
# * users with custom level checked with "close issue"
#
def close_issue(issue, current_user, params = {})
close_resource_email(issue, current_user, :closed_issue_email, closed_via: params[:closed_via])
end
# When we reassign an issue we should send an email to:
#
# * issue old assignees if their notification level is not Disabled
# * issue new assignees if their notification level is not Disabled
# * users with custom level checked with "reassign issue"
#
def reassigned_issue(issue, current_user, previous_assignees = [])
recipients = NotificationRecipients::BuildService.build_recipients(
issue,
current_user,
action: "reassign",
previous_assignees: previous_assignees
)
previous_assignee_ids = previous_assignees.map(&:id)
recipients.each do |recipient|
mailer.send(
:reassigned_issue_email,
recipient.user.id,
issue.id,
previous_assignee_ids,
current_user.id,
recipient.reason
).deliver_later
end
end
# When we add labels to an issue we should send an email to:
#
# * watchers of the issue's labels
#
def relabeled_issue(issue, added_labels, current_user)
relabeled_resource_email(issue, added_labels, current_user, :relabeled_issue_email)
end
# When create a merge request we should send an email to:
#
# * mr author
# * mr assignees if their notification level is not Disabled
# * project team members with notification level higher then Participating
# * watchers of the mr's labels
# * users with custom level checked with "new merge request"
#
# In EE, approvers of the merge request are also included
def new_merge_request(merge_request, current_user)
new_resource_email(merge_request, current_user, :new_merge_request_email)
end
NEW_COMMIT_EMAIL_DISPLAY_LIMIT = 20
def push_to_merge_request(merge_request, current_user, new_commits: [], existing_commits: [])
total_new_commits_count = new_commits.count
truncated_new_commits = new_commits.first(NEW_COMMIT_EMAIL_DISPLAY_LIMIT).map do |commit|
{ short_id: commit.short_id, title: commit.title }
end
# We don't need the list of all existing commits. We need the first, the
# last, and the total number of existing commits only.
total_existing_commits_count = existing_commits.count
existing_commits = [existing_commits.first, existing_commits.last] if total_existing_commits_count > 2
existing_commits = existing_commits.map do |commit|
{ short_id: commit.short_id, title: commit.title }
end
recipients = NotificationRecipients::BuildService.build_recipients(merge_request, current_user, action: "push_to")
recipients.each do |recipient|
mailer.send(
:push_to_merge_request_email,
recipient.user.id, merge_request.id, current_user.id, recipient.reason,
new_commits: truncated_new_commits, total_new_commits_count: total_new_commits_count,
existing_commits: existing_commits, total_existing_commits_count: total_existing_commits_count
).deliver_later
end
end
def change_in_merge_request_draft_status(merge_request, current_user)
recipients = NotificationRecipients::BuildService.build_recipients(merge_request, current_user, action: "draft_status_change")
recipients.each do |recipient|
mailer.send(
:change_in_merge_request_draft_status_email,
recipient.user.id,
merge_request.id,
current_user.id,
recipient.reason
).deliver_later
end
end
# When a merge request is found to be unmergeable, we should send an email to:
#
# * mr author
# * mr merge user if set
#
def merge_request_unmergeable(merge_request)
merge_request_unmergeable_email(merge_request)
end
# When merge request text is updated, we should send an email to:
#
# * newly mentioned project team members with notification level higher than Participating
#
def new_mentions_in_merge_request(merge_request, new_mentioned_users, current_user)
new_mentions_in_resource_email(
merge_request,
new_mentioned_users,
current_user,
:new_mention_in_merge_request_email
)
end
# When we reassign a merge_request we should send an email to:
#
# * merge_request old assignees if their notification level is not Disabled
# * merge_request new assignees if their notification level is not Disabled
# * users with custom level checked with "reassign merge request"
#
def reassigned_merge_request(merge_request, current_user, previous_assignees = [])
recipients = NotificationRecipients::BuildService.build_recipients(
merge_request,
current_user,
action: "reassign",
previous_assignees: previous_assignees
)
previous_assignee_ids = previous_assignees.map(&:id)
recipients.each do |recipient|
mailer.reassigned_merge_request_email(
recipient.user.id,
merge_request.id,
previous_assignee_ids,
current_user.id,
recipient.reason
).deliver_later
end
end
# When we change reviewer in a merge_request we should send an email to:
#
# * merge_request old reviewers if their notification level is not Disabled
# * merge_request new reviewers if their notification level is not Disabled
# * users with custom level checked with "change reviewer merge request"
#
def changed_reviewer_of_merge_request(merge_request, current_user, previous_reviewers = [])
recipients = NotificationRecipients::BuildService.build_recipients(
merge_request,
current_user,
action: "change_reviewer",
previous_assignees: previous_reviewers
)
previous_reviewer_ids = previous_reviewers.map(&:id)
recipients.each do |recipient|
mailer.changed_reviewer_of_merge_request_email(
recipient.user.id,
merge_request.id,
previous_reviewer_ids,
current_user.id,
recipient.reason
).deliver_later
end
end
def review_requested_of_merge_request(merge_request, current_user, reviewer)
recipients = NotificationRecipients::BuildService.build_requested_review_recipients(merge_request, current_user, reviewer)
deliver_option = review_request_deliver_options(merge_request.project)
recipients.each do |recipient|
mailer
.request_review_merge_request_email(recipient.user.id, merge_request.id, current_user.id, recipient.reason)
.deliver_later(deliver_option)
end
end
# When we add labels to a merge request we should send an email to:
#
# * watchers of the mr's labels
#
def relabeled_merge_request(merge_request, added_labels, current_user)
relabeled_resource_email(merge_request, added_labels, current_user, :relabeled_merge_request_email)
end
def close_mr(merge_request, current_user)
close_resource_email(merge_request, current_user, :closed_merge_request_email)
end
def reopen_issue(issue, current_user)
reopen_resource_email(issue, current_user, :issue_status_changed_email, 'reopened')
end
def merge_mr(merge_request, current_user)
close_resource_email(
merge_request,
current_user,
:merged_merge_request_email,
skip_current_user: !merge_request.auto_merge_enabled?
)
end
def reopen_mr(merge_request, current_user)
reopen_resource_email(
merge_request,
current_user,
:merge_request_status_email,
'reopened'
)
end
def resolve_all_discussions(merge_request, current_user)
recipients = NotificationRecipients::BuildService.build_recipients(
merge_request,
current_user,
action: "resolve_all_discussions")
recipients.each do |recipient|
mailer.resolved_all_discussions_email(recipient.user.id, merge_request.id, current_user.id, recipient.reason).deliver_later
end
end
# Notify new user with email after creation
def new_user(user, token = nil)
return true unless notifiable?(user, :mention)
# Don't email omniauth created users
mailer.new_user_email(user.id, token).deliver_later unless user.identities.any?
end
# Notify users on new note in system
def new_note(note)
return true unless note.noteable_type.present?
# ignore gitlab service messages
return true if note.system_note_with_references?
send_new_note_notifications(note)
send_service_desk_notification(note)
end
def send_new_note_notifications(note)
notify_method = "note_#{note.noteable_ability_name}_email".to_sym
recipients = NotificationRecipients::BuildService.build_new_note_recipients(note)
recipients.each do |recipient|
mailer.send(notify_method, recipient.user.id, note.id, recipient.reason).deliver_later
end
end
def send_service_desk_notification(note)
return unless note.noteable_type == 'Issue'
return if note.confidential
return unless note.project.service_desk_enabled?
issue = note.noteable
recipients = issue.email_participants_emails
return unless recipients.any?
support_bot = Users::Internal.support_bot
recipients.delete(issue.external_author) if note.author == support_bot
recipients.each do |recipient|
mailer.service_desk_new_note_email(issue.id, note.id, recipient).deliver_later
Gitlab::Metrics::BackgroundTransaction.current&.add_event(:service_desk_new_note_email)
end
end
# Notify users when a new release is created
def send_new_release_notifications(release)
unless release.author&.can_trigger_notifications?
warn_skipping_notifications(release.author, release)
return false
end
recipients = NotificationRecipients::BuildService.build_recipients(release,
release.author,
action: "new")
recipients.each do |recipient|
mailer.new_release_email(recipient.user.id, release, recipient.reason).deliver_later
end
end
def new_instance_access_request(user)
recipients = User.instance_access_request_approvers_to_be_notified # https://gitlab.com/gitlab-org/gitlab/-/issues/277016 will change this
return true if recipients.empty?
recipients.each do |recipient|
mailer.instance_access_request_email(user, recipient).deliver_later
end
end
def user_admin_rejection(name, email)
mailer.user_admin_rejection_email(name, email).deliver_later
end
def user_deactivated(name, email)
mailer.user_deactivated_email(name, email).deliver_later
end
# Members
def new_access_request(member)
return true unless member.notifiable?(:subscription)
recipients = member.source.access_request_approvers_to_be_notified
return true if recipients.empty?
recipients.each { |recipient| deliver_access_request_email(recipient, member) }
end
def decline_access_request(member)
return true unless member.notifiable?(:subscription)
mailer.member_access_denied_email(member.real_source_type, member.source_id, member.user_id).deliver_later
end
def decline_invite(member)
# Must always send, regardless of project/namespace configuration since it's a
# response to the user's action.
mailer.member_invite_declined_email(
member.real_source_type,
member.source.id,
member.invite_email,
member.created_by_id
).deliver_later
end
# Project invite
def invite_project_member(project_member, token)
return true unless project_member.notifiable?(:subscription)
mailer.member_invited_email(project_member.real_source_type, project_member.id, token).deliver_later
end
def accept_project_invite(project_member)
return true unless project_member.notifiable?(:subscription)
mailer.member_invite_accepted_email(project_member.real_source_type, project_member.id).deliver_later
end
def new_project_member(project_member)
return true unless project_member.notifiable?(:mention, skip_read_ability: true)
mailer.member_access_granted_email(project_member.real_source_type, project_member.id).deliver_later
end
def update_project_member(project_member)
return true unless project_member.notifiable?(:mention)
mailer.member_access_granted_email(project_member.real_source_type, project_member.id).deliver_later
end
def member_about_to_expire(member)
return true unless member.notifiable?(:mention)
mailer.member_about_to_expire_email(member.real_source_type, member.id).deliver_later
end
# Group invite
def invite_group_member(group_member, token)
mailer.member_invited_email(group_member.real_source_type, group_member.id, token).deliver_later
end
def invite_member_reminder(group_member, token, reminder_index)
mailer.member_invited_reminder_email(group_member.real_source_type, group_member.id, token, reminder_index).deliver_later
end
def accept_group_invite(group_member)
mailer.member_invite_accepted_email(group_member.real_source_type, group_member.id).deliver_later
end
def new_group_member(group_member)
return true unless group_member.notifiable?(:mention)
mailer.member_access_granted_email(group_member.real_source_type, group_member.id).deliver_later
end
def update_group_member(group_member)
return true unless group_member.notifiable?(:mention)
mailer.member_access_granted_email(group_member.real_source_type, group_member.id).deliver_later
end
def updated_group_member_expiration(group_member)
return true unless group_member.notifiable?(:mention)
mailer.member_expiration_date_updated_email(group_member.real_source_type, group_member.id).deliver_later
end
def project_was_moved(project, old_path_with_namespace)
recipients = project_moved_recipients(project)
recipients = notifiable_users(recipients, :custom, custom_action: :moved_project, project: project)
recipients.each do |recipient|
mailer.project_was_moved_email(
project.id,
recipient.id,
old_path_with_namespace
).deliver_later
end
end
def issue_moved(issue, new_issue, current_user)
recipients = NotificationRecipients::BuildService.build_recipients(issue, current_user, action: 'moved')
recipients.map do |recipient|
email = mailer.issue_moved_email(recipient.user, issue, new_issue, current_user, recipient.reason)
email.deliver_later
email
end
end
def issue_cloned(issue, new_issue, current_user)
recipients = NotificationRecipients::BuildService.build_recipients(issue, current_user, action: 'cloned')
recipients.map do |recipient|
email = mailer.issue_cloned_email(recipient.user, issue, new_issue, current_user, recipient.reason)
email.deliver_later
email
end
end
def project_exported(project, current_user)
return true unless notifiable?(current_user, :mention, project: project)
mailer.project_was_exported_email(current_user, project).deliver_later
end
def project_not_exported(project, current_user, errors)
return true unless notifiable?(current_user, :mention, project: project)
mailer.project_was_not_exported_email(current_user, project, errors).deliver_later
end
def pipeline_finished(pipeline, ref_status: nil, recipients: nil)
# Must always check project configuration since recipients could be a list of emails
# from the PipelinesEmailService integration.
return if pipeline.project.emails_disabled?
status = pipeline_notification_status(ref_status, pipeline)
email_template = "pipeline_#{status}_email"
return unless mailer.respond_to?(email_template)
recipients ||= notifiable_users(
[pipeline.user], :watch,
custom_action: :"#{status}_pipeline",
target: pipeline
).map do |user|
user.notification_email_for(pipeline.project.group)
end
recipients.each do |recipient|
mailer.public_send(email_template, pipeline, recipient).deliver_later
end
end
def autodevops_disabled(pipeline, recipients)
return if pipeline.project.emails_disabled?
recipients.each do |recipient|
mailer.autodevops_disabled_email(pipeline, recipient).deliver_later
end
end
def pages_domain_verification_succeeded(domain)
project_maintainers_recipients(domain, action: 'succeeded').each do |recipient|
mailer.pages_domain_verification_succeeded_email(domain, recipient.user).deliver_later
end
end
def pages_domain_verification_failed(domain)
project_maintainers_recipients(domain, action: 'failed').each do |recipient|
mailer.pages_domain_verification_failed_email(domain, recipient.user).deliver_later
end
end
def pages_domain_enabled(domain)
project_maintainers_recipients(domain, action: 'enabled').each do |recipient|
mailer.pages_domain_enabled_email(domain, recipient.user).deliver_later
end
end
def pages_domain_disabled(domain)
project_maintainers_recipients(domain, action: 'disabled').each do |recipient|
mailer.pages_domain_disabled_email(domain, recipient.user).deliver_later
end
end
def pages_domain_auto_ssl_failed(domain)
project_maintainers_recipients(domain, action: 'disabled').each do |recipient|
mailer.pages_domain_auto_ssl_failed_email(domain, recipient.user).deliver_later
end
end
def issue_due(issue)
recipients = NotificationRecipients::BuildService.build_recipients(
issue,
issue.author,
action: 'due',
custom_action: :issue_due,
skip_current_user: false
)
recipients.each do |recipient|
mailer.send(:issue_due_email, recipient.user.id, issue.id, recipient.reason).deliver_later
end
end
def repository_cleanup_success(project, user)
return if project.emails_disabled?
mailer.send(:repository_cleanup_success_email, project, user).deliver_later
end
def repository_cleanup_failure(project, user, error)
return if project.emails_disabled?
mailer.send(:repository_cleanup_failure_email, project, user, error).deliver_later
end
def remote_mirror_update_failed(remote_mirror)
recipients = project_maintainers_recipients(remote_mirror, action: 'update_failed')
recipients.each do |recipient|
mailer.remote_mirror_update_failed_email(remote_mirror.id, recipient.user.id).deliver_later
end
end
def prometheus_alerts_fired(project, alerts)
return if project.emails_disabled?
owners_and_maintainers_without_invites(project).to_a.product(alerts).each do |recipient, alert|
mailer.prometheus_alert_fired_email(project, recipient.user, alert).deliver_later
end
end
def group_was_exported(group, current_user)
return true unless notifiable?(current_user, :mention, group: group)
mailer.group_was_exported_email(current_user, group).deliver_later
end
def group_was_not_exported(group, current_user, errors)
return true unless notifiable?(current_user, :mention, group: group)
mailer.group_was_not_exported_email(current_user, group, errors).deliver_later
end
# Notify users on new review in system
def new_review(review)
recipients = NotificationRecipients::BuildService.build_new_review_recipients(review)
deliver_options = new_review_deliver_options(review)
recipients.each do |recipient|
mailer
.new_review_email(recipient.user.id, review.id)
.deliver_later(deliver_options)
end
end
def merge_when_pipeline_succeeds(merge_request, current_user)
recipients = ::NotificationRecipients::BuildService.build_recipients(
merge_request,
current_user,
action: 'merge_when_pipeline_succeeds',
custom_action: :merge_when_pipeline_succeeds
)
recipients.each do |recipient|
mailer.merge_when_pipeline_succeeds_email(recipient.user.id, merge_request.id, current_user.id).deliver_later
end
end
def approve_mr(merge_request, current_user)
approve_mr_email(merge_request, merge_request.target_project, current_user)
end
def unapprove_mr(merge_request, current_user)
unapprove_mr_email(merge_request, merge_request.target_project, current_user)
end
def inactive_project_deletion_warning(project, deletion_date)
owners_and_maintainers_without_invites(project).each do |recipient|
mailer.inactive_project_deletion_warning_email(project, recipient.user, deletion_date).deliver_later
end
end
def removed_milestone(target, current_user)
method = case target
when Issue
:removed_milestone_issue_email
when MergeRequest
:removed_milestone_merge_request_email
end
recipients = NotificationRecipients::BuildService.build_recipients(
target,
current_user,
action: 'removed_milestone'
)
recipients.each do |recipient|
mailer.send(method, recipient.user.id, target.id, current_user.id).deliver_later
end
end
def changed_milestone(target, milestone, current_user)
method = case target
when Issue
:changed_milestone_issue_email
when MergeRequest
:changed_milestone_merge_request_email
end
recipients = NotificationRecipients::BuildService.build_recipients(
target,
current_user,
action: 'changed_milestone'
)
recipients.each do |recipient|
mailer.send(method, recipient.user.id, target.id, milestone, current_user.id).deliver_later
end
end
def new_achievement_email(user, achievement)
mailer.new_achievement_email(user, achievement)
end
protected
def new_resource_email(target, current_user, method)
unless current_user&.can_trigger_notifications?
warn_skipping_notifications(current_user, target)
return false
end
recipients = NotificationRecipients::BuildService.build_recipients(target, target.author, action: "new")
recipients.each do |recipient|
mailer.send(method, recipient.user.id, target.id, recipient.reason).deliver_later
end
end
def new_mentions_in_resource_email(target, new_mentioned_users, current_user, method)
unless current_user&.can_trigger_notifications?
warn_skipping_notifications(current_user, target)
return false
end
recipients = NotificationRecipients::BuildService.build_recipients(target, current_user, action: "new")
recipients = recipients.select { |r| new_mentioned_users.include?(r.user) }
recipients.each do |recipient|
mailer.send(method, recipient.user.id, target.id, current_user.id, recipient.reason).deliver_later
end
end
def close_resource_email(target, current_user, method, skip_current_user: true, closed_via: nil)
action = method == :merged_merge_request_email ? "merge" : "close"
recipients = NotificationRecipients::BuildService.build_recipients(
target,
current_user,
action: action,
skip_current_user: skip_current_user
)
recipients.each do |recipient|
mailer.send(method, recipient.user.id, target.id, current_user.id, reason: recipient.reason, closed_via: closed_via).deliver_later
end
end
def relabeled_resource_email(target, labels, current_user, method)
recipients = labels.flat_map { |l| l.subscribers(target.project) }.uniq
recipients = notifiable_users(
recipients, :subscription,
target: target,
acting_user: current_user
)
label_names = labels.map(&:name)
recipients.each do |recipient|
mailer.send(method, recipient.id, target.id, label_names, current_user.id).deliver_later
end
end
def reopen_resource_email(target, current_user, method, status)
recipients = NotificationRecipients::BuildService.build_recipients(target, current_user, action: "reopen")
recipients.each do |recipient|
mailer.send(method, recipient.user.id, target.id, status, current_user.id, recipient.reason).deliver_later
end
end
def merge_request_unmergeable_email(merge_request)
recipients = NotificationRecipients::BuildService.build_merge_request_unmergeable_recipients(merge_request)
recipients.each do |recipient|
mailer.merge_request_unmergeable_email(recipient.user.id, merge_request.id).deliver_later
end
end
def mailer
Notify
end
private
def approve_mr_email(merge_request, project, current_user)
recipients = ::NotificationRecipients::BuildService.build_recipients(merge_request, current_user, action: 'approve')
recipients.each do |recipient|
mailer.approved_merge_request_email(recipient.user.id, merge_request.id, current_user.id).deliver_later
end
end
def unapprove_mr_email(merge_request, project, current_user)
recipients = ::NotificationRecipients::BuildService.build_recipients(merge_request, current_user, action: 'unapprove')
recipients.each do |recipient|
mailer.unapproved_merge_request_email(recipient.user.id, merge_request.id, current_user.id).deliver_later
end
end
def pipeline_notification_status(ref_status, pipeline)
if Ci::Ref.failing_state?(ref_status)
'failed'
elsif ref_status
ref_status
else
pipeline.status
end
end
def owners_and_maintainers_without_invites(project)
recipients = project.members.active_without_invites_and_requests.owners_and_maintainers
if recipients.empty? && project.group
recipients = project.group.members.active_without_invites_and_requests.owners_and_maintainers
end
recipients
end
def project_moved_recipients(project)
finder = MembersFinder.new(project, nil, params: {
active_without_invites_and_requests: true,
owners_and_maintainers: true
})
finder.execute.preload_user_and_notification_settings.map(&:user)
end
def project_maintainers_recipients(target, action:)
NotificationRecipients::BuildService.build_project_maintainers_recipients(target, action: action)
end
def notifiable?(...)
NotificationRecipients::BuildService.notifiable?(...)
end
def notifiable_users(...)
NotificationRecipients::BuildService.notifiable_users(...)
end
def deliver_access_request_email(recipient, member)
mailer.member_access_requested_email(member.real_source_type, member.id, recipient.user.id).deliver_later
end
def warn_skipping_notifications(user, object)
Gitlab::AppLogger.warn(message: "Skipping sending notifications", user: user.id, klass: object.class.to_s, object_id: object.id)
end
def new_review_deliver_options(review)
# Overridden in EE
{}
end
def review_request_deliver_options(project)
# Overridden in EE
{}
end
end
NotificationService.prepend_mod_with('NotificationService')
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe NotificationService, :mailer, feature_category: :team_planning do
include EmailSpec::Matchers
include ExternalAuthorizationServiceHelpers
include NotificationHelpers
let_it_be_with_refind(:project, reload: true) { create(:project, :public) }
let_it_be_with_refind(:assignee) { create(:user) }
let(:notification) { described_class.new }
around(:example, :deliver_mails_inline) do |example|
# This is a temporary `around` hook until all the examples check the
# background jobs queue instead of the delivered emails array.
# `perform_enqueued_jobs` makes the ActiveJob jobs (e.g. mailer jobs) run inline
# compared to `Sidekiq::Testing.inline!` which makes the Sidekiq jobs run inline.
perform_enqueued_jobs { example.run }
end
shared_examples 'altered milestone notification on issue' do
it 'sends the email to the correct people' do
should_email(subscriber_to_new_milestone)
issue.assignees.each do |a|
should_email(a)
end
should_email(@u_watcher)
should_email(@u_guest_watcher)
should_email(@u_participant_mentioned)
should_email(@subscriber)
should_email(@subscribed_participant)
should_email(@watcher_and_subscriber)
should_not_email(@u_guest_custom)
should_not_email(@u_committer)
should_not_email(@unsubscriber)
should_not_email(@u_participating)
should_not_email(@u_lazy_participant)
should_not_email(issue.author)
should_not_email(@u_disabled)
should_not_email(@u_custom_global)
should_not_email(@u_mentioned)
end
end
shared_examples 'altered milestone notification on merge request' do
it 'sends the email to the correct people' do
should_email(subscriber_to_new_milestone)
merge_request.assignees.each do |a|
should_email(a)
end
should_email(@u_watcher)
should_email(@u_guest_watcher)
should_email(@u_participant_mentioned)
should_email(@subscriber)
should_email(@subscribed_participant)
should_email(@watcher_and_subscriber)
should_not_email(@u_guest_custom)
should_not_email(@u_committer)
should_not_email(@unsubscriber)
should_not_email(@u_participating)
should_not_email(@u_lazy_participant)
should_not_email(merge_request.author)
should_not_email(@u_disabled)
should_not_email(@u_custom_global)
should_not_email(@u_mentioned)
end
end
shared_examples 'notifications for new mentions' do
it 'sends no emails when no new mentions are present' do
send_notifications
should_not_email_anyone
end
it 'emails new mentions with a watch level higher than mention' do
send_notifications(@u_watcher, @u_participant_mentioned, @u_custom_global, @u_mentioned)
should_only_email(@u_watcher, @u_participant_mentioned, @u_custom_global, @u_mentioned)
end
it 'does not email new mentions with a watch level equal to or less than mention' do
send_notifications(@u_disabled)
should_not_email_anyone
end
it 'emails new mentions despite being unsubscribed' do
send_notifications(@unsubscribed_mentioned)
should_only_email(@unsubscribed_mentioned)
end
it 'sends the proper notification reason header' do
send_notifications(@u_watcher)
should_only_email(@u_watcher)
email = find_email_for(@u_watcher)
expect(email).to have_header('X-GitLab-NotificationReason', NotificationReason::MENTIONED)
end
end
shared_examples 'is not able to send notifications' do |check_delivery_jobs_queue: false|
it 'does not send any notification' do
user_1 = create(:user)
recipient_1 = NotificationRecipient.new(user_1, :custom, custom_action: :new_release)
allow(NotificationRecipients::BuildService).to receive(:build_new_release_recipients).and_return([recipient_1])
expect(Gitlab::AppLogger).to receive(:warn).with(message: 'Skipping sending notifications', user: current_user.id, klass: object.class.to_s, object_id: object.id)
if check_delivery_jobs_queue
expect do
action
end.to not_enqueue_mail_with(Notify, notification_method, @u_mentioned, anything, anything)
.and(not_enqueue_mail_with(Notify, notification_method, @u_guest_watcher, anything, anything))
.and(not_enqueue_mail_with(Notify, notification_method, user_1, anything, anything))
.and(not_enqueue_mail_with(Notify, notification_method, current_user, anything, anything))
else
action
should_not_email(@u_mentioned)
should_not_email(@u_guest_watcher)
should_not_email(user_1)
should_not_email(current_user)
end
end
end
# Next shared examples are intended to test notifications of "participants"
#
# they take the following parameters:
# * issuable
# * notification trigger
# * participant
#
shared_examples 'participating by note notification' do |check_delivery_jobs_queue: false|
it 'emails the participant' do
create(:note_on_issue, noteable: issuable, project_id: project.id, note: 'anything', author: participant)
if check_delivery_jobs_queue
expect do
notification_trigger
end.to enqueue_mail_with(Notify, mailer_method, *expectation_args_for_user(participant))
else
notification_trigger
should_email(participant)
end
end
context 'for subgroups' do
before do
build_group(project)
end
it 'emails the participant' do
create(:note_on_issue, noteable: issuable, project_id: project.id, note: 'anything', author: @pg_participant)
if check_delivery_jobs_queue
expect do
notification_trigger
end.to enqueue_mail_with(Notify, mailer_method, *expectation_args_for_user(@pg_participant))
else
notification_trigger
should_email_nested_group_user(@pg_participant)
end
end
end
end
shared_examples 'participating by confidential note notification' do |check_delivery_jobs_queue: false|
context 'when user is mentioned on confidential note' do
let_it_be(:guest_1) { create(:user) }
let_it_be(:guest_2) { create(:user) }
let_it_be(:reporter) { create(:user) }
before do
issuable.resource_parent.add_guest(guest_1)
issuable.resource_parent.add_guest(guest_2)
issuable.resource_parent.add_reporter(reporter)
end
it 'only emails authorized users' do
confidential_note_text = "#{guest_1.to_reference} and #{guest_2.to_reference} and #{reporter.to_reference}"
note_text = "Mentions #{guest_2.to_reference}"
create(:note_on_issue, noteable: issuable, project_id: project.id, note: confidential_note_text, confidential: true)
create(:note_on_issue, noteable: issuable, project_id: project.id, note: note_text)
if check_delivery_jobs_queue
expect do
notification_trigger
end.to enqueue_mail_with(Notify, mailer_method, *expectation_args_for_user(guest_2))
.and(enqueue_mail_with(Notify, mailer_method, *expectation_args_for_user(reporter)))
.and(not_enqueue_mail_with(Notify, mailer_method, *expectation_args_for_user(guest_1)))
else
reset_delivered_emails!
notification_trigger
should_not_email(guest_1)
should_email(guest_2)
should_email(reporter)
end
end
end
end
shared_examples 'participating by assignee notification' do |check_delivery_jobs_queue: false|
it 'emails the participant' do
issuable.assignees << participant
if check_delivery_jobs_queue
expect do
notification_trigger
end.to enqueue_mail_with(Notify, mailer_method, *expectation_args_for_user(participant))
else
notification_trigger
should_email(participant)
end
end
end
shared_examples 'participating by author notification' do |check_delivery_jobs_queue: false|
it 'emails the participant' do
issuable.author = participant
if check_delivery_jobs_queue
expect do
notification_trigger
end.to enqueue_mail_with(Notify, mailer_method, *expectation_args_for_user(participant))
else
notification_trigger
should_email(participant)
end
end
end
shared_examples 'participating by reviewer notification' do
it 'emails the participant' do
issuable.reviewers << participant
notification_trigger
should_email(participant)
end
end
shared_examples_for 'participating notifications' do |check_delivery_jobs_queue: false|
it_behaves_like 'participating by note notification', check_delivery_jobs_queue: check_delivery_jobs_queue
it_behaves_like 'participating by author notification', check_delivery_jobs_queue: check_delivery_jobs_queue
it_behaves_like 'participating by assignee notification', check_delivery_jobs_queue: check_delivery_jobs_queue
end
shared_examples 'declines the invite' do
specify do
member = source.members.last
expect do
notification.decline_invite(member)
end.to change { ActionMailer::Base.deliveries.size }.by(1)
end
end
describe '.permitted_actions' do
it 'includes public methods' do
expect(described_class.permitted_actions).to include(:access_token_created)
end
it 'excludes EXCLUDED_ACTIONS' do
described_class::EXCLUDED_ACTIONS.each do |action|
expect(described_class.permitted_actions).not_to include(action)
end
end
it 'excludes protected and private methods' do
expect(described_class.permitted_actions).not_to include(:new_resource_email)
expect(described_class.permitted_actions).not_to include(:approve_mr_email)
end
end
describe '#async' do
let(:async) { notification.async }
let_it_be(:key) { create(:personal_key) }
it 'returns an Async object with the correct parent' do
expect(async).to be_a(described_class::Async)
expect(async.parent).to eq(notification)
end
context 'when receiving a public method' do
it 'schedules a MailScheduler::NotificationServiceWorker' do
expect(MailScheduler::NotificationServiceWorker)
.to receive(:perform_async).with('new_key', key)
async.new_key(key)
end
end
context 'when receiving a private method' do
it 'raises NoMethodError' do
expect { async.notifiable?(key) }.to raise_error(NoMethodError)
end
end
context 'when receiving a non-existent method' do
it 'raises NoMethodError' do
expect { async.foo(key) }.to raise_error(NoMethodError)
end
end
end
describe 'Keys' do
describe '#new_key' do
let(:key_options) { {} }
let!(:key) { build_stubbed(:personal_key, key_options) }
subject { notification.new_key(key) }
it "sends email to key owner" do
expect { subject }.to have_enqueued_email(key.id, mail: "new_ssh_key_email")
end
describe "never emails the ghost user" do
let(:key_options) { { user: Users::Internal.ghost } }
it "does not send email to key owner" do
expect { subject }.not_to have_enqueued_email(key.id, mail: "new_ssh_key_email")
end
end
end
end
describe 'GpgKeys' do
describe '#new_gpg_key' do
let(:key_options) { {} }
let(:key) { create(:gpg_key, key_options) }
subject { notification.new_gpg_key(key) }
it "sends email to key owner" do
expect { subject }.to have_enqueued_email(key.id, mail: "new_gpg_key_email")
end
describe "never emails the ghost user" do
let(:key_options) { { user: Users::Internal.ghost } }
it "does not send email to key owner" do
expect { subject }.not_to have_enqueued_email(key.id, mail: "new_gpg_key_email")
end
end
end
end
describe 'AccessToken' do
describe '#access_token_created' do
let_it_be(:user) { create(:user) }
let_it_be(:pat) { create(:personal_access_token, user: user) }
subject(:notification_service) { notification.access_token_created(user, pat.name) }
it 'sends email to the token owner' do
expect { notification_service }.to have_enqueued_email(user, pat.name, mail: "access_token_created_email")
end
context 'when user is not allowed to receive notifications' do
before do
user.block!
end
it 'does not send email to the token owner' do
expect { notification_service }.not_to have_enqueued_email(user, pat.name, mail: "access_token_created_email")
end
end
end
describe '#resource_access_token_about_to_expire' do
let_it_be(:project_bot) { create(:user, :project_bot) }
let_it_be(:expiring_token) { create(:personal_access_token, user: project_bot, expires_at: 5.days.from_now) }
let_it_be(:owner1) { create(:user) }
let_it_be(:owner2) { create(:user) }
subject(:notification_service) do
notification.resource_access_tokens_about_to_expire(project_bot, [expiring_token.name])
end
context 'when the resource is a group' do
let(:group) { create(:group) }
before do
group.add_owner(owner1)
group.add_owner(owner2)
group.add_reporter(project_bot)
end
it 'sends emails to the group owners' do
expect { notification_service }.to(
have_enqueued_email(
owner1,
project_bot.resource_bot_resource,
[expiring_token.name],
mail: "resource_access_tokens_about_to_expire_email"
).and(
have_enqueued_email(
owner2,
project_bot.resource_bot_resource,
[expiring_token.name],
mail: "resource_access_tokens_about_to_expire_email"
)
)
)
end
end
context 'when the resource is a project' do
let(:project) { create(:project) }
before do
project.add_maintainer(owner1)
project.add_maintainer(owner2)
project.add_reporter(project_bot)
end
it 'sends emails to the group owners' do
expect { notification_service }.to(
have_enqueued_email(
owner1,
project_bot.resource_bot_resource,
[expiring_token.name],
mail: "resource_access_tokens_about_to_expire_email"
).and(
have_enqueued_email(
owner2,
project_bot.resource_bot_resource,
[expiring_token.name],
mail: "resource_access_tokens_about_to_expire_email"
)
)
)
end
end
end
describe '#access_token_about_to_expire' do
let_it_be(:user) { create(:user) }
let_it_be(:pat) { create(:personal_access_token, user: user, expires_at: 5.days.from_now) }
subject { notification.access_token_about_to_expire(user, [pat.name]) }
it 'sends email to the token owner' do
expect { subject }.to have_enqueued_email(user, [pat.name], mail: "access_token_about_to_expire_email")
end
end
describe '#access_token_expired' do
let_it_be(:user) { create(:user) }
let_it_be(:pat) { create(:personal_access_token, user: user) }
subject { notification.access_token_expired(user, pat.name) }
it 'sends email to the token owner' do
expect { subject }.to have_enqueued_email(user, pat.name, mail: "access_token_expired_email")
end
context 'when user is not allowed to receive notifications' do
before do
user.block!
end
it 'does not send email to the token owner' do
expect { subject }.not_to have_enqueued_email(user, pat.name, mail: "access_token_expired_email")
end
end
end
describe '#access_token_revoked' do
let_it_be(:user) { create(:user) }
let_it_be(:pat) { create(:personal_access_token, user: user) }
subject(:notification_service) { notification.access_token_revoked(user, pat.name) }
it 'sends email to the token owner without source' do
expect { notification_service }.to have_enqueued_email(user, pat.name, nil, mail: "access_token_revoked_email")
end
it 'sends email to the token owner with source' do
expect do
notification.access_token_revoked(user, pat.name, 'secret_detection')
end.to have_enqueued_email(user, pat.name, 'secret_detection', mail: "access_token_revoked_email")
end
context 'when user is not allowed to receive notifications' do
before do
user.block!
end
it 'does not send email to the token owner' do
expect { notification_service }.not_to have_enqueued_email(user, pat.name, mail: "access_token_revoked_email")
end
end
end
end
describe 'SSH Keys' do
let_it_be_with_reload(:user) { create(:user) }
let_it_be(:fingerprints) { ["aa:bb:cc:dd:ee:zz"] }
shared_context 'block user' do
before do
user.block!
end
end
describe '#ssh_key_expired' do
subject { notification.ssh_key_expired(user, fingerprints) }
it 'sends email to the token owner' do
expect { subject }.to have_enqueued_email(user, fingerprints, mail: "ssh_key_expired_email")
end
context 'when user is not allowed to receive notifications' do
include_context 'block user'
it 'does not send email to the token owner' do
expect { subject }.not_to have_enqueued_email(user, fingerprints, mail: "ssh_key_expired_email")
end
end
end
describe '#ssh_key_expiring_soon' do
subject { notification.ssh_key_expiring_soon(user, fingerprints) }
it 'sends email to the token owner' do
expect { subject }.to have_enqueued_email(user, fingerprints, mail: "ssh_key_expiring_soon_email")
end
context 'when user is not allowed to receive notifications' do
include_context 'block user'
it 'does not send email to the token owner' do
expect { subject }.not_to have_enqueued_email(user, fingerprints, mail: "ssh_key_expiring_soon_email")
end
end
end
end
describe '#unknown_sign_in' do
let_it_be(:user) { create(:user) }
let_it_be(:ip) { '127.0.0.1' }
let_it_be(:time) { Time.current }
subject { notification.unknown_sign_in(user, ip, time) }
it 'sends email to the user' do
expect { subject }.to have_enqueued_email(user, ip, time, mail: 'unknown_sign_in_email')
end
end
describe '#disabled_two_factor' do
let_it_be(:user) { create(:user) }
subject { notification.disabled_two_factor(user) }
it 'sends email to the user' do
expect { subject }.to have_enqueued_email(user, mail: 'disabled_two_factor_email')
end
end
describe '#new_email_address_added' do
let_it_be(:user) { create(:user) }
let_it_be(:email) { create(:email, user: user) }
subject { notification.new_email_address_added(user, email) }
it 'sends email to the user' do
expect { subject }.to have_enqueued_email(user, email, mail: 'new_email_address_added_email')
end
end
describe 'Notes' do
context 'issue note' do
let_it_be(:project) { create(:project, :private) }
let_it_be_with_reload(:issue) { create(:issue, project: project, assignees: [assignee]) }
let_it_be(:mentioned_issue) { create(:issue, assignees: issue.assignees) }
let_it_be_with_reload(:author) { create(:user) }
let(:note) { create(:note_on_issue, author: author, noteable: issue, project_id: issue.project_id, note: '@mention referenced, @unsubscribed_mentioned and @outsider also') }
subject { notification.new_note(note) }
context 'issue_email_participants' do
before do
allow(Notify).to receive(:service_desk_new_note_email)
.with(Integer, Integer, String).and_return(mailer)
allow(::Gitlab::Email::IncomingEmail).to receive(:enabled?) { true }
allow(::Gitlab::Email::IncomingEmail).to receive(:supports_wildcard?) { true }
end
let(:subject) { described_class.new }
let(:mailer) { double(deliver_later: true) }
let(:issue) { create(:issue, author: Users::Internal.support_bot) }
let(:project) { issue.project }
let(:note) { create(:note, noteable: issue, project: project) }
shared_examples 'notification with exact metric events' do |number_of_events|
it 'adds metric event' do
metric_transaction = double('Gitlab::Metrics::WebTransaction', increment: true, observe: true)
allow(::Gitlab::Metrics::BackgroundTransaction).to receive(:current).and_return(metric_transaction)
expect(metric_transaction).to receive(:add_event).with(:service_desk_new_note_email).exactly(number_of_events).times
subject.new_note(note)
end
end
shared_examples 'no participants are notified' do
it 'does not send the email' do
expect(Notify).not_to receive(:service_desk_new_note_email)
subject.new_note(note)
end
it_behaves_like 'notification with exact metric events', 0
end
it_behaves_like 'no participants are notified'
context 'do exist and note not confidential' do
let!(:issue_email_participant) { issue.issue_email_participants.create!(email: '[email protected]') }
before do
issue.update!(external_author: '[email protected]')
project.update!(service_desk_enabled: true)
end
it 'sends the email' do
expect(Notify).to receive(:service_desk_new_note_email)
.with(issue.id, note.id, issue.external_author)
subject.new_note(note)
end
it_behaves_like 'notification with exact metric events', 1
context 'when service desk is disabled' do
before do
project.update!(service_desk_enabled: false)
end
it_behaves_like 'no participants are notified'
end
end
context 'do exist and note is confidential' do
let(:note) { create(:note, noteable: issue, project: project, confidential: true) }
let!(:issue_email_participant) { issue.issue_email_participants.create!(email: '[email protected]') }
before do
issue.update!(external_author: '[email protected]')
project.update!(service_desk_enabled: true)
end
it_behaves_like 'no participants are notified'
end
end
describe '#new_note' do
before_all do
build_team(project)
project.add_maintainer(issue.author)
project.add_maintainer(assignee)
project.add_maintainer(author)
@u_custom_off = create_user_with_notification(:custom, 'custom_off')
project.add_guest(@u_custom_off)
create(
:note_on_issue,
author: @u_custom_off,
noteable: issue,
project_id: issue.project_id,
note: 'i think @subscribed_participant should see this'
)
update_custom_notification(:new_note, @u_guest_custom, resource: project)
update_custom_notification(:new_note, @u_custom_global)
end
context 'with users' do
before_all do
add_users(project)
add_user_subscriptions(issue)
end
before do
reset_delivered_emails!
end
it 'sends emails to recipients', :aggregate_failures do
subject
expect_delivery_jobs_count(10)
expect_enqueud_email(@u_watcher.id, note.id, nil, mail: "note_issue_email")
expect_enqueud_email(note.noteable.author.id, note.id, nil, mail: "note_issue_email")
expect_enqueud_email(note.noteable.assignees.first.id, note.id, nil, mail: "note_issue_email")
expect_enqueud_email(@u_custom_global.id, note.id, nil, mail: "note_issue_email")
expect_enqueud_email(@u_mentioned.id, note.id, "mentioned", mail: "note_issue_email")
expect_enqueud_email(@subscriber.id, note.id, "subscribed", mail: "note_issue_email")
expect_enqueud_email(@watcher_and_subscriber.id, note.id, "subscribed", mail: "note_issue_email")
expect_enqueud_email(@subscribed_participant.id, note.id, "subscribed", mail: "note_issue_email")
expect_enqueud_email(@u_custom_off.id, note.id, nil, mail: "note_issue_email")
expect_enqueud_email(@unsubscribed_mentioned.id, note.id, "mentioned", mail: "note_issue_email")
end
it "emails the note author if they've opted into notifications about their activity", :deliver_mails_inline do
note.author.notified_of_own_activity = true
notification.new_note(note)
should_email(note.author)
expect(find_email_for(note.author)).to have_header('X-GitLab-NotificationReason', 'own_activity')
end
it_behaves_like 'project emails are disabled', check_delivery_jobs_queue: true do
let(:notification_target) { note }
let(:notification_trigger) { notification.new_note(note) }
end
end
it 'filters out "mentioned in" notes' do
mentioned_note = SystemNoteService.cross_reference(mentioned_issue, issue, issue.author)
reset_delivered_emails!
notification.new_note(mentioned_note)
expect_no_delivery_jobs
end
context 'participating' do
context 'by note' do
before do
note.author = @u_lazy_participant
note.save!
end
it { expect { subject }.not_to have_enqueued_email(@u_lazy_participant.id, note.id, mail: "note_issue_email") }
end
end
context 'in project that belongs to a group' do
let_it_be(:parent_group) { create(:group) }
before do
note.project.namespace_id = group.id
group.add_member(@u_watcher, GroupMember::MAINTAINER)
group.add_member(@u_custom_global, GroupMember::MAINTAINER)
note.project.save!
@u_watcher.notification_settings_for(note.project).participating!
@u_watcher.notification_settings_for(group).global!
update_custom_notification(:new_note, @u_custom_global)
reset_delivered_emails!
end
shared_examples 'new note notifications' do
it 'sends notifications', :deliver_mails_inline do
notification.new_note(note)
should_email(note.noteable.author)
should_email(note.noteable.assignees.first)
should_email(@u_mentioned)
should_email(@u_custom_global)
should_not_email(@u_guest_custom)
should_not_email(@u_guest_watcher)
should_not_email(@u_watcher)
should_not_email(note.author)
should_not_email(@u_participating)
should_not_email(@u_disabled)
should_not_email(@u_lazy_participant)
expect(find_email_for(@u_mentioned)).to have_header('X-GitLab-NotificationReason', 'mentioned')
expect(find_email_for(@u_custom_global)).to have_header('X-GitLab-NotificationReason', '')
end
end
context 'which is a top-level group' do
let!(:group) { parent_group }
it_behaves_like 'new note notifications'
it_behaves_like 'project emails are disabled', check_delivery_jobs_queue: true do
let(:notification_target) { note }
let(:notification_trigger) { notification.new_note(note) }
end
end
context 'which is a subgroup' do
let!(:group) { create(:group, parent: parent_group) }
it_behaves_like 'new note notifications'
it 'overrides child objects with global level' do
user = create(:user)
parent_group.add_developer(user)
user.notification_settings_for(parent_group).watch!
reset_delivered_emails!
notification.new_note(note)
expect_enqueud_email(user.id, note.id, nil, mail: "note_issue_email")
end
end
end
end
end
context 'confidential issue note' do
let(:author) { create(:user) }
let(:non_member) { create(:user) }
let(:member) { create(:user) }
let(:guest) { create(:user) }
let(:admin) { create(:admin) }
let(:confidential_issue) { create(:issue, :confidential, project: project, author: author, assignees: [assignee]) }
let(:note) { create(:note_on_issue, noteable: confidential_issue, project: project, note: "#{author.to_reference} #{assignee.to_reference} #{non_member.to_reference} #{member.to_reference} #{admin.to_reference}") }
let(:guest_watcher) { create_user_with_notification(:watch, "guest-watcher-confidential") }
subject { notification.new_note(note) }
before do
project.add_developer(member)
project.add_guest(guest)
reset_delivered_emails!
end
it 'filters out users that can not read the issue' do
subject
expect_delivery_jobs_count(4)
expect_enqueud_email(author.id, note.id, "mentioned", mail: "note_issue_email")
expect_enqueud_email(assignee.id, note.id, "mentioned", mail: "note_issue_email")
expect_enqueud_email(member.id, note.id, "mentioned", mail: "note_issue_email")
expect_enqueud_email(admin.id, note.id, "mentioned", mail: "note_issue_email")
end
context 'on project that belongs to subgroup' do
let(:group_reporter) { create(:user) }
let(:group_guest) { create(:user) }
let(:parent_group) { create(:group) }
let(:child_group) { create(:group, parent: parent_group) }
let(:project) { create(:project, namespace: child_group) }
context 'when user is group guest member' do
before do
parent_group.add_reporter(group_reporter)
parent_group.add_guest(group_guest)
group_guest.notification_settings_for(parent_group).watch!
group_reporter.notification_settings_for(parent_group).watch!
reset_delivered_emails!
end
it 'does not email guest user' do
subject
expect_enqueud_email(group_reporter.id, note.id, nil, mail: "note_issue_email")
expect_not_enqueud_email(group_guest.id, "mentioned", mail: "note_issue_email")
end
end
end
end
context 'issue note mention', :deliver_mails_inline do
let_it_be(:issue) { create(:issue, project: project, assignees: [assignee]) }
let_it_be(:mentioned_issue) { create(:issue, assignees: issue.assignees) }
let_it_be(:user_to_exclude) { create(:user) }
let_it_be(:author) { create(:user) }
let(:user_mentions) do
other_members = [
@unsubscribed_mentioned,
@u_guest_watcher,
@pg_watcher,
@u_mentioned,
@u_not_mentioned,
@u_disabled,
@pg_disabled
]
(issue.project.team.members + other_members).map(&:to_reference).join(' ')
end
let(:note) { create(:note_on_issue, author: author, noteable: issue, project_id: issue.project_id, note: note_content) }
before_all do
build_team(project)
build_group(project)
add_users(project)
add_user_subscriptions(issue)
project.add_maintainer(author)
end
before do
reset_delivered_emails!
end
describe '#new_note' do
it 'notifies parent group members with mention level' do
note = create(:note_on_issue, noteable: issue, project_id: issue.project_id, note: "@#{@pg_mention.username}")
notification.new_note(note)
should_email_nested_group_user(@pg_mention)
end
shared_examples 'correct team members are notified' do
it 'notifies the team members' do
notification.new_note(note)
# Make sure @unsubscribed_mentioned is part of the team
expect(note.project.team.members).to include(@unsubscribed_mentioned)
# Notify all team members
note.project.team.members.each do |member|
# User with disabled notification should not be notified
next if member.id == @u_disabled.id
# Author should not be notified
next if member.id == note.author.id
should_email(member)
end
should_email(@u_guest_watcher)
should_email(note.noteable.author)
should_email(note.noteable.assignees.first)
should_email_nested_group_user(@pg_watcher)
should_email(@u_mentioned)
should_email(@u_not_mentioned)
should_not_email(note.author)
should_not_email(@u_disabled)
should_not_email_nested_group_user(@pg_disabled)
end
it 'filters out "mentioned in" notes' do
mentioned_note = SystemNoteService.cross_reference(mentioned_issue, issue, issue.author)
expect(Notify).not_to receive(:note_issue_email)
notification.new_note(mentioned_note)
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { note }
let(:notification_trigger) { notification.new_note(note) }
end
context 'when note is confidential' do
let(:note) { create(:note_on_issue, author: author, noteable: issue, project_id: issue.project_id, note: note_content, confidential: true) }
let(:guest) { create(:user) }
it 'does not notify users that cannot read note' do
project.add_guest(guest)
reset_delivered_emails!
notification.new_note(note)
should_not_email(guest)
end
end
end
context 'when `disable_all_mention` FF is disabled' do
before do
stub_feature_flags(disable_all_mention: false)
end
context 'when `@all` mention is used' do
let(:note_content) { "@all mentioned" }
it_behaves_like 'correct team members are notified'
end
context 'when users are individually mentioned' do
# `user_mentions` is concatenanting individual user mentions
# so that the end result is the same as `@all`.
let(:note_content) { "#{user_mentions} mentioned" }
it_behaves_like 'correct team members are notified'
end
end
context 'when `disable_all_mention` FF is enabled' do
before do
stub_feature_flags(disable_all_mention: true)
end
context 'when `@all` mention is used' do
before_all do
# user_to_exclude is in the note's project but is neither mentioned nor participating.
project.add_maintainer(user_to_exclude)
end
let(:note_content) { "@all mentioned" }
it "does not notify users who are not participating or mentioned" do
reset_delivered_emails!
notification.new_note(note)
should_email(note.noteable.author)
should_not_email(user_to_exclude)
end
end
context 'when users are individually mentioned' do
# `user_mentions` is concatenanting individual user mentions
# so that the end result is the same as `@all`.
let(:note_content) { "#{user_mentions} mentioned" }
it_behaves_like 'correct team members are notified'
end
end
end
end
context 'project snippet note', :deliver_mails_inline do
let(:user_mentions) do
other_members = [
@u_custom_global,
@u_guest_watcher,
snippet.author, # snippet = note.noteable's author
author, # note's author
@u_disabled,
@u_mentioned,
@u_not_mentioned
]
(snippet.project.team.members + other_members).map(&:to_reference).join(' ')
end
let(:snippet) { create(:project_snippet, project: project, author: create(:user)) }
let(:author) { create(:user) }
let(:note) { create(:note_on_project_snippet, author: author, noteable: snippet, project_id: project.id, note: note_content) }
describe '#new_note' do
shared_examples 'correct team members are notified' do
before do
build_team(project)
build_group(project)
project.add_maintainer(author)
# make sure these users can read the project snippet!
project.add_guest(@u_guest_watcher)
project.add_guest(@u_guest_custom)
add_member_for_parent_group(@pg_watcher, project)
reset_delivered_emails!
end
it 'notifies the team members' do
notification.new_note(note)
# Notify all team members
note.project.team.members.each do |member|
# User with disabled notification should not be notified
next if member.id == @u_disabled.id
# Author should not be notified
next if member.id == note.author.id
should_email(member)
end
# it emails custom global users on mention
should_email(@u_custom_global)
should_email(@u_guest_watcher)
should_email(note.noteable.author)
should_not_email(note.author)
should_email(@u_mentioned)
should_not_email(@u_disabled)
should_email(@u_not_mentioned)
end
end
context 'when `disable_all_mention` FF is disabled' do
before do
stub_feature_flags(disable_all_mention: false)
end
context 'when `@all` mention is used' do
let(:note_content) { "@all mentioned" }
it_behaves_like 'correct team members are notified'
end
context 'when users are individually mentioned' do
# `user_mentions` is concatenanting individual user mentions
# so that the end result is the same as `@all`.
let(:note_content) { "#{user_mentions} mentioned" }
it_behaves_like 'correct team members are notified'
end
end
context 'when `disable_all_mention` FF is enabled' do
before do
stub_feature_flags(disable_all_mention: true)
end
context 'when `@all` mention is used' do
let(:user_to_exclude) { create(:user) }
let(:note_content) { "@all mentioned" }
before do
project.add_maintainer(author)
project.add_maintainer(user_to_exclude)
reset_delivered_emails!
end
it "does not notify users who are not participating or mentioned" do
notification.new_note(note)
should_email(note.noteable.author)
should_not_email(user_to_exclude)
end
end
context 'when users are individually mentioned' do
# `user_mentions` is concatenanting individual user mentions
# so that the end result is the same as `@all`.
let(:note_content) { "#{user_mentions} mentioned" }
it_behaves_like 'correct team members are notified'
end
end
end
end
context 'personal snippet note', :deliver_mails_inline do
let(:snippet) { create(:personal_snippet, :public, author: @u_snippet_author) }
let(:note) { create(:note_on_personal_snippet, noteable: snippet, note: '@mentioned note', author: @u_note_author) }
before do
@u_watcher = create_global_setting_for(create(:user), :watch)
@u_participant = create_global_setting_for(create(:user), :participating)
@u_disabled = create_global_setting_for(create(:user), :disabled)
@u_mentioned = create_global_setting_for(create(:user, username: 'mentioned'), :mention)
@u_mentioned_level = create_global_setting_for(create(:user, username: 'participator'), :mention)
@u_note_author = create(:user, username: 'note_author')
@u_snippet_author = create(:user, username: 'snippet_author')
@u_not_mentioned = create_global_setting_for(create(:user, username: 'regular'), :participating)
reset_delivered_emails!
end
let!(:notes) do
[
create(:note_on_personal_snippet, noteable: snippet, note: 'note', author: @u_watcher),
create(:note_on_personal_snippet, noteable: snippet, note: 'note', author: @u_participant),
create(:note_on_personal_snippet, noteable: snippet, note: 'note', author: @u_mentioned),
create(:note_on_personal_snippet, noteable: snippet, note: 'note', author: @u_disabled),
create(:note_on_personal_snippet, noteable: snippet, note: 'note', author: @u_note_author)
]
end
describe '#new_note' do
it 'notifies the participants' do
notification.new_note(note)
# it emails participants
should_email(@u_watcher)
should_email(@u_participant)
should_email(@u_watcher)
should_email(@u_snippet_author)
# it emails mentioned users
should_email(@u_mentioned)
# it does not email participants with mention notification level
should_not_email(@u_mentioned_level)
# it does not email note author
should_not_email(@u_note_author)
end
end
end
context 'commit note', :deliver_mails_inline do
let_it_be(:project) { create(:project, :public, :repository) }
let_it_be(:note) { create(:note_on_commit, project: project) }
before_all do
build_team(project)
build_group(project)
update_custom_notification(:new_note, @u_guest_custom, resource: project)
update_custom_notification(:new_note, @u_custom_global)
end
before do
reset_delivered_emails!
allow(note.noteable).to receive(:author).and_return(@u_committer)
end
describe '#new_note, #perform_enqueued_jobs' do
it do
notification.new_note(note)
should_email(@u_guest_watcher)
should_email(@u_custom_global)
should_email(@u_guest_custom)
should_email(@u_committer)
should_email(@u_watcher)
should_email_nested_group_user(@pg_watcher)
should_not_email(@u_mentioned)
should_not_email(note.author)
should_not_email(@u_participating)
should_not_email(@u_disabled)
should_not_email(@u_lazy_participant)
should_not_email_nested_group_user(@pg_disabled)
end
it do
note.update_attribute(:note, '@mention referenced')
notification.new_note(note)
should_email(@u_guest_watcher)
should_email(@u_committer)
should_email(@u_watcher)
should_email(@u_mentioned)
should_email_nested_group_user(@pg_watcher)
should_not_email(note.author)
should_not_email(@u_participating)
should_not_email(@u_disabled)
should_not_email(@u_lazy_participant)
should_not_email_nested_group_user(@pg_disabled)
end
it do
@u_committer = create_global_setting_for(@u_committer, :mention)
notification.new_note(note)
should_not_email(@u_committer)
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { note }
let(:notification_trigger) { notification.new_note(note) }
end
end
end
context "merge request diff note", :deliver_mails_inline do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let_it_be(:merge_request) { create(:merge_request, source_project: project, assignees: [user], author: create(:user)) }
let_it_be(:note) { create(:diff_note_on_merge_request, project: project, noteable: merge_request) }
before_all do
build_team(note.project)
project.add_maintainer(merge_request.author)
merge_request.assignees.each { |assignee| project.add_maintainer(assignee) }
end
describe '#new_note' do
it "records sent notifications" do
# 3 SentNotification are sent: the MR assignee and author, and the @u_watcher
expect(SentNotification).to receive(:record_note).with(note, any_args).exactly(3).times.and_call_original
notification.new_note(note)
expect(SentNotification.last(3).map(&:recipient).map(&:id))
.to contain_exactly(*merge_request.assignees.pluck(:id), merge_request.author.id, @u_watcher.id)
expect(SentNotification.last.in_reply_to_discussion_id).to eq(note.discussion_id)
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { note }
let(:notification_trigger) { notification.new_note(note) }
end
end
end
context 'design diff note', :deliver_mails_inline do
include DesignManagementTestHelpers
let_it_be(:design) { create(:design, :with_file) }
let_it_be(:project) { design.project }
let_it_be(:member_and_mentioned) { create(:user, developer_projects: [project]) }
let_it_be(:member_and_author_of_second_note) { create(:user, developer_projects: [project]) }
let_it_be(:member_and_not_mentioned) { create(:user, developer_projects: [project]) }
let_it_be(:non_member_and_mentioned) { create(:user) }
let_it_be(:note) do
create(
:diff_note_on_design,
noteable: design,
note: "Hello #{member_and_mentioned.to_reference}, G'day #{non_member_and_mentioned.to_reference}"
)
end
let_it_be(:note_2) do
create(:diff_note_on_design, noteable: design, author: member_and_author_of_second_note)
end
context 'design management is enabled' do
before do
enable_design_management
end
it 'sends new note notifications', :aggregate_failures do
notification.new_note(note)
should_email(design.authors.first)
should_email(member_and_mentioned)
should_email(member_and_author_of_second_note)
should_not_email(member_and_not_mentioned)
should_not_email(non_member_and_mentioned)
should_not_email(note.author)
end
end
context 'design management is disabled' do
before do
enable_design_management(false)
end
it 'does not notify anyone' do
notification.new_note(note)
should_not_email_anyone
end
end
end
end
describe '#send_new_release_notifications', :deliver_mails_inline do
let(:release) { create(:release, project: project, author: current_user) }
let(:object) { release }
let(:action) { notification.send_new_release_notifications(release) }
before_all do
build_team(project)
update_custom_notification(:new_release, @u_guest_custom, resource: project)
update_custom_notification(:new_release, @u_custom_global)
end
context 'when release author is blocked' do
let(:current_user) { create(:user, :blocked) }
include_examples 'is not able to send notifications'
end
context 'when release author is a ghost' do
let(:current_user) { create(:user, :ghost) }
include_examples 'is not able to send notifications'
end
context 'when recipients for a new release exist' do
let(:current_user) { create(:user) }
it 'notifies the expected users' do
notification.send_new_release_notifications(release)
should_only_email(
@u_watcher,
@u_guest_watcher,
@u_custom_global,
@u_guest_custom
)
end
end
end
describe 'Participating project notification settings have priority over group and global settings if available', :deliver_mails_inline do
let_it_be(:group) { create(:group) }
let_it_be(:maintainer) { group.add_owner(create(:user, username: 'maintainer')).user }
let_it_be(:user1) { group.add_developer(create(:user, username: 'user_with_project_and_custom_setting')).user }
let_it_be(:project) { create(:project, :public, namespace: group) }
let(:issue) { create :issue, project: project, assignees: [assignee], description: '' }
before do
reset_delivered_emails!
create_notification_setting(user1, project, :participating)
end
context 'custom on group' do
[nil, true].each do |new_issue_value|
value_caption = new_issue_value || 'nil'
it "does not send an email to user1 when a new issue is created and new_issue is set to #{value_caption}" do
update_custom_notification(:new_issue, user1, resource: group, value: new_issue_value)
notification.new_issue(issue, maintainer)
should_not_email(user1)
end
end
end
context 'watch on group' do
it 'does not send an email' do
user1.notification_settings_for(group).update!(level: :watch)
notification.new_issue(issue, maintainer)
should_not_email(user1)
end
end
context 'custom on global, global on group' do
it 'does not send an email' do
user1.notification_settings_for(nil).update!(level: :custom)
user1.notification_settings_for(group).update!(level: :global)
notification.new_issue(issue, maintainer)
should_not_email(user1)
end
end
context 'watch on global, global on group' do
it 'does not send an email' do
user1.notification_settings_for(nil).update!(level: :watch)
user1.notification_settings_for(group).update!(level: :global)
notification.new_issue(issue, maintainer)
should_not_email(user1)
end
end
end
describe 'Issues', :aggregate_failures do
let(:another_project) { create(:project, :public, namespace: group) }
let(:issue) { create :issue, project: project, assignees: [assignee], description: 'cc @participant @unsubscribed_mentioned' }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :public, namespace: group) }
before_all do
build_team(project)
build_group(project)
add_users(project)
end
before do
project.reload
add_user_subscriptions(issue)
reset_delivered_emails!
update_custom_notification(:new_issue, @u_guest_custom, resource: project)
update_custom_notification(:new_issue, @u_custom_global)
issue.author.notified_of_own_activity = false
end
describe '#new_issue' do
it 'notifies the expected users' do
expect do
notification.new_issue(issue, @u_disabled)
end.to enqueue_mail_with(Notify, :new_issue_email, assignee, issue, 'assigned')
.and(enqueue_mail_with(Notify, :new_issue_email, @u_watcher, issue, nil))
.and(enqueue_mail_with(Notify, :new_issue_email, @u_guest_watcher, issue, nil))
.and(enqueue_mail_with(Notify, :new_issue_email, @u_guest_custom, issue, nil))
.and(enqueue_mail_with(Notify, :new_issue_email, @u_custom_global, issue, nil))
.and(enqueue_mail_with(Notify, :new_issue_email, @u_participant_mentioned, issue, 'mentioned'))
.and(enqueue_mail_with(Notify, :new_issue_email, @g_global_watcher.id, issue.id, nil))
.and(enqueue_mail_with(Notify, :new_issue_email, @g_watcher, issue, nil))
.and(enqueue_mail_with(Notify, :new_issue_email, @unsubscribed_mentioned, issue, 'mentioned'))
.and(enqueue_mail_with(Notify, :new_issue_email, @pg_watcher, issue, nil))
.and(not_enqueue_mail_with(Notify, :new_issue_email, @u_mentioned, anything, anything))
.and(not_enqueue_mail_with(Notify, :new_issue_email, @u_participating, anything, anything))
.and(not_enqueue_mail_with(Notify, :new_issue_email, @u_disabled, anything, anything))
.and(not_enqueue_mail_with(Notify, :new_issue_email, @u_lazy_participant, anything, anything))
.and(not_enqueue_mail_with(Notify, :new_issue_email, @pg_disabled, anything, anything))
.and(not_enqueue_mail_with(Notify, :new_issue_email, @pg_mention, anything, anything))
end
context 'when user has an only mention notification setting' do
before do
create_global_setting_for(issue.assignees.first, :mention)
end
it 'does not send assignee notifications' do
expect do
notification.new_issue(issue, @u_disabled)
end.to not_enqueue_mail_with(Notify, :new_issue_email, issue.assignees.first, anything, anything)
end
end
it 'properly prioritizes notification reason' do
# have assignee be both assigned and mentioned
issue.update_attribute(:description, "/cc #{assignee.to_reference} #{@u_mentioned.to_reference}")
expect do
notification.new_issue(issue, @u_disabled)
end.to enqueue_mail_with(Notify, :new_issue_email, assignee, issue, 'assigned')
.and(enqueue_mail_with(Notify, :new_issue_email, @u_mentioned, issue, 'mentioned'))
end
it 'adds "assigned" reason for assignees if any' do
expect do
notification.new_issue(issue, @u_disabled)
end.to enqueue_mail_with(Notify, :new_issue_email, assignee, issue, 'assigned')
end
it "emails any mentioned users with the mention level" do
issue.description = @u_mentioned.to_reference
expect do
notification.new_issue(issue, @u_disabled)
end.to enqueue_mail_with(Notify, :new_issue_email, @u_mentioned, issue, 'mentioned')
end
it "emails the author if they've opted into notifications about their activity" do
issue.author.notified_of_own_activity = true
expect do
notification.new_issue(issue, issue.author)
end.to enqueue_mail_with(Notify, :new_issue_email, issue.author, issue, 'own_activity')
end
it "doesn't email the author if they haven't opted into notifications about their activity" do
expect do
notification.new_issue(issue, issue.author)
end.to not_enqueue_mail_with(Notify, :new_issue_email, issue.author, anything, anything)
end
it "emails subscribers of the issue's labels and adds `subscribed` reason" do
user_1 = create(:user)
user_2 = create(:user)
user_3 = create(:user)
user_4 = create(:user)
label = create(:label, project: project, issues: [issue])
group_label = create(:group_label, group: group, issues: [issue])
issue.reload
label.toggle_subscription(user_1, project)
group_label.toggle_subscription(user_2, project)
group_label.toggle_subscription(user_3, another_project)
group_label.toggle_subscription(user_4)
expect do
notification.new_issue(issue, issue.author)
end.to enqueue_mail_with(Notify, :new_issue_email, user_1, issue, NotificationReason::SUBSCRIBED)
.and(enqueue_mail_with(Notify, :new_issue_email, user_2, issue, NotificationReason::SUBSCRIBED))
.and(enqueue_mail_with(Notify, :new_issue_email, user_4, issue, NotificationReason::SUBSCRIBED))
.and(not_enqueue_mail_with(Notify, :new_issue_email, user_3, anything, anything))
end
it_behaves_like 'project emails are disabled', check_delivery_jobs_queue: true do
let(:notification_target) { issue }
let(:notification_trigger) { notification.new_issue(issue, @u_disabled) }
end
context 'confidential issues' do
let(:author) { create(:user) }
let(:non_member) { create(:user) }
let(:member) { create(:user) }
let(:guest) { create(:user) }
let(:admin) { create(:admin) }
let(:confidential_issue) { create(:issue, :confidential, project: project, title: 'Confidential issue', author: author, assignees: [assignee]) }
it "emails subscribers of the issue's labels that can read the issue" do
project.add_developer(member)
project.add_guest(guest)
label = create(:label, project: project, issues: [confidential_issue])
confidential_issue.reload
label.toggle_subscription(non_member, project)
label.toggle_subscription(author, project)
label.toggle_subscription(assignee, project)
label.toggle_subscription(member, project)
label.toggle_subscription(guest, project)
label.toggle_subscription(admin, project)
expect do
notification.new_issue(confidential_issue, issue.author)
end.to enqueue_mail_with(Notify, :new_issue_email, assignee, confidential_issue, NotificationReason::ASSIGNED)
.and(enqueue_mail_with(Notify, :new_issue_email, member, confidential_issue, NotificationReason::SUBSCRIBED))
.and(enqueue_mail_with(Notify, :new_issue_email, admin, confidential_issue, NotificationReason::SUBSCRIBED))
.and(not_enqueue_mail_with(Notify, :new_issue_email, @u_guest_watcher, anything, anything))
.and(not_enqueue_mail_with(Notify, :new_issue_email, non_member, anything, anything))
.and(not_enqueue_mail_with(Notify, :new_issue_email, author, anything, anything))
.and(not_enqueue_mail_with(Notify, :new_issue_email, guest, anything, anything))
end
end
context 'when the author is not allowed to trigger notifications' do
let(:object) { issue }
let(:action) { notification.new_issue(issue, current_user) }
let(:notification_method) { :new_issue_email }
context 'because they are blocked' do
let(:current_user) { create(:user, :blocked) }
include_examples 'is not able to send notifications', check_delivery_jobs_queue: true
end
context 'because they are a ghost' do
let(:current_user) { create(:user, :ghost) }
include_examples 'is not able to send notifications', check_delivery_jobs_queue: true
end
end
end
describe '#new_mentions_in_issue' do
let(:notification_method) { :new_mentions_in_issue }
let(:mentionable) { issue }
let(:object) { mentionable }
let(:action) { send_notifications(@u_mentioned, current_user: current_user) }
it 'sends no emails when no new mentions are present' do
send_notifications
expect_no_delivery_jobs
end
it 'emails new mentions with a watch level higher than mention' do
expect do
send_notifications(@u_watcher, @u_participant_mentioned, @u_custom_global, @u_mentioned)
end.to have_only_enqueued_mail_with_args(
Notify,
:new_mention_in_issue_email,
[@u_watcher.id, mentionable.id, anything, anything],
[@u_participant_mentioned.id, mentionable.id, anything, anything],
[@u_custom_global.id, mentionable.id, anything, anything],
[@u_mentioned.id, mentionable.id, anything, anything]
)
end
it 'does not email new mentions with a watch level equal to or less than mention' do
send_notifications(@u_disabled)
expect_no_delivery_jobs
end
it 'emails new mentions despite being unsubscribed' do
expect do
send_notifications(@unsubscribed_mentioned)
end.to have_only_enqueued_mail_with_args(
Notify,
:new_mention_in_issue_email,
[@unsubscribed_mentioned.id, mentionable.id, anything, anything]
)
end
it 'sends the proper notification reason header' do
expect do
send_notifications(@u_watcher)
end.to have_only_enqueued_mail_with_args(
Notify,
:new_mention_in_issue_email,
[@u_watcher.id, mentionable.id, anything, NotificationReason::MENTIONED]
)
end
it_behaves_like 'project emails are disabled', check_delivery_jobs_queue: true do
let(:notification_target) { issue }
let(:notification_trigger) { send_notifications(@u_watcher, @u_participant_mentioned, @u_custom_global, @u_mentioned) }
end
context 'where current_user is blocked' do
let(:current_user) { create(:user, :blocked) }
include_examples 'is not able to send notifications', check_delivery_jobs_queue: true
end
context 'where current_user is a ghost' do
let(:current_user) { create(:user, :ghost) }
include_examples 'is not able to send notifications', check_delivery_jobs_queue: true
end
end
describe '#reassigned_issue' do
let(:anything_args) { [anything, anything, anything, anything] }
let(:mailer_method) { :reassigned_issue_email }
before do
update_custom_notification(:reassign_issue, @u_guest_custom, resource: project)
update_custom_notification(:reassign_issue, @u_custom_global)
end
it 'emails new assignee' do
expect do
notification.reassigned_issue(issue, @u_disabled, [assignee])
end.to enqueue_mail_with(Notify, :reassigned_issue_email, issue.assignees.first, *anything_args)
.and(enqueue_mail_with(Notify, :reassigned_issue_email, @u_watcher, *anything_args))
.and(enqueue_mail_with(Notify, :reassigned_issue_email, @u_guest_watcher, *anything_args))
.and(enqueue_mail_with(Notify, :reassigned_issue_email, @u_guest_custom, *anything_args))
.and(enqueue_mail_with(Notify, :reassigned_issue_email, @u_custom_global, *anything_args))
.and(enqueue_mail_with(Notify, :reassigned_issue_email, @u_participant_mentioned, *anything_args))
.and(enqueue_mail_with(Notify, :reassigned_issue_email, @subscriber, *anything_args))
.and(not_enqueue_mail_with(Notify, :reassigned_issue_email, @unsubscriber, *anything_args))
.and(not_enqueue_mail_with(Notify, :reassigned_issue_email, @u_participating, *anything_args))
.and(not_enqueue_mail_with(Notify, :reassigned_issue_email, @u_disabled, *anything_args))
.and(not_enqueue_mail_with(Notify, :reassigned_issue_email, @u_lazy_participant, *anything_args))
end
it 'adds "assigned" reason for new assignee' do
expect do
notification.reassigned_issue(issue, @u_disabled, [assignee])
end.to enqueue_mail_with(
Notify,
:reassigned_issue_email,
issue.assignees.first,
anything,
anything,
anything,
NotificationReason::ASSIGNED
)
end
it 'emails previous assignee even if they have the "on mention" notif level' do
issue.assignees = [@u_mentioned]
expect do
notification.reassigned_issue(issue, @u_disabled, [@u_watcher])
end.to enqueue_mail_with(Notify, :reassigned_issue_email, @u_mentioned, *anything_args)
.and(enqueue_mail_with(Notify, :reassigned_issue_email, @u_watcher, *anything_args))
.and(enqueue_mail_with(Notify, :reassigned_issue_email, @u_guest_watcher, *anything_args))
.and(enqueue_mail_with(Notify, :reassigned_issue_email, @u_guest_custom, *anything_args))
.and(enqueue_mail_with(Notify, :reassigned_issue_email, @u_participant_mentioned, *anything_args))
.and(enqueue_mail_with(Notify, :reassigned_issue_email, @subscriber, *anything_args))
.and(enqueue_mail_with(Notify, :reassigned_issue_email, @u_custom_global, *anything_args))
.and(not_enqueue_mail_with(Notify, :reassigned_issue_email, @unsubscriber, *anything_args))
.and(not_enqueue_mail_with(Notify, :reassigned_issue_email, @u_participating, *anything_args))
.and(not_enqueue_mail_with(Notify, :reassigned_issue_email, @u_disabled, *anything_args))
.and(not_enqueue_mail_with(Notify, :reassigned_issue_email, @u_lazy_participant, *anything_args))
end
it 'emails new assignee even if they have the "on mention" notif level' do
issue.assignees = [@u_mentioned]
expect(issue.assignees.first).to eq(@u_mentioned)
expect do
notification.reassigned_issue(issue, @u_disabled, [@u_mentioned])
end.to enqueue_mail_with(Notify, :reassigned_issue_email, issue.assignees.first, *anything_args)
.and(enqueue_mail_with(Notify, :reassigned_issue_email, @u_watcher, *anything_args))
.and(enqueue_mail_with(Notify, :reassigned_issue_email, @u_guest_watcher, *anything_args))
.and(enqueue_mail_with(Notify, :reassigned_issue_email, @u_guest_custom, *anything_args))
.and(enqueue_mail_with(Notify, :reassigned_issue_email, @u_participant_mentioned, *anything_args))
.and(enqueue_mail_with(Notify, :reassigned_issue_email, @subscriber, *anything_args))
.and(enqueue_mail_with(Notify, :reassigned_issue_email, @u_custom_global, *anything_args))
.and(not_enqueue_mail_with(Notify, :reassigned_issue_email, @unsubscriber, *anything_args))
.and(not_enqueue_mail_with(Notify, :reassigned_issue_email, @u_participating, *anything_args))
.and(not_enqueue_mail_with(Notify, :reassigned_issue_email, @u_disabled, *anything_args))
.and(not_enqueue_mail_with(Notify, :reassigned_issue_email, @u_lazy_participant, *anything_args))
end
it 'does not email new assignee if they are the current user' do
issue.assignees = [@u_mentioned]
notification.reassigned_issue(issue, @u_mentioned, [@u_mentioned])
expect(issue.assignees.first).to eq(@u_mentioned)
expect do
notification.reassigned_issue(issue, @u_mentioned, [@u_mentioned])
end.to enqueue_mail_with(Notify, :reassigned_issue_email, @u_watcher, *anything_args)
.and(enqueue_mail_with(Notify, :reassigned_issue_email, @u_guest_watcher, *anything_args))
.and(enqueue_mail_with(Notify, :reassigned_issue_email, @u_guest_custom, *anything_args))
.and(enqueue_mail_with(Notify, :reassigned_issue_email, @u_participant_mentioned, *anything_args))
.and(enqueue_mail_with(Notify, :reassigned_issue_email, @subscriber, *anything_args))
.and(enqueue_mail_with(Notify, :reassigned_issue_email, @u_custom_global, *anything_args))
.and(not_enqueue_mail_with(Notify, :reassigned_issue_email, issue.assignees.first, *anything_args))
.and(not_enqueue_mail_with(Notify, :reassigned_issue_email, @unsubscriber, *anything_args))
.and(not_enqueue_mail_with(Notify, :reassigned_issue_email, @u_participating, *anything_args))
.and(not_enqueue_mail_with(Notify, :reassigned_issue_email, @u_disabled, *anything_args))
.and(not_enqueue_mail_with(Notify, :reassigned_issue_email, @u_lazy_participant, *anything_args))
end
it_behaves_like 'participating notifications', check_delivery_jobs_queue: true do
let(:participant) { create(:user, username: 'user-participant') }
let(:issuable) { issue }
let(:notification_trigger) { notification.reassigned_issue(issue, @u_disabled, [assignee]) }
end
it_behaves_like 'participating by confidential note notification', check_delivery_jobs_queue: true do
let(:issuable) { issue }
let(:notification_trigger) { notification.reassigned_issue(issue, @u_disabled, [assignee]) }
end
it_behaves_like 'project emails are disabled', check_delivery_jobs_queue: true do
let(:notification_target) { issue }
let(:notification_trigger) { notification.reassigned_issue(issue, @u_disabled, [assignee]) }
end
end
describe '#relabeled_issue', :deliver_mails_inline do
let(:group_label_1) { create(:group_label, group: group, title: 'Group Label 1', issues: [issue]) }
let(:group_label_2) { create(:group_label, group: group, title: 'Group Label 2') }
let(:label_1) { create(:label, project: project, title: 'Label 1', issues: [issue]) }
let(:label_2) { create(:label, project: project, title: 'Label 2') }
let!(:subscriber_to_group_label_1) { create(:user) { |u| group_label_1.toggle_subscription(u, project) } }
let!(:subscriber_1_to_group_label_2) { create(:user) { |u| group_label_2.toggle_subscription(u, project) } }
let!(:subscriber_2_to_group_label_2) { create(:user) { |u| group_label_2.toggle_subscription(u) } }
let!(:subscriber_to_group_label_2_on_another_project) { create(:user) { |u| group_label_2.toggle_subscription(u, another_project) } }
let!(:subscriber_to_label_1) { create(:user) { |u| label_1.toggle_subscription(u, project) } }
let!(:subscriber_to_label_2) { create(:user) { |u| label_2.toggle_subscription(u, project) } }
it "emails the current user if they've opted into notifications about their activity" do
subscriber_to_label_2.notified_of_own_activity = true
notification.relabeled_issue(issue, [group_label_2, label_2], subscriber_to_label_2)
should_email(subscriber_to_label_2)
end
it "doesn't email the current user if they haven't opted into notifications about their activity" do
notification.relabeled_issue(issue, [group_label_2, label_2], subscriber_to_label_2)
should_not_email(subscriber_to_label_2)
end
it "doesn't send email to anyone but subscribers of the given labels" do
notification.relabeled_issue(issue, [group_label_2, label_2], @u_disabled)
should_not_email(subscriber_to_label_1)
should_not_email(subscriber_to_group_label_1)
should_not_email(subscriber_to_group_label_2_on_another_project)
should_email(subscriber_1_to_group_label_2)
should_email(subscriber_2_to_group_label_2)
should_email(subscriber_to_label_2)
should_not_email(issue.assignees.first)
should_not_email(issue.author)
should_not_email(@u_watcher)
should_not_email(@u_guest_watcher)
should_not_email(@u_participant_mentioned)
should_not_email(@subscriber)
should_not_email(@watcher_and_subscriber)
should_not_email(@unsubscriber)
should_not_email(@u_participating)
end
it "doesn't send multiple email when a user is subscribed to multiple given labels" do
subscriber_to_both = create(:user) do |user|
[label_1, label_2].each { |label| label.toggle_subscription(user, project) }
end
notification.relabeled_issue(issue, [label_1, label_2], @u_disabled)
should_email(subscriber_to_label_1)
should_email(subscriber_to_label_2)
should_email(subscriber_to_both)
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { issue }
let(:notification_trigger) { notification.relabeled_issue(issue, [group_label_2, label_2], @u_disabled) }
end
context 'confidential issues' do
let(:author) { create(:user) }
let(:non_member) { create(:user) }
let(:member) { create(:user) }
let(:guest) { create(:user) }
let(:admin) { create(:admin) }
let(:confidential_issue) { create(:issue, :confidential, project: project, title: 'Confidential issue', author: author, assignees: [assignee]) }
let!(:label_1) { create(:label, project: project, issues: [confidential_issue]) }
let!(:label_2) { create(:label, project: project) }
it "emails subscribers of the issue's labels that can read the issue" do
project.add_developer(member)
project.add_guest(guest)
label_2.toggle_subscription(non_member, project)
label_2.toggle_subscription(author, project)
label_2.toggle_subscription(assignee, project)
label_2.toggle_subscription(member, project)
label_2.toggle_subscription(guest, project)
label_2.toggle_subscription(admin, project)
reset_delivered_emails!
notification.relabeled_issue(confidential_issue, [label_2], @u_disabled)
should_not_email(non_member)
should_not_email(guest)
should_email(author)
should_email(assignee)
should_email(member)
should_email(admin)
end
end
end
describe '#removed_milestone on Issue', :deliver_mails_inline do
context do
let(:milestone) { create(:milestone, project: project, issues: [issue]) }
let!(:subscriber_to_new_milestone) { create(:user) { |u| issue.toggle_subscription(u, project) } }
it_behaves_like 'altered milestone notification on issue' do
before do
notification.removed_milestone(issue, issue.author)
end
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { issue }
let(:notification_trigger) { notification.removed_milestone(issue, issue.author) }
end
it_behaves_like 'participating by confidential note notification' do
let(:issuable) { issue }
let(:notification_trigger) { notification.removed_milestone(issue, issue.author) }
end
end
context 'confidential issues' do
let(:author) { create(:user) }
let(:non_member) { create(:user) }
let(:member) { create(:user) }
let(:guest) { create(:user) }
let(:admin) { create(:admin) }
let(:confidential_issue) { create(:issue, :confidential, project: project, title: 'Confidential issue', author: author, assignees: [assignee]) }
let(:milestone) { create(:milestone, project: project, issues: [confidential_issue]) }
it "emails subscribers of the issue's milestone that can read the issue" do
project.add_developer(member)
project.add_guest(guest)
confidential_issue.subscribe(non_member, project)
confidential_issue.subscribe(author, project)
confidential_issue.subscribe(assignee, project)
confidential_issue.subscribe(member, project)
confidential_issue.subscribe(guest, project)
confidential_issue.subscribe(admin, project)
reset_delivered_emails!
notification.removed_milestone(confidential_issue, @u_disabled)
should_not_email(non_member)
should_not_email(guest)
should_email(author)
should_email(assignee)
should_email(member)
should_email(admin)
end
end
end
describe '#changed_milestone on Issue', :deliver_mails_inline do
context do
let(:new_milestone) { create(:milestone, project: project, issues: [issue]) }
let!(:subscriber_to_new_milestone) { create(:user) { |u| issue.toggle_subscription(u, project) } }
it_behaves_like 'altered milestone notification on issue' do
before do
notification.changed_milestone(issue, new_milestone, issue.author)
end
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { issue }
let(:notification_trigger) { notification.changed_milestone(issue, new_milestone, issue.author) }
end
end
context 'confidential issues' do
let(:author) { create(:user) }
let(:non_member) { create(:user) }
let(:member) { create(:user) }
let(:guest) { create(:user) }
let(:admin) { create(:admin) }
let(:confidential_issue) { create(:issue, :confidential, project: project, title: 'Confidential issue', author: author, assignees: [assignee]) }
let(:new_milestone) { create(:milestone, project: project, issues: [confidential_issue]) }
it "emails subscribers of the issue's milestone that can read the issue" do
project.add_developer(member)
project.add_guest(guest)
confidential_issue.subscribe(non_member, project)
confidential_issue.subscribe(author, project)
confidential_issue.subscribe(assignee, project)
confidential_issue.subscribe(member, project)
confidential_issue.subscribe(guest, project)
confidential_issue.subscribe(admin, project)
reset_delivered_emails!
notification.changed_milestone(confidential_issue, new_milestone, @u_disabled)
should_not_email(non_member)
should_not_email(guest)
should_email(author)
should_email(assignee)
should_email(member)
should_email(admin)
end
end
end
describe '#close_issue', :deliver_mails_inline do
before do
update_custom_notification(:close_issue, @u_guest_custom, resource: project)
update_custom_notification(:close_issue, @u_custom_global)
end
it 'sends email to issue assignee and issue author' do
notification.close_issue(issue, @u_disabled)
should_email(issue.assignees.first)
should_email(issue.author)
should_email(@u_watcher)
should_email(@u_guest_watcher)
should_email(@u_guest_custom)
should_email(@u_custom_global)
should_email(@u_participant_mentioned)
should_email(@subscriber)
should_email(@watcher_and_subscriber)
should_not_email(@unsubscriber)
should_not_email(@u_participating)
should_not_email(@u_disabled)
should_not_email(@u_lazy_participant)
end
it_behaves_like 'participating notifications' do
let(:participant) { create(:user, username: 'user-participant') }
let(:issuable) { issue }
let(:notification_trigger) { notification.close_issue(issue, @u_disabled) }
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { issue }
let(:notification_trigger) { notification.close_issue(issue, @u_disabled) }
end
it_behaves_like 'participating by confidential note notification' do
let(:issuable) { issue }
let(:notification_trigger) { notification.close_issue(issue, @u_disabled) }
end
it 'adds "subscribed" reason to subscriber emails' do
user_1 = create(:user)
issue.subscribe(user_1)
issue.reload
notification.close_issue(issue, @u_disabled)
email = find_email_for(user_1)
expect(email).to have_header('X-GitLab-NotificationReason', NotificationReason::SUBSCRIBED)
end
end
describe '#reopen_issue', :deliver_mails_inline do
before do
update_custom_notification(:reopen_issue, @u_guest_custom, resource: project)
update_custom_notification(:reopen_issue, @u_custom_global)
end
it 'sends email to issue notification recipients' do
notification.reopen_issue(issue, @u_disabled)
should_email(issue.assignees.first)
should_email(issue.author)
should_email(@u_watcher)
should_email(@u_guest_watcher)
should_email(@u_guest_custom)
should_email(@u_custom_global)
should_email(@u_participant_mentioned)
should_email(@subscriber)
should_email(@watcher_and_subscriber)
should_not_email(@unsubscriber)
should_not_email(@u_participating)
should_not_email(@u_disabled)
should_not_email(@u_lazy_participant)
end
it_behaves_like 'participating notifications' do
let(:participant) { create(:user, username: 'user-participant') }
let(:issuable) { issue }
let(:notification_trigger) { notification.reopen_issue(issue, @u_disabled) }
end
it_behaves_like 'participating by confidential note notification' do
let(:issuable) { issue }
let(:notification_trigger) { notification.reopen_issue(issue, @u_disabled) }
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { issue }
let(:notification_trigger) { notification.reopen_issue(issue, @u_disabled) }
end
end
describe '#issue_moved', :deliver_mails_inline do
let(:new_issue) { create(:issue) }
it 'sends email to issue notification recipients' do
notification.issue_moved(issue, new_issue, @u_disabled)
should_email(issue.assignees.first)
should_email(issue.author)
should_email(@u_watcher)
should_email(@u_guest_watcher)
should_email(@u_participant_mentioned)
should_email(@subscriber)
should_email(@watcher_and_subscriber)
should_not_email(@unsubscriber)
should_not_email(@u_participating)
should_not_email(@u_disabled)
should_not_email(@u_lazy_participant)
end
it_behaves_like 'participating notifications' do
let(:participant) { create(:user, username: 'user-participant') }
let(:issuable) { issue }
let(:notification_trigger) { notification.issue_moved(issue, new_issue, @u_disabled) }
end
it_behaves_like 'participating by confidential note notification' do
let(:issuable) { issue }
let(:notification_trigger) { notification.issue_moved(issue, new_issue, @u_disabled) }
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { issue }
let(:notification_trigger) { notification.issue_moved(issue, new_issue, @u_disabled) }
end
end
describe '#issue_cloned', :deliver_mails_inline do
let(:new_issue) { create(:issue) }
it 'sends email to issue notification recipients' do
notification.issue_cloned(issue, new_issue, @u_disabled)
should_email(issue.assignees.first)
should_email(issue.author)
should_email(@u_watcher)
should_email(@u_guest_watcher)
should_email(@u_participant_mentioned)
should_email(@subscriber)
should_email(@watcher_and_subscriber)
should_not_email(@unsubscriber)
should_not_email(@u_participating)
should_not_email(@u_disabled)
should_not_email(@u_lazy_participant)
end
it_behaves_like 'participating notifications' do
let(:participant) { create(:user, username: 'user-participant') }
let(:issuable) { issue }
let(:notification_trigger) { notification.issue_cloned(issue, new_issue, @u_disabled) }
end
it_behaves_like 'participating by confidential note notification' do
let(:issuable) { issue }
let(:notification_trigger) { notification.issue_cloned(issue, new_issue, @u_disabled) }
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { issue }
let(:notification_trigger) { notification.issue_cloned(issue, new_issue, @u_disabled) }
end
end
describe '#issue_due', :deliver_mails_inline do
before do
issue.update!(due_date: Date.today)
update_custom_notification(:issue_due, @u_guest_custom, resource: project)
update_custom_notification(:issue_due, @u_custom_global)
end
it 'sends email to issue notification recipients, excluding watchers' do
notification.issue_due(issue)
should_email(issue.assignees.first)
should_email(issue.author)
should_email(@u_guest_custom)
should_email(@u_custom_global)
should_email(@u_participant_mentioned)
should_email(@subscriber)
should_email(@watcher_and_subscriber)
should_not_email(@u_watcher)
should_not_email(@u_guest_watcher)
should_not_email(@unsubscriber)
should_not_email(@u_participating)
should_not_email(@u_disabled)
should_not_email(@u_lazy_participant)
end
it 'sends the email from the author' do
notification.issue_due(issue)
email = find_email_for(@subscriber)
expect(email.header[:from].display_names).to eq(["#{issue.author.name} (@#{issue.author.username})"])
end
it_behaves_like 'participating notifications' do
let(:participant) { create(:user, username: 'user-participant') }
let(:issuable) { issue }
let(:notification_trigger) { notification.issue_due(issue) }
end
it_behaves_like 'participating by confidential note notification' do
let(:issuable) { issue }
let(:notification_trigger) { notification.issue_due(issue) }
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { issue }
let(:notification_trigger) { notification.issue_due(issue) }
end
end
end
describe 'Merge Requests', :deliver_mails_inline do
let(:another_project) { create(:project, :public, namespace: group) }
let(:assignees) { Array.wrap(assignee) }
let(:merge_request) { create :merge_request, author: author, source_project: project, assignees: assignees, description: 'cc @participant' }
let_it_be_with_reload(:author) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :public, :repository, namespace: group) }
before_all do
build_team(project)
add_users(project)
project.add_maintainer(author)
project.add_maintainer(assignee)
end
before do
add_user_subscriptions(merge_request)
update_custom_notification(:new_merge_request, @u_guest_custom, resource: project)
update_custom_notification(:new_merge_request, @u_custom_global)
reset_delivered_emails!
end
describe '#new_merge_request' do
it do
notification.new_merge_request(merge_request, @u_disabled)
merge_request.assignees.each { |assignee| should_email(assignee) }
should_email(@u_watcher)
should_email(@watcher_and_subscriber)
should_email(@u_participant_mentioned)
should_email(@u_guest_watcher)
should_email(@u_guest_custom)
should_email(@u_custom_global)
should_not_email(@u_participating)
should_not_email(@u_disabled)
should_not_email(@u_lazy_participant)
end
it 'adds "assigned" reason for assignee, if any' do
notification.new_merge_request(merge_request, @u_disabled)
merge_request.assignees.each do |assignee|
email = find_email_for(assignee)
expect(email).to have_header('X-GitLab-NotificationReason', NotificationReason::ASSIGNED)
end
end
it "emails any mentioned users with the mention level" do
merge_request.description = @u_mentioned.to_reference
notification.new_merge_request(merge_request, @u_disabled)
should_email(@u_mentioned)
end
it "emails the author if they've opted into notifications about their activity" do
merge_request.author.notified_of_own_activity = true
notification.new_merge_request(merge_request, merge_request.author)
should_email(merge_request.author)
email = find_email_for(merge_request.author)
expect(email).to have_header('X-GitLab-NotificationReason', NotificationReason::OWN_ACTIVITY)
end
it "doesn't email the author if they haven't opted into notifications about their activity" do
notification.new_merge_request(merge_request, merge_request.author)
should_not_email(merge_request.author)
end
it "emails subscribers of the merge request's labels" do
user_1 = create(:user)
user_2 = create(:user)
user_3 = create(:user)
user_4 = create(:user)
label = create(:label, project: project, merge_requests: [merge_request])
group_label = create(:group_label, group: group, merge_requests: [merge_request])
label.toggle_subscription(user_1, project)
group_label.toggle_subscription(user_2, project)
group_label.toggle_subscription(user_3, another_project)
group_label.toggle_subscription(user_4)
notification.new_merge_request(merge_request, @u_disabled)
should_email(user_1)
should_email(user_2)
should_not_email(user_3)
should_email(user_4)
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { merge_request }
let(:notification_trigger) { notification.new_merge_request(merge_request, @u_disabled) }
end
describe 'Approvals' do
let(:notification_target) { merge_request }
let(:maintainer) { create(:user) }
describe '#approve_mr' do
it 'will notify the author, subscribers, and assigned users' do
notification.approve_mr(merge_request, maintainer)
merge_request.assignees.each { |assignee| should_email(assignee) }
should_email(merge_request.author)
should_email(@u_watcher)
should_email(@u_participant_mentioned)
should_email(@subscribed_participant)
should_email(@subscriber)
should_email(@watcher_and_subscriber)
should_email(@u_guest_watcher)
should_not_email(@unsubscriber)
should_not_email(@u_participating)
should_not_email(@u_disabled)
should_not_email(@u_lazy_participant)
expect(email_recipients.size).to eq(8)
# assignee, author, @u_watcher,
# @u_participant_mentioned, @subscribed_participant,
# @subscriber, @watcher_and_subscriber, @u_guest_watcher
end
end
describe '#unapprove_mr' do
it 'will notify the author, subscribers, and assigned users' do
notification.unapprove_mr(merge_request, maintainer)
merge_request.assignees.each { |assignee| should_email(assignee) }
should_email(merge_request.author)
should_email(@u_watcher)
should_email(@u_participant_mentioned)
should_email(@subscribed_participant)
should_email(@subscriber)
should_email(@watcher_and_subscriber)
should_email(@u_guest_watcher)
should_not_email(@unsubscriber)
should_not_email(@u_participating)
should_not_email(@u_disabled)
should_not_email(@u_lazy_participant)
expect(email_recipients.size).to eq(8)
# assignee, author, @u_watcher,
# @u_participant_mentioned, @subscribed_participant,
# @subscriber, @watcher_and_subscriber, @u_guest_watcher
end
end
end
context 'participating' do
it_behaves_like 'participating by assignee notification' do
let(:participant) { create(:user, username: 'user-participant') }
let(:issuable) { merge_request }
let(:notification_trigger) { notification.new_merge_request(merge_request, @u_disabled) }
end
it_behaves_like 'participating by note notification' do
let(:participant) { create(:user, username: 'user-participant') }
let(:issuable) { merge_request }
let(:notification_trigger) { notification.new_merge_request(merge_request, @u_disabled) }
end
context 'by author' do
let(:participant) { create(:user, username: 'user-participant') }
before do
merge_request.author = participant
merge_request.save!
notification.new_merge_request(merge_request, @u_disabled)
end
it { should_not_email(participant) }
end
end
context 'when the author is not allowed to trigger notifications' do
let(:current_user) { nil }
let(:object) { merge_request }
let(:action) { notification.new_merge_request(merge_request, current_user) }
context 'because they are blocked' do
let(:current_user) { create(:user, :blocked) }
it_behaves_like 'is not able to send notifications'
end
context 'because they are a ghost' do
let(:current_user) { create(:user, :ghost) }
it_behaves_like 'is not able to send notifications'
end
end
end
describe '#new_mentions_in_merge_request' do
let(:notification_method) { :new_mentions_in_merge_request }
let(:mentionable) { merge_request }
let(:object) { mentionable }
let(:action) { send_notifications(@u_mentioned, current_user: current_user) }
include_examples 'notifications for new mentions'
it_behaves_like 'project emails are disabled' do
let(:notification_target) { merge_request }
let(:notification_trigger) { send_notifications(@u_watcher, @u_participant_mentioned, @u_custom_global, @u_mentioned) }
end
context 'where current_user is blocked' do
let(:current_user) { create(:user, :blocked) }
include_examples 'is not able to send notifications'
end
context 'where current_user is a ghost' do
let(:current_user) { create(:user, :ghost) }
include_examples 'is not able to send notifications'
end
end
describe '#reassigned_merge_request' do
let(:current_user) { create(:user) }
before do
update_custom_notification(:reassign_merge_request, @u_guest_custom, resource: project)
update_custom_notification(:reassign_merge_request, @u_custom_global)
end
it do
notification.reassigned_merge_request(merge_request, current_user, [assignee])
merge_request.assignees.each { |assignee| should_email(assignee) }
should_email(merge_request.author)
should_email(@u_watcher)
should_email(@u_participant_mentioned)
should_email(@subscriber)
should_email(@watcher_and_subscriber)
should_email(@u_guest_watcher)
should_email(@u_guest_custom)
should_email(@u_custom_global)
should_not_email(@unsubscriber)
should_not_email(@u_participating)
should_not_email(@u_disabled)
should_not_email(@u_lazy_participant)
end
it 'adds "assigned" reason for new assignee' do
notification.reassigned_merge_request(merge_request, current_user, [assignee])
merge_request.assignees.each do |assignee|
email = find_email_for(assignee)
expect(email).to have_header('X-GitLab-NotificationReason', NotificationReason::ASSIGNED)
end
end
it_behaves_like 'participating notifications' do
let(:participant) { create(:user, username: 'user-participant') }
let(:issuable) { merge_request }
let(:notification_trigger) { notification.reassigned_merge_request(merge_request, current_user, [assignee]) }
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { merge_request }
let(:notification_trigger) { notification.reassigned_merge_request(merge_request, current_user, [assignee]) }
end
end
describe '#changed_reviewer_of_merge_request' do
let(:merge_request) { create(:merge_request, author: author, source_project: project, reviewers: [reviewer], description: 'cc @participant') }
let_it_be(:current_user) { create(:user) }
let_it_be(:reviewer) { create(:user) }
before do
update_custom_notification(:change_reviewer_merge_request, @u_guest_custom, resource: project)
update_custom_notification(:change_reviewer_merge_request, @u_custom_global)
end
it 'sends emails to relevant users only', :aggregate_failures do
notification.changed_reviewer_of_merge_request(merge_request, current_user, [reviewer])
merge_request.reviewers.each { |reviewer| should_email(reviewer) }
should_email(merge_request.author)
should_email(@u_watcher)
should_email(@u_participant_mentioned)
should_email(@subscriber)
should_email(@watcher_and_subscriber)
should_email(@u_guest_watcher)
should_email(@u_guest_custom)
should_email(@u_custom_global)
should_not_email(@unsubscriber)
should_not_email(@u_participating)
should_not_email(@u_disabled)
should_not_email(@u_lazy_participant)
end
it 'adds "review requested" reason for new reviewer' do
notification.changed_reviewer_of_merge_request(merge_request, current_user, [reviewer])
merge_request.reviewers.each do |assignee|
email = find_email_for(assignee)
expect(email).to have_header('X-GitLab-NotificationReason', NotificationReason::REVIEW_REQUESTED)
end
end
context 'participating notifications with reviewers' do
let(:participant) { create(:user, username: 'user-participant') }
let(:issuable) { merge_request }
let(:notification_trigger) { notification.changed_reviewer_of_merge_request(merge_request, current_user, [reviewer]) }
it_behaves_like 'participating notifications'
it_behaves_like 'participating by reviewer notification'
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { merge_request }
let(:notification_trigger) { notification.changed_reviewer_of_merge_request(merge_request, current_user, [reviewer]) }
end
end
describe '#change_in_merge_request_draft_status' do
let(:merge_request) { create(:merge_request, author: author, source_project: project) }
let_it_be(:current_user) { create(:user) }
it 'sends emails to relevant users only', :aggregate_failures do
notification.change_in_merge_request_draft_status(merge_request, current_user)
merge_request.reviewers.each { |reviewer| should_email(reviewer) }
merge_request.assignees.each { |assignee| should_email(assignee) }
should_email(merge_request.author)
should_email(@u_watcher)
should_email(@subscriber)
should_email(@watcher_and_subscriber)
should_email(@u_guest_watcher)
should_not_email(@u_participant_mentioned)
should_not_email(@u_guest_custom)
should_not_email(@u_custom_global)
should_not_email(@unsubscriber)
should_not_email(@u_participating)
should_not_email(@u_disabled)
should_not_email(@u_lazy_participant)
end
it_behaves_like 'participating notifications' do
let(:participant) { create(:user, username: 'user-participant') }
let(:issuable) { merge_request }
let(:notification_trigger) { notification.change_in_merge_request_draft_status(merge_request, @u_disabled) }
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { merge_request }
let(:notification_trigger) { notification.change_in_merge_request_draft_status(merge_request, @u_disabled) }
end
end
describe '#push_to_merge_request' do
before do
update_custom_notification(:push_to_merge_request, @u_guest_custom, resource: project)
update_custom_notification(:push_to_merge_request, @u_custom_global)
end
it do
notification.push_to_merge_request(merge_request, @u_disabled)
merge_request.assignees.each { |assignee| should_email(assignee) }
should_email(@u_guest_custom)
should_email(@u_custom_global)
should_email(@u_participant_mentioned)
should_email(@subscriber)
should_email(@watcher_and_subscriber)
should_not_email(@u_watcher)
should_not_email(@u_guest_watcher)
should_not_email(@unsubscriber)
should_not_email(@u_participating)
should_not_email(@u_disabled)
should_not_email(@u_lazy_participant)
end
describe 'triggers push_to_merge_request_email with corresponding email' do
let_it_be(:merge_request) { create(:merge_request, author: author, source_project: project) }
def mock_commits(length)
Array.new(length) { |i| double(:commit, short_id: SecureRandom.hex(4), title: "This is commit #{i}") }
end
def commit_to_hash(commit)
{ short_id: commit.short_id, title: commit.title }
end
let(:existing_commits) { mock_commits(50) }
let(:expected_existing_commits) { [commit_to_hash(existing_commits.first), commit_to_hash(existing_commits.last)] }
before do
allow(::Notify).to receive(:push_to_merge_request_email).and_call_original
end
where(:number_of_new_commits, :number_of_new_commits_displayed) do
limit = described_class::NEW_COMMIT_EMAIL_DISPLAY_LIMIT
[
[0, 0],
[limit - 2, limit - 2],
[limit - 1, limit - 1],
[limit, limit],
[limit + 1, limit],
[limit + 2, limit]
]
end
with_them do
let(:new_commits) { mock_commits(number_of_new_commits) }
let(:expected_new_commits) { new_commits.first(number_of_new_commits_displayed).map(&method(:commit_to_hash)) }
it 'triggers the corresponding mailer method with list of stripped commits' do
notification.push_to_merge_request(
merge_request, merge_request.author,
new_commits: new_commits, existing_commits: existing_commits
)
expect(Notify).to have_received(:push_to_merge_request_email).at_least(:once).with(
@subscriber.id, merge_request.id, merge_request.author.id, "subscribed",
new_commits: expected_new_commits, total_new_commits_count: number_of_new_commits,
existing_commits: expected_existing_commits, total_existing_commits_count: 50
)
end
end
context 'there is only one existing commit' do
let(:new_commits) { mock_commits(10) }
let(:expected_new_commits) { new_commits.map(&method(:commit_to_hash)) }
it 'triggers corresponding mailer method with only one existing commit' do
notification.push_to_merge_request(merge_request, merge_request.author, new_commits: new_commits, existing_commits: existing_commits.first(1))
expect(Notify).to have_received(:push_to_merge_request_email).at_least(:once).with(
@subscriber.id, merge_request.id, merge_request.author.id, "subscribed",
new_commits: expected_new_commits, total_new_commits_count: 10,
existing_commits: expected_existing_commits.first(1), total_existing_commits_count: 1
)
end
end
end
it_behaves_like 'participating notifications' do
let(:participant) { create(:user, username: 'user-participant') }
let(:issuable) { merge_request }
let(:notification_trigger) { notification.push_to_merge_request(merge_request, @u_disabled) }
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { merge_request }
let(:notification_trigger) { notification.push_to_merge_request(merge_request, @u_disabled) }
end
end
describe '#relabel_merge_request' do
let(:group_label_1) { create(:group_label, group: group, title: 'Group Label 1', merge_requests: [merge_request]) }
let(:group_label_2) { create(:group_label, group: group, title: 'Group Label 2') }
let(:label_1) { create(:label, project: project, title: 'Label 1', merge_requests: [merge_request]) }
let(:label_2) { create(:label, project: project, title: 'Label 2') }
let!(:subscriber_to_group_label_1) { create(:user) { |u| group_label_1.toggle_subscription(u, project) } }
let!(:subscriber_1_to_group_label_2) { create(:user) { |u| group_label_2.toggle_subscription(u, project) } }
let!(:subscriber_2_to_group_label_2) { create(:user) { |u| group_label_2.toggle_subscription(u) } }
let!(:subscriber_to_group_label_2_on_another_project) { create(:user) { |u| group_label_2.toggle_subscription(u, another_project) } }
let!(:subscriber_to_label_1) { create(:user) { |u| label_1.toggle_subscription(u, project) } }
let!(:subscriber_to_label_2) { create(:user) { |u| label_2.toggle_subscription(u, project) } }
it "doesn't send email to anyone but subscribers of the given labels" do
notification.relabeled_merge_request(merge_request, [group_label_2, label_2], @u_disabled)
should_not_email(subscriber_to_label_1)
should_not_email(subscriber_to_group_label_1)
should_not_email(subscriber_to_group_label_2_on_another_project)
should_email(subscriber_1_to_group_label_2)
should_email(subscriber_2_to_group_label_2)
should_email(subscriber_to_label_2)
merge_request.assignees.each { |assignee| should_not_email(assignee) }
should_not_email(merge_request.author)
should_not_email(@u_watcher)
should_not_email(@u_participant_mentioned)
should_not_email(@subscriber)
should_not_email(@watcher_and_subscriber)
should_not_email(@unsubscriber)
should_not_email(@u_participating)
should_not_email(@u_lazy_participant)
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { merge_request }
let(:notification_trigger) { notification.relabeled_merge_request(merge_request, [group_label_2, label_2], @u_disabled) }
end
end
describe '#removed_milestone on MergeRequest' do
let(:milestone) { create(:milestone, project: project, merge_requests: [merge_request]) }
let!(:subscriber_to_new_milestone) { create(:user) { |u| merge_request.toggle_subscription(u, project) } }
it_behaves_like 'altered milestone notification on merge request' do
before do
notification.removed_milestone(merge_request, merge_request.author)
end
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { merge_request }
let(:notification_trigger) { notification.removed_milestone(merge_request, merge_request.author) }
end
end
describe '#changed_milestone on MergeRequest' do
let(:new_milestone) { create(:milestone, project: project, merge_requests: [merge_request]) }
let!(:subscriber_to_new_milestone) { create(:user) { |u| merge_request.toggle_subscription(u, project) } }
it_behaves_like 'altered milestone notification on merge request' do
before do
notification.changed_milestone(merge_request, new_milestone, merge_request.author)
end
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { merge_request }
let(:notification_trigger) { notification.changed_milestone(merge_request, new_milestone, merge_request.author) }
end
end
describe '#merge_request_unmergeable' do
it "sends email to merge request author" do
notification.merge_request_unmergeable(merge_request)
should_email(merge_request.author)
expect(email_recipients.size).to eq(1)
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { merge_request }
let(:notification_trigger) { notification.merge_request_unmergeable(merge_request) }
end
describe 'when merge_when_pipeline_succeeds is true' do
before do
merge_request.update!(
merge_when_pipeline_succeeds: true,
merge_user: create(:user)
)
end
it "sends email to merge request author and merge_user" do
notification.merge_request_unmergeable(merge_request)
should_email(merge_request.author)
should_email(merge_request.merge_user)
expect(email_recipients.size).to eq(2)
end
end
end
describe '#closed_merge_request' do
before do
update_custom_notification(:close_merge_request, @u_guest_custom, resource: project)
update_custom_notification(:close_merge_request, @u_custom_global)
end
it do
notification.close_mr(merge_request, @u_disabled)
merge_request.assignees.each { |assignee| should_email(assignee) }
should_email(@u_watcher)
should_email(@u_guest_watcher)
should_email(@u_guest_custom)
should_email(@u_custom_global)
should_email(@u_participant_mentioned)
should_email(@subscriber)
should_email(@watcher_and_subscriber)
should_not_email(@unsubscriber)
should_not_email(@u_participating)
should_not_email(@u_disabled)
should_not_email(@u_lazy_participant)
end
it_behaves_like 'participating notifications' do
let(:participant) { create(:user, username: 'user-participant') }
let(:issuable) { merge_request }
let(:notification_trigger) { notification.close_mr(merge_request, @u_disabled) }
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { merge_request }
let(:notification_trigger) { notification.close_mr(merge_request, @u_disabled) }
end
end
describe '#merged_merge_request' do
before do
update_custom_notification(:merge_merge_request, @u_guest_custom, resource: project)
update_custom_notification(:merge_merge_request, @u_custom_global)
end
it do
notification.merge_mr(merge_request, @u_disabled)
merge_request.assignees.each { |assignee| should_email(assignee) }
should_email(@u_watcher)
should_email(@u_guest_watcher)
should_email(@u_guest_custom)
should_email(@u_custom_global)
should_email(@u_participant_mentioned)
should_email(@subscriber)
should_email(@watcher_and_subscriber)
should_not_email(@unsubscriber)
should_not_email(@u_participating)
should_not_email(@u_disabled)
should_not_email(@u_lazy_participant)
end
it "notifies the merger when the pipeline succeeds is true" do
merge_request.merge_when_pipeline_succeeds = true
notification.merge_mr(merge_request, @u_watcher)
should_email(@u_watcher)
end
it "does not notify the merger when the pipeline succeeds is false" do
merge_request.merge_when_pipeline_succeeds = false
notification.merge_mr(merge_request, @u_watcher)
should_not_email(@u_watcher)
end
it "notifies the merger when the pipeline succeeds is false but they've opted into notifications about their activity" do
merge_request.merge_when_pipeline_succeeds = false
@u_watcher.notified_of_own_activity = true
notification.merge_mr(merge_request, @u_watcher)
should_email(@u_watcher)
end
it_behaves_like 'participating notifications' do
let(:participant) { create(:user, username: 'user-participant') }
let(:issuable) { merge_request }
let(:notification_trigger) { notification.merge_mr(merge_request, @u_disabled) }
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { merge_request }
let(:notification_trigger) { notification.merge_mr(merge_request, @u_disabled) }
end
end
describe '#reopen_merge_request' do
before do
update_custom_notification(:reopen_merge_request, @u_guest_custom, resource: project)
update_custom_notification(:reopen_merge_request, @u_custom_global)
end
it do
notification.reopen_mr(merge_request, @u_disabled)
merge_request.assignees.each { |assignee| should_email(assignee) }
should_email(@u_watcher)
should_email(@u_participant_mentioned)
should_email(@subscriber)
should_email(@watcher_and_subscriber)
should_email(@u_guest_watcher)
should_email(@u_guest_custom)
should_email(@u_custom_global)
should_not_email(@unsubscriber)
should_not_email(@u_participating)
should_not_email(@u_disabled)
should_not_email(@u_lazy_participant)
end
it_behaves_like 'participating notifications' do
let(:participant) { create(:user, username: 'user-participant') }
let(:issuable) { merge_request }
let(:notification_trigger) { notification.reopen_mr(merge_request, @u_disabled) }
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { merge_request }
let(:notification_trigger) { notification.reopen_mr(merge_request, @u_disabled) }
end
end
describe "#resolve_all_discussions" do
it do
notification.resolve_all_discussions(merge_request, @u_disabled)
merge_request.assignees.each { |assignee| should_email(assignee) }
should_email(@u_watcher)
should_email(@u_participant_mentioned)
should_email(@subscriber)
should_email(@watcher_and_subscriber)
should_email(@u_guest_watcher)
should_not_email(@unsubscriber)
should_not_email(@u_participating)
should_not_email(@u_disabled)
should_not_email(@u_lazy_participant)
end
it_behaves_like 'participating notifications' do
let(:participant) { create(:user, username: 'user-participant') }
let(:issuable) { merge_request }
let(:notification_trigger) { notification.resolve_all_discussions(merge_request, @u_disabled) }
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { merge_request }
let(:notification_trigger) { notification.resolve_all_discussions(merge_request, @u_disabled) }
end
end
describe '#merge_when_pipeline_succeeds' do
before do
update_custom_notification(:merge_when_pipeline_succeeds, @u_guest_custom, resource: project)
update_custom_notification(:merge_when_pipeline_succeeds, @u_custom_global)
end
it 'send notification that merge will happen when pipeline succeeds' do
notification.merge_when_pipeline_succeeds(merge_request, assignee)
should_email(merge_request.author)
should_email(@u_watcher)
should_email(@subscriber)
should_email(@u_guest_custom)
should_email(@u_custom_global)
should_not_email(@unsubscriber)
should_not_email(@u_disabled)
end
it 'does not send notification if the custom event is disabled' do
update_custom_notification(:merge_when_pipeline_succeeds, @u_guest_custom, resource: project, value: false)
update_custom_notification(:merge_when_pipeline_succeeds, @u_custom_global, resource: nil, value: false)
notification.merge_when_pipeline_succeeds(merge_request, assignee)
should_not_email(@u_guest_custom)
should_not_email(@u_custom_global)
end
it 'sends notification to participants even if the custom event is disabled' do
update_custom_notification(:merge_when_pipeline_succeeds, merge_request.author, resource: project, value: false)
update_custom_notification(:merge_when_pipeline_succeeds, @u_watcher, resource: project, value: false)
update_custom_notification(:merge_when_pipeline_succeeds, @subscriber, resource: project, value: false)
notification.merge_when_pipeline_succeeds(merge_request, assignee)
should_email(merge_request.author)
should_email(@u_watcher)
should_email(@subscriber)
end
it_behaves_like 'participating notifications' do
let(:participant) { create(:user, username: 'user-participant') }
let(:issuable) { merge_request }
let(:notification_trigger) { notification.merge_when_pipeline_succeeds(merge_request, @u_disabled) }
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { merge_request }
let(:notification_trigger) { notification.merge_when_pipeline_succeeds(merge_request, @u_disabled) }
end
end
describe '#review_requested_of_merge_request' do
let(:merge_request) { create(:merge_request, author: author, source_project: project, reviewers: [reviewer]) }
let(:mailer) { double }
let_it_be(:current_user) { create(:user) }
let_it_be(:reviewer) { create(:user) }
it 'sends email to reviewer', :aggregate_failures do
notification.review_requested_of_merge_request(merge_request, current_user, reviewer)
merge_request.reviewers.each { |reviewer| should_email(reviewer) }
should_not_email(merge_request.author)
should_not_email(@u_watcher)
should_not_email(@u_participant_mentioned)
should_not_email(@subscriber)
should_not_email(@watcher_and_subscriber)
should_not_email(@u_guest_watcher)
should_not_email(@u_guest_custom)
should_not_email(@u_custom_global)
should_not_email(@unsubscriber)
should_not_email(@u_participating)
should_not_email(@u_disabled)
should_not_email(@u_lazy_participant)
end
it 'deliver email immediately' do
allow(Notify).to receive(:request_review_merge_request_email)
.with(Integer, Integer, Integer, String).and_return(mailer)
expect(mailer).to receive(:deliver_later).with({})
notification.review_requested_of_merge_request(merge_request, current_user, reviewer)
end
it 'adds "review requested" reason for new reviewer' do
notification.review_requested_of_merge_request(merge_request, current_user, reviewer)
merge_request.reviewers.each do |reviewer|
email = find_email_for(reviewer)
expect(email).to have_header('X-GitLab-NotificationReason', NotificationReason::REVIEW_REQUESTED)
end
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { merge_request }
let(:notification_trigger) { notification.review_requested_of_merge_request(merge_request, current_user, reviewer) }
end
end
end
describe 'Projects', :deliver_mails_inline do
before_all do
build_team(project)
reset_delivered_emails!
end
describe '#project_was_moved' do
context 'when notifications are disabled' do
before do
@u_custom_global.global_notification_setting.update!(moved_project: false)
end
it 'does not send a notification' do
notification.project_was_moved(project, "gitlab/gitlab")
should_not_email(@u_custom_global)
end
end
context 'with users at both project and group level' do
let(:maintainer) { create(:user) }
let(:developer) { create(:user) }
let(:group_owner) { create(:user) }
let(:group_maintainer) { create(:user) }
let(:group_developer) { create(:user) }
let(:blocked_user) { create(:user, :blocked) }
let(:invited_user) { create(:user) }
let!(:group) do
create(:group, :public) do |group|
project.group = group
project.save!
group.add_owner(group_owner)
group.add_maintainer(group_maintainer)
group.add_developer(group_developer)
# This is to check for dupes
group.add_maintainer(maintainer)
group.add_maintainer(blocked_user)
end
end
before do
project.add_maintainer(maintainer)
project.add_developer(developer)
project.add_maintainer(blocked_user)
reset_delivered_emails!
end
it 'notifies the expected users' do
notification.project_was_moved(project, "gitlab/gitlab")
should_email(@u_watcher)
should_email(@u_participating)
should_email(@u_lazy_participant)
should_email(@u_custom_global)
should_not_email(@u_guest_watcher)
should_not_email(@u_guest_custom)
should_not_email(@u_disabled)
should_email(maintainer)
should_email(group_owner)
should_email(group_maintainer)
should_not_email(group_developer)
should_not_email(developer)
should_not_email(blocked_user)
end
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { project }
let(:notification_trigger) { notification.project_was_moved(project, "gitlab/gitlab") }
end
context 'users not having access to the new location' do
it 'does not send email' do
old_user = create(:user)
ProjectAuthorization.create!(project: project, user: old_user, access_level: Gitlab::Access::GUEST)
build_group(project)
reset_delivered_emails!
notification.project_was_moved(project, "gitlab/gitlab")
should_email(@g_watcher)
should_email(@g_global_watcher)
should_email(project.creator)
should_not_email(old_user)
end
end
end
context 'user with notifications disabled' do
describe '#project_exported' do
it do
notification.project_exported(project, @u_disabled)
should_not_email_anyone
end
end
describe '#project_not_exported' do
it do
notification.project_not_exported(project, @u_disabled, ['error'])
should_not_email_anyone
end
end
end
context 'user with notifications enabled' do
describe '#project_exported' do
it do
notification.project_exported(project, @u_participating)
should_only_email(@u_participating)
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { project }
let(:notification_trigger) { notification.project_exported(project, @u_participating) }
end
end
describe '#project_not_exported' do
it do
notification.project_not_exported(project, @u_participating, ['error'])
should_only_email(@u_participating)
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { project }
let(:notification_trigger) { notification.project_not_exported(project, @u_participating, ['error']) }
end
end
end
end
describe '#invite_member_reminder' do
let_it_be(:group_member) { create(:group_member) }
subject { notification.invite_member_reminder(group_member, 'token', 0) }
it 'calls the Notify.invite_member_reminder method with the right params' do
expect(Notify).to receive(:member_invited_reminder_email).with('Group', group_member.id, 'token', 0).at_least(:once).and_call_original
subject
end
it 'sends exactly one email' do
subject
expect_delivery_jobs_count(1)
expect_enqueud_email('Group', group_member.id, 'token', 0, mail: 'member_invited_reminder_email')
end
end
describe '#new_instance_access_request', :deliver_mails_inline do
let_it_be(:user) { create(:user, :blocked_pending_approval) }
let_it_be(:admins) { create_list(:admin, 12, :with_sign_ins) }
subject { notification.new_instance_access_request(user) }
before do
reset_delivered_emails!
stub_application_setting(require_admin_approval_after_user_signup: true)
end
it 'sends notification only to a maximum of ten most recently active instance admins' do
ten_most_recently_active_instance_admins = User.admins.active.sort_by(&:current_sign_in_at).last(10)
subject
should_only_email(*ten_most_recently_active_instance_admins)
end
end
describe '#user_admin_rejection', :deliver_mails_inline do
let_it_be(:user) { create(:user, :blocked_pending_approval) }
before do
reset_delivered_emails!
end
it 'sends the user a rejection email' do
notification.user_admin_rejection(user.name, user.email)
should_only_email(user)
end
end
describe '#user_deactivated', :deliver_mails_inline do
let_it_be(:user) { create(:user) }
it 'sends the user an email' do
notification.user_deactivated(user.name, user.notification_email_or_default)
should_only_email(user)
end
end
describe 'GroupMember', :deliver_mails_inline do
let(:added_user) { create(:user) }
describe '#new_access_request' do
context 'recipients' do
let(:maintainer) { create(:user) }
let(:owner) { create(:user) }
let(:developer) { create(:user) }
let!(:group) do
create(:group, :public) do |group|
group.add_owner(owner)
group.add_maintainer(maintainer)
group.add_developer(developer)
end
end
before do
reset_delivered_emails!
end
it 'sends notification only to group owners' do
group.request_access(added_user)
should_email(owner)
should_not_email(maintainer)
should_not_email(developer)
end
it_behaves_like 'group emails are disabled' do
let(:notification_target) { group }
let(:notification_trigger) { group.request_access(added_user) }
end
end
it_behaves_like 'sends notification only to a maximum of ten, most recently active group owners' do
let(:group) { create(:group, :public) }
let(:notification_trigger) { group.request_access(added_user) }
end
end
describe '#decline_invite' do
let(:creator) { create(:user) }
let(:group) { create(:group) }
let(:member) { create(:user) }
before do
group.add_owner(creator)
group.add_developer(member, creator)
end
it_behaves_like 'declines the invite' do
let(:source) { group }
end
end
describe '#new_group_member' do
let(:group) { create(:group) }
it 'sends a notification' do
group.add_guest(added_user)
should_only_email(added_user)
end
describe 'when notifications are disabled' do
before do
create_global_setting_for(added_user, :disabled)
end
it 'does not send a notification' do
group.add_guest(added_user)
should_not_email_anyone
end
end
it_behaves_like 'group emails are disabled' do
let(:notification_target) { group }
let(:notification_trigger) { group.add_guest(added_user) }
end
end
describe '#updated_group_member_expiration' do
let_it_be(:group_member) { create(:group_member) }
it 'emails the user that their group membership expiry has changed' do
expect_next_instance_of(NotificationService) do |notification|
allow(notification).to receive(:updated_group_member_expiration).with(group_member)
end
group_member.update!(expires_at: 5.days.from_now)
end
end
end
describe 'ProjectMember', :deliver_mails_inline do
let(:added_user) { create(:user) }
describe '#new_access_request' do
context 'for a project in a user namespace' do
context 'recipients' do
let(:developer) { create(:user) }
let(:maintainer) { create(:user) }
let!(:project) do
create(:project, :public) do |project|
project.add_developer(developer)
project.add_maintainer(maintainer)
end
end
before do
reset_delivered_emails!
end
it 'sends notification only to project maintainers' do
project.request_access(added_user)
should_email(maintainer)
should_not_email(developer)
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { project }
let(:notification_trigger) { project.request_access(added_user) }
end
end
it_behaves_like 'sends notification only to a maximum of ten, most recently active project maintainers' do
let(:notification_trigger) { project.request_access(added_user) }
end
end
context 'for a project in a group' do
let(:group_owner) { create(:user) }
let(:group) { create(:group).tap { |g| g.add_owner(group_owner) } }
context 'when the project has no maintainers' do
context 'when the group has at least one owner' do
let!(:project) { create(:project, :public, namespace: group) }
before do
reset_delivered_emails!
end
context 'recipients' do
it 'sends notifications to the group owners' do
project.request_access(added_user)
should_only_email(group_owner)
end
end
it_behaves_like 'sends notification only to a maximum of ten, most recently active group owners' do
let(:group) { create(:group, :public) }
let(:notification_trigger) { project.request_access(added_user) }
end
end
context 'when the group does not have any owners' do
let(:group) { create(:group) }
let!(:project) { create(:project, :public, namespace: group) }
context 'recipients' do
before do
reset_delivered_emails!
end
it 'does not send any notifications' do
project.request_access(added_user)
should_not_email_anyone
end
end
end
end
context 'when the project has maintainers' do
let(:maintainer) { create(:user) }
let(:developer) { create(:user) }
let!(:project) do
create(:project, :public, namespace: group) do |project|
project.add_maintainer(maintainer)
project.add_developer(developer)
end
end
before do
reset_delivered_emails!
end
context 'recipients' do
it 'sends notifications only to project maintainers' do
project.request_access(added_user)
should_email(maintainer)
should_not_email(developer)
should_not_email(group_owner)
end
end
it_behaves_like 'sends notification only to a maximum of ten, most recently active project maintainers' do
let(:project) { create(:project, :public, namespace: group) }
let(:notification_trigger) { project.request_access(added_user) }
end
end
end
end
describe '#decline_invite' do
let(:member) { create(:user) }
before do
project.add_developer(member, current_user: project.first_owner)
end
it_behaves_like 'declines the invite' do
let(:source) { project }
end
end
describe '#new_project_member' do
it do
create_member!
should_only_email(added_user)
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { project }
let(:notification_trigger) { create_member! }
end
context 'when notifications are disabled' do
before do
create_global_setting_for(added_user, :disabled)
end
it do
create_member!
should_not_email_anyone
end
end
end
describe '#member_about_to_expire' do
let_it_be(:group_member) { create(:group_member, expires_at: 7.days.from_now.to_date) }
let_it_be(:project_member) { create(:project_member, expires_at: 7.days.from_now.to_date) }
context "with group member" do
it 'emails the user that their group membership will be expired' do
notification.member_about_to_expire(group_member)
should_email(group_member.user)
end
end
context "with project member" do
it 'emails the user that their project membership will be expired' do
notification.member_about_to_expire(project_member)
should_email(project_member.user)
end
end
end
def create_member!
create(:project_member, user: added_user, project: project)
end
end
context 'guest user in private project', :deliver_mails_inline do
let(:private_project) { create(:project, :private) }
let(:guest) { create(:user) }
let(:developer) { create(:user) }
let(:merge_request) { create(:merge_request, source_project: private_project, assignees: [assignee]) }
let(:merge_request1) { create(:merge_request, source_project: private_project, assignees: [assignee], description: "cc @#{guest.username}") }
let(:note) { create(:note, noteable: merge_request, project: private_project) }
before do
private_project.add_developer(assignee)
private_project.add_developer(developer)
private_project.add_guest(guest)
ActionMailer::Base.deliveries.clear
end
it 'filters out guests when new note is created' do
expect(SentNotification).to receive(:record).with(merge_request, any_args).once
notification.new_note(note)
should_not_email(guest)
should_email(assignee)
end
it 'filters out guests when new merge request is created' do
notification.new_merge_request(merge_request1, developer)
should_not_email(guest)
should_email(assignee)
end
it 'filters out guests when merge request is closed' do
notification.close_mr(merge_request, developer)
should_not_email(guest)
should_email(assignee)
end
it 'filters out guests when merge request is reopened' do
notification.reopen_mr(merge_request, developer)
should_not_email(guest)
should_email(assignee)
end
it 'filters out guests when merge request is merged' do
notification.merge_mr(merge_request, developer)
should_not_email(guest)
should_email(assignee)
end
end
describe 'Pipelines', :deliver_mails_inline do
describe '#pipeline_finished' do
let_it_be(:project) { create(:project, :public, :repository) }
let_it_be(:u_member) { create(:user) }
let_it_be(:u_watcher) { create_user_with_notification(:watch, 'watcher') }
let_it_be(:u_custom_notification_unset) do
create_user_with_notification(:custom, 'custom_unset')
end
let_it_be(:u_custom_notification_enabled) do
user = create_user_with_notification(:custom, 'custom_enabled')
update_custom_notification(:success_pipeline, user, resource: project)
update_custom_notification(:failed_pipeline, user, resource: project)
update_custom_notification(:fixed_pipeline, user, resource: project)
user
end
let_it_be(:u_custom_notification_disabled) do
user = create_user_with_notification(:custom, 'custom_disabled')
update_custom_notification(:success_pipeline, user, resource: project, value: false)
update_custom_notification(:failed_pipeline, user, resource: project, value: false)
update_custom_notification(:fixed_pipeline, user, resource: project, value: false)
user
end
let(:commit) { project.commit }
def create_pipeline(user, status)
create(
:ci_pipeline, status,
project: project,
user: user,
ref: 'refs/heads/master',
sha: commit.id,
before_sha: '00000000'
)
end
before_all do
project.add_maintainer(u_member)
project.add_maintainer(u_watcher)
project.add_maintainer(u_custom_notification_unset)
project.add_maintainer(u_custom_notification_enabled)
project.add_maintainer(u_custom_notification_disabled)
end
before do
reset_delivered_emails!
end
context 'with a successful pipeline' do
context 'when the creator has default settings' do
before do
pipeline = create_pipeline(u_member, :success)
notification.pipeline_finished(pipeline)
end
it 'notifies nobody' do
should_not_email_anyone
end
end
context 'when the creator has watch set' do
before do
pipeline = create_pipeline(u_watcher, :success)
notification.pipeline_finished(pipeline)
end
it 'notifies nobody' do
should_not_email_anyone
end
end
context 'when the creator has custom notifications, but without any set' do
before do
pipeline = create_pipeline(u_custom_notification_unset, :success)
notification.pipeline_finished(pipeline)
end
it 'notifies nobody' do
should_not_email_anyone
end
end
context 'when the creator has custom notifications disabled' do
before do
pipeline = create_pipeline(u_custom_notification_disabled, :success)
notification.pipeline_finished(pipeline)
end
it 'notifies nobody' do
should_not_email_anyone
end
end
context 'when the creator has custom notifications enabled' do
let(:pipeline) { create_pipeline(u_custom_notification_enabled, :success) }
it 'emails only the creator' do
notification.pipeline_finished(pipeline)
should_only_email(u_custom_notification_enabled)
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { pipeline }
let(:notification_trigger) { notification.pipeline_finished(pipeline) }
end
context 'when the creator has group notification email set' do
let(:group_notification_email) { '[email protected]' }
before do
group = create(:group)
project.update!(group: group)
create(:email, :confirmed, user: u_custom_notification_enabled, email: group_notification_email)
create(:notification_setting, user: u_custom_notification_enabled, source: group, notification_email: group_notification_email)
end
it 'sends to group notification email' do
notification.pipeline_finished(pipeline)
expect(email_recipients.first).to eq(group_notification_email)
end
end
end
end
context 'with a failed pipeline' do
context 'when the creator has no custom notification set' do
let(:pipeline) { create_pipeline(u_member, :failed) }
it 'emails only the creator' do
notification.pipeline_finished(pipeline)
should_only_email(u_member)
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { pipeline }
let(:notification_trigger) { notification.pipeline_finished(pipeline) }
end
context 'when the creator has group notification email set' do
let(:group_notification_email) { '[email protected]' }
before do
group = create(:group)
project.update!(group: group)
create(:email, :confirmed, user: u_member, email: group_notification_email)
create(:notification_setting, user: u_member, source: group, notification_email: group_notification_email)
end
it 'sends to group notification email' do
notification.pipeline_finished(pipeline)
expect(email_recipients.first).to eq(group_notification_email)
end
end
end
context 'when the creator has watch set' do
before do
pipeline = create_pipeline(u_watcher, :failed)
notification.pipeline_finished(pipeline)
end
it 'emails only the creator' do
should_only_email(u_watcher)
end
end
context 'when the creator has custom notifications, but without any set' do
before do
pipeline = create_pipeline(u_custom_notification_unset, :failed)
notification.pipeline_finished(pipeline)
end
it 'emails only the creator' do
should_only_email(u_custom_notification_unset)
end
end
context 'when the creator has custom notifications disabled' do
before do
pipeline = create_pipeline(u_custom_notification_disabled, :failed)
notification.pipeline_finished(pipeline)
end
it 'notifies nobody' do
should_not_email_anyone
end
end
context 'when the creator has custom notifications set' do
before do
pipeline = create_pipeline(u_custom_notification_enabled, :failed)
notification.pipeline_finished(pipeline)
end
it 'emails only the creator' do
should_only_email(u_custom_notification_enabled)
end
end
context 'when the creator has no read_build access' do
before do
pipeline = create_pipeline(u_member, :failed)
project.update!(public_builds: false)
project.team.truncate
notification.pipeline_finished(pipeline)
end
it 'does not send emails', :sidekiq_inline do
should_not_email_anyone
end
end
end
context 'with a fixed pipeline' do
let(:ref_status) { 'fixed' }
context 'when the creator has no custom notification set' do
let(:pipeline) { create_pipeline(u_member, :success) }
it 'emails only the creator' do
notification.pipeline_finished(pipeline, ref_status: ref_status)
should_only_email(u_member)
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { pipeline }
let(:notification_trigger) { notification.pipeline_finished(pipeline, ref_status: ref_status) }
end
context 'when the creator has group notification email set' do
let(:group_notification_email) { '[email protected]' }
before do
group = create(:group)
project.update!(group: group)
create(:email, :confirmed, user: u_member, email: group_notification_email)
create(:notification_setting, user: u_member, source: group, notification_email: group_notification_email)
end
it 'sends to group notification email' do
notification.pipeline_finished(pipeline, ref_status: ref_status)
expect(email_recipients.first).to eq(group_notification_email)
end
end
end
context 'when the creator has watch set' do
before do
pipeline = create_pipeline(u_watcher, :success)
notification.pipeline_finished(pipeline, ref_status: ref_status)
end
it 'emails only the creator' do
should_only_email(u_watcher)
end
end
context 'when the creator has custom notifications, but without any set' do
before do
pipeline = create_pipeline(u_custom_notification_unset, :success)
notification.pipeline_finished(pipeline, ref_status: ref_status)
end
it 'emails only the creator' do
should_only_email(u_custom_notification_unset)
end
end
context 'when the creator has custom notifications disabled' do
before do
pipeline = create_pipeline(u_custom_notification_disabled, :success)
notification.pipeline_finished(pipeline, ref_status: ref_status)
end
it 'notifies nobody' do
should_not_email_anyone
end
end
context 'when the creator has custom notifications set' do
it 'emails only the creator' do
pipeline = create_pipeline(u_custom_notification_enabled, :success)
notification.pipeline_finished(pipeline, ref_status: ref_status)
should_only_email(u_custom_notification_enabled)
end
end
end
end
end
describe 'Pages domains', :deliver_mails_inline do
let_it_be(:project, reload: true) { create(:project) }
let_it_be(:domain, reload: true) { create(:pages_domain, project: project) }
let_it_be(:u_blocked) { create(:user, :blocked) }
let_it_be(:u_silence) { create_user_with_notification(:disabled, 'silent', project) }
let_it_be(:u_owner) { project.first_owner }
let_it_be(:u_maintainer1) { create(:user) }
let_it_be(:u_maintainer2) { create(:user) }
let_it_be(:u_developer) { create(:user) }
before do
project.add_maintainer(u_blocked)
project.add_maintainer(u_silence)
project.add_maintainer(u_maintainer1)
project.add_maintainer(u_maintainer2)
project.add_developer(u_developer)
reset_delivered_emails!
end
%i[
pages_domain_enabled
pages_domain_disabled
pages_domain_verification_succeeded
pages_domain_verification_failed
pages_domain_auto_ssl_failed
].each do |sym|
describe "##{sym}" do
subject(:notify!) { notification.send(sym, domain) }
it 'emails current watching maintainers and owners' do
expect(Notify).to receive(:"#{sym}_email").at_least(:once).and_call_original
notify!
should_only_email(u_maintainer1, u_maintainer2, u_owner)
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { domain }
let(:notification_trigger) { notify! }
end
it 'emails nobody if the project is missing' do
domain.project = nil
notify!
should_not_email_anyone
end
end
end
end
context 'Auto DevOps notifications', :deliver_mails_inline do
describe '#autodevops_disabled' do
let(:owner) { create(:user) }
let(:namespace) { create(:namespace, owner: owner) }
let(:project) { create(:project, :repository, :auto_devops, namespace: namespace) }
let(:pipeline_user) { create(:user) }
let(:pipeline) { create(:ci_pipeline, :failed, project: project, user: pipeline_user) }
it 'emails project owner and user that triggered the pipeline' do
project.add_developer(pipeline_user)
notification.autodevops_disabled(pipeline, [owner.email, pipeline_user.email])
should_email(owner, times: 1) # Once for the disable pipeline.
should_email(pipeline_user, times: 2) # Once for the new permission, once for the disable.
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { project }
let(:notification_trigger) { notification.autodevops_disabled(pipeline, [owner.email, pipeline_user.email]) }
end
end
end
describe 'Repository cleanup', :deliver_mails_inline do
let(:user) { create(:user) }
describe '#repository_cleanup_success' do
it 'emails the specified user only' do
notification.repository_cleanup_success(project, user)
should_email(user)
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { project }
let(:notification_trigger) { notification.repository_cleanup_success(project, user) }
end
end
describe '#repository_cleanup_failure' do
it 'emails the specified user only' do
notification.repository_cleanup_failure(project, user, 'Some error')
should_email(user)
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { project }
let(:notification_trigger) { notification.repository_cleanup_failure(project, user, 'Some error') }
end
end
end
context 'Remote mirror notifications', :deliver_mails_inline do
describe '#remote_mirror_update_failed' do
let(:remote_mirror) { create(:remote_mirror, project: project) }
let(:u_blocked) { create(:user, :blocked) }
let(:u_silence) { create_user_with_notification(:disabled, 'silent-maintainer', project) }
let(:u_owner) { project.first_owner }
let(:u_maintainer1) { create(:user) }
let(:u_maintainer2) { create(:user) }
let(:u_developer) { create(:user) }
before do
project.add_maintainer(u_blocked)
project.add_maintainer(u_silence)
project.add_maintainer(u_maintainer1)
project.add_maintainer(u_maintainer2)
project.add_developer(u_developer)
reset_delivered_emails!
end
it 'emails current watching maintainers and owners' do
notification.remote_mirror_update_failed(remote_mirror)
should_only_email(u_maintainer1, u_maintainer2, u_owner)
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { project }
let(:notification_trigger) { notification.remote_mirror_update_failed(remote_mirror) }
end
end
end
context 'with external authorization service', :deliver_mails_inline do
let(:issue) { create(:issue) }
let(:project) { issue.project }
let(:note) { create(:note, noteable: issue, project: project) }
let(:member) { create(:user) }
subject { described_class.new }
before do
project.add_maintainer(member)
member.global_notification_setting.update!(level: :watch)
end
it 'sends email when the service is not enabled' do
expect(Notify).to receive(:new_issue_email).at_least(:once).with(member.id, issue.id, nil).and_call_original
subject.new_issue(issue, member)
end
context 'when the service is enabled' do
before do
enable_external_authorization_service_check
end
it 'does not send an email' do
expect(Notify).not_to receive(:new_issue_email)
subject.new_issue(issue, member)
end
context 'with admin user' do
before do
member.update!(admin: true)
end
context 'when admin mode is enabled', :enable_admin_mode do
it 'still delivers email to admins' do
expect(Notify).to receive(:new_issue_email).at_least(:once).with(member.id, issue.id, nil).and_call_original
subject.new_issue(issue, member)
end
end
context 'when admin mode is disabled' do
it 'does not send an email' do
expect(Notify).not_to receive(:new_issue_email)
subject.new_issue(issue, member)
end
end
end
end
end
describe '#prometheus_alerts_fired' do
let_it_be(:project) { create(:project) }
let_it_be(:master) { create(:user) }
let_it_be(:developer) { create(:user) }
let_it_be(:alert) { create(:alert_management_alert, project: project) }
before do
project.add_maintainer(master)
end
it 'sends the email to owners and masters' do
expect(Notify).to receive(:prometheus_alert_fired_email).with(project, master, alert).and_call_original
expect(Notify).to receive(:prometheus_alert_fired_email).with(project, project.first_owner, alert).and_call_original
expect(Notify).not_to receive(:prometheus_alert_fired_email).with(project, developer, alert)
subject.prometheus_alerts_fired(project, [alert])
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { project }
let(:notification_trigger) { subject.prometheus_alerts_fired(project, [alert]) }
around do |example|
perform_enqueued_jobs { example.run }
end
end
end
describe '#new_review' do
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
let(:user2) { create(:user) }
let(:reviewer) { create(:user) }
let(:merge_request) { create(:merge_request, source_project: project, assignees: [user, user2], author: create(:user)) }
let(:review) { create(:review, merge_request: merge_request, project: project, author: reviewer) }
let(:note) { create(:diff_note_on_merge_request, project: project, noteable: merge_request, author: reviewer, review: review) }
before do
build_team(review.project)
add_users(review.project)
add_user_subscriptions(merge_request)
project.add_maintainer(merge_request.author)
project.add_maintainer(reviewer)
merge_request.assignees.each { |assignee| project.add_maintainer(assignee) }
create(
:diff_note_on_merge_request,
project: project,
noteable: merge_request,
author: reviewer,
review: review,
note: "cc @mention"
)
end
it 'sends emails' do
expect(Notify).not_to receive(:new_review_email).with(review.author.id, review.id)
expect(Notify).not_to receive(:new_review_email).with(@unsubscriber.id, review.id)
merge_request.assignee_ids.each do |assignee_id|
expect(Notify).to receive(:new_review_email).with(assignee_id, review.id).and_call_original
end
expect(Notify).to receive(:new_review_email).with(merge_request.author.id, review.id).and_call_original
expect(Notify).to receive(:new_review_email).with(@u_watcher.id, review.id).and_call_original
expect(Notify).to receive(:new_review_email).with(@u_mentioned.id, review.id).and_call_original
expect(Notify).to receive(:new_review_email).with(@subscriber.id, review.id).and_call_original
expect(Notify).to receive(:new_review_email).with(@watcher_and_subscriber.id, review.id).and_call_original
expect(Notify).to receive(:new_review_email).with(@subscribed_participant.id, review.id).and_call_original
subject.new_review(review)
end
it_behaves_like 'project emails are disabled' do
let(:notification_target) { review }
let(:notification_trigger) { subject.new_review(review) }
around do |example|
perform_enqueued_jobs { example.run }
end
end
end
describe '#inactive_project_deletion_warning' do
let_it_be(:deletion_date) { Date.current }
let_it_be(:project) { create(:project) }
let_it_be(:maintainer) { create(:user) }
let_it_be(:developer) { create(:user) }
before do
project.add_maintainer(maintainer)
end
subject { notification.inactive_project_deletion_warning(project, deletion_date) }
it "sends email to project owners and maintainers" do
expect { subject }.to have_enqueued_email(
project,
maintainer,
deletion_date,
mail: "inactive_project_deletion_warning_email"
)
expect { subject }.not_to have_enqueued_email(
project,
developer,
deletion_date,
mail: "inactive_project_deletion_warning_email"
)
end
end
def build_team(project)
@u_watcher = create_global_setting_for(create(:user), :watch)
@u_participating = create_global_setting_for(create(:user), :participating)
@u_participant_mentioned = create_global_setting_for(create(:user, username: 'participant'), :participating)
@u_disabled = create_global_setting_for(create(:user), :disabled)
@u_mentioned = create_global_setting_for(create(:user, username: 'mention'), :mention)
@u_committer = create(:user, username: 'committer')
@u_not_mentioned = create_global_setting_for(create(:user, username: 'regular'), :participating)
@u_outsider_mentioned = create(:user, username: 'outsider')
@u_custom_global = create_global_setting_for(create(:user, username: 'custom_global'), :custom)
# User to be participant by default
# This user does not contain any record in notification settings table
# It should be treated with a :participating notification_level
@u_lazy_participant = create(:user, username: 'lazy-participant')
@u_guest_watcher = create_user_with_notification(:watch, 'guest_watching')
@u_guest_custom = create_user_with_notification(:custom, 'guest_custom')
project.add_maintainer(@u_watcher)
project.add_maintainer(@u_participating)
project.add_maintainer(@u_participant_mentioned)
project.add_maintainer(@u_disabled)
project.add_maintainer(@u_mentioned)
project.add_maintainer(@u_committer)
project.add_maintainer(@u_not_mentioned)
project.add_maintainer(@u_lazy_participant)
project.add_maintainer(@u_custom_global)
end
# Users in the project's group but not part of project's team
# with different notification settings
def build_group(project, visibility: :public)
group = create_nested_group(visibility)
project.update!(namespace_id: group.id)
# Group member: global=disabled, group=watch
@g_watcher ||= create_user_with_notification(:watch, 'group_watcher', project.group)
@g_watcher.notification_settings_for(nil).disabled!
# Group member: global=watch, group=global
@g_global_watcher ||= create_global_setting_for(create(:user), :watch)
group.add_members([@g_watcher, @g_global_watcher], :maintainer)
group
end
def create_nested_group(visibility)
parent_group = create(:group, visibility)
child_group = create(:group, visibility, parent: parent_group)
# Parent group member: global=disabled, parent_group=watch, child_group=global
@pg_watcher ||= create_user_with_notification(:watch, 'parent_group_watcher', parent_group)
@pg_watcher.notification_settings_for(nil).disabled!
# Parent group member: global=global, parent_group=disabled, child_group=global
@pg_disabled ||= create_user_with_notification(:disabled, 'parent_group_disabled', parent_group)
@pg_disabled.notification_settings_for(nil).global!
# Parent group member: global=global, parent_group=mention, child_group=global
@pg_mention ||= create_user_with_notification(:mention, 'parent_group_mention', parent_group)
@pg_mention.notification_settings_for(nil).global!
# Parent group member: global=global, parent_group=participating, child_group=global
@pg_participant ||= create_user_with_notification(:participating, 'parent_group_participant', parent_group)
@pg_mention.notification_settings_for(nil).global!
child_group
end
def add_member_for_parent_group(user, project)
project.reload
project.group.parent.add_maintainer(user)
end
def should_email_nested_group_user(user, times: 1, recipients: email_recipients)
should_email(user, times: times, recipients: recipients)
end
def should_not_email_nested_group_user(user, recipients: email_recipients)
should_not_email(user, recipients: recipients)
end
def add_users(project)
@subscriber = create :user
@unsubscriber = create :user
@unsubscribed_mentioned = create :user, username: 'unsubscribed_mentioned'
@subscribed_participant = create_global_setting_for(create(:user, username: 'subscribed_participant'), :participating)
@watcher_and_subscriber = create_global_setting_for(create(:user), :watch)
project.add_maintainer(@subscribed_participant)
project.add_maintainer(@subscriber)
project.add_maintainer(@unsubscriber)
project.add_maintainer(@watcher_and_subscriber)
project.add_maintainer(@unsubscribed_mentioned)
end
def add_user_subscriptions(issuable)
issuable.subscriptions.create!(user: @unsubscribed_mentioned, project: project, subscribed: false)
issuable.subscriptions.create!(user: @subscriber, project: project, subscribed: true)
issuable.subscriptions.create!(user: @subscribed_participant, project: project, subscribed: true)
issuable.subscriptions.create!(user: @unsubscriber, project: project, subscribed: false)
# Make the watcher a subscriber to detect dupes
issuable.subscriptions.create!(user: @watcher_and_subscriber, project: project, subscribed: true)
end
def expectation_args_for_user(user)
[user, *anything_args]
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class ResetProjectCacheService < BaseService
def execute
@project.increment!(:jobs_cache_index)
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe ResetProjectCacheService, feature_category: :groups_and_projects do
let(:project) { create(:project) }
let(:user) { create(:user) }
subject { described_class.new(project, user).execute }
context 'when project cache_index is nil' do
before do
project.jobs_cache_index = nil
end
it 'sets project cache_index to one' do
expect { subject }.to change { project.reload.jobs_cache_index }.from(nil).to(1)
end
end
context 'when project cache_index is a numeric value' do
before do
project.update!(jobs_cache_index: 1)
end
it 'increments project cache index' do
expect { subject }.to change { project.reload.jobs_cache_index }.by(1)
end
end
end
|