Account archive download (#6460)
* Fix #201: Account archive download * Export actor and private key in the archive * Optimize BackupService - Add conversation to cached associations of status, because somehow it was forgotten and is source of N+1 queries - Explicitly call GC between batches of records being fetched (Model class allocations are the worst offender) - Stream media files into the tar in 1MB chunks (Do not allocate media file (up to 8MB) as string into memory) - Use #bytesize instead of #size to calculate file size for JSON (Fix FileOverflow error) - Segment media into subfolders by status ID because apparently GIF-to-MP4 media are all named "media.mp4" for some reason * Keep uniquely generated filename in Paperclip::GifTranscoder * Ensure dumped files do not overwrite each other by maintaing directory partitions * Give tar archives a good name * Add scheduler to remove week-old backups * Fix code style issueth-downstream
parent
9ff2739fb5
commit
f6884555d7
@ -1,11 +1,23 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
class Settings::ExportsController < ApplicationController
|
||||
include Authorization
|
||||
|
||||
layout 'admin'
|
||||
|
||||
before_action :authenticate_user!
|
||||
|
||||
def show
|
||||
@export = Export.new(current_account)
|
||||
@export = Export.new(current_account)
|
||||
@backups = current_user.backups
|
||||
end
|
||||
|
||||
def create
|
||||
authorize :backup, :create?
|
||||
|
||||
backup = current_user.backups.create!
|
||||
BackupWorker.perform_async(backup.id)
|
||||
|
||||
redirect_to settings_export_path
|
||||
end
|
||||
end
|
||||
|
After Width: | Height: | Size: 205 B |
After Width: | Height: | Size: 271 B |
@ -0,0 +1,22 @@
|
||||
# frozen_string_literal: true
|
||||
# == Schema Information
|
||||
#
|
||||
# Table name: backups
|
||||
#
|
||||
# id :integer not null, primary key
|
||||
# user_id :integer
|
||||
# dump_file_name :string
|
||||
# dump_content_type :string
|
||||
# dump_file_size :integer
|
||||
# dump_updated_at :datetime
|
||||
# processed :boolean default(FALSE), not null
|
||||
# created_at :datetime not null
|
||||
# updated_at :datetime not null
|
||||
#
|
||||
|
||||
class Backup < ApplicationRecord
|
||||
belongs_to :user, inverse_of: :backups
|
||||
|
||||
has_attached_file :dump
|
||||
do_not_validate_attachment_file_type :dump
|
||||
end
|
@ -0,0 +1,9 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
class BackupPolicy < ApplicationPolicy
|
||||
MIN_AGE = 1.week
|
||||
|
||||
def create?
|
||||
user_signed_in? && current_user.backups.where('created_at >= ?', MIN_AGE.ago).count.zero?
|
||||
end
|
||||
end
|
@ -0,0 +1,128 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'rubygems/package'
|
||||
|
||||
class BackupService < BaseService
|
||||
attr_reader :account, :backup, :collection
|
||||
|
||||
def call(backup)
|
||||
@backup = backup
|
||||
@account = backup.user.account
|
||||
|
||||
build_json!
|
||||
build_archive!
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def build_json!
|
||||
@collection = serialize(collection_presenter, ActivityPub::CollectionSerializer)
|
||||
|
||||
account.statuses.with_includes.find_in_batches do |statuses|
|
||||
statuses.each do |status|
|
||||
item = serialize(status, ActivityPub::ActivitySerializer)
|
||||
item.delete(:'@context')
|
||||
|
||||
unless item[:type] == 'Announce' || item[:object][:attachment].blank?
|
||||
item[:object][:attachment].each do |attachment|
|
||||
attachment[:url] = Addressable::URI.parse(attachment[:url]).path.gsub(/\A\/system\//, '')
|
||||
end
|
||||
end
|
||||
|
||||
@collection[:orderedItems] << item
|
||||
end
|
||||
|
||||
GC.start
|
||||
end
|
||||
end
|
||||
|
||||
def build_archive!
|
||||
tmp_file = Tempfile.new(%w(archive .tar.gz))
|
||||
|
||||
File.open(tmp_file, 'wb') do |file|
|
||||
Zlib::GzipWriter.wrap(file) do |gz|
|
||||
Gem::Package::TarWriter.new(gz) do |tar|
|
||||
dump_media_attachments!(tar)
|
||||
dump_outbox!(tar)
|
||||
dump_actor!(tar)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
archive_filename = ['archive', Time.now.utc.strftime('%Y%m%d%H%M%S'), SecureRandom.hex(2)].join('-') + '.tar.gz'
|
||||
|
||||
@backup.dump = ActionDispatch::Http::UploadedFile.new(tempfile: tmp_file, filename: archive_filename)
|
||||
@backup.processed = true
|
||||
@backup.save!
|
||||
ensure
|
||||
tmp_file.close
|
||||
tmp_file.unlink
|
||||
end
|
||||
|
||||
def dump_media_attachments!(tar)
|
||||
MediaAttachment.attached.where(account: account).find_in_batches do |media_attachments|
|
||||
media_attachments.each do |m|
|
||||
download_to_tar(tar, m.file, m.file.path)
|
||||
end
|
||||
|
||||
GC.start
|
||||
end
|
||||
end
|
||||
|
||||
def dump_outbox!(tar)
|
||||
json = Oj.dump(collection)
|
||||
|
||||
tar.add_file_simple('outbox.json', 0o444, json.bytesize) do |io|
|
||||
io.write(json)
|
||||
end
|
||||
end
|
||||
|
||||
def dump_actor!(tar)
|
||||
actor = serialize(account, ActivityPub::ActorSerializer)
|
||||
|
||||
actor[:icon][:url] = 'avatar' + File.extname(actor[:icon][:url]) if actor[:icon]
|
||||
actor[:image][:url] = 'header' + File.extname(actor[:image][:url]) if actor[:image]
|
||||
|
||||
download_to_tar(tar, account.avatar, 'avatar' + File.extname(account.avatar.path)) if account.avatar.exists?
|
||||
download_to_tar(tar, account.header, 'header' + File.extname(account.header.path)) if account.header.exists?
|
||||
|
||||
json = Oj.dump(actor)
|
||||
|
||||
tar.add_file_simple('actor.json', 0o444, json.bytesize) do |io|
|
||||
io.write(json)
|
||||
end
|
||||
|
||||
tar.add_file_simple('key.pem', 0o444, account.private_key.bytesize) do |io|
|
||||
io.write(account.private_key)
|
||||
end
|
||||
end
|
||||
|
||||
def collection_presenter
|
||||
ActivityPub::CollectionPresenter.new(
|
||||
id: account_outbox_url(account),
|
||||
type: :ordered,
|
||||
size: account.statuses_count,
|
||||
items: []
|
||||
)
|
||||
end
|
||||
|
||||
def serialize(object, serializer)
|
||||
ActiveModelSerializers::SerializableResource.new(
|
||||
object,
|
||||
serializer: serializer,
|
||||
adapter: ActivityPub::Adapter
|
||||
).as_json
|
||||
end
|
||||
|
||||
CHUNK_SIZE = 1.megabyte
|
||||
|
||||
def download_to_tar(tar, attachment, filename)
|
||||
adapter = Paperclip.io_adapters.for(attachment)
|
||||
|
||||
tar.add_file_simple(filename, 0o444, adapter.size) do |io|
|
||||
while (buffer = adapter.read(CHUNK_SIZE))
|
||||
io.write(buffer)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
@ -0,0 +1,59 @@
|
||||
%table.email-table{ cellspacing: 0, cellpadding: 0 }
|
||||
%tbody
|
||||
%tr
|
||||
%td.email-body
|
||||
.email-container
|
||||
%table.content-section{ cellspacing: 0, cellpadding: 0 }
|
||||
%tbody
|
||||
%tr
|
||||
%td.content-cell.hero
|
||||
.email-row
|
||||
.col-6
|
||||
%table.column{ cellspacing: 0, cellpadding: 0 }
|
||||
%tbody
|
||||
%tr
|
||||
%td.column-cell.text-center.padded
|
||||
%table.hero-icon{ align: 'center', cellspacing: 0, cellpadding: 0 }
|
||||
%tbody
|
||||
%tr
|
||||
%td
|
||||
= image_tag full_pack_url('icon_file_download.png'), alt: ''
|
||||
|
||||
%h1= t 'user_mailer.backup_ready.title'
|
||||
|
||||
%table.email-table{ cellspacing: 0, cellpadding: 0 }
|
||||
%tbody
|
||||
%tr
|
||||
%td.email-body
|
||||
.email-container
|
||||
%table.content-section{ cellspacing: 0, cellpadding: 0 }
|
||||
%tbody
|
||||
%tr
|
||||
%td.content-cell.content-start
|
||||
.email-row
|
||||
.col-6
|
||||
%table.column{ cellspacing: 0, cellpadding: 0 }
|
||||
%tbody
|
||||
%tr
|
||||
%td.column-cell.text-center
|
||||
%p= t 'user_mailer.backup_ready.explanation'
|
||||
|
||||
%table.email-table{ cellspacing: 0, cellpadding: 0 }
|
||||
%tbody
|
||||
%tr
|
||||
%td.email-body
|
||||
.email-container
|
||||
%table.content-section{ cellspacing: 0, cellpadding: 0 }
|
||||
%tbody
|
||||
%tr
|
||||
%td.content-cell
|
||||
%table.column{ cellspacing: 0, cellpadding: 0 }
|
||||
%tbody
|
||||
%tr
|
||||
%td.column-cell.button-cell
|
||||
%table.button{ align: 'center', cellspacing: 0, cellpadding: 0 }
|
||||
%tbody
|
||||
%tr
|
||||
%td.button-primary
|
||||
= link_to full_asset_url(@backup.dump.url) do
|
||||
%span= t 'exports.archive_takeout.download'
|
@ -0,0 +1,7 @@
|
||||
<%= t 'user_mailer.backup_ready.title' %>
|
||||
|
||||
===
|
||||
|
||||
<%= t 'user_mailer.backup_ready.explanation' %>
|
||||
|
||||
=> <%= full_asset_url(@backup.dump.url) %>
|
@ -0,0 +1,17 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
class BackupWorker
|
||||
include Sidekiq::Worker
|
||||
|
||||
sidekiq_options queue: 'pull'
|
||||
|
||||
def perform(backup_id)
|
||||
backup = Backup.find(backup_id)
|
||||
user = backup.user
|
||||
|
||||
BackupService.new.call(backup)
|
||||
|
||||
user.backups.where.not(id: backup.id).destroy_all
|
||||
UserMailer.backup_ready(user, backup).deliver_later
|
||||
end
|
||||
end
|
@ -0,0 +1,16 @@
|
||||
# frozen_string_literal: true
|
||||
require 'sidekiq-scheduler'
|
||||
|
||||
class Scheduler::BackupCleanupScheduler
|
||||
include Sidekiq::Worker
|
||||
|
||||
def perform
|
||||
old_backups.find_each(&:destroy!)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def old_backups
|
||||
Backup.where('created_at < ?', 7.days.ago)
|
||||
end
|
||||
end
|
@ -0,0 +1,11 @@
|
||||
class CreateBackups < ActiveRecord::Migration[5.1]
|
||||
def change
|
||||
create_table :backups do |t|
|
||||
t.references :user, foreign_key: { on_delete: :nullify }
|
||||
t.attachment :dump
|
||||
t.boolean :processed, null: false, default: false
|
||||
|
||||
t.timestamps
|
||||
end
|
||||
end
|
||||
end
|
@ -0,0 +1,3 @@
|
||||
Fabricator(:backup) do
|
||||
user
|
||||
end
|
@ -0,0 +1,5 @@
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Backup, type: :model do
|
||||
|
||||
end
|
Loading…
Reference in new issue