-
-
Save nicksellen/a70ff34960ab39388b59b8c41de4bfe6 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Run this with: | |
# | |
# docker exec -i <idofcontainer> rails runner - < local.rb >/tmp/localfiles | |
# | |
# The first line will be a warning message, discard this, then you have a nice list of local files! | |
# It only takes the original attachments, not the small size. | |
MediaAttachment.local.find_in_batches(batch_size: 200) do |items| | |
items.each do |item| | |
puts item.file.url(:original).gsub(/.*\/social-coop-live\//, '') | |
end | |
end | |
Account.local.find_in_batches(batch_size: 200) do |accounts| | |
accounts.each do |account| | |
avatar = account.avatar_original_url.gsub(/.*\/social-coop-live\//, '') | |
puts avatar if avatar != '/avatars/original/missing.png' | |
header = account.header_original_url.gsub(/.*\/social-coop-live\//, '') | |
puts header if header != '/headers/original/missing.png' | |
end | |
end |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from multiprocessing import Process, Queue | |
import boto3 | |
opts = { | |
'service_name':'s3', | |
'aws_access_key_id':'FOO', | |
'aws_secret_access_key':'BAR', | |
'endpoint_url':'https://ams3.digitaloceanspaces.com', | |
} | |
bucket_name = 'social-coop-media' | |
process_count = 10 | |
session = boto3.session.Session() | |
s3_client = session.client(**opts) | |
s3 = session.resource(**opts) | |
bucket = s3.Bucket(bucket_name) | |
DONE = '___DONE___' | |
processed = dict() | |
for item in open('aclprocessed').read().splitlines(): | |
processed[item] = True | |
def process(q): | |
while True: | |
key = q.get() | |
if key == DONE: | |
break | |
s3_client.put_object_acl(Bucket=bucket_name, Key=key, ACL='public-read') | |
print('processed', key) | |
if __name__ == '__main__': | |
q = Queue(process_count) | |
processes = [] | |
for n in range(process_count): | |
p = Process(target=process, args=(q,)) | |
p.start() | |
processes.append(p) | |
for obj in bucket.objects.all(): | |
if obj.key in processed: | |
print('skipped', obj.key) | |
else: | |
q.put(obj.key) | |
for n in range(process_count): | |
q.put(END_KEY) | |
for p in processes: | |
p.join() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment