kopia lustrzana https://github.com/bellingcat/auto-archiver
Works with multiple sheets (hardcoded)
rodzic
f6abb0fb3e
commit
e55a4d55dc
142
update.py
142
update.py
|
@ -11,95 +11,101 @@ from botocore.errorfactory import ClientError
|
||||||
load_dotenv()
|
load_dotenv()
|
||||||
|
|
||||||
gc = gspread.service_account()
|
gc = gspread.service_account()
|
||||||
sh = gc.open("Bellingcat media archiver")
|
sh = gc.open("Media Sheet (January 16-20 + People)")
|
||||||
wks = sh.sheet1
|
|
||||||
values = wks.get_all_values()
|
|
||||||
|
|
||||||
ydl_opts = {'outtmpl': 'tmp/%(id)s.%(ext)s', 'quiet': False}
|
for ii in range(5):
|
||||||
ydl = youtube_dl.YoutubeDL(ydl_opts)
|
wks = sh.get_worksheet(ii)
|
||||||
|
values = wks.get_all_values()
|
||||||
|
|
||||||
s3_client = boto3.client('s3',
|
ydl_opts = {'outtmpl': 'tmp/%(id)s.%(ext)s', 'quiet': False}
|
||||||
region_name=os.getenv('DO_SPACES_REGION'),
|
ydl = youtube_dl.YoutubeDL(ydl_opts)
|
||||||
endpoint_url='https://{}.digitaloceanspaces.com'.format(os.getenv('DO_SPACES_REGION')),
|
|
||||||
aws_access_key_id=os.getenv('DO_SPACES_KEY'),
|
|
||||||
aws_secret_access_key=os.getenv('DO_SPACES_SECRET'))
|
|
||||||
|
|
||||||
for i in range(2, len(values)+1):
|
s3_client = boto3.client('s3',
|
||||||
v = values[i-1]
|
region_name=os.getenv('DO_SPACES_REGION'),
|
||||||
|
endpoint_url='https://{}.digitaloceanspaces.com'.format(os.getenv('DO_SPACES_REGION')),
|
||||||
|
aws_access_key_id=os.getenv('DO_SPACES_KEY'),
|
||||||
|
aws_secret_access_key=os.getenv('DO_SPACES_SECRET'))
|
||||||
|
|
||||||
if v[2] == "":
|
for i in range(2, len(values)+1):
|
||||||
print(v[0])
|
v = values[i-1]
|
||||||
|
|
||||||
try:
|
if v[1] != "" and v[10] == "":
|
||||||
info = ydl.extract_info(v[0], download=False)
|
print(v[1])
|
||||||
|
|
||||||
if 'entries' in info:
|
|
||||||
if len(info['entries']) > 1:
|
|
||||||
raise Exception('ERROR: Cannot archive channels or pages with multiple videos')
|
|
||||||
|
|
||||||
filename = ydl.prepare_filename(info['entries'][0])
|
|
||||||
else:
|
|
||||||
filename = ydl.prepare_filename(info)
|
|
||||||
|
|
||||||
print(filename)
|
|
||||||
key = filename.split('/')[1]
|
|
||||||
cdn_url = 'https://{}.{}.cdn.digitaloceanspaces.com/{}'.format(os.getenv('DO_BUCKET'), os.getenv('DO_SPACES_REGION'), key)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
s3_client.head_object(Bucket=os.getenv('DO_BUCKET'), Key=key)
|
info = ydl.extract_info(v[1], download=False)
|
||||||
|
|
||||||
# file exists
|
|
||||||
|
|
||||||
update = [{
|
|
||||||
'range': 'C' + str(i),
|
|
||||||
'values': [['already archived']]
|
|
||||||
}, {
|
|
||||||
'range': 'D' + str(i),
|
|
||||||
'values': [[cdn_url]]
|
|
||||||
}]
|
|
||||||
|
|
||||||
wks.batch_update(update)
|
|
||||||
|
|
||||||
except ClientError:
|
|
||||||
# Not found
|
|
||||||
|
|
||||||
# sometimes this results in a different filename, so do this again
|
|
||||||
info = ydl.extract_info(v[0], download=True)
|
|
||||||
if 'entries' in info:
|
if 'entries' in info:
|
||||||
|
if len(info['entries']) > 1:
|
||||||
|
raise Exception('ERROR: Cannot archive channels or pages with multiple videos')
|
||||||
|
|
||||||
filename = ydl.prepare_filename(info['entries'][0])
|
filename = ydl.prepare_filename(info['entries'][0])
|
||||||
else:
|
else:
|
||||||
filename = ydl.prepare_filename(info)
|
filename = ydl.prepare_filename(info)
|
||||||
|
|
||||||
print(filename)
|
print(filename)
|
||||||
key = filename.split('/')[1]
|
key = filename.split('/')[1]
|
||||||
cdn_url = 'https://{}.{}.cdn.digitaloceanspaces.com/{}'.format(os.getenv('DO_BUCKET'), os.getenv('DO_SPACES_REGION'), key)
|
cdn_url = 'https://{}.{}.cdn.digitaloceanspaces.com/{}'.format(os.getenv('DO_BUCKET'), os.getenv('DO_SPACES_REGION'), key)
|
||||||
|
|
||||||
# with open(filename, 'rb') as f:
|
try:
|
||||||
# s3_client.upload_fileobj(f, Bucket=os.getenv('DO_BUCKET'), Key=key, ExtraArgs={'ACL': 'public-read'})
|
s3_client.head_object(Bucket=os.getenv('DO_BUCKET'), Key=key)
|
||||||
|
|
||||||
os.remove(filename)
|
# file exists
|
||||||
|
|
||||||
|
update = [{
|
||||||
|
'range': 'K' + str(i),
|
||||||
|
'values': [['already archived']]
|
||||||
|
}, {
|
||||||
|
'range': 'M' + str(i),
|
||||||
|
'values': [[cdn_url]]
|
||||||
|
}]
|
||||||
|
|
||||||
|
wks.batch_update(update)
|
||||||
|
|
||||||
|
except ClientError:
|
||||||
|
# Not found
|
||||||
|
|
||||||
|
# sometimes this results in a different filename, so do this again
|
||||||
|
info = ydl.extract_info(v[1], download=True)
|
||||||
|
if 'entries' in info:
|
||||||
|
filename = ydl.prepare_filename(info['entries'][0])
|
||||||
|
else:
|
||||||
|
filename = ydl.prepare_filename(info)
|
||||||
|
|
||||||
|
|
||||||
|
if not os.path.exists(filename):
|
||||||
|
filename = filename.split('.')[0] + '.mkv'
|
||||||
|
|
||||||
|
print(filename)
|
||||||
|
key = filename.split('/')[1]
|
||||||
|
cdn_url = 'https://{}.{}.cdn.digitaloceanspaces.com/{}'.format(os.getenv('DO_BUCKET'), os.getenv('DO_SPACES_REGION'), key)
|
||||||
|
|
||||||
|
with open(filename, 'rb') as f:
|
||||||
|
s3_client.upload_fileobj(f, Bucket=os.getenv('DO_BUCKET'), Key=key, ExtraArgs={'ACL': 'public-read'})
|
||||||
|
|
||||||
|
os.remove(filename)
|
||||||
|
|
||||||
|
update = [{
|
||||||
|
'range': 'K' + str(i),
|
||||||
|
'values': [['successful']]
|
||||||
|
}, {
|
||||||
|
'range': 'L' + str(i),
|
||||||
|
'values': [[datetime.datetime.now().isoformat()]]
|
||||||
|
}, {
|
||||||
|
'range': 'M' + str(i),
|
||||||
|
'values': [[cdn_url]]
|
||||||
|
}]
|
||||||
|
|
||||||
|
wks.batch_update(update)
|
||||||
|
except:
|
||||||
|
t, value, traceback = sys.exc_info()
|
||||||
|
|
||||||
update = [{
|
update = [{
|
||||||
'range': 'C' + str(i),
|
'range': 'K' + str(i),
|
||||||
'values': [['successful-desktop']]
|
'values': [[str(value)]]
|
||||||
}, {
|
}, {
|
||||||
'range': 'B' + str(i),
|
'range': 'L' + str(i),
|
||||||
'values': [[datetime.datetime.now().isoformat()]]
|
'values': [[datetime.datetime.now().isoformat()]]
|
||||||
}, {
|
|
||||||
'range': 'D' + str(i),
|
|
||||||
'values': [[cdn_url]]
|
|
||||||
}]
|
}]
|
||||||
|
|
||||||
wks.batch_update(update)
|
wks.batch_update(update)
|
||||||
except:
|
|
||||||
t, value, traceback = sys.exc_info()
|
|
||||||
|
|
||||||
update = [{
|
|
||||||
'range': 'C' + str(i),
|
|
||||||
'values': [[str(value)]]
|
|
||||||
}, {
|
|
||||||
'range': 'B' + str(i),
|
|
||||||
'values': [[datetime.datetime.now().isoformat()]]
|
|
||||||
}]
|
|
||||||
|
|
||||||
wks.batch_update(update)
|
|
||||||
|
|
Ładowanie…
Reference in New Issue