Combine streaming/non-streaming into one script with CLI arguments

pull/9/head
Logan Williams 2021-02-09 14:55:26 +01:00
rodzic 853e018ace
commit d6cb20dace
5 zmienionych plików z 393 dodań i 216 usunięć

16
Pipfile 100644
Wyświetl plik

@ -0,0 +1,16 @@
[[source]]
url = "https://pypi.org/simple"
verify_ssl = true
name = "pypi"
[packages]
gspread = "*"
boto3 = "*"
python-dotenv = "*"
youtube_dl = "*"
argparse = "*"
[dev-packages]
[requires]
python_version = "3.9"

223
Pipfile.lock wygenerowano 100644
Wyświetl plik

@ -0,0 +1,223 @@
{
"_meta": {
"hash": {
"sha256": "f8d5bda536d37e5fca14f05a2b6102b002ee5846e84298175ba319d20bd41d60"
},
"pipfile-spec": 6,
"requires": {
"python_version": "3.9"
},
"sources": [
{
"name": "pypi",
"url": "https://pypi.org/simple",
"verify_ssl": true
}
]
},
"default": {
"argparse": {
"hashes": [
"sha256:62b089a55be1d8949cd2bc7e0df0bddb9e028faefc8c32038cc84862aefdd6e4",
"sha256:c31647edb69fd3d465a847ea3157d37bed1f95f19760b11a47aa91c04b666314"
],
"index": "pypi",
"version": "==1.4.0"
},
"boto3": {
"hashes": [
"sha256:65514427f5f849245c9a272fa06a5a014ae3945333f4f407489d034fb99dc61f",
"sha256:af87efaa772f95de67f72ed91aed2feef63593b5290696f669799202bc484b99"
],
"index": "pypi",
"version": "==1.17.4"
},
"botocore": {
"hashes": [
"sha256:61657a1e4b3cdda9627084184bdf9dca4637c1523daead31a36974be0d51686d",
"sha256:96f9e0920ac91b6caae3039e5de09b80648ad57b4a97fc7d81a369afae34fb10"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'",
"version": "==1.20.4"
},
"cachetools": {
"hashes": [
"sha256:1d9d5f567be80f7c07d765e21b814326d78c61eb0c3a637dffc0e5d1796cb2e2",
"sha256:f469e29e7aa4cff64d8de4aad95ce76de8ea1125a16c68e0d93f65c3c3dc92e9"
],
"markers": "python_version ~= '3.5'",
"version": "==4.2.1"
},
"certifi": {
"hashes": [
"sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c",
"sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"
],
"version": "==2020.12.5"
},
"chardet": {
"hashes": [
"sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa",
"sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==4.0.0"
},
"google-auth": {
"hashes": [
"sha256:008e23ed080674f69f9d2d7d80db4c2591b9bb307d136cea7b3bc129771d211d",
"sha256:514e39f4190ca972200ba33876da5a8857c5665f2b4ccc36c8b8ee21228aae80"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'",
"version": "==1.25.0"
},
"google-auth-oauthlib": {
"hashes": [
"sha256:65b65bc39ad8cab15039b35e5898455d3d66296d0584d96fe0e79d67d04c51d9",
"sha256:d4d98c831ea21d574699978827490a41b94f05d565c617fe1b420e88f1fc8d8d"
],
"markers": "python_version >= '3.6'",
"version": "==0.4.2"
},
"gspread": {
"hashes": [
"sha256:273da28275eb8dc664b1ca944e59255949d75ac3cac62d65797003dbb419a2cd",
"sha256:e04f1a6267b3929fc1600424c5ec83906d439672cafdd61a9d5b916a139f841c"
],
"index": "pypi",
"version": "==3.6.0"
},
"idna": {
"hashes": [
"sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6",
"sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.10"
},
"jmespath": {
"hashes": [
"sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9",
"sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f"
],
"markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==0.10.0"
},
"oauthlib": {
"hashes": [
"sha256:bee41cc35fcca6e988463cacc3bcb8a96224f470ca547e697b604cc697b2f889",
"sha256:df884cd6cbe20e32633f1db1072e9356f53638e4361bef4e8b03c9127c9328ea"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==3.1.0"
},
"pyasn1": {
"hashes": [
"sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359",
"sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576",
"sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf",
"sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7",
"sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d",
"sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00",
"sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8",
"sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86",
"sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12",
"sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776",
"sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba",
"sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2",
"sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"
],
"version": "==0.4.8"
},
"pyasn1-modules": {
"hashes": [
"sha256:0845a5582f6a02bb3e1bde9ecfc4bfcae6ec3210dd270522fee602365430c3f8",
"sha256:0fe1b68d1e486a1ed5473f1302bd991c1611d319bba158e98b106ff86e1d7199",
"sha256:15b7c67fabc7fc240d87fb9aabf999cf82311a6d6fb2c70d00d3d0604878c811",
"sha256:426edb7a5e8879f1ec54a1864f16b882c2837bfd06eee62f2c982315ee2473ed",
"sha256:65cebbaffc913f4fe9e4808735c95ea22d7a7775646ab690518c056784bc21b4",
"sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e",
"sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74",
"sha256:a99324196732f53093a84c4369c996713eb8c89d360a496b599fb1a9c47fc3eb",
"sha256:b80486a6c77252ea3a3e9b1e360bc9cf28eaac41263d173c032581ad2f20fe45",
"sha256:c29a5e5cc7a3f05926aff34e097e84f8589cd790ce0ed41b67aed6857b26aafd",
"sha256:cbac4bc38d117f2a49aeedec4407d23e8866ea4ac27ff2cf7fb3e5b570df19e0",
"sha256:f39edd8c4ecaa4556e989147ebf219227e2cd2e8a43c7e7fcb1f1c18c5fd6a3d",
"sha256:fe0644d9ab041506b62782e92b06b8c68cca799e1a9636ec398675459e031405"
],
"version": "==0.2.8"
},
"python-dateutil": {
"hashes": [
"sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c",
"sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.8.1"
},
"python-dotenv": {
"hashes": [
"sha256:0c8d1b80d1a1e91717ea7d526178e3882732420b03f08afea0406db6402e220e",
"sha256:587825ed60b1711daea4832cf37524dfd404325b7db5e25ebe88c495c9f807a0"
],
"index": "pypi",
"version": "==0.15.0"
},
"requests": {
"hashes": [
"sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804",
"sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==2.25.1"
},
"requests-oauthlib": {
"hashes": [
"sha256:7f71572defaecd16372f9006f33c2ec8c077c3cfa6f5911a9a90202beb513f3d",
"sha256:b4261601a71fd721a8bd6d7aa1cc1d6a8a93b4a9f5e96626f8e4d91e8beeaa6a",
"sha256:fa6c47b933f01060936d87ae9327fead68768b69c6c9ea2109c48be30f2d4dbc"
],
"version": "==1.3.0"
},
"rsa": {
"hashes": [
"sha256:69805d6b69f56eb05b62daea3a7dbd7aa44324ad1306445e05da8060232d00f4",
"sha256:a8774e55b59fd9fc893b0d05e9bfc6f47081f46ff5b46f39ccf24631b7be356b"
],
"markers": "python_version >= '3.6'",
"version": "==4.7"
},
"s3transfer": {
"hashes": [
"sha256:1e28620e5b444652ed752cf87c7e0cb15b0e578972568c6609f0f18212f259ed",
"sha256:7fdddb4f22275cf1d32129e21f056337fd2a80b6ccef1664528145b72c49e6d2"
],
"version": "==0.3.4"
},
"six": {
"hashes": [
"sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259",
"sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.15.0"
},
"urllib3": {
"hashes": [
"sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80",
"sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'",
"version": "==1.26.3"
},
"youtube-dl": {
"hashes": [
"sha256:831a29b2d34493ef8181ff288f403135bb4b00df1cd201eb8cbe80b5b5425760",
"sha256:b337f20563094decc6b3c16e6fcad14ec3df9d99519344d6e95e40878b2c8075"
],
"index": "pypi",
"version": "==2021.2.4.1"
}
},
"develop": {}
}

154
auto-archive.py 100644
Wyświetl plik

@ -0,0 +1,154 @@
import gspread
import youtube_dl
from pathlib import Path
import sys
import datetime
import boto3
import os
from dotenv import load_dotenv
from botocore.errorfactory import ClientError
import argparse
load_dotenv()
def col_to_index(col):
col = list(col)
ndigits = len(col)
alphabet = ' ABCDEFGHIJKLMNOPQRSTUVWXYZ'
v = 0
i = ndigits - 1
for digit in col:
index = alphabet.find(digit)
v += (26 ** i) * index
i -= 1
return v - 1
def download_vid(url, s3_client, check_if_exists=False):
ydl_opts = {'outtmpl': 'tmp/%(id)s.%(ext)s', 'quiet': False}
ydl = youtube_dl.YoutubeDL(ydl_opts)
if check_if_exists:
info = ydl.extract_info(url, download=False)
if 'entries' in info:
if len(info['entries']) > 1:
raise Exception('ERROR: Cannot archive channels or pages with multiple videos')
filename = ydl.prepare_filename(info['entries'][0])
else:
filename = ydl.prepare_filename(info)
key = filename.split('/')[1]
cdn_url = 'https://{}.{}.cdn.digitaloceanspaces.com/{}'.format(os.getenv('DO_BUCKET'), os.getenv('DO_SPACES_REGION'), key)
try:
s3_client.head_object(Bucket=os.getenv('DO_BUCKET'), Key=key)
# file exists
return (cdn_url, 'already archived')
except ClientError:
pass
# sometimes this results in a different filename, so do this again
info = ydl.extract_info(url, download=True)
if 'entries' in info:
if len(info['entries']) > 1:
raise Exception('ERROR: Cannot archive channels or pages with multiple videos')
filename = ydl.prepare_filename(info['entries'][0])
else:
filename = ydl.prepare_filename(info)
if not os.path.exists(filename):
filename = filename.split('.')[0] + '.mkv'
key = filename.split('/')[1]
cdn_url = 'https://{}.{}.cdn.digitaloceanspaces.com/{}'.format(os.getenv('DO_BUCKET'), os.getenv('DO_SPACES_REGION'), key)
with open(filename, 'rb') as f:
s3_client.upload_fileobj(f, Bucket=os.getenv('DO_BUCKET'), Key=key, ExtraArgs={'ACL': 'public-read'})
os.remove(filename)
return (cdn_url, 'success')
def main():
parser = argparse.ArgumentParser(description="Automatically use youtube-dl to download media from a Google Sheet")
parser.add_argument("--sheet", action="store", dest="sheet")
parser.add_argument('--streaming', dest='streaming', action='store_true')
parser.add_argument('--all-worksheets', dest='all_worksheets', action='store_true')
parser.add_argument('--url-col', dest='url', action='store')
parser.add_argument('--archive-col', dest='archive', action='store')
parser.add_argument('--date-col', dest='date', action='store')
parser.add_argument('--status-col', dest='status', action='store')
args = parser.parse_args()
def update_sheet(wks, row, status, url):
update = [{
'range': args.status + str(row),
'values': [[status]]
}, {
'range': args.date + str(row),
'values': [[datetime.datetime.now().isoformat()]]
}, {
'range': args.archive + str(row),
'values': [[url]]
}]
if url is None:
update = update[:-1]
wks.batch_update(update)
print("Opening document " + args.sheet)
gc = gspread.service_account()
sh = gc.open(args.sheet)
n_worksheets = len(sh.worksheets()) if args.all_worksheets else 1
s3_client = boto3.client('s3',
region_name=os.getenv('DO_SPACES_REGION'),
endpoint_url='https://{}.digitaloceanspaces.com'.format(os.getenv('DO_SPACES_REGION')),
aws_access_key_id=os.getenv('DO_SPACES_KEY'),
aws_secret_access_key=os.getenv('DO_SPACES_SECRET'))
# loop through worksheets to check
for ii in range(n_worksheets):
print("Opening worksheet " + str(ii))
wks = sh.get_worksheet(ii)
values = wks.get_all_values()
# loop through rows in worksheet
for i in range(2, len(values)+1):
v = values[i-1]
url_index = col_to_index(args.url)
if v[url_index] != "" and v[col_to_index(args.status)] == "":
print(v[col_to_index(args.url)])
try:
ydl_opts = {'outtmpl': 'tmp/%(id)s.%(ext)s', 'quiet': False}
ydl = youtube_dl.YoutubeDL(ydl_opts)
info = ydl.extract_info(v[url_index], download=False)
if args.streaming and 'is_live' in info and info['is_live']:
wks.update(args.status + str(i), 'Recording stream')
cdn_url, status = download_vid(v[url_index], s3_client)
update_sheet(wks, i, status, cdn_url)
sys.exit()
elif not args.streaming and ('is_live' not in info or not info['is_live']):
cdn_url, status = download_vid(v[url_index], s3_client, check_if_exists=True)
update_sheet(wks, i, status, cdn_url)
except:
# if any unexpected errors occured, log these into the Google Sheet
t, value, traceback = sys.exc_info()
update_sheet(wks, i, str(value), None)
if __name__ == "__main__":
main()

Wyświetl plik

@ -1,101 +0,0 @@
import gspread
import youtube_dl
from pathlib import Path
import sys
import datetime
import boto3
import os
from dotenv import load_dotenv
from botocore.errorfactory import ClientError
load_dotenv()
gc = gspread.service_account()
sh = gc.open("Media Sheet (January 16-20 + People)")
found_live = False
# loop through worksheets to check
for ii in range(5):
# only capture one video if its a livestream
if found_live:
break
wks = sh.get_worksheet(ii)
values = wks.get_all_values()
ydl_opts = {'outtmpl': 'tmp/%(id)s.%(ext)s', 'quiet': False}
ydl = youtube_dl.YoutubeDL(ydl_opts)
s3_client = boto3.client('s3',
region_name=os.getenv('DO_SPACES_REGION'),
endpoint_url='https://{}.digitaloceanspaces.com'.format(os.getenv('DO_SPACES_REGION')),
aws_access_key_id=os.getenv('DO_SPACES_KEY'),
aws_secret_access_key=os.getenv('DO_SPACES_SECRET'))
# loop through rows in worksheet
for i in range(2, len(values)+1):
# only capture one video if its a livestream
if found_live:
break
v = values[i-1]
if v[1] != "" and v[10] == "":
print(v[1])
try:
info = ydl.extract_info(v[1], download=False)
# skip if live
if 'is_live' in info and info['is_live']:
found_live = True
wks.update('K' + str(i), 'Recording stream')
# sometimes this results in a different filename, so do this again
info = ydl.extract_info(v[1], download=True)
if 'entries' in info:
filename = ydl.prepare_filename(info['entries'][0])
else:
filename = ydl.prepare_filename(info)
if not os.path.exists(filename):
filename = filename.split('.')[0] + '.mkv'
print(filename)
key = filename.split('/')[1]
cdn_url = 'https://{}.{}.cdn.digitaloceanspaces.com/{}'.format(os.getenv('DO_BUCKET'), os.getenv('DO_SPACES_REGION'), key)
with open(filename, 'rb') as f:
s3_client.upload_fileobj(f, Bucket=os.getenv('DO_BUCKET'), Key=key, ExtraArgs={'ACL': 'public-read'})
os.remove(filename)
update = [{
'range': 'K' + str(i),
'values': [['successful']]
}, {
'range': 'L' + str(i),
'values': [[datetime.datetime.now().isoformat()]]
}, {
'range': 'M' + str(i),
'values': [[cdn_url]]
}]
wks.batch_update(update)
break
except:
t, value, traceback = sys.exc_info()
update = [{
'range': 'K' + str(i),
'values': [[str(value)]]
}, {
'range': 'L' + str(i),
'values': [[datetime.datetime.now().isoformat()]]
}]
wks.batch_update(update)

115
update.py
Wyświetl plik

@ -1,115 +0,0 @@
import gspread
import youtube_dl
from pathlib import Path
import sys
import datetime
import boto3
import os
from dotenv import load_dotenv
from botocore.errorfactory import ClientError
load_dotenv()
gc = gspread.service_account()
sh = gc.open("Media Sheet (January 16-20 + People)")
# loop through worksheets to check
for ii in range(5):
wks = sh.get_worksheet(ii)
values = wks.get_all_values()
ydl_opts = {'outtmpl': 'tmp/%(id)s.%(ext)s', 'quiet': False}
ydl = youtube_dl.YoutubeDL(ydl_opts)
s3_client = boto3.client('s3',
region_name=os.getenv('DO_SPACES_REGION'),
endpoint_url='https://{}.digitaloceanspaces.com'.format(os.getenv('DO_SPACES_REGION')),
aws_access_key_id=os.getenv('DO_SPACES_KEY'),
aws_secret_access_key=os.getenv('DO_SPACES_SECRET'))
# loop through rows in worksheet
for i in range(2, len(values)+1):
v = values[i-1]
if v[1] != "" and v[10] == "":
print(v[1])
try:
info = ydl.extract_info(v[1], download=False)
# skip if live
if 'is_live' not in info or not info['is_live']:
if 'entries' in info:
if len(info['entries']) > 1:
raise Exception('ERROR: Cannot archive channels or pages with multiple videos')
filename = ydl.prepare_filename(info['entries'][0])
else:
filename = ydl.prepare_filename(info)
print(filename)
key = filename.split('/')[1]
cdn_url = 'https://{}.{}.cdn.digitaloceanspaces.com/{}'.format(os.getenv('DO_BUCKET'), os.getenv('DO_SPACES_REGION'), key)
try:
s3_client.head_object(Bucket=os.getenv('DO_BUCKET'), Key=key)
# file exists
update = [{
'range': 'K' + str(i),
'values': [['already archived']]
}, {
'range': 'M' + str(i),
'values': [[cdn_url]]
}]
wks.batch_update(update)
except ClientError:
# Not found
# sometimes this results in a different filename, so do this again
info = ydl.extract_info(v[1], download=True)
if 'entries' in info:
filename = ydl.prepare_filename(info['entries'][0])
else:
filename = ydl.prepare_filename(info)
if not os.path.exists(filename):
filename = filename.split('.')[0] + '.mkv'
print(filename)
key = filename.split('/')[1]
cdn_url = 'https://{}.{}.cdn.digitaloceanspaces.com/{}'.format(os.getenv('DO_BUCKET'), os.getenv('DO_SPACES_REGION'), key)
with open(filename, 'rb') as f:
s3_client.upload_fileobj(f, Bucket=os.getenv('DO_BUCKET'), Key=key, ExtraArgs={'ACL': 'public-read'})
os.remove(filename)
update = [{
'range': 'K' + str(i),
'values': [['successful']]
}, {
'range': 'L' + str(i),
'values': [[datetime.datetime.now().isoformat()]]
}, {
'range': 'M' + str(i),
'values': [[cdn_url]]
}]
wks.batch_update(update)
except:
t, value, traceback = sys.exc_info()
update = [{
'range': 'K' + str(i),
'values': [[str(value)]]
}, {
'range': 'L' + str(i),
'values': [[datetime.datetime.now().isoformat()]]
}]
wks.batch_update(update)