Last active
January 5, 2020 09:01
-
-
Save st0le/b6cb31272e1e03a7e1d479b2686d3e3e to your computer and use it in GitHub Desktop.
Downloads all public videos of a user
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# 2019-23-11 - Added Powershell Script. See the other file. | |
# download all PlaysTv videos of a user | |
# To find the user id, navigate to the your profile while logged in (IMPORTANT!) | |
# View source of the page, In the <html> tag there's data-conf attribute. | |
# The json in there will have the user id under [login_user.id] | |
from re import sub | |
from json import load | |
from urllib.request import urlretrieve, urlopen | |
def safe_title(index, title): | |
only_chars = sub(r'[^\w]+', '_', title).strip("_") | |
return f"{index} - {only_chars[:30]}.mp4" | |
def get_playstv_videos(user_id): | |
last_id = "" | |
items = [] | |
while last_id != None: | |
batch = load(urlopen( | |
f"https://plays.tv/playsapi/feedsys/v1/userfeed/{user_id}/uploaded?limit=200&filter=&lastId={last_id}")) | |
items.extend(batch["items"]) | |
last_id = batch["lastId"] | |
print(len(items)) | |
for index, item in enumerate(items, start=1): | |
try: | |
filename, url = safe_title( | |
index, item["description"]), item["downloadUrl"] | |
print(f"Downloading {filename} from {url}") | |
urlretrieve(url, filename) | |
except Exception as e: | |
print(f"Error downloading {filename} from {url}") | |
print(e) | |
if __name__ == "__main__": | |
get_playstv_videos("<playstv userid>") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Copy and paste this in a Powershell window. Enter the userId at the prompt. Follow instruction below to get your userId | |
# download all PlaysTv videos of a user | |
# To find the user id, navigate to the your profile while logged in (IMPORTANT!) | |
# View source of the page, In the <html> tag there's data-conf attribute. | |
# The json in there will have the user id under [login_user.id] | |
function Safe-Title { | |
param ( | |
$Description, | |
$EpochMilliseconds | |
) | |
$CreatedDate = (Get-Date "1970-01-01 00:00:00.000Z") + ([TimeSpan]::FromSeconds($EpochMilliseconds / 1000)) | |
$Title = $Description -replace "[^\w]+", "_" | |
$Title = $Title.Substring(0, [System.Math]::Min(30, $Title.Length)) | |
return (get-date $CreatedDate -uformat "%Y-%m-%d") + " - " + $Title + ".mp4" | |
} | |
$userId = Read-Host -Prompt "Enter UserID" | |
$lastId = "" | |
while ($null -ne $lastId) { | |
$page = Invoke-RestMethod "https://plays.tv/playsapi/feedsys/v1/userfeed/$userId/uploaded?limit=20&filter=&lastId=$lastId" | |
$videos = $page.items | Select-Object downloadUrl, @{Name = 'fileName'; Expression = { Safe-Title $_.description $_.created } } | |
$videos | ForEach-Object { Write-Host "Downloading $($_.fileName)"; Invoke-WebRequest -Uri $_.downloadUrl -OutFile $_.fileName; Write-Host "Downloaded $($_.fileName)" } | |
$lastId = $page.lastId | |
} |
Ah not sure. Sorry. The API I used didn't return the unlisted videos either.
Unlisted shouldn't be returned unless you're logged in since they're private. I was talking about the Videos of you
which are public.
I'm aware. I was trying to say the API I'm using in the script is limited.
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
For me as an example, I have 212 videos that I have uploaded and then 47 videos that other users have uploaded that I'm in. Is there an easy way to get the 47? The API url in the script only pulls videos uploaded by the person with the user id we use.
This is my id: 362ba04c2955a3820a9f4bf2abfb3ab0
I've been working on a script using selenium that scrolls the page and pulls the video IDs to download them but I haven't quite gotten the video names too.