Last active
July 3, 2017 20:57
-
-
Save xybydy/a21e4f797e45d2d912d30760899f5f81 to your computer and use it in GitHub Desktop.
puhu
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
*.log |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import logging | |
import os | |
import re | |
from logging.handlers import RotatingFileHandler | |
from urllib.parse import urljoin | |
import requests | |
# TODO bu pythonin tip mip bisileri vardi o | |
# TODO ts_video merger | |
# TODO cocuk ve puhu ozel kontrol edilecek | |
# TODO kaydedilen dosya adi formati yok ve max secim seyleri yok | |
# TODO Resume hale getirmeye calisalim header belli sonucta | |
# TODO toplu indirme olacaksa threading de olsun amk | |
# TODO yapmisken kodiye de gonderelim ya | |
# TODO search, back, skip to main menu falan yapalim | |
# TODO cache de lazim amk ya of | |
# todo multiple download, batch download | |
# todo arguments ekle ve output path falan da olsun | |
__all__ = ['root', 'ana_menu', 'ara_menu', 'df_menu', 'seasons', 'episode', 'quality', 'download', 'pretty_size', | |
'json_yaz'] | |
HEADERS = {'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8', | |
'Accept-Encoding': 'gzip, deflate, sdch', | |
'Accept-Language': 'en-US,en;q=0.8', | |
'Connection': 'keep-alive', | |
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36', | |
'Referer': 'https://puhutv.com'} | |
RESUME_HEADER = {'Range': 'bytes={:d}-'} | |
MAIN_URL = 'https://puhutv.com' | |
GROUPS = '/api/title_groups' # kategoriler hepsi burada | |
SLUG_PART = '/api/slug/' # pathler buradan beslenecek | |
TITLES_PART = '/api/titles/{}/seasons' # sonuna params ekleniyor. | |
SEASONS_PART = '/api/seasons/{}/episodes' | |
VIDEOS_PART = '/api/assets/{}/videos' | |
REC = re.compile('\/mp4\/(\d+p).mp4') | |
REC_EPISODE = re.compile('^(\d+). Bölüm$') | |
TYPES = ('144p', '240p', '360p', '480p', '720p', '1080p') | |
KATEGORI_URL = urljoin(MAIN_URL, GROUPS) | |
SLUG_URL = urljoin(MAIN_URL, SLUG_PART) | |
# s_page pagination ve s_per ise sayfa basi kac item donecegi | |
GET_PARAMS = {'s_page': '{:d}', 's_per': '{:d}'} | |
SEASON_PARAMS = {'s_page': '1', 's_per': '5000'} | |
EPISODE_PARAMS = {'e_page': '1', 'e_per': '5000'} | |
CHUNK_SIZE = 8192 | |
log = logging.getLogger(__name__) | |
log.setLevel(logging.DEBUG) | |
log_hand = RotatingFileHandler( | |
"puhutv.log", backupCount=5, maxBytes=10485760, encoding='utf-8', delay=True) | |
log.addHandler(log_hand) | |
log_format = logging.Formatter("%(asctime)s [%(levelname)s] %(name)s: %(message)s") | |
log_hand.setFormatter(log_format) | |
# log.addHandler(logging.StreamHandler()) | |
s = requests.session() | |
def return_fname(name='fatih', tip='movie', season=None, episode=None, ext='mp4'): | |
if tip == 'movie': | |
return f'{name}.{ext}' | |
elif tip == 'tv': | |
return f'{name}.S{season:02}.E{episode:02}' | |
def json_yaz(data, name): | |
import json | |
with open(f'{name}.json', 'w') as f: | |
json.dump(data, f, indent=4) | |
def pretty_size(size): | |
OP = 1024.0 | |
types = ['bytes', 'KB', 'MB', 'GB', 'TB', 'PB'] | |
for i in types: | |
if size < OP: | |
return size, i | |
size /= OP | |
def root(data=None): | |
log.debug("Puhutv bağlanıyor.") | |
if data is None: | |
try: | |
data = s.get(KATEGORI_URL, headers=HEADERS).json() | |
except Exception: | |
log.error("", exc_info=True) | |
log.debug("Puhutv bağlandı.") | |
os.system('clear') | |
# json_yaz(data, 'root') | |
baslik = 'MENU' | |
print(f'{baslik:-^30}') | |
log.debug(f"Başlangıç noktası") | |
data = data['data'] | |
try: | |
for i, y in enumerate(data, 1): | |
print(f'{i:04d}. {y["display_name"]:.24}') | |
except Exception: | |
log.error(f"Ana menü oluşurken hata oluştu", exc_info=True) | |
log.error(f"Hata yaratan ana menü json dosyası: \n {data}") | |
# print('{:04d}. {:24.24s}'.format(i, y['display_name'])) | |
print('-' * 30) | |
log.debug("Ana menü geri döndürülüyor.") | |
return 'ana_menu', data, len(data) | |
def originals(data, select='main'): | |
log.debug(f"originals asamasi") | |
data = s.get(urljoin(SLUG_URL, data[select]['slug_path']), headers=HEADERS, | |
params=SEASON_PARAMS).json() | |
log.debug("Seçimin dizi olduğu anlaşıldı") | |
try: | |
data = data['data']['seasons'] | |
for i, y in enumerate(data, 1): | |
print(f'{i:04d}. Season {y["position"]}'[:40]) | |
return 'seasons', data, len(data) | |
except Exception: | |
log.error(f"Hata oluştu", exc_info=True) | |
log.error(f"Hata yaratan ara menü json dosyası: \n {data}") | |
print('-' * 30) | |
def ana_menu(data, select): | |
log.debug(f"ana_menu aşaması") | |
if data[select]['name'] == 'originals': | |
data = s.get(urljoin(SLUG_URL, data[select]['slug_path']), headers=HEADERS, | |
params=SEASON_PARAMS).json() | |
try: | |
data = data['data']["titles"] | |
for i, y in enumerate(data, 1): | |
print(f'{i:04d}. {y["name"]:.24}') | |
except Exception: | |
log.error(f"Hata oluştu", exc_info=True) | |
log.error(f"Hata yaratan ara menü json dosyası: \n {data}") | |
return 'originals', data, 1 | |
else: | |
data = data[select]['genres'] | |
try: | |
for i, y in enumerate(data, 1): | |
print(f'{i:04d}. {y["name"]:.24}') | |
# print('{:04d}. {:24.24s}'.format(i, y['name'])) | |
except Exception: | |
log.error(f"Menü oluşurken hata oluştu", exc_info=True) | |
log.error(f"Hata yaratan ara menü json dosyası: \n {data}") | |
# json_yaz(data, 'ana_menu') | |
print('-' * 30) | |
log.debug("Menü geri döndürülüyor.") | |
return 'ara_menu', data, len(data) | |
def ara_menu(data, select): | |
log.debug(f"ara_menu aşaması") | |
log.debug( | |
f"{data[select]['slug_path']} adresine bağlanılıyor.") | |
# json_yaz(data, 'ara_menu') | |
try: | |
data = s.get(urljoin(SLUG_URL, data[select]['slug_path']), headers=HEADERS, | |
params=SEASON_PARAMS).json() | |
except Exception: | |
log.error("Hata olustu:", exc_info=True) | |
log.debug(f"Seçim başlıkları aktarılıyor.") | |
data = data["data"]["titles"] | |
try: | |
for i, y in enumerate(data, 1): | |
print(f'{i:04d}. {y["name"]:.24}') | |
# print('{:04d}. {:24.24s}'.format(i, y['name'])) | |
except Exception: | |
log.error(f"Hata oluştu", exc_info=True) | |
log.error(f"Hata yaratan ara menü json dosyası: \n {data}") | |
print('-' * 30) | |
# json_yaz(data, 'ara_menu2') | |
return 'df_menu', data, len(data) | |
def df_menu(data, select): | |
log.debug(f"df_menu aşaması") | |
log.debug( | |
f"{data[select]['slug_path']} adresine bağlanılıyor.") | |
try: | |
data = s.get(urljoin(SLUG_URL, data[select]['slug_path']), headers=HEADERS, | |
params=SEASON_PARAMS).json() | |
except Exception: | |
log.error("Hata olustu:", exc_info=True) | |
# json_yaz(data, 'df_menu2') | |
if not len(data['data'].get('seasons')) == 0: | |
log.debug("Seçimin dizi olduğu anlaşıldı") | |
try: | |
data = data['data']['seasons'] | |
for i, y in enumerate(data, 1): | |
print(f'{i:04d}. Season {y["position"]}'[:40]) | |
return 'seasons', data, len(data) | |
except Exception: | |
log.error(f"Hata oluştu", exc_info=True) | |
log.error(f"Hata yaratan ara menü json dosyası: \n {data}") | |
else: | |
log.debug("Seçimin film olduğu anlaşıldı") | |
return quality(data, 'movie') | |
def seasons(data, select): | |
log.debug("seasons aşaması") | |
log.debug(f"{data[select]['id']} adresine bağlanılıyor.") | |
season = SEASONS_PART.format(data[select]['id']) | |
# json_yaz(data, 'seasons') | |
try: | |
data = s.get(urljoin(MAIN_URL, season), | |
headers=HEADERS, params=EPISODE_PARAMS).json() | |
except Exception: | |
log.error("Hata olustu:", exc_info=True) | |
log.debug("Bölümler aktarılıyor.") | |
# json_yaz(data, 'seasons2') | |
try: | |
data = [x for x in sorted(data['data']['episodes'], key=lambda x: x['position']) if | |
x['assets'][0]['content']['content_type'] == 'main'] | |
for i, y in enumerate(data, 1): | |
print(f'{i:04d}. Episode {REC_EPISODE.search(y["name"])[1]}'[:40]) | |
# print('{:04d}. {:24.24s}'.format(i, 'Episode {:d}'.format(y['position']))) | |
except Exception: | |
log.error(f"Hata oluştu", exc_info=True) | |
log.error(f"Hata yaratan ara menü json dosyası: \n {data}") | |
return 'episode', data, len(data) | |
def episode(data, select): | |
log.debug("episode aşaması") | |
log.debug( | |
f"{data[select]['assets'][0]['slug_path']} adresine bağlanılıyor.") | |
path = urljoin(SLUG_URL, data[select]['assets'][0]['slug_path']) | |
# json_yaz(data, 'episode') | |
try: | |
data = s.get(path, headers=HEADERS).json() | |
except Exception: | |
log.error("Hata olustu:", exc_info=True) | |
# json_yaz(data, 'episode2') | |
return quality(data, 'tv') | |
def quality(data, type='tv'): | |
log.debug("quality aşaması") | |
log.debug("Download id alınıyor.") | |
try: | |
if type is 'movie': | |
m_id = data['data']['to_watch_asset_id'] | |
name = data['data']['name'] | |
else: | |
m_id = data['data']['id'] | |
name = data['data']['title']['name'] | |
ep = REC_EPISODE.search(data['data']['name'])[1] | |
except Exception: | |
log.error("Hata olustu:", exc_info=True) | |
log.debug("Download url oluşturuluyor.") | |
video_path = VIDEOS_PART.format(m_id) | |
log.debug(f"{video_path} url oluşturuldu.") | |
# json_yaz(data, 'quality') | |
try: | |
data = s.get(urljoin(MAIN_URL, video_path), headers=HEADERS).json() | |
except Exception: | |
log.error("Hata olustu:", exc_info=True) | |
# json_yaz(data, 'quality2') | |
log.debug("Vidyo formatları düzenleniyor.") | |
try: | |
list_dl = sorted([{'url': i['url'], 'type': REC.search(i['url'])[1]} for i in data['data']['videos'] if | |
REC.search(i['url'])], key=lambda i: int(i['type'].split('p')[0])) | |
log.debug("Vidyo formatları gösteriliyor.") | |
for i, y in enumerate(list_dl, 1): | |
print(f'{i:04d}. {y["type"]}'[:40]) | |
except Exception: | |
log.error("Hata olustu:", exc_info=True) | |
log.debug("Vidyo formatları gösterildi.") | |
return 'download', list_dl, len(list_dl) | |
def download(data, select): | |
# json_yaz(data, 'download') | |
url = data[select]['url'] | |
log.debug(f"{url} indirilecek.") | |
try: | |
r = s.get(url, stream=True) | |
except Exception: | |
log.error(f"{url} indirilirken hata olustu.", exc_info=True) | |
log.debug(f"Dosya indiriliyor.") | |
try: | |
with open('file', 'wb') as f: | |
i = 0 | |
total_size = pretty_size(int(r.headers['Content-Length'])) | |
os.system('clear') | |
log.debug(f"Dosya boyutu {total_size}") | |
for chunk in r.iter_content(chunk_size=CHUNK_SIZE): | |
f.write(chunk) | |
i += CHUNK_SIZE | |
pp = pretty_size(i) | |
print(f'{pp[0]:4.2f} {pp[1]} downloaded of {total_size[0]:4.2f} {total_size[1]}'[ | |
:40], end='\r', flush=True) | |
log.debug(f"{total_size} indirildi.") | |
# print('{:4.2f} {} downloaded of {:4.2f} {}'.format(*pretty_size(i), *total_size)[:40], end='\r', flush=True) | |
except Exception: | |
log.error("Aşağıdaki hata oluştu", exc_info=True) | |
if __name__ == '__main__': | |
selection = None | |
data = None | |
step = None | |
length = 0 | |
while 1: | |
try: | |
if selection is None: | |
step, data, length = root() | |
elif step is 'ana_menu': | |
step, data, length = ana_menu(data, selection) | |
elif step is 'ara_menu': | |
step, data, length = ara_menu(data, selection) | |
elif step is 'df_menu': | |
step, data, length = df_menu(data, selection) | |
elif step is 'seasons': | |
step, data, length = seasons(data, selection) | |
elif step is 'episode': | |
step, data, length = episode(data, selection) | |
elif step is 'originals': | |
step, data, length = originals(data, selection) | |
elif step is 'download': | |
download(data, selection) | |
selection = int(input(f'Choice [1 - {length}]: ')) - 1 | |
except: | |
log.error("", exc_info=True) | |
selection = int(input(f'Enter a valid number [1 - {length}]: ')) - 1 | |
# back = show(select=selection, data=back) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment