Skip to content

Instantly share code, notes, and snippets.

@sash13
Created September 18, 2011 10:01
Show Gist options
  • Save sash13/1224938 to your computer and use it in GitHub Desktop.
Save sash13/1224938 to your computer and use it in GitHub Desktop.
Open Player music grabber
# -*- coding: utf-8 -*-
import urllib
import urllib2
import re
import os
import json
from BeautifulSoup import BeautifulSoup
urllib2.install_opener(urllib2.build_opener(urllib2.HTTPCookieProcessor))
def get_soung(attr):
namei = 'mu/'+attr[0] +'-'+attr[1]+'.mp3'
if os.path.isfile(namei) == False:
params = urllib.urlencode({
'app' : "ajax",
'query': "getSong",
'artist': attr[0],
'id': attr[3],
'name': attr[1],
'url': attr[2]
})
f = urllib2.urlopen(
"http://music.bonart.org.ua",
params)
'''urli ='http://music.bonart.org.ua'+json.loads(''.join(f.read()))['url'][1::]
print urli
urllib.urlretrieve(urli, namei)'''
data = f.read()
f.close()
file=open(namei, 'wb')
file.write(data)
file.close()
params = urllib.urlencode({
'app' : "ajax",
'query': "login",
'user': "login=login&password=pass",
})
f = urllib2.urlopen(
"http://music.bonart.org.ua",
params)
params = urllib.urlencode({
'app' : "ajax",
'query': "reloadPL"
})
f = urllib2.urlopen(
"http://music.bonart.org.ua",
params)
soup = BeautifulSoup(''.join(f.read()))
habahaba = soup.findAll('div', "op-playlist")
for data in habahaba:
'''print data.find('span', id="opLinkPlaylistName477")'''
print data.find('span', id=re.compile('^opLinkPlaylistName.*')).string
lyla = data.findAll('div', "op-song")
for lists in lyla:
print lists['data-artist']+' '+lists['data-name']+' '+lists['data-url']
get_soung([lists['data-artist'].encode('utf-8'), lists['data-name'].encode('utf-8'), lists['data-url'], lists['data-id'] ])
'''os.system('wget '+lists['data-url'])'''
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment