Skip to content

Instantly share code, notes, and snippets.

Created May 28, 2016 08:04
Show Gist options
  • Save anonymous/e4c315b8c6441996f7accb2dc0d5e8e1 to your computer and use it in GitHub Desktop.
Save anonymous/e4c315b8c6441996f7accb2dc0d5e8e1 to your computer and use it in GitHub Desktop.
#coding: utf-8
import urllib2, lxml.html
from datetime import datetime
def GET_info(user_agent):
headers = {'User-Agent': user_agent}
root = lxml.html.fromstring(urllib2.urlopen(urllib2.Request("http://trafficinfo.westjr.co.jp/list.html", None, headers)).read())
list_data = []
list_data.append("[{}]".format("".join([i.text for i in root.xpath('//*[@id="contents"]/div[1]/h1')]).encode("utf-8")))
for i in range(1, 30):
path_status = '//*[@id="contents"]/div[2]/ul/li[%d]/span[3]' % i
path_name = '//*[@id="contents"]/div[2]/ul/li[%d]/span[1]' % i
if not "".join([i.text for i in root.xpath(path_status)]).encode("utf-8"):continue
"""
if "遅れの情報はありません。" in "".join([i.text for i in root.xpath(path_status)]).encode("utf-8"):
body = "\033[94m*\033[0m{}: \033[94m{}\033[0m".format(
"".join([i.text for i in root.xpath(path_name)]).encode("utf-8"),
"".join([i.text for i in root.xpath(path_status)]).encode("utf-8"))
list_data.append(body)
"""
if not "遅れの情報はありません。" in "".join([i.text for i in root.xpath(path_status)]).encode("utf-8"):
body = "\033[91m*\033[0m{}: \033[91m{}\033[0m".format(
"".join([i.text for i in root.xpath(path_name)]).encode("utf-8"),
"".join([i.text for i in root.xpath(path_status)]).encode("utf-8"))
list_data.append(body)
if len(list_data) == 1:
list_data.append("\033[94m*遅れの情報はありません。\033[0m")
return list_data
def kinki(user_agent):
headers = {'User-Agent': user_agent}
root = lxml.html.fromstring(urllib2.urlopen(urllib2.Request("http://trafficinfo.westjr.co.jp/kinki.html", None, headers)).read())
list_data = []
list_data.append("[{}]".format("".join([i.text for i in root.xpath('//*[@id="contents"]/div[1]/h1')]).encode("utf-8")))
for i in range(1, 30):
path_status = '//*[@id="contents"]/div[2]/ul/li[%d]/span[3]' % i
path_name = '//*[@id="contents"]/div[2]/ul/li[%d]/span[1]' % i
if not "".join([i.text for i in root.xpath(path_status)]).encode("utf-8"):continue
"""
if "遅れの情報はありません。" in "".join([i.text for i in root.xpath(path_status)]).encode("utf-8"):
body = "\033[94m*\033[0m{}: \033[94m{}\033[0m".format(
"".join([i.text for i in root.xpath(path_name)]).encode("utf-8"),
"".join([i.text for i in root.xpath(path_status)]).encode("utf-8"))
list_data.append(body)
"""
if not "遅れの情報はありません。" in "".join([i.text for i in root.xpath(path_status)]).encode("utf-8"):
body = "\033[91m*\033[0m{}: \033[91m{}\033[0m".format(
"".join([i.text for i in root.xpath(path_name)]).encode("utf-8"),
"".join([i.text for i in root.xpath(path_status)]).encode("utf-8"))
list_data.append(body)
if len(list_data) == 1:
list_data.append("\033[94m*遅れの情報はありません。\033[0m")
return list_data
def chugoku(user_agent):
headers = {'User-Agent': user_agent}
root = lxml.html.fromstring(urllib2.urlopen(urllib2.Request("http://trafficinfo.westjr.co.jp/chugoku.html", None, headers)).read())
list_data = []
list_data.append("[{}]".format("".join([i.text for i in root.xpath('//*[@id="contents"]/div[1]/h1')]).encode("utf-8")))
for i in range(1, 30):
path_status = '//*[@id="contents"]/div[2]/ul/li[%d]/span[3]' % i
path_name = '//*[@id="contents"]/div[2]/ul/li[%d]/span[1]' % i
if not "".join([i.text for i in root.xpath(path_status)]).encode("utf-8"):continue
"""
if "遅れの情報はありません。" in "".join([i.text for i in root.xpath(path_status)]).encode("utf-8"):
body = "\033[94m*\033[0m{}: \033[94m{}\033[0m".format(
"".join([i.text for i in root.xpath(path_name)]).encode("utf-8"),
"".join([i.text for i in root.xpath(path_status)]).encode("utf-8"))
list_data.append(body)
"""
if not "遅れの情報はありません。" in "".join([i.text for i in root.xpath(path_status)]).encode("utf-8"):
body = "\033[91m*\033[0m{}: \033[91m{}\033[0m".format(
"".join([i.text for i in root.xpath(path_name)]).encode("utf-8"),
"".join([i.text for i in root.xpath(path_status)]).encode("utf-8"))
list_data.append(body)
if len(list_data) == 1:
list_data.append("\033[94m*遅れの情報はありません。\033[0m")
return list_data
def ex_index(user_agent):
headers = {'User-Agent': user_agent}
root = lxml.html.fromstring(urllib2.urlopen(urllib2.Request("http://trafficinfo.westjr.co.jp/ex_index.html", None, headers)).read())
list_data = []
list_data.append("[{}]".format("".join([i.text for i in root.xpath('//*[@id="contents"]/div[1]/h1')]).encode("utf-8")))
for i in range(1, 30):
path_status = '//*[@id="contents"]/div[2]/ul/li[%d]/span[3]' % i
path_name = '//*[@id="contents"]/div[2]/ul/li[%d]/span[1]' % i
if not "".join([i.text for i in root.xpath(path_status)]).encode("utf-8"):continue
"""
if "遅れの情報はありません。" in "".join([i.text for i in root.xpath(path_status)]).encode("utf-8"):
body = "\033[94m*\033[0m{}: \033[94m{}\033[0m".format(
"".join([i.text for i in root.xpath(path_name)]).encode("utf-8"),
"".join([i.text for i in root.xpath(path_status)]).encode("utf-8"))
list_data.append(body)
"""
if not "遅れの情報はありません。" in "".join([i.text for i in root.xpath(path_status)]).encode("utf-8"):
body = "\033[91m*\033[0m{}: \033[91m{}\033[0m".format(
"".join([i.text for i in root.xpath(path_name)]).encode("utf-8"),
"".join([i.text for i in root.xpath(path_status)]).encode("utf-8"))
list_data.append(body)
if len(list_data) == 1:
list_data.append("\033[94m*遅れの情報はありません。\033[0m")
return list_data
def Bulletin_board():
import sys, time
while True:
data = GET_info(user_agent="Mozilla/5.0 (iPhone; CPU iPhone OS 6_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10A403 Safari/8536.25")
for i in data:
sys.stdout.write("\r{}".format(i))
sys.stdout.flush()
time.sleep(1)
if __name__ == "__main__":
user_agent = "Mozilla/5.0 (iPhone; CPU iPhone OS 6_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10A403 Safari/8536.25"
print "\n".join(GET_info(user_agent))+"\n"+"\n".join(kinki(user_agent))+"\n"+"\n".join(chugoku(user_agent))+"\n"+"\n".join(ex_index(user_agent))
"""
print "\n".join(kinki(user_agent))
print "\n".join(chugoku(user_agent))
print "\n".join(ex_index(user_agent))
"""
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment