Skip to content

Instantly share code, notes, and snippets.

@scrapehero
Last active August 14, 2022 03:03
Show Gist options
  • Star 13 You must be signed in to star a gist
  • Fork 8 You must be signed in to fork a gist
  • Save scrapehero/0b8b4aeea00ff3abf3bc72a9e9d26849 to your computer and use it in GitHub Desktop.
Save scrapehero/0b8b4aeea00ff3abf3bc72a9e9d26849 to your computer and use it in GitHub Desktop.
from lxml import html
import csv
import os
import requests
from exceptions import ValueError
from time import sleep
from random import randint
def parse(url):
headers = {
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36'
}
try:
# Retrying for failed requests
for i in range(20):
# Generating random delays
sleep(randint(1,3))
# Adding verify=False to avold ssl related issues
response = requests.get(url, headers=headers, verify=False)
if response.status_code == 200:
doc = html.fromstring(response.content)
XPATH_NAME = '//h1[@id="title"]//text()'
XPATH_SALE_PRICE = '//span[contains(@id,"ourprice") or contains(@id,"saleprice")]/text()'
XPATH_ORIGINAL_PRICE = '//td[contains(text(),"List Price") or contains(text(),"M.R.P") or contains(text(),"Price")]/following-sibling::td/text()'
XPATH_CATEGORY = '//a[@class="a-link-normal a-color-tertiary"]//text()'
XPATH_AVAILABILITY = '//div[@id="availability"]//text()'
RAW_NAME = doc.xpath(XPATH_NAME)
RAW_SALE_PRICE = doc.xpath(XPATH_SALE_PRICE)
RAW_CATEGORY = doc.xpath(XPATH_CATEGORY)
RAW_ORIGINAL_PRICE = doc.xpath(XPATH_ORIGINAL_PRICE)
RAw_AVAILABILITY = doc.xpath(XPATH_AVAILABILITY)
NAME = ' '.join(''.join(RAW_NAME).split()) if RAW_NAME else None
SALE_PRICE = ' '.join(''.join(RAW_SALE_PRICE).split()).strip() if RAW_SALE_PRICE else None
CATEGORY = ' > '.join([i.strip() for i in RAW_CATEGORY]) if RAW_CATEGORY else None
ORIGINAL_PRICE = ''.join(RAW_ORIGINAL_PRICE).strip() if RAW_ORIGINAL_PRICE else None
AVAILABILITY = ''.join(RAw_AVAILABILITY).strip() if RAw_AVAILABILITY else None
if not ORIGINAL_PRICE:
ORIGINAL_PRICE = SALE_PRICE
# retrying in case of captcha
if not NAME:
raise ValueError('captcha')
data = {
'NAME': NAME,
'SALE_PRICE': SALE_PRICE,
'CATEGORY': CATEGORY,
'ORIGINAL_PRICE': ORIGINAL_PRICE,
'AVAILABILITY': AVAILABILITY,
'URL': url,
}
return data
elif response.status_code==404:
break
except Exception as e:
print e
def ReadAsin():
# AsinList = csv.DictReader(open(os.path.join(os.path.dirname(__file__),"Asinfeed.csv")))
AsinList = ['B075QLRSPK',
'B0731KJVGG',
'B075KQ622T',
'B071YC3G5V',
'B01MDMJGYT',
'B071VZKDDP',
'B075FLG6MV',
'B075TZQRHN',
'B01MQUB0QN',
'B075XKFMQC' ]
extracted_data = []
for i in AsinList:
url = "http://www.amazon.com/dp/" + i
print "Processing: " + url
# Calling the parser
parsed_data = parse(url)
if parsed_data:
extracted_data.append(parsed_data)
# Writing scraped data to csv file
with open('scraped_data.csv', 'w') as csvfile:
fieldnames = ['NAME','SALE_PRICE','CATEGORY','ORIGINAL_PRICE','AVAILABILITY','URL']
writer = csv.DictWriter(csvfile, fieldnames=fieldnames, quoting=csv.QUOTE_ALL)
writer.writeheader()
for data in extracted_data:
writer.writerow(data)
if __name__ == "__main__":
ReadAsin()
@3bner
Copy link

3bner commented Jun 18, 2018

Getting this error when run...
image
can someone hel?

@MoMoMoMoMoMoMoMoMoMoMo
Copy link

MoMoMoMoMoMoMoMoMoMoMo commented Oct 12, 2018

``
C:\WINDOWS\system32>C:\Users\snyde\Documents\amazonscraper\Amazon_Scraper\amazon_product.py
File "C:\Users\snyde\Documents\amazonscraper\Amazon_Scraper\amazon_product.py", line 52
AsinList = [''B077XM396G',
^
SyntaxError: invalid syntax

C:\WINDOWS\system32>"
The error is with one of the ASINs for a product.... Anyone have any tips to correct this?
If I were smarter I'd actually error check my code.... I see what I did now. Whoops.
Thank you for the code! Hopefully this makes my job a little bit easier.

@karthikeyan622
Copy link

How can I change this script to work for amazon.in site.

Thanks in advance.

@tchow25
Copy link

tchow25 commented Aug 15, 2019

Getting this error:

Capture

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment