Skip to content

Instantly share code, notes, and snippets.

@d0n601
Last active October 27, 2017 18:22
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save d0n601/f130b4e0d9e7c8535274708d9c89d074 to your computer and use it in GitHub Desktop.
Save d0n601/f130b4e0d9e7c8535274708d9c89d074 to your computer and use it in GitHub Desktop.
Leafly.com Scraper Proof of Concept
import json
import requests
#from pymongo import MongoClient
class LeaflyHarvest(object):
"""
Leafly.com proof of concept data harvester.
"""
def scrape_dispensaries(self):
"""
Scrape all dispensaries from Leafly.com.
:return: void
"""
# API Endpoint to search for stuff on Leafly
url = "https://www.leafly.com/finder/searchnext"
# Body of the request
data = {
"NorthwestLatitude": 57.70487627437739,
"NorthwestLongitude": -138.49595341516113,
"Page": 0,
"PremiumLocation": 84720,
"PremiumLocationType": "ZipCode",
"SoutheastLatitude": 12.898819268131966,
"SoutheastLongitude": -33.027203415161125,
"Take": 9000
}
data = json.dumps(data)
# Endpoint's response to you
response = requests.post(url, data=data, headers={"Content-Type": "application/json"}).json()
# Within this loop you could filter/store data...
for dispensary in response["Results"]:
print(dispensary['Name'])
print(len(response["Results"]))
if __name__ == "__main__":
LeaflyHarvest().scrape_dispensaries()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment