Skip to content

Instantly share code, notes, and snippets.

View RootPat's full-sized avatar

Patrick Harris RootPat

View GitHub Profile
@RootPat
RootPat / TCP Client
Created December 15, 2017 16:52
TCP client in Python
import socket
target_host=''
target_port=''
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client.connect(target_host, target_port)
client.send("GET / HTTP/1.1\r\nHost; google.com\r\n\r\n")
response = clinet.recv(4096)
print(response)
@RootPat
RootPat / Linkpuller
Last active November 11, 2017 02:27
Grabs links off a given URL
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from bs4 import BeautifulSoup
driver = webdriver.Chrome('PATH_TO_CHROMEDRIVER')
driver.get('URL_to_scrape')
driver.maximize_window()
linkpuller = driver.find_elements_by_xpath('//a[@href]')
for elem in linkpuller:
#set variables
a = 0
b = 1
start_at = 0
stop_at = 1001
#Set the program logic. While 0 < 100, execute the indented code below
# Instructs the computer to begin counting - iterating from start_at (0) and sets each digit as sum of the two preceding numbers
start_at = start_at + 1
old_a = a
from bs4 import BeautifulSoup
import pandas as pd
html_string = '''
<html>
<head>
<title>Classes</title>
<body>
<table>
<tr>
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from bs4 import BeautifulSoup
import time
import pandas as pd
driver = webdriver.Chrome('/Users/patrickharris/Desktop/chromedriver/chromedriver.exe')
driver.get('https://www.upwork.com/ab/account-security/login')
driver.maximize_window()
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from bs4 import BeautifulSoup
import time
import pandas as pd
driver = webdriver.Chrome('/Users/patrickharris/Desktop/chromedriver/chromedriver.exe')
driver.get('https://www.upwork.com/ab/account-security/login')
driver.maximize_window()