I hereby claim:
- I am gardart on github.
- I am gardart (https://keybase.io/gardart) on keybase.
- I have a public key ASCEaKt49AJeVMuAmMUN_-424hmrA0dC-3qtzaxIDR7LrQo
To claim this, I am signing this object:
I hereby claim:
To claim this, I am signing this object:
#!/bin/bash | |
PROGNAME=`/bin/basename $0` | |
PROGPATH=`echo $0 | /bin/sed -e 's,[\\/][^\\/][^\\/]*$,,'` | |
. $PROGPATH/utils.sh | |
#export PATH=$PATH:/usr/lib64/nagios/plugins:/usr/lib/nagios/plugins:/usr/lib/oracle/11.2/client64/bin | |
#export ORACLE_HOME=/usr/lib/oracle/11.2 | |
#export TNS_ADMIN=/etc | |
#export LD_LIBRARY_PATH=/usr/lib/oracle/11.2/client64/lib |
yum update -y | |
yum install -y epel-release | |
yum clean all | |
# Add repos | |
rpm -Uvh "https://labs.consol.de/repo/stable/rhel7/i386/labs-consol-stable.rhel7.noarch.rpm" | |
rpm -ihv http://opensource.is/repo/ok-release.rpm | |
yum update -y ok-release | |
# |
# Export TOP X Performance statistics from VMM into a HTML table | |
$htmlfile = "c:\temp\status.html" | |
$firstx=10; # Select number of objects to list in the tables | |
# CSS style | |
$a = "<style>" | |
$a = $a + "BODY{background-color:peachpuff;}" | |
$a = $a + "TABLE{border-width: 1px;border-style: solid;border-color: black;border-collapse: collapse;}" | |
$a = $a + "TH{border-width: 1px;padding: 0px;border-style: solid;border-color: black;background-color:thistle}" | |
$a = $a + "TD{border-width: 1px;padding: 0px;border-style: solid;border-color: black;background-color:PaleGoldenrod}" |
# Convert icelandic weather html data (all stations) from html table to csv format | |
$ curl "http://brunnur.vedur.is/athuganir/athtafla/2015081210.html" 2>/dev/null | grep -i -e '</\?TABLE\|</\?TD\|</\?TR'| tr -d '\n' | sed 's/<\ /TR[^>]*>/\n/Ig' | sed 's/<\/\?\(TABLE\|TR\)[^>]*>//Ig' | sed 's/^<T[DH][^>]*>\|<\/\?T[DH][^>]*>$//Ig' | sed 's/<\/T[DH][^>]*><T[DH][^>]*>/,/Ig' | sed 's/<[^>] \+>//Ig' | sed 's/^[\ \t]*//g' | sed 's/^[\ \t]*//g' | sed '/^\s*$/d' | sed 's/^/2015081210,/' | |
Output: | |
2015081210,33751,Siglufjarðarvegur_Herkonugil,-99,6.9,6.9,7.9,80,6.7,7.1,10.2,92,-99 | |
2015081210,33643,Stafá,40,9.3,8.9,9.5,38,4.9,4.9,7.1,79,-99 | |
2015081210,32474,Steingrímsfjarðarheiði,440,4.4,3.9,4.5,65,11.5,11.6,14.2,99,-99 | |
2015081210,31950,Stórholt,70,9.9,9.3,9.9,81,6.7,6.7,8.5,82,-99 |
### | |
# | |
# This script runs robocopy jobs in parallel by increasing the number of outstanding i/o's to the VPSA. Even though you can | |
# change the number of threads using the "/mt:#" parameter, your backups will run faster by adding two or more jobs to your | |
# original set. | |
# | |
# To do this, you need to subdivide the work into directories. That is, each job will recurse the directory until completed. | |
# The ideal case is to have 100's of directories as the root of the backup. Simply change $src to get | |
# the list of folders to backup and the list is used to feed $ScriptBlock. | |
# |
#!/usr/bin/env python | |
# -*- coding: utf-8 -*- | |
# Written as part of https://www.scrapehero.com/how-to-scrape-amazon-product-reviews-using-python/ | |
from lxml import html | |
from json import dump,loads | |
from requests import get | |
import json | |
from re import sub | |
from dateutil import parser as dateparser | |
from time import sleep |
{ | |
"_id":"amazon_pet", | |
"startUrl":[ | |
"https://www.amazon.com/Best-Sellers-Pet-Supplies/zgbs/pet-supplies/ref=zg_bs_nav_0" | |
], | |
"selectors":[ | |
{ | |
"id":"product", | |
"type":"SelectorElement", | |
"parentSelectors":[ |
from lxml import html | |
import csv | |
import os | |
import requests | |
from exceptions import ValueError | |
from time import sleep | |
from random import randint | |
def parse(url): | |
headers = { |
# Debian | Raspbian
All steps on Debian require to run as root. To become root simply run:
Debian
su
Raspbian