A list of comman regular expressions for use in django url's regex.
Example Django URLs patterns:
urlpatterns = patterns('',
function ytChannelId(channelName) { | |
if(channelName) { | |
var name = getChannelFromUrl(channelName); | |
var url = "https://gdata.youtube.com/feeds/api/users/" + name + "?fields=id&alt=json"; | |
var result = UrlFetchApp.fetch(url); | |
var data = Utilities.jsonParse(result.getContentText()) | |
if(typeof data['entry'] !== 'undefined' && data['entry']['id']['$t'] !== 'undefined') { | |
var id = "UC" + data['entry']['id']['$t'].split('/').pop(); | |
<form class='form-inline' id='my-custom-form'> | |
<div class="form-group"> | |
<input type='email' class='form-control' placeholder='Your email address' required> | |
</div> | |
<button class="btn btn-primary" type='submit'>Sign up</button> | |
</form> | |
<!-- Actual form that gets submitted to HubSpot --> | |
<div class="hidden" id='hubspot-form'> | |
<script charset="utf-8" src="//js.hsforms.net/forms/current.js"></script> |
Just paste it into the browser console, and it will count contributions for you.
Array.from(document.querySelectorAll(".user-contrib-cell")).reduce((acc, el) => (isNaN(+acc) ? 0 : acc) + (el.getAttribute("title").trim().split(" ").length > 1 && isNaN(+el.getAttribute("title").trim().split(" ")[0]) ? 0 : +el.getAttribute("title").trim().split(" ")[0]));
# Use Python 3 for easy unicode | |
$ virtualenv -p python3 .env | |
$ source .env/bin/activate | |
$ pip install django | |
$ deactivate | |
# Start new django project and app | |
$ django-admin.py startproject mysite | |
$ ./manage.py migrate | |
$ ./manage.py createsuperuser |
<?php | |
echo file_get_contents('http://tinyurl.com/api-create.php?url='.'http://www.example.com/'); | |
/* For example | |
http://tinyurl.com/api-create.php?url=http://www.fullondesign.co.uk/ | |
Would return: | |
http://tinyurl.com/d4px9f | |
*/ | |
?> |
input[type=time]::-webkit-datetime-edit-ampm-field { | |
display: none; | |
} |
This goes with the Traversy Media Scrapy tutorial on YouTube
pip install scrapy