# Skip .git, .htpasswd etc
location ~ /\.(git|svn|ht) {
return 404;
}
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
'cache' => array( | |
'frontend' => array( | |
'default' => array( | |
'backend' => 'Cm_Cache_Backend_Redis', | |
'backend_options' => array( | |
'server' => '127.0.0.1', | |
'port' => '6379', | |
), | |
), | |
), |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
'session' => array( | |
'save' => 'redis', | |
'redis' => array( | |
'host' => 'redismaster', | |
'port' => '6379', | |
'password' => '', | |
'timeout' => '2.5', | |
'persistent_identifier' => '', | |
'database' => '2', | |
'compression_threshold' => '2048', |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env python | |
""" | |
Warm the caches of your website by crawling each page defined in sitemap.xml. | |
To use, download this file and make it executable. Then run: | |
./cache-warmer.py --threads 4 --file /data/web/public/sitemap.xml -v | |
""" | |
import argparse | |
import multiprocessing.pool as mpool | |
import os.path | |
import re |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/bin/bash | |
if [ "$#" -ne 1 ] || [ "x$1" == "x" ] ; then | |
echo "Usage: $0 <sitemap.xml>" | |
exit 0; | |
fi | |
if [ ! -f "$1" ]; then | |
echo "Sitemap file $1 not found! Exit!" | |
exit 1 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
<?php | |
/* | |
This script tests whether your php installation is able to create TLS connections. | |
*/ | |
foreach ([6, 7, 8] as $version) { | |
$ch = curl_init(); | |
curl_setopt($ch, CURLOPT_URL, "https://tlstest.paypal.com/"); | |
curl_setopt($ch, CURLOPT_SSLVERSION, $version); | |
var_dump(curl_exec($ch)); | |
var_dump(curl_error($ch)); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
<session_save>db</session_save> | |
<redis_session> | |
<host>redismaster</host> | |
<port>6379</port> | |
<password></password> | |
<timeout>2.5</timeout> | |
<persistent></persistent> | |
<db>2</db> | |
<compression_threshold>2048</compression_threshold> | |
<compression_lib>gzip</compression_lib> |
<Directorymatch "^/(.*/)*\.(git|svn)/">
Order deny,allow
Deny from all
</Directorymatch>
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/bin/bash | |
# This script is a debug utility for cronjobs as explained in: | |
# - https://support.hypernode.com/knowledgebase/configure-cronjobs-on-hypernode/ | |
# It logs all output and timing to a log file | |
# | |
# To use it, download the script, add the executable bit and put it in your cronjob: | |
# */5 * * * * /data/web/bin/debug-cron php -f /data/web/public/cron.php | |
LOGDIR="/data/web/public/var/log/crons" | |
TIMESTAMP="$( date '+%Y%m%d%H%M' )" |
location /robots.txt { return 200 "### Autogenerated robots.txt\n
# Sitemap
Sitemap: https://$http_host/sitemap.xml
# Crawlers Setup
User-agent: *
Crawl-delay: 20