- Cross-Site Scripting (XSS)
- Cross-Site Request Forgery (CSRF)
Below some packages in suggestion order that implements this protocol:
sudo cat /var/log/nginx/access.log | cut -d '"' -f3 | cut -d ' ' -f2 | sort | uniq -c | sort -rn |
def __init__(self, *args, **kwargs): | |
super(Document, self).__init__(*args, **kwargs) | |
self.__status = self.status | |
self.__amount = self.amount | |
self.__pay_bill = self.pay_bill | |
def save(self, force_insert=False, force_update=False): | |
if self.__status != self.status: # El estado cambio |
:80 | |
root /usr/src/wordpress | |
gzip | |
fastcgi / wordpress:9000 php | |
rewrite { | |
if {path} not_match ^\/wp-admin | |
to {path} {path}/ /index.php?_url={uri} | |
} | |
log stdout | |
errors stderr |
Below some packages in suggestion order that implements this protocol:
For implementing HTTPS on your server
Below some packages in suggestion order that implements this protocol:
(pipeline)hadoop@ip-172-30-0-176:~$ remote-task ImportCountryWorkflow --n-reduce-tasks 1 --host localhost --remote-name analyticstack --user hadoop --repo https://github.com/codetigerco/edx-analytics-pipeline --branch stage_edxapp --local-scheduler --verbose --wait --overwrite --interval-start 2017-01-01 --overwrite-n-days 16 --geolocation-data hdfs://localhost:9000/data/GeoIP.dat --override-config $HOME/edx-analytics-pipeline/config/devstack.cfg --skip-setup | |
Parsed arguments = Namespace(branch='stage_edxapp', extra_repo=None, host='localhost', job_flow_id=None, job_flow_name=None, launch_task_arguments=['ImportCountryWorkflow', '--n-reduce-tasks', '1', '--local-scheduler', '--overwrite', '--interval-start', '2017-01-01', '--overwrite-n-days', '16', '--geolocation-data', 'hdfs://localhost:9000/data/GeoIP.dat'], log_path=None, override_config='/edx/app/hadoop/edx-analytics-pipeline/config/devstack.cfg', package=None, private_key=None, remote_name='analyticstack', repo='https://github.com/codetigerco/edx |
remote-task LastCountryOfUser --n-reduce-tasks 1 --host localhost --remote-name analyticstack --user hadoop --repo https://github.com/codetigerco/edx-analytics-pipeline --branch stage_edxapp --local-scheduler --verbose --wait --overwrite --interval-start 2017-01-01 --overwrite-n-days 16 --geolocation-data GeoIP.dat --override-config $HOME/edx-analytics-pipeline/config/devstack.cfg --skip-setup | |
Parsed arguments = Namespace(branch='stage_edxapp', extra_repo=None, host='localhost', job_flow_id=None, job_flow_name=None, launch_task_arguments=['LastCountryOfUser', '--n-reduce-tasks', '1', '--local-scheduler', '--overwrite', '--interval-start', '2017-01-01', '--overwrite-n-days', '16', '--geolocation-data', 'GeoIP.dat'], log_path=None, override_config='/edx/app/hadoop/edx-analytics-pipeline/config/devstack.cfg', package=None, private_key=None, remote_name='analyticstack', repo='https://github.com/codetigerco/edx-analytics-pipeline', secure_config=None, secure_config_branch=None, secure_config_repo=None, shel |
969637439385692816-temp-2017-01-19T16-33-41.818263 | |
2017-01-19 16:33:45,906 INFO 10701 [luigi-interface] hadoop.py:273 - 17/01/19 16:33:45 WARN streaming.StreamJob: -file option is deprecated, please use generic option -files instead. | |
2017-01-19 16:33:49,043 INFO 10701 [luigi-interface] hadoop.py:273 - 17/01/19 16:33:49 INFO client.RMProxy: Connecting to ResourceManager at /0.0.0.0:8032 | |
2017-01-19 16:33:49,544 INFO 10701 [luigi-interface] hadoop.py:273 - 17/01/19 16:33:49 INFO client.RMProxy: Connecting to ResourceManager at /0.0.0.0:8032 | |
2017-01-19 16:33:52,698 INFO 10701 [luigi-interface] hadoop.py:273 - 17/01/19 16:33:52 INFO mapred.FileInputFormat: Total input paths to process : 1 | |
2017-01-19 16:33:52,879 INFO 10701 [luigi-interface] hadoop.py:273 - 17/01/19 16:33:52 INFO mapreduce.JobSubmitter: number of splits:2 | |
2017-01-19 16:33:52,887 INFO 10701 [luigi-interface] hadoop.py:273 - 17/01/19 16:33:52 INFO Configuration.deprecation: mapred.job.name is deprecated. Instead, use mapreduce.job.name | |
2017-01-19 16: |
(pipeline) ubuntu@ip:~/edx-analytics-pipeline$ remote-task --host localhost --user ubuntu --remote-name analyticstack --skip-setup --wait ImportEnrollmentsIntoMysql --interval 2017-01-01-2017-01-10 --verbose --local-scheduler --overwrite-n-days 12 | |
Parsed arguments = Namespace(branch='release', extra_repo=None, host='localhost', job_flow_id=None, job_flow_name=None, launch_task_arguments=['ImportEnrollmentsIntoMysql', '--interval', '2017-01-01-2017-01-10', '--local-scheduler', '--overwrite-n-days', '12'], log_path=None, override_config=None, package=None, private_key=None, remote_name='analyticstack', repo=None, secure_config=None, secure_config_branch=None, secure_config_repo=None, shell=None, skip_setup=True, sudo_user='hadoop', user='ubuntu', vagrant_path=None, verbose=True, virtualenv_extra_args=None, wait=True, wheel_url=None, workflow_profiler=None) | |
Running commands from path = /home/ubuntu/pipeline/share/edx.analytics.tasks | |
Remote name = analyticstack | |
Running command = ['ssh', '-tt', '-o', 'ForwardAge |
``` TASK: [analytics_api | install application requirements] ********************** | |
ok: [localhost] => (item=base.txt) | |
ok: [localhost] => (item=production.txt) | |
ok: [localhost] => (item=optional.txt) | |
TASK: [analytics_api | migrate] *********************************************** | |
failed: [localhost] => {"changed": true, "cmd": "DB_MIGRATION_USER='????' DB_MIGRATION_PASS='???' /edx/app/analytics_api/venvs/analytics_api/bin/python ./manage.py migrate --noinput", "delta": "0:00:00.764840", "end": "2017-01-02 20:19:06.217027", "rc": 1, "start": "2017-01-02 20:19:05.452187", "warnings": []} | |
stderr: Traceback (most recent call last): | |
File "./manage.py", line 10, in <module> | |
execute_from_command_line(sys.argv) |