You can save the python file to your machine, replace your team ID and run
python download_fpl_api.py
This will create a directory data
and will include all files in JSON format.
@ECHO OFF | |
for /r %%F in (*) do if %%~zF LSS 1000000 ( if %%~xF EQU .mp3 del "%%F" ) |
.header { | |
max-height: 0px; | |
visibility: hidden; | |
} | |
.fixed-title { | |
position: fixed; | |
display: block; | |
top: 50px; | |
max-width: 720px; |
import requests | |
result = requests.get(r'https://www.microcenter.com/product/630283/amd-ryzen-9-5900x-vermeer-37ghz-12-core-am4-boxed-processor?storeid=071') | |
print(result.status_code) # This should be 200 if all goes OK | |
print(result.text) # This text will go to Beautiful Soup |
You can save the python file to your machine, replace your team ID and run
python download_fpl_api.py
This will create a directory data
and will include all files in JSON format.
if punt_length is not None: | |
punt_length = int(punt_length) | |
final_gw = gameweeks[-1] | |
model.add_constraints((so.expr_sum(transfer_out[p,w2] for w2 in range(w, w+punt_length)) <= 1-transfer_in[p,w] for p in players for w in gameweeks if w <= final_gw-punt_length+1), name='f_punt_length') |
let get_all_data = () => { | |
let gws = Array.from(jQuery(".ptsshoq")).map(i => i.textContent).filter(i => i.includes('GW')).map(i => parseInt(i.split('GW')[1])) | |
let first_week = Math.min(...gws) | |
let last_week = Math.max(...gws) | |
let jsondata= jQuery("#fplr_api").text() | |
let d=JSON.parse(decodeURIComponent(escape(window.atob(jsondata)))) | |
let main_keys = ['name', 'pos', 'team_abbrev', 'now_cost'] |
bb_gw = 29 | |
model.add_constraints((so.expr_sum(lineup[p,w] for p in players) == 11 + (4 if bb_gw == w else 0) for w in gameweeks), name='lineup_count') | |
model.add_constraints((so.expr_sum(bench[p,w,0] for p in players if player_type[p] == 1) == 1 - (1 if bb_gw == w else 0) for w in gameweeks), name='bench_gk') | |
model.add_constraints((so.expr_sum(bench[p,w,o] for p in players) == 1 - (1 if bb_gw == w else 0) for w in gameweeks for o in [1,2,3]), name='bench_count') | |
model.add_constraints((so.expr_sum(captain[p,w] for p in players) == 1 for w in gameweeks), name='captain_count') | |
model.add_constraints((so.expr_sum(vicecap[p,w] for p in players) == 1 for w in gameweeks), name='vicecap_count') | |
model.add_constraints((lineup[p,w] <= squad[p,w] for p in players for w in gameweeks), name='lineup_squad_rel') | |
model.add_constraints((bench[p,w,o] <= squad[p,w] for p in players for w in gameweeks for o in order), name='bench_squad_rel') | |
model.add_constraints((captain[p,w] <= lineup[p,w] for |
getMyRank = () => {let me = parseFloat(jQuery("#my_dataviz9 .bubbles[r=6]")[0].getAttribute("cy")); let others = jQuery("#my_dataviz9 .bubbles[r=2]").toArray().map(i => parseFloat(i.getAttribute('cy'))); return others.filter(i => i < me).length + 1}; getMyRank() |
from unicodedata import combining, normalize | |
import pandas as pd | |
import requests | |
from fuzzywuzzy import fuzz | |
import numpy as np | |
# To remove accents in names | |
def fix_name_dialect(name): | |
new_name = ''.join([c for c in normalize('NFKD', name) if not combining(c)]) | |
return new_name.replace('Ø', 'O').replace('ø', 'o').replace('ã', 'a') |
import pandas as pd | |
import pathlib | |
import os | |
import json | |
from unicodedata import combining, normalize | |
import requests | |
from fuzzywuzzy import fuzz | |
import numpy as np | |
from abc import ABC, abstractmethod |