Last active
January 18, 2023 18:40
-
-
Save sertalpbilal/9940312d4d96684ca2b858bd797bfe6f to your computer and use it in GitHub Desktop.
Review detailed data download
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
let get_all_data = () => { | |
let gws = Array.from(jQuery(".ptsshoq")).map(i => i.textContent).filter(i => i.includes('GW')).map(i => parseInt(i.split('GW')[1])) | |
let first_week = Math.min(...gws) | |
let last_week = Math.max(...gws) | |
let jsondata= jQuery("#fplr_api").text() | |
let d=JSON.parse(decodeURIComponent(escape(window.atob(jsondata)))) | |
let main_keys = ['name', 'pos', 'team_abbrev', 'now_cost'] | |
let gw_keys = ['opp', 'dmins', 'livpts', 'livxg', 'livxa', 'livcs', 'livbon'] | |
let text_data = "ID,Name,Pos,Team,Price" | |
for (let w=first_week; w<=last_week; w++) { | |
text_data += `,${w}_fix,${w}_xmins,${w}_pts,${w}_g,${w}_a,${w}_cs,${w}_bon` | |
} | |
let players = Object.entries(d) | |
players = players.filter(i => i[1] != 0) | |
players.forEach(e => { | |
let pid = e[0] | |
let p = e[1] | |
row_data = pid + "," + main_keys.map(i => p[i]).join(",") | |
for (let w=first_week; w<=last_week; w++) { | |
gw_keys.forEach(k => { | |
try { | |
row_data += ',' + p[w][k] | |
} | |
catch { | |
aa = 1 | |
} | |
}) | |
} | |
text_data += '\n'+row_data | |
}) | |
return text_data | |
} | |
function download_csv_data(csv, filename) { | |
var csvFile; | |
var downloadLink; | |
csvFile = new Blob([csv], {type: "text/csv"}); | |
downloadLink = document.createElement("a"); | |
downloadLink.download = filename; | |
downloadLink.href = window.URL.createObjectURL(csvFile); | |
downloadLink.style.display = "none"; | |
document.body.appendChild(downloadLink); | |
downloadLink.click(); | |
} | |
text_data = get_all_data() | |
download_csv_data(text_data, "review_detailed.csv") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
let get_all_data = () => { | |
let gws = Array.from(jQuery(".ptsshoq")).map(i => i.textContent).filter(i => i.includes('GW')).map(i => parseInt(i.split('GW')[1])) | |
let first_week = Math.min(...gws) | |
let last_week = Math.max(...gws) | |
let jsondata= jQuery("#fplr_api").text() | |
let d=JSON.parse(decodeURIComponent(escape(window.atob(jsondata)))) | |
let main_keys = ['name', 'pos', 'team_abbrev', 'now_cost'] | |
let gw_keys = ['opp', 'dmins', 'livpts', 'livxg', 'livxa', 'livcs', 'livbon'] | |
let text_data = "ID,Name,Pos,Team,Price" | |
for (let w=first_week; w<=last_week; w++) { | |
text_data += `,${w}_fix,${w}_xmins,${w}_pts,${w}_g,${w}_a,${w}_cs,${w}_bon,${w}_dmins,${w}_likelihood` | |
} | |
let players = Object.entries(d) | |
players = players.filter(i => i[1] != 0) | |
players.forEach(e => { | |
let pid = e[0] | |
let p = e[1] | |
row_data = pid + "," + main_keys.map(i => p[i]).join(",") | |
for (let w=first_week; w<=last_week; w++) { | |
gw_keys.forEach(k => { | |
try { | |
row_data += ',' + p[w][k] | |
} | |
catch { | |
aa = 1 | |
} | |
}) | |
row_data += p[w].matchos ? ','+p[w].matchos.map(i => p[w][i].dmins).join(' ') : ',' | |
row_data += p[w].matchos ? ','+p[w].matchos.map(i => p[w][i].likelihood).join(' ') : ',' | |
} | |
text_data += '\n'+row_data | |
}) | |
return text_data | |
} | |
function download_csv_data(csv, filename) { | |
var csvFile; | |
var downloadLink; | |
csvFile = new Blob([csv], {type: "text/csv"}); | |
downloadLink = document.createElement("a"); | |
downloadLink.download = filename; | |
downloadLink.href = window.URL.createObjectURL(csvFile); | |
downloadLink.style.display = "none"; | |
document.body.appendChild(downloadLink); | |
downloadLink.click(); | |
} | |
text_data = get_all_data() | |
download_csv_data(text_data, "review_detailed.csv") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
let get_all_data = () => { | |
let gw_m = jQuery("*[data-gwid]") | |
let first_week = Math.min(...Array.from(gw_m).map(i => parseInt(i.dataset.gwid))) | |
let last_week = Math.max(...Array.from(gw_m).map(i => parseInt(i.dataset.gwid))) | |
let d = JSON.parse(jQuery("#fplr_api").html()) | |
let main_keys = ['name', 'pos', 'team_abbrev', 'now_cost'] | |
let gw_keys = ['opp', 'dmins', 'livpts', 'livxg', 'livxa', 'livcs', 'livbon'] | |
let text_data = "ID,Name,Pos,Team,Price" | |
for (let w=first_week; w<=last_week; w++) { | |
text_data += `,${w}_fix,${w}_xmins,${w}_pts,${w}_g,${w}_a,${w}_cs,${w}_bon` | |
} | |
let players = Object.entries(d) | |
players = players.filter(i => i[1] != 0) | |
players.forEach(e => { | |
let pid = e[0] | |
let p = e[1] | |
row_data = pid + "," + main_keys.map(i => p[i]).join(",") | |
for (let w=first_week; w<=last_week; w++) { | |
gw_keys.forEach(k => { | |
row_data += ',' + p[w][k] | |
}) | |
} | |
text_data += '\n'+row_data | |
}) | |
function download_csv_data(csv, filename) { | |
var csvFile; | |
var downloadLink; | |
csvFile = new Blob([csv], {type: "text/csv"}); | |
downloadLink = document.createElement("a"); | |
downloadLink.download = filename; | |
downloadLink.href = window.URL.createObjectURL(csvFile); | |
downloadLink.style.display = "none"; | |
document.body.appendChild(downloadLink); | |
downloadLink.click(); | |
} | |
download_csv_data(text_data, "review_detailed.csv") | |
console.log(text_data) | |
} | |
get_all_data() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
function get_data() { | |
let text_data = 'Name,ID,' | |
let gw_names = Array.from(jQuery('#scout_table *[data-gwid]')).map(i => i.innerText) | |
text_data += gw_names.map(i => [i+"_xp", i+"_fix", i+"_g", i+"_a", i+"_cs", i+"_bon"]).flat() | |
let rows = jQuery("#scout_table tbody tr") | |
let data = Array.from(rows).map(e => { | |
let row = jQuery(e) | |
let player = row.find(".fancyname") | |
let name = player.text() | |
let player_id = player.data()['id'] | |
let cols = row.find(".multTotal.squad") | |
let row_data = name + "," + player_id | |
let pdata = Array.from(cols).map((col, order) => { | |
let gw_tag = gw_names[order] | |
let c = col.textContent.match(/([0-9\.]*)(\([HA]\): [A-Z]{3})G: ([0-9\.]*)A: ([0-9\.]*)CS: ([0-9\.]*)Bon: ([0-9\.]*)/) | |
let d = {} | |
let last = 0 | |
last = d[gw_tag + '_xp'] = parseFloat(c[1]) | |
row_data += "," + last | |
last = d[gw_tag + '_fix'] = c[2] | |
row_data += "," + last | |
last = d[gw_tag + '_g'] = parseFloat(c[3]) | |
row_data += "," + last | |
last = d[gw_tag + '_a'] = parseFloat(c[4]) | |
row_data += "," + last | |
last = d[gw_tag + '_cs'] = parseFloat(c[5]) | |
row_data += "," + last | |
last = d[gw_tag + '_bon'] = parseFloat(c[6]) | |
row_data += "," + last | |
return d | |
}) | |
text_data += "\n" + row_data | |
return {name, player_id, pdata} | |
}) | |
console.log(data) | |
console.log(text_data) | |
function download_csv_data(csv, filename) { | |
var csvFile; | |
var downloadLink; | |
csvFile = new Blob([csv], {type: "text/csv"}); | |
downloadLink = document.createElement("a"); | |
downloadLink.download = filename; | |
downloadLink.href = window.URL.createObjectURL(csvFile); | |
downloadLink.style.display = "none"; | |
document.body.appendChild(downloadLink); | |
downloadLink.click(); | |
} | |
download_csv_data(text_data, "review_detailed.csv") | |
return data | |
} | |
get_data() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment