Created
December 3, 2025 04:38
-
-
Save EncodeTheCode/c82ffecaf978c532c8dea9b6e4f3ef90 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| <!doctype html> | |
| <html lang="en"> | |
| <head> | |
| <meta charset="utf-8" /> | |
| <meta name="viewport" content="width=device-width,initial-scale=1" /> | |
| <title>WoW Addon Version Checker — Wowhead & CurseForge (No-API + DBM scraper)</title> | |
| <style> | |
| :root{--bg:#0b1020;--card:#0f1724;--accent:#6ee7b7;--muted:#9aa4b2} | |
| body{font-family:Inter, system-ui, -apple-system,Segoe UI, Roboto, "Helvetica Neue", Arial; background:linear-gradient(180deg,#071026 0%, #071422 100%); color:#e6eef6; margin:0; padding:22px} | |
| h1{font-size:20px;margin:0 0 12px} | |
| .card{background:rgba(255,255,255,0.02); border:1px solid rgba(255,255,255,0.03); padding:16px; border-radius:12px; box-shadow:0 6px 18px rgba(2,6,23,0.6)} | |
| label{display:block;font-size:13px;color:var(--muted);margin-top:8px} | |
| input, select, button{width:100%;padding:8px 10px;margin-top:6px;border-radius:8px;border:1px solid rgba(255,255,255,0.06);background:transparent;color:inherit} | |
| .row{display:grid;grid-template-columns:1fr 1fr;gap:12px} | |
| .output{margin-top:12px} | |
| .result{padding:12px;border-radius:10px;background:rgba(0,0,0,0.25);margin-bottom:10px} | |
| .badge{display:inline-block;padding:4px 8px;border-radius:999px;background:rgba(255,255,255,0.03);font-size:13px;color:var(--accent); } | |
| .small{font-size:13px;color:var(--muted)} | |
| pre{white-space:pre-wrap;font-size:13px;color:#cfe7d9;background:rgba(0,0,0,0.12);padding:8px;border-radius:6px} | |
| footer{margin-top:12px;color:var(--muted);font-size:13px} | |
| table{width:100%;border-collapse:collapse;margin-top:8px} | |
| th,td{padding:8px;border-bottom:1px solid rgba(255,255,255,0.03);text-align:left} | |
| th{font-weight:600} | |
| .link{color:var(--accent)} | |
| </style> | |
| </head> | |
| <body> | |
| <h1>WoW Addon Version Checker — No-API + DBM files scraper</h1> | |
| <div class="card"> | |
| <div class="small">This single-file HTML + JavaScript tool scrapes the CurseForge DBM files page (no official APIs). Use a local CORS proxy for reliable results (instructions included below).</div> | |
| <label>DBM files list URL (defaulted to the page you gave)</label> | |
| <input id="dbmFilesUrl" value="https://www.curseforge.com/wow/addons/deadly-boss-mods/files/all?page=1&pageSize=20&gameVersionTypeId=67408&showAlphaFiles=hide" /> | |
| <label>Optional: CORS proxy prefix (for browser testing). Examples: | |
| <div class="small"><code>http://localhost:8080/proxy?url=</code> or <code>https://cors.bridged.cc/</code></div> | |
| </label> | |
| <input id="proxyPrefix" placeholder="leave blank to try direct (may hit CORS)" /> | |
| <div class="row"> | |
| <button id="scrapeDbmBtn">Scrape DBM files (page & files)</button> | |
| <button id="clearBtn">Clear results</button> | |
| </div> | |
| <div class="output" id="output"></div> | |
| <details style="margin-top:10px"><summary class="small">How the DBM scraper works</summary> | |
| <div class="small"> | |
| • Fetches the CurseForge files listing page you provided and parses all <code><a class="file-row-details"></code> items.<br> | |
| • Extracts the version from <code>span.name</code>, the file page link, upload date and file size (if visible).<br> | |
| • Visits each file page and looks for anchors with <code>/download</code> or direct <code>media.cursecdn.com</code> links.<br> | |
| • Attempts to follow the download redirect (so you get the final CDN URL) — this requires a proxy or running the local proxy below.<br> | |
| • Output: table of Version • Upload • Size • File page • Direct download (CDN if resolvable). | |
| </div> | |
| </details> | |
| <footer class="small"> | |
| <strong>Important:</strong> Browsers enforce CORS. Use the local proxy below (recommended). Set the proxy field to <code>http://localhost:8080/proxy?url=</code> after running it. | |
| </footer> | |
| </div> | |
| <script> | |
| const $ = id => document.getElementById(id); | |
| const out = $('output'); | |
| function clearOut(){ out.innerHTML = ''; } | |
| function logLine(msg){ const d=document.createElement('div'); d.className='small'; d.textContent=msg; out.appendChild(d); } | |
| function esc(s){ return String(s||''); } | |
| // Build fetch URL that routes via proxyPrefix if provided. | |
| function buildFetchUrl(targetUrl, proxyPrefix){ | |
| if(!proxyPrefix) return targetUrl; | |
| if(proxyPrefix.includes('?')) return proxyPrefix + encodeURIComponent(targetUrl); | |
| return proxyPrefix.replace(/\/$/, '') + '/' + targetUrl.replace(/^https?:\/\//, ''); | |
| } | |
| // Fetch text and return {text, response} | |
| async function fetchText(targetUrl, proxyPrefix){ | |
| const url = buildFetchUrl(targetUrl, proxyPrefix); | |
| logLine('Fetching: ' + url); | |
| const resp = await fetch(url, { redirect: 'follow' }); | |
| if(!resp.ok) throw new Error('Fetch failed: ' + resp.status + ' ' + resp.statusText); | |
| const text = await resp.text(); | |
| return { text, response: resp }; | |
| } | |
| // Follow download anchor to get final resolved URL (if proxy allows) | |
| async function followDownloadAndGetFinal(downloadHref, proxyPrefix){ | |
| if(!downloadHref) return { error: 'No download href' }; | |
| let fullHref = downloadHref; | |
| if(downloadHref.startsWith('/')) fullHref = 'https://www.curseforge.com' + downloadHref; | |
| const proxied = buildFetchUrl(fullHref, proxyPrefix); | |
| logLine('Following download: ' + proxied); | |
| try{ | |
| const resp = await fetch(proxied, { method: 'GET', redirect: 'follow' }); | |
| if(!resp.ok) return { error: 'Follow failed: ' + resp.status }; | |
| return { finalUrl: resp.url, status: resp.status }; | |
| }catch(err){ return { error: String(err) } } | |
| } | |
| // Main scraping routine | |
| async function scrapeDbmFiles(){ | |
| clearOut(); | |
| const pageUrl = $('dbmFilesUrl').value.trim(); | |
| const proxy = $('proxyPrefix').value.trim(); | |
| if(!pageUrl){ logLine('No DBM files URL provided.'); return; } | |
| try{ | |
| const { text } = await fetchText(pageUrl, proxy); | |
| const parser = new DOMParser(); | |
| const doc = parser.parseFromString(text, 'text/html'); | |
| // select anchors with class file-row-details (exact selector you provided) | |
| let anchors = Array.from(doc.querySelectorAll('a.file-row-details')); | |
| logLine('Found ' + anchors.length + ' file-row elements on the page'); | |
| if(anchors.length === 0){ | |
| // fallback: any anchor with /files/ in href | |
| const fallback = Array.from(doc.querySelectorAll('a[href*="/files/"]')); | |
| logLine('Fallback anchors found: ' + fallback.length); | |
| anchors = fallback; | |
| if(anchors.length === 0) throw new Error('No file-row elements and no fallback anchors found — possible CORS or page structure change.'); | |
| } | |
| const results = []; | |
| for(const a of anchors){ | |
| try{ | |
| const href = a.getAttribute('href') || ''; | |
| const filePageUrl = href.startsWith('http') ? href : ('https://www.curseforge.com' + href); | |
| const verSpan = a.querySelector('span.name'); | |
| const version = verSpan ? verSpan.textContent.trim() : '(unknown)'; | |
| // Try to pull the upload date (commonly in the 3rd div block) | |
| let upload = '(unknown)'; | |
| try{ | |
| const divs = a.querySelectorAll('div > div'); | |
| if(divs && divs.length >= 3) upload = divs[2].textContent.trim(); | |
| else { | |
| const dateSpan = a.querySelector('div span'); if(dateSpan) upload = dateSpan.textContent.trim(); | |
| } | |
| }catch(e){} | |
| // Attempt to find size string in children | |
| let size = '(unknown)'; | |
| try{ | |
| const sizeNode = Array.from(a.querySelectorAll('div')).find(d=> /MB|KB|GB/i.test(d.textContent)); | |
| if(sizeNode) size = sizeNode.textContent.trim(); | |
| }catch(e){} | |
| results.push({ version, filePageUrl, upload, size }); | |
| }catch(e){ logLine('Error parsing anchor: ' + e); } | |
| } | |
| if(results.length === 0){ append('<div class="result small">No results parsed from page</div>'); return; } | |
| append('<div class="result small">Parsed ' + results.length + ' files — now fetching each file page to find download links (may take a few seconds)...</div>'); | |
| let table = '<table><thead><tr><th>Version</th><th>Upload</th><th>Size</th><th>File page</th><th>Direct download (resolved)</th></tr></thead><tbody>'; | |
| for(const r of results){ | |
| let downloadHref = null; | |
| let resolved = null; | |
| try{ | |
| const fp = r.filePageUrl; | |
| logLine('Fetching file page: ' + fp); | |
| const { text: fhtml } = await fetchText(fp, proxy); | |
| const fdoc = new DOMParser().parseFromString(fhtml, 'text/html'); | |
| // Try to find a direct download anchor | |
| let a = fdoc.querySelector('a[href*="/download"]'); | |
| if(!a){ | |
| a = Array.from(fdoc.querySelectorAll('a, button')).find(el => { | |
| const href = el.getAttribute && el.getAttribute('href'); | |
| const txt = (el.textContent || ''); | |
| return (href && href.includes('/download')) || /download/i.test(txt); | |
| }); | |
| } | |
| if(a){ | |
| downloadHref = a.getAttribute('href') || a.getAttribute('data-href') || a.getAttribute('data-download-url') || null; | |
| if(downloadHref && downloadHref.startsWith('/')) downloadHref = 'https://www.curseforge.com' + downloadHref; | |
| // Try to resolve via redirect | |
| try{ | |
| const final = await followDownloadAndGetFinal(downloadHref, proxy); | |
| if(final && final.finalUrl) resolved = final.finalUrl; | |
| else if(final && final.error) logLine('Could not resolve final CDN for ' + downloadHref + ': ' + final.error); | |
| }catch(e){ logLine('Error following download for ' + downloadHref + ': ' + e); } | |
| } else { | |
| // fallback: search for media.cursecdn links in the HTML | |
| const mediaMatch = fhtml.match(/https?:\/\/media\.cursecdn\.com\/[\w\/%.\-]+/gi); | |
| if(mediaMatch && mediaMatch.length){ downloadHref = mediaMatch[0]; resolved = mediaMatch[0]; } | |
| else { | |
| const fallbackA = Array.from(fdoc.querySelectorAll('a')).find(el => { | |
| const href = el.getAttribute('href') || ''; | |
| return /download/i.test(href) || /media\.cursecdn/.test(href); | |
| }); | |
| if(fallbackA){ | |
| downloadHref = fallbackA.getAttribute('href'); | |
| if(downloadHref && downloadHref.startsWith('/')) downloadHref = 'https://www.curseforge.com' + downloadHref; | |
| try{ const final = await followDownloadAndGetFinal(downloadHref, proxy); if(final && final.finalUrl) resolved = final.finalUrl; }catch(e){} | |
| } | |
| } | |
| } | |
| }catch(e){ logLine('Error handling file page ' + r.filePageUrl + ' -> ' + e); } | |
| table += '<tr>' + | |
| '<td>' + esc(r.version) + '</td>' + | |
| '<td>' + esc(r.upload) + '</td>' + | |
| '<td>' + esc(r.size) + '</td>' + | |
| '<td><a class="link" href="' + esc(r.filePageUrl) + '" target="_blank">Open</a></td>' + | |
| '<td>' + (resolved ? ('<a class="link" href="' + esc(resolved) + '" target="_blank">' + esc(resolved) + '</a>') : (downloadHref ? ('<a class="link" href="' + esc(downloadHref) + '" target="_blank">' + esc(downloadHref) + '</a>') : '<span class="small">Not found</span>')) + '</td>' + | |
| '</tr>'; | |
| } | |
| table += '</tbody></table>'; | |
| append('<div class="result"><strong>DBM files</strong>' + table + '</div>'); | |
| }catch(err){ | |
| append('<div class="result small">Error scraping DBM files: ' + esc(String(err)) + '</div>'); | |
| console.error(err); | |
| } | |
| } | |
| // button wiring | |
| $('scrapeDbmBtn').addEventListener('click', scrapeDbmFiles); | |
| $('clearBtn').addEventListener('click', () => { clearOut(); }); | |
| </script> | |
| <!-- | |
| Local proxy example (recommended): | |
| // Quick Node/Express proxy (local testing only) | |
| const express = require('express'); | |
| const fetch = require('node-fetch'); | |
| const app = express(); | |
| app.get('/proxy', async (req, res) => { | |
| const url = req.query.url; | |
| if(!url) return res.status(400).send('missing url'); | |
| try{ | |
| const r = await fetch(url, { headers: { 'User-Agent': 'AddonFinder/1.0' } }); | |
| const text = await r.text(); | |
| res.set('Access-Control-Allow-Origin','*'); | |
| res.set('Content-Type', r.headers.get('content-type') || 'text/html'); | |
| res.send(text); | |
| }catch(e){ res.status(500).send(String(e)); } | |
| }); | |
| app.listen(8080, ()=> console.log('proxy on 8080')); | |
| Use in the UI as: http://localhost:8080/proxy?url=<ENCODED_TARGET_URL> | |
| --> | |
| </body> | |
| </html> |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment