Skip to content

Instantly share code, notes, and snippets.

@hegerdes
Created January 5, 2024 00:07
Show Gist options
  • Save hegerdes/568b5581c786b277b08470894605d4bb to your computer and use it in GitHub Desktop.
Save hegerdes/568b5581c786b277b08470894605d4bb to your computer and use it in GitHub Desktop.
Headless auto download of suse os images
import time
import os
from bs4 import BeautifulSoup
from selenium import webdriver
from lxml import etree
suse_username = os.environ.get('SUSE_USERNAME')
suse_password = os.environ.get('SUSE_PASSWORD')
if (suse_username == None or suse_password == None):
raise RuntimeError('Credentials not supplied')
options = webdriver.ChromeOptions()
options.add_argument('--incognito')
options.add_argument('--headless')
options.add_argument('--disable-extensions')
options.add_argument('--disable-gpu')
options.add_argument('--disable-dev-shm-usage')
options.add_argument('--no-sandbox')
driver = webdriver.Chrome(options=options)
driver.get('https://myaccount.suse.com/')
# Fill inputs
username = driver.find_element(webdriver.common.by.By.ID, 'okta-signin-username')
password = driver.find_element(webdriver.common.by.By.ID, 'okta-signin-password')
username.send_keys(suse_username)
password.send_keys(suse_password)
driver.find_element(webdriver.common.by.By.ID, 'okta-signin-submit').click()
# Goto download page
time.sleep(15)
driver.get('https://www.suse.com/download/sles/')
time.sleep(5)
driver.get('https://www.suse.com/saml2/login/')
time.sleep(5)
driver.get('https://www.suse.com/download/sles/')
time.sleep(5)
# Extract image
soup = BeautifulSoup(driver.page_source, 'lxml')
dom = etree.HTML(str(soup))
try:
# Download suse v15 minimal-cloud
# Change xpath to required image
suse_15_download_item_sha = dom.xpath('//*[@id="productID_2465"]/div[2]/div[2]/div[1]/div[8]/span[3]/a')[0]
suse_15_download_item = dom.xpath('/html/body/main/div[3]/div[8]/div[2]/div[2]/div[1]/div[8]/a')[0]
suse_15_download_url = suse_15_download_item.get('data-href')
suse_15_download_url_sha = suse_15_download_item_sha.get('href')
print('Found image_sha: ' + suse_15_download_url_sha)
print('Found image: ' + suse_15_download_url)
except Exception as err:
time.sleep(600)
print('Error getting suse links')
print(err)
exit(1)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment