Skip to content

Instantly share code, notes, and snippets.

@SketchingDev
Last active October 23, 2024 06:54
Show Gist options
  • Save SketchingDev/4b0a632e457b724b4eee9d95c5596208 to your computer and use it in GitHub Desktop.
Save SketchingDev/4b0a632e457b724b4eee9d95c5596208 to your computer and use it in GitHub Desktop.
Genesys WebRTC Softphone Page Object Model

Genesys WebRTC Softphone Page Object Model

This POM simplifies the process of interacting with Genesys's WebRTC Softphone's UI when working with testing tools like Puppeteer. It was created to serve my use-case that I posted about but can easily be extended.

Usage example:

const softphone = await webRtcSoftphone(frame);
const audioInput = await softphone.getAudioInputFromWebRtcSettings();
console.log(`Audio Input: ${audioInput}`);

await softphone.waitForCallThenAnswer(30000); // Waits at least 30 seconds for a call to answer

Read my blog article Automated tests using Genesys Cloud's WebRTC softphone to see how I am using this.


WARNING: Genesys may change the underlying element selectors in their softphone, causing this to break. This is an inherent problem in driving a UI you don't own.

import { Frame } from "puppeteer";
export async function updateToOnQueue(frame: Frame) {
console.debug("Clicking Status Arrow");
const statusArrow = await frame.waitForSelector("#statusListArrow_test", {
visible: true,
});
await statusArrow.click();
console.debug("Clicking On Queue");
const onQueueButton = await frame.waitForSelector("#ON_QUEUE", {
visible: true,
});
await onQueueButton.click();
}
export async function pickupCall(frame: Frame, timeoutInMs: number) {
console.debug("Clicking Pickup Alert");
const elements = await frame.waitForSelector("div.pickup-alert", {
timeout: timeoutInMs,
});
await elements.click();
}
import { Frame } from "puppeteer";
export async function clickInteractionsInSettings(frame: Frame) {
console.debug("Clicking Interactions in Navigation");
const interactionsNavItem = await frame.waitForSelector(
"#navInteractionList",
{
visible: true,
},
);
await interactionsNavItem.click();
}
export async function selectMoreInNavigation(frame: Frame) {
console.debug("Clicking More in Navigation");
const moreButtonInNavigation = await frame.waitForSelector(
"#navBar li.item-with-more > a",
{
visible: true,
},
);
await moreButtonInNavigation.click();
}
export async function openNavigation(frame: Frame) {
console.debug("Clicking Navigation button");
const navigationButton = await frame.waitForSelector(
"button.navigation-main",
{
visible: true,
},
);
await navigationButton.click();
}
import { Frame } from "puppeteer";
export async function getAudioInputName(frame: Frame) {
console.debug("Extracting Audio Input name from WebRTC Settings");
const audioInput = await frame.waitForSelector("#audioInput", {
visible: true,
});
const audioInputText = await audioInput.evaluate(
() => document.getElementById("audioInput").textContent,
);
console.debug(`Extracted Audio Input name: ${audioInputText}`);
if (typeof audioInputText !== "string") {
throw new Error("Not text");
}
return audioInputText.trim();
}
export async function clickRtcSettings(frame: Frame) {
console.debug("Clicking Settings in Navigation");
const settingsInNavigation = await frame.waitForSelector("#navSettings", {
visible: true,
});
await settingsInNavigation.click();
console.debug("Clicking WebRTC header in Settings");
const expandWebRtcInSettings = await frame.waitForSelector(
"#headingWebRTC a",
{
visible: true,
},
);
await expandWebRtcInSettings.click();
}
import { Frame } from "puppeteer";
import {
clickInteractionsInSettings,
openNavigation,
selectMoreInNavigation,
} from "./navigationUi";
import { pickupCall, updateToOnQueue } from "./interactionsUI";
import { clickRtcSettings, getAudioInputName } from "./settingsUi";
/**
* The Page Object Model for Genesys's WebRTC Softphone.
*
* This relies on HTML IDs/Elements that are owned by Genesys, which may change without warning.
*
* @param {Frame} frame - The Puppeteer frame object representing the Genesys WebRTC softphone page
* @example
* // Usage example:
* const softphone = await webRtcSoftphone(frame);
* const audioInput = await softphone.getAudioInputFromWebRtcSettings();
* console.log(`Audio Input: ${audioInput}`);
*
* await softphone.waitForCallThenAnswer(30000); // Waits at least 30 seconds for a call to answer
*/
export async function webRtcSoftphone(frame: Frame) {
return {
async getAudioInputFromWebRtcSettings() {
console.group("getAudioInputFromWebRtcSettings");
await openNavigation(frame);
await selectMoreInNavigation(frame);
await clickRtcSettings(frame);
const audioInputName = await getAudioInputName(frame);
await openNavigation(frame);
await clickInteractionsInSettings(frame);
console.groupEnd();
return audioInputName;
},
async waitForCallThenAnswer(timoutInMs: number) {
console.group("waitForCallThenAnswer");
await updateToOnQueue(frame);
await pickupCall(frame, timoutInMs);
},
};
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment