Skip to content

Instantly share code, notes, and snippets.

const express = require('express');
const router = express.Router();
const { PrismaClient } = require('@prisma/client');
const prisma = new PrismaClient();
const amqplib = require('amqplib');
const express = require('express');
const router = express.Router();
const { PrismaClient } = require('@prisma/client');
const prisma = new PrismaClient();
const amqplib = require('amqplib');
import { Injectable } from '@nestjs/common';
import { PrismaService } from '../../../../prisma/prisma.service';
import { CreateAdminUserDto } from '../dto/CreateAdminUser.dto';
@Injectable()
export class AdminService {
constructor(private readonly prisma: PrismaService, private readonly saltRounds, private readonly password) {
saltRounds = 10
password = "Admin@123"
import streamlit as st
# Title
st.title('Transvoice App')
# Apply custom CSS for a two-column layout
st.markdown(
"""
<style>
.two-column-layout {
import streamlit as st
import matplotlib.pyplot as plt
# Create a square using Matplotlib
def draw_square(ax, width, height, fill_color, edge_color):
square = plt.Rectangle((0, 0), width, height, fc=fill_color, ec=edge_color)
ax.add_patch(square)
ax.set_xlim(-0.5, width + 0.5)
ax.set_ylim(-0.5, height + 0.5)
ax.set_aspect('equal', 'box')
import subprocess
from config import config, preset
class VirtualMic():
DEVICE_ID = 0
def __init__(self) -> None:
pass
import speech_recognition as sr
import sounddevice as sd
import wave
def record_and_transcribe():
duration =10 # Recording duration in seconds
sample_rate = 44100 # Standard audio sample rate
channels = 2 # Mono audio (change to 2 for stereo)
# Record audio using sounddevice
import sounddevice as sd
import SpeechRecognition as sr # Note the capitalization
def record_and_transcribe():
duration = 5 # Recording duration in seconds
sample_rate = 44100 # Standard audio sample rate
channels = 1 # Mono audio (change to 2 for stereo)
# Record audio using sounddevice
print("Recording...")
function drawLine(e) {
const video = document.querySelector("#myVideo");
if (!drawing) return;
const [x, y] = [e.clientX - canvas.offsetLeft, e.clientY - canvas.offsetTop];
ctx.strokeStyle = 'white';
ctx.lineWidth = 2;
ctx.lineJoin = 'round';
if (
x >= zoomRegionX &&
const puppeteer = require("puppeteer");
const grabfunc = async (finalArr) => {
//let finalArr = [];
const browser = await puppeteer.launch({
headless: false,
args: ["--disable-features=EnableUserAgentClientHint"],
});
const page = await browser.newPage();
const puppeteer = require("puppeteer");
(async () => {
const results = []; // Declare 'results' array in the wider scope
const browser = await puppeteer.launch({
headless: false,
args: ["--disable-features=EnableUserAgentClientHint"],
});
const page = await browser.newPage();