Skip to content

Instantly share code, notes, and snippets.

View Tony363's full-sized avatar
🎯
Focusing

Tony Siu Tony363

🎯
Focusing
View GitHub Profile
class A:
def test(self):
print("test of A called")
class B(A):
def test(self):
print('test of B called')
super().test()
class C(A):
def test(self):
print("test of C called")
@Tony363
Tony363 / game.py
Last active November 6, 2019 15:11
import sys
import random
# matrix = [
# [1,2],
# [3,4],
# [5,6],
# ]
# tony = [[row[i] for row in matrix] for i in range(2)]
# print(tony)
# points = random.sample([range(1,7),range(1,7)],3)
import tkinter as tk
from random import randrange
import time
database = []
def getorigin(eventorigin):
x0 = eventorigin.x
y0 = eventorigin.y
## imports for Python 2.7, change as appropriate
from tkinter import *
import tkinter.simpledialog
import time, random, numpy, math
class PlayerSprite:
def __init__(self, canvas):
self.canvas = canvas
self.endgame = False
import requests
import urllib
from bs4 import BeautifulSoup
url = requests.get('https://app.chartmetric.com/artist?id=4')
soup = BeautifulSoup(url.content, 'html.parser')
message_box = soup.find_all("div",{"class":"left-col"})
# print(soup.find_all("div"))
print(message_box)
print('hello world')
Stitcher::Status Stitcher::composePanorama(InputArrayOfArrays images, OutputArray pano)
{
CV_INSTRUMENT_REGION();
LOGLN("Warping images (auxiliary)... ");
std::vector<UMat> imgs;
images.getUMatVector(imgs);
if (!imgs.empty())
{
# NB : cv2.UMat array is faster than np array
pano = cv2.UMat(np.asarray([]))
leftimage_gpu = cv2.cuda_GpuMat()
rightimage_gpu = cv2.cuda_GpuMat()
readFrame = 0
while self.running:
try:
# Initialize left and right frames
# CSI cameras frames works on 30 or 60 FPS but the sticher works under 3FPS (slower)
# Therefore it needs to store a frame for a longer period of time to be able to stitch
if cached is not None:
dst_sz,warper, cameras,corners,masks_warped = cached
blender.prepare(dst_sz)
for idx, name in enumerate(img_names):
corner, image_warped = warper.warp(name, cameras[idx].K().astype(np.float32), cameras[idx].R, cv2.INTER_LINEAR, cv2.BORDER_REFLECT)
p, mask_warped = warper.warp(255 * np.ones((name.shape[0], name.shape[1]), np.uint8), cameras[idx].K().astype(np.float32), cameras[idx].R, cv2.INTER_NEAREST, cv2.BORDER_CONSTANT)
compensator.apply(idx, corners[idx], image_warped, mask_warped)
mask_warped = cv2.bitwise_and(cv2.resize(cv2.dilate(masks_warped[idx], None), (mask_warped.shape[1], mask_warped.shape[0]), 0, 0, cv2.INTER_LINEAR_EXACT), mask_warped)
blender.feed(cv2.UMat(image_warped.astype(np.int16)), mask_warped, corners[idx])
result, result_mask = blender.blend(None, None)
masks = np.asarray([cv2.UMat(255 * np.ones((images[i].shape[0], images[i].shape[1]), np.uint8)) for i in range(img_names.shape[0])])
corners = np.asarray([warper.warp(images[idx], Kseam_work_aspect(cameras[idx].K().astype(np.float32),seam_work_aspect), cameras[idx].R, cv2.INTER_LINEAR, cv2.BORDER_REFLECT)[0] for idx in range(img_names.shape[0])])
masks_warped = np.asarray([warper.warp(masks[idx], Kseam_work_aspect(cameras[idx].K().astype(np.float32),seam_work_aspect), cameras[idx].R, cv2.INTER_NEAREST, cv2.BORDER_CONSTANT)[1].get() for idx in range(img_names.shape[0])])
images_warped = np.asarray([warper.warp(images[idx], Kseam_work_aspect(cameras[idx].K().astype(np.float32),seam_work_aspect), cameras[idx].R, cv2.INTER_LINEAR, cv2.BORDER_REFLECT)[1]for idx in range(img_names.shape[0])])
sizes = np.asarray([warper.warp(images[idx], Kseam_work_aspect(cameras[idx].K().astype(np.float32),seam_work_aspect), cameras[idx].R, cv2.INTER_LINEAR, cv2.BORDER_REFLECT)[1].shape[1::-1] for idx in range(img_names.shape[0])]
def Manual(
left_image,
right_image,
cached=None,
work_megapix=0.69,
seam_megapix=0.01,
ba_refine_mask='xxxxx',
finder = cv2.ORB.create(),
blender=cv2.detail.Blender_createDefault(cv2.detail.Blender_NO),
compensator=cv2.detail.ExposureCompensator_createDefault(cv2.detail.ExposureCompensator_GAIN_BLOCKS),