Skip to content

Instantly share code, notes, and snippets.

View anisayari's full-sized avatar
🎯
Focusing

Anis AYARI anisayari

🎯
Focusing
View GitHub Profile
import React, { useEffect, useRef } from 'react';
import * as THREE from 'three';
import { EffectComposer } from 'three/examples/jsm/postprocessing/EffectComposer';
import { RenderPass } from 'three/examples/jsm/postprocessing/RenderPass';
import { UnrealBloomPass } from 'three/examples/jsm/postprocessing/UnrealBloomPass';
import { OutputPass } from 'three/examples/jsm/postprocessing/OutputPass';
import ShaderCode from './ShaderCode';
const AudioVisualizer3D = ({ audioStream, type }) => {
const { vertexShader, fragmentShader } = ShaderCode();
print('[INFO] Starting System...')
print('[INFO] Importing pretrained model..')
pose_predictor_68_point = dlib.shape_predictor("pretrained_model/shape_predictor_68_face_landmarks.dat")
pose_predictor_5_point = dlib.shape_predictor("pretrained_model/shape_predictor_5_face_landmarks.dat")
face_encoder = dlib.face_recognition_model_v1("pretrained_model/dlib_face_recognition_resnet_model_v1.dat")
face_detector = dlib.get_frontal_face_detector()
print('[INFO] Importing pretrained model..')
import cv2
import dlib
import PIL.Image
import numpy as np
from imutils import face_utils
import argparse
from pathlib import Path
import os
import ntpath
@anisayari
anisayari / gist:1d2ada17ea4d7e09d93358e5705db781
Created September 18, 2019 11:39
Reduction memory_usage
def reduce_mem_usage(df):
""" iterate through all the columns of a dataframe and modify the data type
to reduce memory usage.
"""
start_mem = df.memory_usage().sum() / 1024 ** 2
print('Memory usage of dataframe is {:.2f} MB'.format(start_mem))
for col in df.columns:
col_type = df[col].dtype
@anisayari
anisayari / reduce_mem_usage.py
Created September 18, 2019 11:39
Reduction memory_usage
def reduce_mem_usage(df):
""" iterate through all the columns of a dataframe and modify the data type
to reduce memory usage.
"""
start_mem = df.memory_usage().sum() / 1024 ** 2
print('Memory usage of dataframe is {:.2f} MB'.format(start_mem))
for col in df.columns:
col_type = df[col].dtype
@anisayari
anisayari / extract_features_from_songs.py
Created April 21, 2019 18:18
extract_features_from_songs.py
import librosa
import numpy as np
import pandas as pd
from tqdm import tqdm
def extract_features_from_a_song(x,sr):
dict_features = {
'zcr': np.mean(librosa.feature.zero_crossing_rate(x)),
'chroma_stft': np.mean(librosa.feature.chroma_stft(x, sr=sr)),
'mfcc': np.mean(librosa.feature.mfcc(x, sr=sr)),
@anisayari
anisayari / get_youtube_music.py
Created April 21, 2019 17:34
get_youtube_music
import youtube_dl
import pandas as pd
def get_youtube_music(row):
path = 'data/music/{}/{}.mp3'.format(row['style'],str(row['uuid']))
exists = os.path.isfile(path)
if exists:
print('Already exist {}'.format(row['uuid']))
return
if row['videoID_youtube'] != "missing":
videoID = row['videoID_youtube']
@anisayari
anisayari / get_video_id_from_youtube.py
Last active April 21, 2019 16:45
get_video_id_from_youtube
import csv
import pandas as pd
import googleapiclient
from tqdm import tqdm
DEVELOPER_KEY_GCP = "key_here"
def get_video_id_from_youtube(df):
# Disable OAuthlib's HTTPS verification when running locally.
# *DO NOT* leave this option enabled in production.
@anisayari
anisayari / build_dataset_by_tag.py
Last active April 21, 2019 16:28
dataset from lastfm by tag
import pylast
import pandas as pd
from tqdm import tqdm
API_KEY_lastfm = "key_here"
API_SECRET_lastfm = " key_here"
username_lastfm = "username"
def build_dataset_by_tag(tag_list,output_file):
print('[INFO] Bulding Dataset by Tag....')
network_lastfm = pylast.LastFMNetwork(api_key=API_KEY_lastfm, api_secret=API_SECRET_lastfm,username=username_lastfm)