Skip to content

Instantly share code, notes, and snippets.

View azelenets's full-sized avatar
:octocat:
Impossible is nothing

Andrii Zelenets azelenets

:octocat:
Impossible is nothing
View GitHub Profile
# Migration file
#------------------------------------------------------------
class CreateTableName < ActiveRecord::Migration[5.2]
def up
execute <<-SQL
CREATE TYPE full_address AS
(
google_place_id VARCHAR,
address VARCHAR,
lat NUMERIC,
@azelenets
azelenets / AWS Lambda to encode video using ElasticTranscoder HLS.js
Last active April 15, 2022 12:27
AWS Lambda to encode video using ElasticTranscoder HLS
//Author: Dennis Hills, Andrew Zelenets
//Date: July 26, 2018
//Purpose: HLS Audio/Video Streaming
//Requirements: An Elastic Transcoder Pipeline
var aws = require('aws-sdk');
var elastictranscoder = new aws.ElasticTranscoder();
// return filename without extension
function baseName(path) {
# frozen_string_literal: true
# More info:
# https://www.contentful.com/r/knowledgebase/searching-on-references/
# https://www.contentful.com/developers/docs/references/content-delivery-api/
module Contentful
module FindInRelation
extend ActiveSupport::Concern
# frozen_string_literal: true
module BelongsToContentful
extend ActiveSupport::Concern
CONTENT_TYPES = %w[prayer passage teaching teaching_clip organization author].freeze
included do
extend ClassMethods
end
client = ContentfulSpace.first.client
teachings = client.entries(content_type: 'teachingFull', include: 3)
tf = teachings.first
attributes = Api::V1::Contentful::TeachingSerializer.new(tf).as_json
uri = URI.parse(attributes[:remote_media_file_url])
remote_path = uri.path
tmp_filepath = Rails.root.join(remote_path.split('/').reject(&:blank?).unshift('tmp', 'contentful_import').join('/'))
create_file(tmp_filepath) unless File.exist?(tmp_filepath)
`wget #{uri} -O #{tmp_filepath}` # Cache file before upload to S3
client = ContentfulSpace.first.client
teachings = client.entries(content_type: 'teachingFull', include: 3)
tf = teachings.first
attributes = Api::V1::Contentful::TeachingSerializer.new(tf).as_json
uri = URI.parse(attributes[:remote_media_file_url])
remote_path = uri.path
tmp_filepath = Rails.root.join(remote_path.split('/').reject(&:blank?).unshift('tmp', 'contentful_import').join('/'))
create_file(tmp_filepath) unless File.exist?(tmp_filepath)
`wget #{uri} -O #{tmp_filepath}` # Cache file before upload to S3
@azelenets
azelenets / rubocop_pre_commit_hook
Last active March 23, 2018 11:07
Ruby style guide git pre-commit hook using Rubocop as the style guide checker. Only runs on staged ruby files that have been added and/or modified.
#!/usr/bin/env ruby
require 'english'
require 'rubocop'
ADDED_OR_MODIFIED = /A|AM|^M/.freeze
changed_files = `git status --porcelain`.split(/\n/).
select { |file_name_with_status|
file_name_with_status =~ ADDED_OR_MODIFIED
@azelenets
azelenets / custom_array_matchers.rb
Created December 18, 2017 14:25 — forked from bunnymatic/custom_array_matchers.rb
RSpec Matchers for increasing/decreasing array tests. Useful when sort may not be the easiest way to do things.
RSpec::Matchers.define :be_monotonically_increasing do
match do |actual|
derivative = actual.each_cons(2).map{|x, y| y <=> x}
derivative.all?{|v| v >= 0}
end
failure_message_for_should do |actual|
"expected array #{actual.inspect} to be monotonically increasing"
end
# 1. Clone https://github.com/churchio/open-bibles repo;
# 2. install https://github.com/churchio/bible_ref gem
class Importer
def import(path, translation_id)
bible = BibleParser.new(File.open(path))
bible.each_verse do |verse|
data = verse.to_h
data[:book] = data.delete(:book_title)
data[:chapter] = data.delete(:chapter_num)
INSERT INTO assignments (exam_id, student_id, created_at, updated_at, grade, graded_by, comment, comment_by, lesson_segment_id, state) VALUES
(250, 7066, '2012-07-05 15:47:14', '2017-05-30 13:33:48', 85, null, null, null, null, 5),
(250, 7066, '2012-07-07 15:47:14', '2017-05-30 13:33:48', 90, null, null, null, null, 5),
(250, 10241, '2010-12-09 00:00:00', '2017-05-29 13:13:55', 99, null, null, null, null, 5),
(250, 10241, '2016-07-19 20:59:28', '2017-05-29 14:29:37', 80, 76153, null, null, null, 5),
(250, 10241, '2016-07-20 20:59:28', '2017-05-29 14:29:37', 81, 76153, null, null, null, 5),
(250, 456861, '2012-03-19 05:14:11', '2017-05-29 13:25:48', 75, null, null, null, null, 5),
(250, 456861, '2012-03-19 17:01:45', '2012-03-19 17:01:45', null, null, null, null, null, 0),
(250, 4, '2012-01-30 07:50:55', NULL, NULL, NULL, NULL, NULL, NULL, 0);