Skip to content

Instantly share code, notes, and snippets.

View cra's full-sized avatar
🔥

Igor cra

🔥
View GitHub Profile
import os
import pendulum
from airflow import DAG
from airflow.decorators import dag
from airflow.operators.empty import EmptyOperator
from airflow.operators.bash import BashOperator
base_path = os.path.dirname(os.path.abspath(__file__))
file_path = os.path.join(base_path, 'YM_logs')
@cra
cra / popup.py
Created February 22, 2023 09:04
import random
from plyer import notification
todo = (
"pip install plyer",
"save this as autoran every 24h",
"можно и по-русски писать",
"из файла через open('path').readlines()",
)
@cra
cra / resume.json
Last active January 12, 2023 20:50
{
"$schema": "https://raw.githubusercontent.com/jsonresume/resume-schema/v1.0.0/schema.json",
"basics": {
"name": "Dr. Igor Mosyagin",
"label": "Pragmatic engineer with strong academic background",
"image": "",
"email": "igor@mosyag.in",
"phone": "",
"url": "",
"summary": "Pragmatic engineer with strong academic background.\nComputer Science -> Science -> Data Science -> Data",
from faker import Faker
from datetime import date
class User(Faker):
def __init__(self):
super().__init__(['pl_PL'])
@property
from airflow.models import DAG
from airflow.utils.dates import days_ago
from airflow.operators.python import PythonOperator
from airflow.operators.empty import EmptyOperator
def ohai(raz, dva, tri, chetyre):
print("Оппа нихуя себе", raz, dva)
print("И так тоже можно:", tri, chetyre)
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
import pendulum
from airflow import DAG
from airflow.operators.trigger_dagrun import TriggerDagRunOperator
dag = DAG(
dag_id='i_will_trigger',
schedule_interval=None,
start_date=pendulum.parse('2022-07-13'),
)
  1. Install rq: pip install rq
  2. Run redis docker run -p 6379:6379 redis
  3. Run worker rq worker
  4. Run submit script python submit.py

More info: https://python-rq.org/docs/

2022.05.05 00:47:02.330955 [ 261 ] {} <Trace> StorageKafka (lab4_kafka_queue): Polled batch of 599 messages. Offsets position: [ andrey_bratukhin_lab03_in[0:37533] ]
2022.05.05 00:47:02.331745 [ 261 ] {} <Trace> StorageKafka (lab4_kafka_queue): Re-joining claimed consumer after failure
2022.05.05 00:47:02.332298 [ 261 ] {} <Error> void DB::StorageKafka::threadFunc(size_t): Code: 33. DB::ParsingException: Unexpected end of stream while parsing JSONEachRow format: while parsing Kafka message (topic: andrey_bratukhin_lab03_in, partition: 0, offset: 36934)': While executing Kafka: (at row 2)
. (CANNOT_READ_ALL_DATA), Stack trace (when copying this message, always include the lines below):
0. DB::Exception::Exception(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, int, bool) @ 0xa82d07a in /usr/bin/clickhouse
1. DB::JSONEachRowRowInputFormat::advanceToNextKey(unsigned long) @ 0x14904954 in /usr/bin/clickhouse
2. DB::JSONEachRowRowInputFormat::readJSONObject(std::__1::
2022.05.05 00:47:02.332298 [ 261 ] {} <Error> void DB::StorageKafka::threadFunc(size_t): Code: 33. DB::ParsingException: Unexpected end of stream while parsing JSONEachRow format: while parsing Kafka message (topic: andrey_bratukhin_lab03_in, partition: 0, offset: 36934)': While executing Kafka: (at row 2)
. (CANNOT_READ_ALL_DATA), Stack trace (when copying this message, always include the lines below):
0. DB::Exception::Exception(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, int, bool) @ 0xa82d07a in /usr/bin/clickhouse
1. DB::JSONEachRowRowInputFormat::advanceToNextKey(unsigned long) @ 0x14904954 in /usr/bin/clickhouse
2. DB::JSONEachRowRowInputFormat::readJSONObject(std::__1::vector<COW<DB::IColumn>::mutable_ptr<DB::IColumn>, std::__1::allocator<COW<DB::IColumn>::mutable_ptr<DB::IColumn> > >&) @ 0x149043e1 in /usr/bin/clickhouse
3. DB::JSONEachRowRowInputFormat::readRow(std::__1::vector<COW<DB::IColumn>::mutable_ptr<DB::IColumn>, std::__1::allocator<COW<DB::