- Install rq:
pip install rq
- Run redis
docker run -p 6379:6379 redis
- Run worker
rq worker
- Run submit script
python submit.py
More info: https://python-rq.org/docs/
import os | |
import pendulum | |
from airflow import DAG | |
from airflow.decorators import dag | |
from airflow.operators.empty import EmptyOperator | |
from airflow.operators.bash import BashOperator | |
base_path = os.path.dirname(os.path.abspath(__file__)) | |
file_path = os.path.join(base_path, 'YM_logs') |
import random | |
from plyer import notification | |
todo = ( | |
"pip install plyer", | |
"save this as autoran every 24h", | |
"можно и по-русски писать", | |
"из файла через open('path').readlines()", | |
) |
{ | |
"$schema": "https://raw.githubusercontent.com/jsonresume/resume-schema/v1.0.0/schema.json", | |
"basics": { | |
"name": "Dr. Igor Mosyagin", | |
"label": "Pragmatic engineer with strong academic background", | |
"image": "", | |
"email": "igor@mosyag.in", | |
"phone": "", | |
"url": "", | |
"summary": "Pragmatic engineer with strong academic background.\nComputer Science -> Science -> Data Science -> Data", |
from faker import Faker | |
from datetime import date | |
class User(Faker): | |
def __init__(self): | |
super().__init__(['pl_PL']) | |
@property |
from airflow.models import DAG | |
from airflow.utils.dates import days_ago | |
from airflow.operators.python import PythonOperator | |
from airflow.operators.empty import EmptyOperator | |
def ohai(raz, dva, tri, chetyre): | |
print("Оппа нихуя себе", raz, dva) | |
print("И так тоже можно:", tri, chetyre) |
import pendulum | |
from airflow import DAG | |
from airflow.operators.trigger_dagrun import TriggerDagRunOperator | |
dag = DAG( | |
dag_id='i_will_trigger', | |
schedule_interval=None, | |
start_date=pendulum.parse('2022-07-13'), | |
) |
pip install rq
docker run -p 6379:6379 redis
rq worker
python submit.py
More info: https://python-rq.org/docs/
2022.05.05 00:47:02.330955 [ 261 ] {} <Trace> StorageKafka (lab4_kafka_queue): Polled batch of 599 messages. Offsets position: [ andrey_bratukhin_lab03_in[0:37533] ] | |
2022.05.05 00:47:02.331745 [ 261 ] {} <Trace> StorageKafka (lab4_kafka_queue): Re-joining claimed consumer after failure | |
2022.05.05 00:47:02.332298 [ 261 ] {} <Error> void DB::StorageKafka::threadFunc(size_t): Code: 33. DB::ParsingException: Unexpected end of stream while parsing JSONEachRow format: while parsing Kafka message (topic: andrey_bratukhin_lab03_in, partition: 0, offset: 36934)': While executing Kafka: (at row 2) | |
. (CANNOT_READ_ALL_DATA), Stack trace (when copying this message, always include the lines below): | |
0. DB::Exception::Exception(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, int, bool) @ 0xa82d07a in /usr/bin/clickhouse | |
1. DB::JSONEachRowRowInputFormat::advanceToNextKey(unsigned long) @ 0x14904954 in /usr/bin/clickhouse | |
2. DB::JSONEachRowRowInputFormat::readJSONObject(std::__1:: |
2022.05.05 00:47:02.332298 [ 261 ] {} <Error> void DB::StorageKafka::threadFunc(size_t): Code: 33. DB::ParsingException: Unexpected end of stream while parsing JSONEachRow format: while parsing Kafka message (topic: andrey_bratukhin_lab03_in, partition: 0, offset: 36934)': While executing Kafka: (at row 2) | |
. (CANNOT_READ_ALL_DATA), Stack trace (when copying this message, always include the lines below): | |
0. DB::Exception::Exception(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, int, bool) @ 0xa82d07a in /usr/bin/clickhouse | |
1. DB::JSONEachRowRowInputFormat::advanceToNextKey(unsigned long) @ 0x14904954 in /usr/bin/clickhouse | |
2. DB::JSONEachRowRowInputFormat::readJSONObject(std::__1::vector<COW<DB::IColumn>::mutable_ptr<DB::IColumn>, std::__1::allocator<COW<DB::IColumn>::mutable_ptr<DB::IColumn> > >&) @ 0x149043e1 in /usr/bin/clickhouse | |
3. DB::JSONEachRowRowInputFormat::readRow(std::__1::vector<COW<DB::IColumn>::mutable_ptr<DB::IColumn>, std::__1::allocator<COW<DB:: |