Skip to content

Instantly share code, notes, and snippets.

View S0PEX's full-sized avatar
🏠
Moving to K8s 🐳

Artur Komaristych S0PEX

🏠
Moving to K8s 🐳
View GitHub Profile
/home/s0pex/anaconda3/envs/bert_377/bin/python /home/s0pex/Git/UzK/ICL/bertese/BERTese/seq2seq_experiment.py --output_dir ./output --model_type bert_emb_identity_seq2seq --model_name bert-large-uncased --num_train_epochs 100 --evaluate_during_training --do_eval_test
Traceback (most recent call last):
File "/home/s0pex/Git/UzK/ICL/bertese/BERTese/seq2seq_experiment.py", line 142, in <module>
_run_identity_exp(args)
File "/home/s0pex/Git/UzK/ICL/bertese/BERTese/seq2seq_experiment.py", line 94, in _run_identity_exp
model_type=args.model_type)
File "/home/s0pex/Git/UzK/ICL/bertese/BERTese/seq2seq_experiment.py", line 52, in lama_seq2seq_experiment
all_data = readers.read_all_data(args, data_path, datasets_file_name)
File "/home/s0pex/Git/UzK/ICL/bertese/BERTese/readers.py", line 184, in read_all_data
pickle.dump(all_data, open(os.path.join(data_path, datasets_file_name), "wb"))
/home/s0pex/anaconda3/envs/bert_377/bin/python /home/s0pex/Git/UzK/ICL/bertese/BERTese/seq2seq_experiment.py --output_dir ./output --model_type bert_emb_identity_seq2seq --model_name bert-large-uncased --num_train_epochs 100 --evaluate_during_training --do_eval_test
Traceback (most recent call last):
File "/home/s0pex/Git/UzK/ICL/bertese/BERTese/seq2seq_experiment.py", line 142, in <module>
_run_identity_exp(args)
File "/home/s0pex/Git/UzK/ICL/bertese/BERTese/seq2seq_experiment.py", line 94, in _run_identity_exp
model_type=args.model_type)
File "/home/s0pex/Git/UzK/ICL/bertese/BERTese/seq2seq_experiment.py", line 52, in lama_seq2seq_experiment
all_data = readers.read_all_data(args, data_path, datasets_file_name)
File "/home/s0pex/Git/UzK/ICL/bertese/BERTese/readers.py", line 177, in read_all_data
trex_data = get_trex_data(args)
@S0PEX
S0PEX / log.txt
Created June 7, 2024 06:41
pypip install bertese requirements
(bertese) s0pex@um790-pro:~/Git/UzK/ICL/bertese$ pip install -r requirements.txt
Collecting absl-py==0.9.0 (from -r requirements.txt (line 1))
Downloading absl-py-0.9.0.tar.gz (104 kB)
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 104.0/104.0 kB 553.9 kB/s eta 0:00:00
Preparing metadata (setup.py) ... done
Collecting blis==0.4.1 (from -r requirements.txt (line 2))
Downloading blis-0.4.1.tar.gz (1.8 MB)
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 1.8/1.8 MB 3.9 MB/s eta 0:00:00
Preparing metadata (setup.py) ... done
Collecting boto3==1.12.39 (from -r requirements.txt (line 3))
rce>run-test DefaultTestSuite releases/10.4.0
> Starting test scenario "Verifying accessibility of console command auth for role "developer""
Setting up installation "releases_10_4_0" using build "releases/10.4.0"
*** Error in test scenario "Verifying accessibility of console command auth for role "developer""; dumping any captured StdOut/StdErr output
> Starting test scenario "Verifying accessibility of console command auth create for role "developer""
Setting up installation "releases_10_4_0" using build "releases/10.4.0"
*** Error in test scenario "Verifying accessibility of console command auth create for role "developer""; dumping any captured StdOut/StdErr output
> Starting test scenario "Verifying accessibility of console command auth delete for role "developer""
Setting up installation "releases_10_4_0" using build "releases/10.4.0"
*** Error in test scenario "Verifying accessibility of console command auth delete for role "developer""; dumping any captured StdOut/StdErr output
@S0PEX
S0PEX / tst.py
Created November 17, 2023 09:03
def test_all_Steps_of_pipeline(
dummy_pipeline: PipelineTuple, expected_dronology_ypred_ytrue
):
"""
Test loading predictions from a CSV file into the pipeline and calculate
the metrics and export the results.
We aim to validate that the pipeline can successfully load prediction results from a CSV
file. The metrics are tested through the exporter, because they calculate the metrics by
using the methods from the Metrics class.
@S0PEX
S0PEX / bot.py
Created July 2, 2023 19:48
Bot.py
import logging
import requests
import argparse
import schedule
def send_register_request(course_id: int):
data = {
'state': 'studentAnmelden',
'type': 'student',
# Entsprechend die ID eintragen, weiß nicht für was die offerId=36 war
import requests
data = {
'state': 'studentAnmelden',
'type': 'student',
'offerCourseID': ' 169 ', # Entsprechend die ID eintragen, weiß nicht für was die offerId=36 war
'vorname': 'Name',
'nachname': 'Nachname',
'telefon': '0176000000',
'matrikel': '74XXX21',
// SoldierBones
// Bones 48 8B C4 53 48 81 EC ? ? ? ? 44 0F 29 60
static constexpr auto kClientDiceAntAnimatableEntity = 0x1050;
static constexpr auto kAntAnimatableEntity = 0x38;
static constexpr auto kRenderSkeleton = 0x30;
static constexpr auto kSqtArray = 0x38;
// RenderSkeleton
static constexpr auto kServerBoneHierarchy = 0x48;
static constexpr auto kLocalBoneHierarchy = 0x60;
@S0PEX
S0PEX / argo.ts
Created August 15, 2022 20:36
Argo Workflow with wolume claim
const workflow: HttpWorkflowsArgoprojIoWorkflowsJson = {
metadata: {
name: `${pipelineSchema.title.replace(/\s/g, '-').toLowerCase()}-${id}`,
namespace: 'default',
labels: {
id,
},
},
spec: {
entrypoint: 'workflow',
@S0PEX
S0PEX / CMakeLists.txt
Created March 16, 2022 20:45
CMake Driver CMakeLists.txt Example
# Setup WDK, make sure to edit the path variable as my Finddk.cmake is at
# - root
# -- cmake
# ---- FindWdk.cmake
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_LIST_DIR}/cmake")
find_package(WDK REQUIRED)
# Set include from include and src
include_directories(
${CMAKE_CURRENT_SOURCE_DIR}/include