Skip to content

Instantly share code, notes, and snippets.

colvarsTrajFrequency 5000
colvarsRestartFrequency 5000
indexFile ../complex.ndx
colvar {
name RMSD
rmsd {
atoms {
indexGroup ligand
}
refpositionsfile ./complex_largeBox.xyz
@HanatoK
HanatoK / alad.abf1.czar.grad
Created September 12, 2022 08:30
Problematic gradient files
# 2
# -197.477 0.0345828 150 0
# -16.6357 0.0451627 150 0
-197.46 -16.6132 0 0
-197.46 -16.568 0 0
-197.46 -16.5228 0 0
-197.46 -16.4777 0 0
-197.46 -16.4325 0 0
-197.46 -16.3873 0 0
@HanatoK
HanatoK / gist:cc03687d55caa6c9a730a3a01ef1de06
Created August 11, 2022 15:57
CZAR grad file that is not able to integrate
This file has been truncated, but you can view the full file.
# 2
# -12.2151 0.0790927 200 0
# 1.49417 0.108057 200 0
-12.1755 1.54819 0 0
-12.1755 1.65625 0 0
-12.1755 1.76431 0 0
-12.1755 1.87237 0 0
-12.1755 1.98042 0 0
-12.1755 2.08848 0 0
@HanatoK
HanatoK / shared_from_this.cpp
Created March 14, 2022 20:44
Use dynamic_pointer_cast to avoid bad_weak_ptr
#include <memory>
#include <iostream>
#include <type_traits>
#include <typeinfo>
#include <string>
struct Executor;
struct Intepreter;
struct Executor1 {
Executor1(const std::shared_ptr<Executor>& executor,
@HanatoK
HanatoK / enum_to_type.cpp
Created March 7, 2022 17:55
C++ map strongly typed enum to type (C++17)
#include <iostream>
#include <string>
#include <vector>
#include <utility>
#include <type_traits>
enum class Foo {
A1, A2
};
@HanatoK
HanatoK / observer_pattern.cpp
Created November 21, 2021 11:16
C++ observer pattern with smart pointers
#include <iostream>
#include <algorithm>
#include <vector>
#include <memory>
#include <string>
class SubjectBase;
class ObserverBase {
public:
@HanatoK
HanatoK / main_boost.cpp
Created June 8, 2021 11:11
Read .xz file via boost
// main_boost.cpp
// g++ main_boost.cpp -o main_boost -lboost_iostreams
#include <boost/iostreams/filtering_stream.hpp>
#include <boost/iostreams/filter/lzma.hpp>
#include <iostream>
#include <fstream>
#include <string>
int main() {
const std::string filename("test.dat.xz");
WARNING:tensorflow:AutoGraph could not transform <function WhileV2.__call__.<locals>.while_fn at 0x7f3eaa4711f0> and will run it as-is.
Please report this to the TensorFlow team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output.
Cause: module 'gast' has no attribute 'Index'
To silence this warning, decorate the function with @tf.autograph.experimental.do_not_convert
ERROR:tensorflow:Got error while pfor was converting op name: "custom_loss/loop_body/while"
op: "StatelessWhile"
input: "custom_loss/loop_body/while/loop_counter"
input: "custom_loss/loop_body/while/maximum_iterations"
input: "custom_loss/loop_body/GatherV2"
attr {
@HanatoK
HanatoK / main.cpp
Created October 24, 2020 09:45
Test for ELU and ReLU with Lepton
#include <iostream>
#include <string>
#include "Lepton.h"
void testElu(const double x) {
const std::string expression{"elu(x, 2.0)"};
std::cout << "Test expression: " << expression << "\n";
Lepton::ParsedExpression parsed_expression;
parsed_expression = Lepton::Parser::parse(expression);
@HanatoK
HanatoK / main.cu
Last active June 11, 2020 23:28
cuda copy to symbol, device to device
#include <cstdio>
#define N 3
__device__ int d_array[N] = {1, 2, 3};
__device__ __constant__ int d_array_constant[N];
static const char *_cudaGetErrorEnum(cudaError_t error) {
return cudaGetErrorName(error);
}