Skip to content

Instantly share code, notes, and snippets.

@FantasyVR
Last active September 13, 2023 08:51
Show Gist options
  • Save FantasyVR/14192d7b8c724beee668d582dfd430a2 to your computer and use it in GitHub Desktop.
Save FantasyVR/14192d7b8c724beee668d582dfd430a2 to your computer and use it in GitHub Desktop.
#include "cuda_runtime.h"
#include "device_launch_parameters.h"
#include <iostream>
#include <vector>
#include <fstream>
#include <string>
#include <sstream>
#include <cassert>
#include <set>
template <typename T>
struct Vec2 {
T x, y;
__host__ __device__ Vec2() :x(0), y(0) {}
__host__ __device__ Vec2(T x, T y) : x(x), y(y) {}
__host__ __device__ T operator[](int idx) { if (idx == 0)return x; else return y; }
__host__ __device__ Vec2 operator+(const Vec2& v) const { return Vec2(x + v.x, y + v.y); }
__host__ __device__ Vec2 operator-(const Vec2& v) const { return Vec2(x - v.x, y - v.y); }
__host__ __device__ Vec2& operator+=(const Vec2& v) { x += v.x; y += v.y; return *this; }
__host__ __device__ Vec2& operator-=(const Vec2& v) { x -= v.x; y -= v.y; return *this; }
__host__ __device__ Vec2& operator*=(T s) { x *= s; y *= s; return *this; }
__host__ __device__ Vec2& operator/=(T s) { x /= s; y /= s; return *this; }
__host__ __device__ T dot(const Vec2& v) const { return x * v.x + y * v.y; }
__host__ __device__ T cross(const Vec2& v) const { return x * v.y - y * v.x; }
__host__ __device__ T norm() const { return sqrt(x * x + y * y); }
__host__ __device__ T square_norm() const { return x * x + y * y; }
__host__ __device__ Vec2 normalized() const { return *this / norm(); }
};
template <typename T>
__host__ __device__ Vec2<T> operator*(T s, const Vec2<T>& v) { return Vec2<T>(v.x * s, v.y * s); }
template <typename T>
__host__ __device__ Vec2<T> operator/(T s, const Vec2<T>& v) { return Vec2<T>(v.x / s, v.y / s); }
template <typename T>
struct Vec3 {
T x, y, z;
__host__ __device__ Vec3() :x(0), y(0), z(0) {}
__host__ __device__ Vec3(T x, T y, T z) : x(x), y(y), z(z) {}
__host__ __device__ T operator[](int idx) { if (idx == 0)return x; else if (idx == 1) return y; else return z; }
__host__ __device__ Vec3 operator+(const Vec3& v) const { return Vec3(x + v.x, y + v.y, z + v.z); }
__host__ __device__ Vec3 operator-(const Vec3& v) const { return Vec3(x - v.x, y - v.y, z - v.z); }
__host__ __device__ Vec3& operator+=(const Vec3& v) { x += v.x; y += v.y; z += v.z; return *this; }
__host__ __device__ Vec3& operator-=(const Vec3& v) { x -= v.x; y -= v.y; z -= v.z; return *this; }
__host__ __device__ Vec3& operator*=(T s) { x *= s; y *= s; z *= s; return *this; }
__host__ __device__ Vec3& operator/=(T s) { x /= s; y /= s; z /= s; return *this; }
__host__ __device__ T dot(const Vec3& v) const { return x * v.x + y * v.y + z * v.z; }
__host__ __device__ Vec3 cross(const Vec3& v) const { return Vec3(y * v.z - z * v.y, z * v.x - x * v.z, x * v.y - y * v.x); }
__host__ __device__ T norm() const { return sqrt(x * x + y * y + z * z); }
__host__ __device__ T square_norm() const { return x * x + y * y + z * z; }
__host__ __device__ Vec3 normalized() const { return *this / norm(); }
};
template <typename T>
__host__ __device__ Vec3<T> operator*(const Vec3<T>& v, T s) { return Vec3<T>(v.x * s, v.y * s, v.z * s); }
template <typename T>
__host__ __device__ Vec3<T> operator/(const Vec3<T>& v, T s) { return Vec3<T>(v.x / s, v.y / s, v.z / s); }
template <typename T>
__host__ __device__ Vec3<T> operator*(T s, const Vec3<T>& v) { return Vec3<T>(v.x * s, v.y * s, v.z * s); }
template <typename T>
__host__ __device__ Vec3<T> operator/(T s, const Vec3<T>& v) { return Vec3<T>(v.x / s, v.y / s, v.z / s); }
template <typename T>
struct Vec4 {
int x, y, z, w;
__host__ __device__ Vec4() :x(0), y(0), z(0), w(0) {}
__host__ __device__ Vec4(int x, int y, int z, int w) : x(x), y(y), z(z), w(w) {}
};
using Real = float;
using Vec4i = Vec4<int>;
using Vec3f = Vec3<Real>;
using Vec3i = Vec3<int>;
using Vec2i = Vec2<int>;
class Softbody {
private:
std::vector<Vec3f> pos_;
std::vector<Vec3f> old_pos_;
std::vector<Vec3f> predict_pos_;
std::vector<Vec3f> vel_;
std::vector<Real> w_;
std::vector<Vec3i> faces_;
std::vector<Vec2i> edges_;
std::vector<Vec4i> tets_;
std::vector<Real> rest_length_;
std::vector<Real> rest_volume_;
int num_vertices_{ 0 };
int num_edges_{ 0 };
int num_faces_{ 0 };
int num_tets_{ 0 };
public:
Real* d_pos;
Real* d_old_pos;
Real* d_predict_pos;
Real* d_vels;
Real* d_w;
int* d_edges;
int* d_tets;
Real* d_rest_length;
Real* d_rest_volume;
private:
void read_node_file(const std::string node_file) {
std::ifstream infile(node_file);
std::string line;
std::getline(infile, line);
std::stringstream ss(line);
int tmp1, tmp2, temp3;
ss >> num_vertices_ >> tmp1 >> tmp2 >> temp3;
while (std::getline(infile, line)) {
if (line[0] == '#') continue;
std::stringstream ss(line);
int idx;
Real a, b, c;
ss >> idx >> a >> b >> c;
pos_.push_back(Vec3f(a, b, c));
}
assert(pos_.size() == num_vertices_);
infile.close();
}
void read_ele_file(const std::string ele_file) {
std::ifstream infile(ele_file);
std::string line;
std::getline(infile, line);
std::stringstream ss(line);
int tmp1, tmp2;
ss >> num_tets_ >> tmp1 >> tmp2;
while (std::getline(infile, line)) {
if (line[0] == '#') continue;
std::stringstream ss(line);
int idx, a, b, c, d;
ss >> idx >> a >> b >> c >> d;
tets_.push_back(Vec4i(a, b, c, d));
}
assert(num_tets_ == tets_.size());
infile.close();
}
void read_face_file(const std::string face_file) {
std::ifstream infile(face_file);
std::string line;
std::getline(infile, line);
std::stringstream ss(line);
int tmp1, tmp2;
ss >> num_faces_ >> tmp1 >> tmp2;
while (std::getline(infile, line)) {
if (line[0] == '#') continue;
std::stringstream ss(line);
int idx, a, b, c;
ss >> idx >> a >> b >> c;
faces_.push_back(Vec3i(a, b, c));
}
assert(num_faces_ == faces_.size());
infile.close();
}
void extract_edges() {
std::set<std::pair<int, int>> edges;
for (int i = 0; i < num_tets_; i++)
{
int dix0 = tets_[i].x, idx1 = tets_[i].y, idx2 = tets_[i].z, idx3 = tets_[i].w;
edges.insert(std::make_pair(std::min(dix0, idx1), std::max(dix0, idx1)));
edges.insert(std::make_pair(std::min(dix0, idx2), std::max(dix0, idx2)));
edges.insert(std::make_pair(std::min(dix0, idx3), std::max(dix0, idx3)));
edges.insert(std::make_pair(std::min(idx1, idx2), std::max(idx1, idx2)));
edges.insert(std::make_pair(std::min(idx1, idx3), std::max(idx1, idx3)));
edges.insert(std::make_pair(std::min(idx2, idx3), std::max(idx2, idx3)));
}
for (auto e = edges.begin(); e != edges.end(); e++)
{
edges_.push_back(Vec2i(e->first, e->second));
}
num_edges_ = (int)edges.size();
}
public:
Softbody(const std::string node_file, const std::string ele_file, const std::string face_file)
{
read_node_file(node_file);
read_ele_file(ele_file);
read_face_file(face_file);
extract_edges();
}
void init_physical_data() {
old_pos_ = pos_;
predict_pos_ = pos_;
vel_.resize(num_vertices_, Vec3f(0.0f, 0.0f, 0.0f));
w_.resize(num_vertices_, 1.0f);
}
void init_constraints() {
rest_length_.resize(num_edges_);
for (int i = 0; i < num_edges_; i++)
{
int idx0 = edges_[i].x, idx1 = edges_[i].y;
rest_length_[i] = (pos_[idx0] - pos_[idx1]).norm();
}
rest_volume_.resize(num_tets_);
for (int i = 0; i < num_tets_; i++)
{
int idx0 = tets_[i].x, idx1 = tets_[i].y, idx2 = tets_[i].z, idx3 = tets_[i].w;
Vec3f p10 = pos_[idx1] - pos_[idx0];
Vec3f p20 = pos_[idx2] - pos_[idx0];
Vec3f p30 = pos_[idx3] - pos_[idx0];
rest_volume_[i] = p30.dot(p10.cross(p20)) / 6.0f;
}
}
void init_gpu_data() {
cudaMalloc((void**)&d_pos, sizeof(Real) * 3 * num_vertices_);
cudaMalloc((void**)&d_old_pos, sizeof(Real) * 3 * num_vertices_);
cudaMalloc((void**)&d_predict_pos, sizeof(Real) * 3 * num_vertices_);
cudaMalloc((void**)&d_vels, sizeof(Real) * 3 * num_vertices_);
cudaMalloc((void**)&d_w, sizeof(Real) * num_vertices_);
cudaMalloc((void**)&d_edges, sizeof(int) * 2 * num_edges_);
cudaMalloc((void**)&d_tets, sizeof(int) * 4 * num_tets_);
cudaMalloc((void**)&d_rest_length, sizeof(Real) * num_edges_);
cudaMalloc((void**)&d_rest_volume, sizeof(Real) * num_tets_);
cudaMemcpy(d_pos, pos_.data(), sizeof(Real) * 3 * num_vertices_, cudaMemcpyHostToDevice);
cudaMemcpy(d_old_pos, old_pos_.data(), sizeof(Real) * 3 * num_vertices_, cudaMemcpyHostToDevice);
cudaMemcpy(d_predict_pos, predict_pos_.data(), sizeof(Real) * 3 * num_vertices_, cudaMemcpyHostToDevice);
cudaMemcpy(d_vels, vel_.data(), sizeof(Real) * 3 * num_vertices_, cudaMemcpyHostToDevice);
cudaMemcpy(d_w, w_.data(), sizeof(Real) * num_vertices_, cudaMemcpyHostToDevice);
cudaMemcpy(d_edges, edges_.data(), sizeof(int) * 2 * num_edges_, cudaMemcpyHostToDevice);
cudaMemcpy(d_tets, tets_.data(), sizeof(int) * 4 * num_tets_, cudaMemcpyHostToDevice);
cudaMemcpy(d_rest_length, rest_length_.data(), sizeof(Real) * num_edges_, cudaMemcpyHostToDevice);
cudaMemcpy(d_rest_volume, rest_volume_.data(), sizeof(Real) * num_tets_, cudaMemcpyHostToDevice);
}
void data2cpu() {
cudaMemcpy(pos_.data(), d_pos, 3 * sizeof(Real) * num_vertices_, cudaMemcpyDeviceToHost);
}
void init_static_points(const std::vector<int> &static_points) {
for (auto p : static_points)
w_[p] = 0.0f;
}
void semi_euler(Real h) {
old_pos_ = pos_;
for (int i = 0; i < num_vertices_; i++)
{
if (w_[i] != 0.0f)
{
vel_[i] += h * Vec3f(0.0f, -9.8f, 0.0f);
pos_[i] += h * vel_[i];
}
}
predict_pos_ = pos_;
}
void solve_distance_constraints() {
for (int i = 0; i < num_edges_; i++)
{
int idx0 = edges_[i].x, idx1 = edges_[i].y;
Real w0 = w_[idx0], w1 = w_[idx1];
Real w_sum = w0 + w1;
if (w_sum == 0.0) continue;
Vec3f p0p1 = predict_pos_[idx0] - predict_pos_[idx1];
Real constraint = p0p1.norm() - rest_length_[i];
Vec3f normal = p0p1.normalized();
Real delta_lambda = constraint / w_sum;
Vec3f corr = - 0.4f * delta_lambda * normal;
if (w0 != 0.0)
{
pos_[idx0] += w0 * corr;
}
if (w1 != 0.0)
{
pos_[idx1] -= w1 * corr;
}
}
}
void solve_volume_constraints() {
for (int i = 0; i < num_tets_; i++)
{
int idx0 = tets_[i].x, idx1 = tets_[i].y, idx2 = tets_[i].z, idx3 = tets_[i].w;
Vec3f p0 = pos_[idx0];
Vec3f p1 = pos_[idx1];
Vec3f p2 = pos_[idx2];
Vec3f p3 = pos_[idx3];
Real w0 = w_[idx0];
Real w1 = w_[idx1];
Real w2 = w_[idx2];
Real w3 = w_[idx3];
Real volume = (1.0f / 6.0f) * (p1 - p0).cross(p2 - p0).dot(p3 - p0);
Vec3f grad0 = (p3 - p1).cross(p2 - p1);
Vec3f grad1 = (p2 - p0).cross(p3 - p0);
Vec3f grad2 = (p3 - p0).cross(p1 - p0);
Vec3f grad3 = (p1 - p0).cross(p2 - p0);
Real lambda = w0 * grad0.square_norm() + w1 * grad1.square_norm() + w2 * grad2.square_norm() + w2 * grad3.square_norm();
if (fabs(lambda) < 1e-6) continue;
lambda = (volume - rest_volume_[i]) / lambda;
Real stiffness = 0.5f;
if (w0 != 0.0f)
pos_[idx0] -= stiffness * grad0 * lambda * w0;
if (w1 != 0.0f)
pos_[idx1] -= stiffness * grad1 * lambda * w1;
if (w2 != 0.0f)
pos_[idx2] -= stiffness * grad2 * lambda * w2;
if (w3 != 0.0f)
pos_[idx3] -= stiffness * grad3 * lambda * w3;
}
}
void update_velocity(Real h) {
for (int i = 0; i < num_vertices_; i++)
{
if (w_[i] != 0.0f)
{
vel_[i] = (pos_[i] - old_pos_[i]) / h;
}
}
}
void update(Real h, int maxIte) {
semi_euler(h);
for (int ite = 0; ite < maxIte; ite++)
{
solve_distance_constraints();
solve_volume_constraints();
predict_pos_ = pos_;
}
update_velocity(h);
}
void write_obj(const std::string obj_file) {
std::ofstream outfile(obj_file);
for (int i = 0; i < num_vertices_; i++)
{
outfile << "v " << pos_[i].x << " " << pos_[i].y << " " << pos_[i].z << std::endl;
}
for (int i = 0; i < num_faces_; i++)
{
outfile << "f " << faces_[i].x + 1 << " " << faces_[i].y + 1 << " " << faces_[i].z + 1 << std::endl;
}
outfile.close();
}
int get_num_vertices() { return num_vertices_; }
int get_num_edges() { return num_edges_; }
int get_num_faces() { return num_faces_; }
int get_num_tets() { return num_tets_; }
};
__global__ void semi_euler(int num_vertices, Real* w, Real h, Real* pos, Real* vels) {
int idx = threadIdx.x + blockDim.x * blockIdx.x;
if (idx < num_vertices)
{
if (w[idx] == 0.0) return;
vels[3 * idx + 1] += h * -9.8;
pos[3 * idx + 0] += h * vels[3 * idx + 0];
pos[3 * idx + 1] += h * vels[3 * idx + 1];
pos[3 * idx + 2] += h * vels[3 * idx + 2];
}
}
__global__ void solve_distance_constraint(int num_edges, Real* w, Real* predic_pos, Real* pos, int* edges, Real* rest_length)
{
int idx = threadIdx.x + blockDim.x * blockIdx.x;
if (idx < num_edges)
{
int idx0 = edges[2 * idx], idx1 = edges[2 * idx + 1];
Vec3f p0(predic_pos[3 * idx0 + 0], predic_pos[3 * idx0 + 1], predic_pos[3 * idx0 + 2]);
Vec3f p1(predic_pos[3 * idx1 + 0], predic_pos[3 * idx1 + 1], predic_pos[3 * idx1 + 2]);
Vec3f normal = p0 - p1;
Real dis = normal.norm() - rest_length[idx];
Vec3f n = normal.normalized();
Real sum_inv_mass = w[idx0] + w[idx1];
if (sum_inv_mass == 0.0) return;
Real deltaLambda = -dis / sum_inv_mass;
Vec3f corr = 0.3f * deltaLambda * n;
if (w[idx0] != 0.0)
{
atomicAdd(&pos[3 * idx0 + 0], w[idx0] * corr.x);
atomicAdd(&pos[3 * idx0 + 1], w[idx0] * corr.y);
atomicAdd(&pos[3 * idx0 + 2], w[idx0] * corr.z);
}
if (w[idx1] != 0.0)
{
atomicAdd(&pos[3 * idx1 + 0], -w[idx1] * corr.x);
atomicAdd(&pos[3 * idx1 + 1], -w[idx1] * corr.y);
atomicAdd(&pos[3 * idx1 + 2], -w[idx1] * corr.z);
}
}
}
__global__ void solve_volume_constraint(int num_tets, Real* w, Real* predic_pos, Real* pos, int* tets, Real* rest_volume)
{
int idx = threadIdx.x + blockDim.x * blockIdx.x;
if (idx < num_tets)
{
int idx0 = tets[4 * idx], idx1 = tets[4 * idx + 1], idx2 = tets[4 * idx + 2], idx3 = tets[4 * idx + 3];
Vec3f p0(predic_pos[3 * idx0 + 0], predic_pos[3 * idx0 + 1], predic_pos[3 * idx0 + 2]);
Vec3f p1(predic_pos[3 * idx1 + 0], predic_pos[3 * idx1 + 1], predic_pos[3 * idx1 + 2]);
Vec3f p2(predic_pos[3 * idx2 + 0], predic_pos[3 * idx2 + 1], predic_pos[3 * idx2 + 2]);
Vec3f p3(predic_pos[3 * idx3 + 0], predic_pos[3 * idx3 + 1], predic_pos[3 * idx3 + 2]);
Real w0 = w[idx0], w1 = w[idx1], w2 = w[idx2], w3 = w[idx3];
Real volume = (1.0f / 6.0f) * (p1 - p0).cross(p2 - p0).dot(p3 - p0);
Vec3f grad0 = (p3 - p1).cross(p2 - p1);
Vec3f grad1 = (p2 - p0).cross(p3 - p0);
Vec3f grad2 = (p3 - p0).cross(p1 - p0);
Vec3f grad3 = (p1 - p0).cross(p2 - p0);
Real lambda = w0 * grad0.square_norm() + w1 * grad1.square_norm()
+ w2 * grad2.square_norm() + w2 * grad3.square_norm();
if (fabs(lambda) < 1e-6) return;
lambda = (volume - rest_volume[idx]) / lambda;
Real stiffness = 0.5f;
if (w[idx0] != 0.0)
{
atomicAdd(&pos[3 * idx0 + 0], -stiffness * lambda * w[idx0] * grad0.x);
atomicAdd(&pos[3 * idx0 + 1], -stiffness * lambda * w[idx0] * grad0.y);
atomicAdd(&pos[3 * idx0 + 2], -stiffness * lambda * w[idx0] * grad0.z);
}
if (w[idx1] != 0.0)
{
atomicAdd(&pos[3 * idx1 + 0], -stiffness * lambda * w[idx1] * grad1.x);
atomicAdd(&pos[3 * idx1 + 1], -stiffness * lambda * w[idx1] * grad1.y);
atomicAdd(&pos[3 * idx1 + 2], -stiffness * lambda * w[idx1] * grad1.z);
}
if (w[idx2] != 0.0)
{
atomicAdd(&pos[3 * idx2 + 0], -stiffness * lambda * w[idx2] * grad2.x);
atomicAdd(&pos[3 * idx2 + 1], -stiffness * lambda * w[idx2] * grad2.y);
atomicAdd(&pos[3 * idx2 + 2], -stiffness * lambda * w[idx2] * grad2.z);
}
if (w[idx3] != 0.0)
{
atomicAdd(&pos[3 * idx3 + 0], -stiffness * lambda * w[idx3] * grad3.x);
atomicAdd(&pos[3 * idx3 + 1], -stiffness * lambda * w[idx3] * grad3.y);
atomicAdd(&pos[3 * idx3 + 2], -stiffness * lambda * w[idx3] * grad3.z);
}
}
}
__global__ void update_vel(int num_vertices, Real h, Real* pos, Real* old_pos, Real* vel, Real* w)
{
int idx = threadIdx.x + blockDim.x * blockIdx.x;
if (idx < num_vertices) {
if (w[idx] != 0.0) {
for (int i = 0; i < 3; i++)
vel[3 * idx + i] = (pos[3 * idx + i] - old_pos[3 * idx + i]) / h;
}
}
}
void simulate_on_gpu() {
Softbody softbody("../model/torus.node", "../model/torus.ele", "../model/torus.face");
softbody.init_physical_data();
softbody.init_static_points({ 0, 1, 2 });
softbody.init_constraints();
softbody.init_gpu_data();
Real h = 0.01f;
int maxIte = 10;
int max_frame = 100;
int nv = softbody.get_num_vertices();
int ne = softbody.get_num_edges();
int nt = softbody.get_num_tets();
int nthreads = 256;
int nblocks = (nv + nthreads - 1) / nthreads;
int ndcblocks = (ne + nthreads - 1) / nthreads;
int ntcblocks = (nt + nthreads - 1) / nthreads;
for (int frame = 0; frame < max_frame; frame++) {
cudaMemcpy(softbody.d_old_pos, softbody.d_pos, sizeof(Real) * 3 * nv, cudaMemcpyDeviceToDevice);
semi_euler << <nblocks, nthreads >> > (nv, softbody.d_w, h, softbody.d_pos, softbody.d_vels);
cudaError error = cudaGetLastError();
if (error != cudaSuccess)
std::cout << "Failed to run semi-euler integrator " << std::endl;
cudaMemcpy(softbody.d_predict_pos, softbody.d_pos, sizeof(Real) * 3 * nv, cudaMemcpyDeviceToDevice);
for (int i = 0; i < maxIte; i++)
{
solve_distance_constraint << <ndcblocks, nthreads >> > (ne, softbody.d_w, softbody.d_predict_pos, softbody.d_pos, softbody.d_edges, softbody.d_rest_length);
solve_volume_constraint<<< ntcblocks, nthreads>>> (ne, softbody.d_w, softbody.d_predict_pos, softbody.d_pos, softbody.d_tets, softbody.d_rest_volume);
cudaMemcpy(softbody.d_predict_pos, softbody.d_pos, sizeof(Real) * 3 * nv, cudaMemcpyDeviceToDevice);
}
update_vel << <nblocks, nthreads >> > (nv, h, softbody.d_pos, softbody.d_old_pos, softbody.d_vels, softbody.d_w);
softbody.data2cpu();
softbody.write_obj("../output/frame" + std::to_string(frame) + ".obj");
}
}
void simulate_on_cpu() {
Softbody softbody("../model/torus.node", "../model/torus.ele", "../model/torus.face");
softbody.init_physical_data();
softbody.init_static_points({ 0, 1, 2 });
softbody.init_constraints();
int max_frame = 100;
for (int frame = 0; frame < max_frame; frame++) {
softbody.update(0.01f, 10);
softbody.write_obj("../output/frame" + std::to_string(frame) + ".obj");
}
}
int main() {
simulate_on_gpu();
return 0;
}
491 4 0
0 188 187 189 190
1 27 37 36 38
2 114 116 126 127
3 79 66 78 60
4 102 111 114 112
5 72 60 63 73
6 114 111 123 112
7 134 121 132 124
8 60 72 71 73
9 114 115 125 116
10 142 141 131 140
11 29 27 16 26
12 184 181 172 180
13 145 144 157 155
14 128 137 134 132
15 86 83 96 84
16 114 124 123 121
17 130 138 128 137
18 92 94 83 93
19 138 140 147 137
20 33 48 38 47
21 140 144 147 137
22 134 128 132 121
23 89 81 70 80
24 150 144 140 142
25 131 128 130 121
26 157 155 153 164
27 145 147 136 146
28 3 16 2 0
29 78 60 67 68
30 150 142 151 152
31 86 99 90 89
32 73 71 82 72
33 16 3 5 0
34 46 47 57 48
35 132 142 131 140
36 181 184 172 183
37 66 63 74 65
38 3 195 194 5
39 39 20 30 31
40 110 118 109 108
41 178 186 176 185
42 88 86 98 87
43 111 114 123 121
44 82 81 71 70
45 142 144 132 143
46 72 60 71 62
47 26 24 38 25
48 191 3 2 0
49 46 57 56 55
50 71 60 73 70
51 96 92 90 100
52 102 94 96 100
53 26 14 24 25
54 49 48 40 50
55 187 170 180 172
56 60 61 71 62
57 99 109 108 100
58 63 66 53 65
59 60 72 63 62
60 71 73 82 70
61 38 23 33 35
62 38 35 46 36
63 99 96 90 100
64 121 129 130 120
65 8 198 9 0
66 111 123 112 121
67 147 140 150 144
68 83 75 82 73
69 110 114 107 108
70 94 92 96 100
71 107 96 108 100
72 94 93 92 103
73 102 94 92 103
74 114 123 113 112
75 123 112 121 122
76 60 79 73 70
77 145 147 146 156
78 178 165 164 177
79 102 114 103 112
80 103 114 113 112
81 98 107 96 108
82 103 96 105 100
83 134 137 144 132
84 127 124 135 126
85 190 195 184 194
86 111 114 121 110
87 137 132 140 144
88 138 130 140 137
89 129 121 110 120
90 147 144 157 145
91 123 124 133 132
92 124 134 133 132
93 107 97 98 96
94 134 143 133 132
95 134 144 143 132
96 121 123 132 124
97 123 121 132 122
98 134 128 124 135
99 78 86 77 76
100 102 111 101 110
101 111 102 114 110
102 110 128 129 118
103 130 129 128 139
104 125 114 116 126
105 131 137 128 132
106 110 129 119 118
107 97 87 98 96
108 147 145 157 156
109 116 127 118 117
110 114 107 108 100
111 84 94 96 85
112 81 82 92 90
113 78 60 66 67
114 184 186 178 185
115 138 140 139 148
116 157 167 156 165
117 101 102 110 100
118 124 110 114 127
119 82 81 83 90
120 167 166 156 165
121 128 136 137 127
122 144 142 132 140
123 140 147 150 148
124 138 130 128 139
125 131 128 121 132
126 86 88 89 78
127 190 187 198 188
128 140 138 147 148
129 102 114 110 100
130 135 124 125 126
131 176 166 167 165
132 129 121 130 128
133 79 66 60 73
134 131 137 132 140
135 150 147 158 148
136 136 134 128 137
137 134 136 128 135
138 137 134 144 136
139 137 131 128 130
140 130 138 140 139
141 140 139 148 149
142 110 114 108 100
143 86 78 75 76
144 147 144 136 137
145 124 110 121 114
146 124 114 126 127
147 124 127 135 128
148 144 145 147 136
149 144 135 145 136
150 144 134 135 136
151 73 79 82 70
152 94 103 96 105
153 159 168 158 150
154 124 114 125 126
155 86 84 96 85
156 131 137 140 130
157 81 83 75 82
158 54 43 44 45
159 110 116 127 118
160 75 81 79 78
161 89 81 80 90
162 92 91 90 100
163 81 75 79 82
164 110 107 118 108
165 82 83 92 90
166 84 86 75 85
167 135 136 127 126
168 96 92 83 90
169 21 33 22 32
170 107 114 105 100
171 102 94 103 96
172 86 98 96 99
173 128 124 110 121
174 114 110 116 127
175 128 121 110 129
176 14 3 2 12
177 83 75 73 84
178 124 128 110 127
179 23 12 21 22
180 96 107 105 100
181 119 110 118 109
182 96 99 108 100
183 110 114 116 107
184 92 96 83 94
185 83 81 89 90
186 92 102 101 100
187 29 27 17 16
188 86 87 88 78
189 57 46 48 58
190 110 128 118 127
191 103 102 96 100
192 84 94 83 96
193 86 99 96 90
194 94 102 92 100
195 98 96 99 108
196 107 110 118 116
197 81 78 89 79
198 107 116 118 117
199 81 92 91 90
200 60 61 62 50
201 86 88 98 89
202 103 94 104 105
203 73 74 84 75
204 82 75 79 73
205 81 79 89 70
206 83 81 75 89
207 83 89 75 86
208 89 98 86 99
209 86 98 87 96
210 81 79 70 82
211 83 86 96 90
212 91 92 101 100
213 187 189 179 188
214 109 110 108 100
215 114 103 104 105
216 96 107 106 105
217 114 115 116 105
218 63 64 74 65
219 60 63 50 62
220 48 59 58 50
221 114 116 107 105
222 107 116 106 105
223 75 74 65 66
224 66 73 63 60
225 172 184 174 183
226 96 94 105 95
227 29 27 28 17
228 96 94 95 85
229 52 62 51 50
230 114 102 103 100
231 114 103 105 100
232 79 75 78 66
233 66 73 74 63
234 83 86 75 84
235 66 78 67 76
236 67 78 77 76
237 62 61 51 50
238 63 64 65 54
239 81 75 89 78
240 89 86 78 75
241 39 29 20 21
242 23 21 38 22
243 73 75 66 74
244 59 68 58 50
245 86 87 78 77
246 27 38 29 28
247 198 199 9 0
248 65 66 53 55
249 16 18 19 8
250 67 60 50 68
251 60 79 69 68
252 79 60 78 68
253 38 46 47 36
254 31 33 21 32
255 75 78 66 76
256 23 26 38 29
257 37 47 36 38
258 2 14 12 10
259 3 192 191 2
260 52 46 57 58
261 39 33 21 31
262 186 184 187 197
263 196 186 197 195
264 12 14 23 10
265 168 169 178 160
266 66 52 57 58
267 66 67 58 57
268 199 198 189 190
269 42 51 41 50
270 42 52 46 53
271 35 23 33 34
272 29 16 18 19
273 195 190 184 198
274 1 191 2 0
275 187 179 180 170
276 29 16 19 10
277 36 38 35 25
278 17 29 16 18
279 28 29 17 18
280 36 27 38 26
281 52 46 58 50
282 2 16 10 0
283 184 178 187 172
284 43 54 53 45
285 178 186 187 176
286 16 8 10 0
287 16 18 8 7
288 186 184 178 187
289 198 184 187 190
290 150 147 144 158
291 148 159 158 150
292 150 160 162 161
293 16 17 18 7
294 79 75 66 73
295 198 188 189 190
296 187 189 190 180
297 157 165 155 164
298 176 187 178 177
299 168 167 157 165
300 176 175 165 164
301 198 184 197 187
302 177 165 176 167
303 144 157 155 153
304 157 168 165 164
305 195 198 184 197
306 187 179 170 178
307 128 134 124 121
308 147 144 158 157
309 156 157 165 155
310 148 149 159 150
311 163 164 152 153
312 145 157 156 155
313 168 169 160 150
314 186 184 197 195
315 144 142 152 153
316 179 178 160 170
317 172 170 180 171
318 144 142 150 152
319 178 169 179 160
320 178 165 177 167
321 177 165 164 176
322 190 3 0 195
323 6 196 197 195
324 189 187 179 180
325 165 168 178 164
326 184 192 181 191
327 16 6 7 5
328 8 9 10 0
329 197 198 7 195
330 6 197 7 195
331 142 141 150 151
332 188 179 187 178
333 186 184 195 185
334 198 8 7 195
335 19 16 8 10
336 159 169 168 150
337 153 155 154 164
338 187 170 172 178
339 142 141 140 150
340 148 140 149 150
341 160 170 172 171
342 172 180 181 171
343 162 150 161 151
344 164 160 178 172
345 142 144 143 153
346 180 184 190 187
347 184 180 190 181
348 157 144 152 153
349 178 160 170 172
350 160 172 162 161
351 172 160 171 161
352 187 184 172 180
353 184 192 183 181
354 155 144 153 154
355 178 176 164 172
356 163 172 174 173
357 172 176 185 178
358 163 160 150 164
359 164 172 176 174
360 172 160 162 163
361 176 185 175 174
362 181 172 182 183
363 162 150 151 152
364 21 33 38 22
365 86 90 83 89
366 157 144 158 152
367 158 144 150 152
368 184 185 172 174
369 176 175 164 174
370 163 172 164 174
371 172 174 173 183
372 184 178 172 185
373 11 1 12 10
374 176 172 185 174
375 176 178 164 177
376 194 3 193 192
377 181 192 183 182
378 164 160 172 163
379 160 164 178 168
380 152 157 153 164
381 168 165 178 167
382 160 163 150 162
383 150 164 160 168
384 150 163 164 152
385 168 157 158 164
386 158 157 152 164
387 168 158 150 164
388 150 158 152 164
389 65 63 54 53
390 14 16 15 25
391 38 29 39 21
392 190 184 181 191
393 192 184 183 193
394 192 194 184 193
395 184 190 194 191
396 194 192 184 191
397 194 190 3 191
398 63 50 52 66
399 194 3 192 191
400 195 0 190 198
401 26 14 23 24
402 35 38 46 33
403 42 52 51 50
404 29 26 38 27
405 14 3 12 13
406 199 198 190 0
407 1 2 10 0
408 14 3 4 5
409 16 29 26 23
410 14 16 26 23
411 38 24 35 25
412 16 29 23 10
413 8 16 7 0
414 19 8 9 10
415 14 16 23 10
416 14 12 23 13
417 1 2 12 10
418 48 42 40 50
419 16 14 26 25
420 24 26 38 23
421 38 36 26 25
422 190 194 3 195
423 7 6 195 5
424 16 14 15 5
425 3 14 16 5
426 4 3 194 5
427 2 14 16 3
428 14 2 16 10
429 39 20 31 21
430 24 23 35 34
431 69 68 59 50
432 23 38 33 22
433 46 35 34 45
434 44 43 34 45
435 52 42 46 50
436 49 59 48 50
437 41 42 40 31
438 7 16 5 0
439 195 3 0 5
440 198 8 195 0
441 190 3 191 0
442 8 7 195 0
443 195 7 5 0
444 57 66 56 55
445 46 43 53 45
446 42 32 40 31
447 49 48 39 40
448 43 46 34 45
449 53 54 55 45
450 43 46 53 42
451 66 52 63 53
452 43 46 33 34
453 63 50 62 52
454 46 52 57 53
455 35 33 46 34
456 69 60 68 50
457 38 46 33 47
458 42 41 40 50
459 40 39 30 31
460 66 57 53 55
461 42 32 33 40
462 57 46 53 55
463 54 65 53 55
464 52 66 57 53
465 46 53 55 45
466 48 38 39 40
467 33 48 40 38
468 39 33 40 38
469 48 42 33 40
470 46 33 42 43
471 42 48 33 46
472 46 48 33 47
473 46 48 58 50
474 46 42 48 50
475 66 52 58 50
476 50 67 68 58
477 63 60 50 66
478 60 67 50 66
479 21 23 38 29
480 39 33 31 40
481 31 33 32 40
482 38 24 23 35
483 33 39 21 38
484 12 11 10 21
485 23 12 10 21
486 29 23 10 21
487 29 10 20 21
488 128 136 127 135
489 50 67 58 66
490 150 163 152 162
400 3 0
0 1 0 10
1 9 10 0
2 11 1 10
3 1 12 2
4 3 2 12
5 11 12 1
6 3 12 13
7 14 4 3
8 14 5 4
9 14 3 13
10 14 15 5
11 16 6 5
12 16 7 6
13 16 5 15
14 16 17 7
15 18 8 7
16 17 18 7
17 19 9 8
18 19 10 9
19 18 19 8
20 11 10 21
21 12 11 21
22 10 20 21
23 12 21 22
24 12 23 13
25 14 13 23
26 23 12 22
27 14 23 24
28 14 25 15
29 16 15 25
30 14 24 25
31 16 25 26
32 27 17 16
33 27 16 26
34 28 18 17
35 27 28 17
36 29 10 19
37 29 19 18
38 29 20 10
39 28 29 18
40 20 31 21
41 20 30 31
42 21 32 22
43 31 32 21
44 23 22 33
45 33 22 32
46 24 23 34
47 23 33 34
48 24 35 25
49 24 34 35
50 36 26 25
51 36 27 26
52 36 25 35
53 27 36 37
54 27 38 28
55 38 29 28
56 27 37 38
57 39 20 29
58 39 30 20
59 38 39 29
60 40 31 30
61 40 30 39
62 41 31 40
63 42 32 31
64 42 33 32
65 41 42 31
66 43 34 33
67 33 42 43
68 44 34 43
69 35 34 45
70 44 45 34
71 35 46 36
72 46 35 45
73 37 36 47
74 37 47 38
75 46 47 36
76 48 39 38
77 48 38 47
78 49 40 39
79 49 39 48
80 41 40 50
81 49 50 40
82 42 41 51
83 51 41 50
84 42 51 52
85 43 42 53
86 42 52 53
87 54 44 43
88 54 45 44
89 43 53 54
90 46 45 55
91 54 55 45
92 46 55 56
93 46 57 47
94 47 57 48
95 46 56 57
96 57 58 48
97 49 48 59
98 49 59 50
99 48 58 59
100 61 51 50
101 60 61 50
102 52 51 62
103 62 51 61
104 52 63 53
105 63 54 53
106 63 52 62
107 63 64 54
108 54 65 55
109 64 65 54
110 66 56 55
111 57 56 66
112 65 66 55
113 67 58 57
114 66 67 57
115 59 58 68
116 67 68 58
117 69 50 59
118 69 60 50
119 69 59 68
120 60 71 61
121 61 71 62
122 71 60 70
123 72 63 62
124 72 62 71
125 72 73 63
126 63 74 64
127 64 74 65
128 73 74 63
129 75 66 65
130 75 65 74
131 66 76 67
132 75 76 66
133 67 76 77
134 78 68 67
135 67 77 78
136 60 69 79
137 79 69 68
138 60 79 70
139 79 68 78
140 81 71 70
141 81 70 80
142 71 82 72
143 73 72 82
144 82 71 81
145 83 73 82
146 73 84 74
147 74 84 75
148 83 84 73
149 84 85 75
150 86 76 75
151 86 77 76
152 86 75 85
153 87 78 77
154 86 87 77
155 87 88 78
156 79 89 70
157 78 89 79
158 89 80 70
159 88 89 78
160 81 80 90
161 89 90 80
162 81 90 91
163 81 92 82
164 82 92 83
165 81 91 92
166 92 93 83
167 84 83 94
168 84 94 85
169 94 83 93
170 94 95 85
171 86 85 96
172 86 96 87
173 96 85 95
174 97 87 96
175 88 87 98
176 88 98 89
177 97 98 87
178 99 90 89
179 89 98 99
180 91 90 100
181 99 100 90
182 91 101 92
183 91 100 101
184 92 101 102
185 93 92 103
186 94 93 103
187 102 103 92
188 103 104 94
189 94 105 95
190 96 95 105
191 94 104 105
192 96 105 106
193 107 97 96
194 107 98 97
195 96 106 107
196 98 108 99
197 98 107 108
198 99 109 100
199 99 108 109
200 101 100 110
201 109 110 100
202 102 101 111
203 111 101 110
204 102 112 103
205 102 111 112
206 103 112 113
207 114 104 103
208 114 105 104
209 103 113 114
210 114 115 105
211 116 106 105
212 107 106 116
213 115 116 105
214 107 116 117
215 107 118 108
216 118 109 108
217 107 117 118
218 119 110 109
219 119 109 118
220 111 110 121
221 111 121 112
222 121 110 120
223 112 121 122
224 123 113 112
225 114 113 123
226 123 112 122
227 114 123 124
228 114 125 115
229 115 125 116
230 124 125 114
231 125 126 116
232 116 127 117
233 127 118 117
234 116 126 127
235 128 118 127
236 110 119 129
237 129 119 118
238 129 120 110
239 128 129 118
240 121 120 130
241 129 130 120
242 131 121 130
243 121 132 122
244 123 122 132
245 131 132 121
246 123 133 124
247 123 132 133
248 124 133 134
249 135 125 124
250 135 126 125
251 134 135 124
252 136 127 126
253 135 136 126
254 128 127 137
255 136 137 127
256 138 128 137
257 129 128 139
258 130 129 139
259 138 139 128
260 131 130 140
261 130 139 140
262 141 131 140
263 132 131 142
264 142 131 141
265 143 133 132
266 134 133 143
267 142 143 132
268 144 135 134
269 134 143 144
270 135 145 136
271 144 145 135
272 145 146 136
273 147 137 136
274 138 137 147
275 147 136 146
276 138 148 139
277 138 147 148
278 140 139 149
279 139 148 149
280 141 140 150
281 140 149 150
282 142 141 151
283 141 150 151
284 142 151 152
285 142 153 143
286 144 143 153
287 142 152 153
288 144 153 154
289 145 144 155
290 155 144 154
291 145 156 146
292 147 146 156
293 145 155 156
294 147 156 157
295 147 158 148
296 147 157 158
297 148 159 149
298 149 159 150
299 148 158 159
300 150 161 151
301 150 160 161
302 162 152 151
303 162 151 161
304 163 153 152
305 163 152 162
306 153 164 154
307 155 154 164
308 163 164 153
309 156 155 165
310 165 155 164
311 166 156 165
312 157 156 167
313 167 156 166
314 168 158 157
315 159 158 168
316 168 157 167
317 159 169 150
318 169 160 150
319 159 168 169
320 160 171 161
321 160 170 171
322 172 162 161
323 172 163 162
324 172 161 171
325 163 172 173
326 163 174 164
327 163 173 174
328 175 165 164
329 175 164 174
330 176 166 165
331 176 167 166
332 176 165 175
333 177 167 176
334 168 167 178
335 168 178 169
336 178 167 177
337 169 179 160
338 179 170 160
339 178 179 169
340 170 180 171
341 179 180 170
342 172 171 181
343 180 181 171
344 181 182 172
345 172 183 173
346 174 173 183
347 172 182 183
348 184 174 183
349 185 175 174
350 176 175 185
351 184 185 174
352 186 176 185
353 176 187 177
354 187 178 177
355 186 187 176
356 188 179 178
357 188 178 187
358 189 180 179
359 189 179 188
360 180 190 181
361 189 190 180
362 1 191 0
363 1 2 191
364 190 0 191
365 190 191 181
366 3 192 2
367 181 192 182
368 192 183 182
369 192 191 2
370 192 181 191
371 184 183 193
372 3 193 192
373 192 193 183
374 4 194 3
375 4 5 194
376 194 193 3
377 194 184 193
378 6 195 5
379 184 195 185
380 186 185 195
381 195 194 5
382 195 184 194
383 6 196 195
384 196 186 195
385 6 7 197
386 186 197 187
387 6 197 196
388 196 197 186
389 198 7 8
390 8 9 198
391 187 198 188
392 198 189 188
393 197 7 198
394 198 187 197
395 199 9 0
396 199 0 190
397 199 190 189
398 198 9 199
399 199 189 198
200 3 0 0
0 0.5 0.0 0.0
1 0.480902 0.0 -0.058779
2 0.430902 0.0 -0.095106
3 0.369098 0.0 -0.095106
4 0.319098 0.0 -0.058779
5 0.3 0.0 -0.0
6 0.319098 -0.0 0.058779
7 0.369098 -0.0 0.095106
8 0.430902 -0.0 0.095106
9 0.480902 -0.0 0.058779
10 0.475528 -0.154508 -0.0
11 0.457365 -0.148607 -0.058779
12 0.409812 -0.133156 -0.095106
13 0.351033 -0.114058 -0.095106
14 0.303481 -0.098607 -0.058779
15 0.285317 -0.092705 -0.0
16 0.303481 -0.098607 0.058778
17 0.351033 -0.114058 0.095106
18 0.409812 -0.133156 0.095106
19 0.457365 -0.148607 0.058778
20 0.404509 -0.293893 -0.0
21 0.389058 -0.282667 -0.058779
22 0.348607 -0.253278 -0.095106
23 0.298607 -0.21695 -0.095106
24 0.258156 -0.187561 -0.058779
25 0.242705 -0.176336 -0.0
26 0.258156 -0.187561 0.058778
27 0.298607 -0.216951 0.095106
28 0.348607 -0.253278 0.095106
29 0.389058 -0.282667 0.058778
30 0.293893 -0.404509 -0.0
31 0.282667 -0.389058 -0.058779
32 0.253278 -0.348607 -0.095106
33 0.21695 -0.298607 -0.095106
34 0.187561 -0.258156 -0.058779
35 0.176336 -0.242705 -0.0
36 0.187561 -0.258156 0.058778
37 0.21695 -0.298607 0.095106
38 0.253278 -0.348607 0.095105
39 0.282667 -0.389058 0.058778
40 0.154509 -0.475528 -0.0
41 0.148607 -0.457365 -0.058779
42 0.133156 -0.409812 -0.095106
43 0.114058 -0.351033 -0.095106
44 0.098607 -0.30348 -0.058779
45 0.092705 -0.285317 -0.0
46 0.098607 -0.303481 0.058778
47 0.114058 -0.351033 0.095105
48 0.133156 -0.409812 0.095105
49 0.148607 -0.457365 0.058778
50 0.0 -0.5 -0.0
51 0.0 -0.480902 -0.058779
52 0.0 -0.430902 -0.095106
53 0.0 -0.369098 -0.095106
54 0.0 -0.319098 -0.058779
55 0.0 -0.3 -0.0
56 0.0 -0.319098 0.058778
57 0.0 -0.369098 0.095105
58 0.0 -0.430902 0.095105
59 0.0 -0.480902 0.058778
60 -0.154509 -0.475528 -0.0
61 -0.148607 -0.457365 -0.058779
62 -0.133156 -0.409812 -0.095106
63 -0.114058 -0.351033 -0.095106
64 -0.098607 -0.30348 -0.058779
65 -0.092705 -0.285317 -0.0
66 -0.098607 -0.303481 0.058778
67 -0.114058 -0.351033 0.095105
68 -0.133156 -0.409812 0.095105
69 -0.148607 -0.457365 0.058778
70 -0.293893 -0.404508 -0.0
71 -0.282667 -0.389058 -0.058779
72 -0.253278 -0.348607 -0.095106
73 -0.216951 -0.298607 -0.095106
74 -0.187561 -0.258156 -0.058779
75 -0.176336 -0.242705 -0.0
76 -0.187561 -0.258156 0.058778
77 -0.216951 -0.298607 0.095106
78 -0.253278 -0.348607 0.095105
79 -0.282667 -0.389058 0.058778
80 -0.404509 -0.293893 -0.0
81 -0.389058 -0.282667 -0.058779
82 -0.348607 -0.253278 -0.095106
83 -0.298607 -0.21695 -0.095106
84 -0.258156 -0.187561 -0.058779
85 -0.242705 -0.176335 -0.0
86 -0.258156 -0.187561 0.058778
87 -0.298607 -0.21695 0.095106
88 -0.348607 -0.253278 0.095106
89 -0.389058 -0.282667 0.058778
90 -0.475528 -0.154509 -0.0
91 -0.457365 -0.148607 -0.058779
92 -0.409812 -0.133156 -0.095106
93 -0.351033 -0.114058 -0.095106
94 -0.303481 -0.098607 -0.058779
95 -0.285317 -0.092705 -0.0
96 -0.303481 -0.098607 0.058778
97 -0.351033 -0.114058 0.095106
98 -0.409812 -0.133156 0.095106
99 -0.457365 -0.148607 0.058778
100 -0.5 -0.0 -0.0
101 -0.480902 -0.0 -0.058779
102 -0.430902 0.0 -0.095106
103 -0.369098 0.0 -0.095106
104 -0.319098 0.0 -0.058779
105 -0.3 -0.0 -0.0
106 -0.319098 -0.0 0.058779
107 -0.369098 -0.0 0.095106
108 -0.430902 -0.0 0.095106
109 -0.480902 -0.0 0.058779
110 -0.475528 0.154508 0.0
111 -0.457365 0.148607 -0.058778
112 -0.409812 0.133156 -0.095106
113 -0.351033 0.114058 -0.095106
114 -0.303481 0.098607 -0.058778
115 -0.285317 0.092705 0.0
116 -0.303481 0.098607 0.058779
117 -0.351033 0.114057 0.095106
118 -0.409812 0.133156 0.095106
119 -0.457365 0.148607 0.058779
120 -0.404509 0.293893 0.0
121 -0.389058 0.282667 -0.058778
122 -0.348607 0.253278 -0.095106
123 -0.298607 0.21695 -0.095106
124 -0.258156 0.187561 -0.058778
125 -0.242705 0.176335 0.0
126 -0.258156 0.187561 0.058779
127 -0.298607 0.21695 0.095106
128 -0.348607 0.253278 0.095106
129 -0.389058 0.282667 0.058779
130 -0.293893 0.404508 0.0
131 -0.282667 0.389058 -0.058778
132 -0.253278 0.348607 -0.095105
133 -0.216951 0.298607 -0.095106
134 -0.187561 0.258156 -0.058778
135 -0.176336 0.242705 0.0
136 -0.187561 0.258156 0.058779
137 -0.216951 0.298607 0.095106
138 -0.253278 0.348607 0.095106
139 -0.282667 0.389058 0.058779
140 -0.154509 0.475528 0.0
141 -0.148607 0.457365 -0.058778
142 -0.133156 0.409812 -0.095105
143 -0.114058 0.351033 -0.095105
144 -0.098607 0.303481 -0.058778
145 -0.092705 0.285317 0.0
146 -0.098607 0.30348 0.058779
147 -0.114058 0.351033 0.095106
148 -0.133156 0.409812 0.095106
149 -0.148607 0.457365 0.058779
150 0.0 0.5 0.0
151 0.0 0.480902 -0.058778
152 0.0 0.430902 -0.095105
153 0.0 0.369098 -0.095105
154 0.0 0.319098 -0.058778
155 0.0 0.3 0.0
156 0.0 0.319098 0.058779
157 0.0 0.369098 0.095106
158 0.0 0.430902 0.095106
159 0.0 0.480902 0.058779
160 0.154509 0.475528 0.0
161 0.148607 0.457365 -0.058778
162 0.133156 0.409812 -0.095105
163 0.114058 0.351033 -0.095105
164 0.098607 0.303481 -0.058778
165 0.092705 0.285317 0.0
166 0.098607 0.30348 0.058779
167 0.114058 0.351033 0.095106
168 0.133156 0.409812 0.095106
169 0.148607 0.457365 0.058779
170 0.293892 0.404509 0.0
171 0.282667 0.389058 -0.058778
172 0.253277 0.348607 -0.095105
173 0.21695 0.298607 -0.095106
174 0.187561 0.258156 -0.058778
175 0.176335 0.242705 0.0
176 0.187561 0.258156 0.058779
177 0.21695 0.298607 0.095106
178 0.253277 0.348607 0.095106
179 0.282667 0.389058 0.058779
180 0.404508 0.293893 0.0
181 0.389058 0.282667 -0.058778
182 0.348607 0.253278 -0.095106
183 0.298607 0.216951 -0.095106
184 0.258156 0.187561 -0.058778
185 0.242705 0.176336 0.0
186 0.258156 0.187561 0.058779
187 0.298607 0.216951 0.095106
188 0.348607 0.253278 0.095106
189 0.389058 0.282667 0.058779
190 0.475528 0.154509 0.0
191 0.457365 0.148607 -0.058778
192 0.409812 0.133156 -0.095106
193 0.351033 0.114058 -0.095106
194 0.30348 0.098607 -0.058778
195 0.285317 0.092705 0.0
196 0.30348 0.098607 0.058779
197 0.351033 0.114058 0.095106
198 0.409812 0.133156 0.095106
199 0.457365 0.148607 0.058779
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment