Skip to content

Instantly share code, notes, and snippets.

View JoseRFJuniorLLMs's full-sized avatar
🎯
Focusing

Jose RF Junior JoseRFJuniorLLMs

🎯
Focusing
View GitHub Profile
1 hostname elliot-01
2 echo elliot-01 > /etc/hostname
3 bash
4 vim /etc/modules-load.d/k8s.conf
5 curl -fsSL https://get.docker.com | bash
6 docker version
7 docker ps
8 apt-get update && apt-get install -y apt-transport-https
9 curl -s https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add -
10 echo "deb http://apt.kubernetes.io/ kubernetes-xenial main" > /etc/apt/sources.list.d/kubernetes.list
<?xml version="1.0" encoding="utf-8" standalone="no"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
android:installLocation="auto">
<uses-feature android:name="android.hardware.vr.headtracking" android:version="1" android:required="true" /><!-- Request the headset DoF mode -->
<uses-feature android:name="oculus.software.handtracking" android:required="true" /><uses-permission android:name="oculus.permission.handtracking" /><!-- Request the headset handtracking mode -->
<application
android:label="@string/app_name"
android:icon="@mipmap/app_icon">
<activity
android:theme="@android:style/Theme.Black.NoTitleBar.Fullscreen"
@cleuton
cleuton / StreamDemo.java
Created September 23, 2019 17:39
Java Stream API demo
package com.obomprogramador.stream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public class StreamDemo {
@mkaranasou
mkaranasou / pyspark_simple_file_read_short.py
Last active October 4, 2019 13:48
Read a txt file with pyspark
from pyspark import SparkConf
from pyspark.sql import SparkSession, functions as F
conf = SparkConf()
# optional but it would be good to set the amount of ram the driver can use to
# a reasonable (regarding the size of the file we want to read) amount, so that we don't get an OOM exception
conf.set('spark.driver.memory', '6G')
spark = SparkSession.builder \
.config(conf=conf) \
@mkaranasou
mkaranasou / pyspark_simple_read_text_file.py
Last active October 4, 2019 14:16
Use pyspark to read a text file and identify a term
from pyspark import SparkConf
from pyspark.sql import SparkSession, functions as F
conf = SparkConf()
# optional but it would be good to set the amount of ram the driver can use to
# a reasonable (regarding the size of the file we want to read) amount, so that we don't get an OOM exception
conf.set('spark.driver.memory', '6G')
# create a spark session - nothing can be done without this:
spark = SparkSession.builder \
package main
import (
"context"
"fmt"
"mime"
"net/http"
"os"
"os/signal"
"path/filepath"
import numpy as np
import matplotlib.pyplot as plt
class MotionModel():
def __init__(self, A, Q):
self.A = A
self.Q = Q
(m, _) = Q.shape
<html>
<head>
<title>API Example</title>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<script src="http://ajax.googleapis.com/ajax/libs/jquery/2.1.1/jquery.min.js"></script>
<script type="text/javascript">
var accessToken = "<your agent's client access token>";
var baseUrl = "https://api.api.ai/v1/";
@fabiogoll
fabiogoll / bo.ts
Last active December 1, 2018 01:31
[Angular2] Implementação da Paginação no projeto
import {Paginacao, PaginacaoRequest} from "../utils/paginacao";
buscarClientePorFilial(filialId: number, pr: PaginacaoRequest): Promise<Paginacao> {
var self = this;
return new Promise<Paginacao>((resolve, reject) => {
var resposta: Resposta = new Resposta(TipoErro.ERROR, "Erro na busca de clientes!");
if (filialId === 0) {
resposta.motivo.push("Código da filial não pode ser nula!");
reject(resposta);
// taken from Tutorial: Creating an Angular2 Autocomplete by Leonardo Jines
// http://4dev.tech/2016/03/tutorial-creating-an-angular2-autocomplete/#comment-1609
import {Component, ElementRef} from 'angular2/core';
@Component({
selector: 'my-app',
host: {
'(document:click)': 'handleClick($event)',
},