Skip to content

Instantly share code, notes, and snippets.

View sebge2emasphere's full-sized avatar

Sébastien Gérard sebge2emasphere

  • emasphere
View GitHub Profile
#!/bin/bash
function usage()
{
echo "Export Nexus Repositories."
echo ""
echo "./nexus-export"
echo -e "\t--help"
echo -e "\t--localNexusUrl"
#!/usr/bin/perl
use strict;
use warnings;
use Text::CSV;
my $tenantColumn = 1;
my $outputFile = "output.csv";
import java.io.BufferedReader;
import java.io.InputStreamReader;
public class MyClass {
public static void main(String[] args) throws Exception {
ProcessBuilder processBuilder = new ProcessBuilder(args);
processBuilder.redirectOutput(ProcessBuilder.Redirect.PIPE);
processBuilder.command("/home/sgerard/script/script.pl", args[0]);
package com.emasphere.poc.hbase.sample;
import com.emasphere.data.executor.common.DataFormatUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.FilterList;
package com.emasphere.poc.parquetspark.sample;
import com.emasphere.poc.parquetspark.ParquetCsvImporter;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.functions;
import java.math.BigDecimal;
package com.emasphere.poc.parquetspark;
import com.jcraft.jsch.JSch;
import com.jcraft.jsch.Session;
import org.apache.commons.io.IOUtils;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.SaveMode;
create table analysecom
(
id numeric(19,2),
third_party text,
compte numeric(19,2),
code_vendeur text,
vendeur text,
annee timestamp,
periode numeric(19,2),
montant numeric(19,2),
package com.emasphere.poc.hbase;
import com.emasphere.data.executor.common.DataFormatUtils;
import com.emasphere.data.executor.common.utils.FlowExecutorUtils;
import com.jcraft.jsch.JSch;
import com.jcraft.jsch.Session;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Mutation;
package com.emasphere.poc.hbase.sample;
import com.emasphere.data.executor.common.DataFormatUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.FilterList;
{
"columns": [
{
"code": "a",
"defaultLabel": "ID"
},
{
"code": "b",
"defaultLabel": "categorie_produit-productnr"
},