Navigation Menu

Skip to content

Instantly share code, notes, and snippets.

provider "aws" {
access_key = "${var.access_key}"
secret_key = "${var.secret_key}"
region = "us-west-2"
}
terraform {
backend "s3" {
bucket = "terraform-remote-state"
key = "terraform/dev/terraform.tfstate"
provider "aws" {
access_key = "${var.access_key}"
secret_key = "${var.secret_key}"
region = "us-west-2"
}
terraform {
backend "s3" {
bucket = "terraform-remote-state"
key = "terraform/dev/terraform.tfstate"
output "instance_id" {
value = "${aws_instance.awsInstance.id}"
}
output "public_dns" {
value = "${aws_instance.awsInstance.public_dns}"
}
# Fill all the varibles before executing the deployment.
access_key = "<< Access Key >>"
secret_key = "<<Secret Key>>"
private_key_path = "<<Private Key Path>>"
variable "access_key" {}
variable "secret_key" {}
variable "private_key_path" {}
variable "key_name" {
default = "dummy"
}
provider "aws" {
access_key = "${var.access_key}"
secret_key = "${var.secret_key}"
region = "us-west-2"
}
resource "aws_instance" "awsInstance" {
ami = "ami-c0f936b8"
instance_type = "t2.micro"
key_name = "${var.key_name}"
@Ayush-Singhal28
Ayush-Singhal28 / sparkAssignment2.scala
Created February 25, 2018 21:08
SparkAssignment-2
// Load a local file - file1
scala> val file1 = sc.textFile("/home/knoldus/Desktop/file1.txt")
file1: org.apache.spark.rdd.RDD[String] = /home/knoldus/Desktop/file1.txt MapPartitionsRDD[1] at textFile at <console>:24
// Creating a rdd and split based on '#'
scala> val fileRDD1 = file1.map(x => x.split('#'))
fileRDD1: org.apache.spark.rdd.RDD[Array[String]] = MapPartitionsRDD[2] at map at <console>:26
@Ayush-Singhal28
Ayush-Singhal28 / sparkAssignment.scala
Last active February 22, 2018 05:30
Spark-Assignment-1
// Solution-1
scala> val line = "Hello, world"
line: String = Hello, world
scala> val rdd = sc.parallelize(List(line))
rdd: org.apache.spark.rdd.RDD[String] = ParallelCollectionRDD[1] at parallelize at <console>:26
scala> rdd.collect()
res1: Array[String] = Array(Hello, world)