Skip to content

Instantly share code, notes, and snippets.

View bijukunjummen's full-sized avatar

Biju Kunjummen bijukunjummen

View GitHub Profile
@bijukunjummen
bijukunjummen / CachingUtils.java
Last active November 27, 2023 05:27
Demonstration of caching with Project Reactor and Caffeine
import com.github.benmanes.caffeine.cache.AsyncLoadingCache;
import com.github.benmanes.caffeine.cache.Caffeine;
import com.github.benmanes.caffeine.cache.LoadingCache;
import org.jetbrains.annotations.NotNull;
import reactor.core.publisher.Mono;
import reactor.core.scheduler.Schedulers;
import java.time.Duration;
import java.util.function.Function;
import java.util.function.Supplier;
@bijukunjummen
bijukunjummen / sample-deployment.yaml
Created September 22, 2022 23:56
Sample Deployment manifest
apiVersion: apps/v1
kind: Deployment
metadata:
name: hello-skaffold-gke-deployment
spec:
replicas: 1
selector:
matchLabels:
app: hello-skaffold-gke
template:
@bijukunjummen
bijukunjummen / gcp-to-aws-s3-list.py
Created August 31, 2023 02:57
Use federated identity to make a call from GCP to AWS
import boto3
import google.auth
import google.auth.transport.requests
import requests
import google.oauth2.id_token
auth_req = google.auth.transport.requests.Request()
# Get GCP Id Token, with an audience of 'gcp-aws-access'
credentials = google.oauth2.id_token.fetch_id_token_credentials('gcp-aws-access', request=auth_req)
public void upload() {
byte[] buffer = new byte[4096];
BlobInfo build = BlobInfo.newBuilder("some-bucket", "samplefile1.txt").build();
Field uploadIdField = BaseWriteChannel.class.getDeclaredField("uploadId");
ReflectionUtils.makeAccessible(uploadIdField);
try (WriteChannel writer = storage.writer(build)) {
String uploadId = (String) ReflectionUtils.getField(uploadIdField, writer);
String changedHostUploadId = changeHost(uploadId, "myinternalhost.test.com");
package org.bk.gcs;
import com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystem;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path;
import org.junit.jupiter.api.Test;
import java.net.URI;
import java.util.List;
import com.google.cloud.bigtable.data.v2.BigtableDataClient;
import com.google.cloud.bigtable.data.v2.BigtableDataSettings;
import com.google.cloud.bigtable.data.v2.models.Mutation;
import com.google.cloud.bigtable.data.v2.models.Row;
import com.google.cloud.bigtable.data.v2.models.RowCell;
import static com.google.cloud.bigtable.data.v2.models.Filters.FILTERS;
...
apiVersion: deploy.cloud.google.com/v1
kind: DeliveryPipeline
metadata:
name: clouddeploy-cloudrun-sample
description: Delivery Pipeline for a sample java app
serialPipeline:
stages:
- targetId: dev
profiles: [dev]
- targetId: prod
apiVersion: deploy.cloud.google.com/v1
---
kind: Target
metadata:
name: dev
description: Cloud Run Dev environment
run:
location: projects/sampleproject/locations/us-west1
---
apiVersion: skaffold/v3alpha1
kind: Config
metadata:
name: clouddeploy-cloudrun-skaffold
manifests:
kustomize:
paths:
- manifests/base
build:
artifacts:
namePrefix: dev-
resources:
- ../../base