One Paragraph of project description goes here
These instructions will get you a copy of the project up and running on your local machine for development and testing purposes. See deployment for notes on how to deploy the project on a live system.
#!/bin/bash | |
repositories=$(aws ecr describe-repositories | jq -r '.repositories[].repositoryName') | |
for repo in $repositories | |
do | |
# latest image, having a version tag | |
latest_image=$(aws ecr describe-images --repository-name $repo --query 'sort_by(imageDetails,& imagePushedAt)[*]' | jq -r '.[] | select(.imageTags[] | startswith("v")) | "imageDigest=\(.imageDigest),imageTag=\(.imageTags[0])"' 2>/dev/null | tail -n 1 ) | |
if [ -z "$latest_image" ]; then |
def async_cache(func): | |
""" | |
decorator meant to build singletons, and designed for async coroutines | |
:param func: | |
:return: | |
""" | |
instances = {} | |
@functools.wraps(func) | |
async def wrapper(*args, **kwargs): |
Array.from(document.querySelectorAll("[aria-labelledby=detail-header] label").values()).filter(i => i.control.checked ).forEach(i => {i.click()}) |
/* | |
* Licensed to the Apache Software Foundation (ASF) under one or more | |
* contributor license agreements. See the NOTICE file distributed with | |
* this work for additional information regarding copyright ownership. | |
* The ASF licenses this file to You under the Apache License, Version 2.0 | |
* (the "License"); you may not use this file except in compliance with | |
* the License. You may obtain a copy of the License at | |
* | |
* http://www.apache.org/licenses/LICENSE-2.0 | |
* |
/* | |
* Licensed to the Apache Software Foundation (ASF) under one or more | |
* contributor license agreements. See the NOTICE file distributed with | |
* this work for additional information regarding copyright ownership. | |
* The ASF licenses this file to You under the Apache License, Version 2.0 | |
* (the "License"); you may not use this file except in compliance with | |
* the License. You may obtain a copy of the License at | |
* | |
* http://www.apache.org/licenses/LICENSE-2.0 | |
* |
package ssp | |
import java.nio.charset.Charset | |
import java.nio.file.{Files, Paths} | |
import org.apache.spark.sql.functions.broadcast | |
import org.apache.spark.sql.streaming.{OutputMode, Trigger} | |
import org.apache.spark.sql.types.{StringType, StructField, StructType} | |
import org.apache.spark.sql.{Dataset, SparkSession} | |
import org.apache.spark.storage.StorageLevel |
// This code is related to PR https://github.com/apache/spark/pull/17461 | |
// I show how to use the setInitialModel() param of LDA to build a model incrementally, | |
// and I compare the performance (perplexity) with a model built in one-shot | |
import scala.collection.mutable | |
import org.apache.spark.ml.{Pipeline, PipelineModel} | |
import org.apache.spark.ml.clustering.{LDA, LDAModel} |
ssh edge_node "hdfs dfs -cat /some/path/part-*" | cat > file | |
cat file | ssh edge_node "hdfs dfs -put - /target/path" | |
# think of using a named pipe (mkfifo) to sream directly for application output | |
rm -f stream | |
mkfifo stream |
# grab the kernel module name | |
lshw -C network 2>&1 | grep wireless | grep driver | |
sudo modprobe -r ath9k && sudo modprobe ath9k | |