Skip to content

Instantly share code, notes, and snippets.

@zhaoawd
zhaoawd / usual-query.sql
Last active June 10, 2020 08:42
[MySQL 常用查询] #mysql
show full PROCESSLIST;
#查询及设置最大连接数
show variables like '%max_connections%';
set global max_connections=1000;
#按ip统计连接数
select SUBSTRING_INDEX(host,':',1) as ip , count(*) from information_schema.processlist group by ip;
#按pid统计与MySQL的tcp连接
@zhaoawd
zhaoawd / Map 相关
Created April 29, 2019 06:43
[JAVA 对 map 的操作] 部分常用操作 #java
java.util.Iterator  对 collection 进行迭代的迭代器。
java.util.Iterator it = map.entrySet().iterator();
while(it.hasNext()){
      java.util.Map.Entry entry = (java.util.Map.Entry)it.next();
      entry.getKey()      //返回对应的键
      entry.getValue()   //返回对应的值
}
Map<String,String> testData = new HashMap<String, String>();
@zhaoawd
zhaoawd / TimeCountTrigger
Created April 10, 2019 02:17
[TimeCountTrigger] 针对时间窗做处理,在超出窗体的时间或者个数极限的时候触发Fire&Purge #flink
import org.apache.flink.api.common.functions.ReduceFunction
import org.apache.flink.api.common.state.{ReducingState, ReducingStateDescriptor}
import org.apache.flink.api.common.typeutils.base.LongSerializer
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.windowing.triggers.Trigger.TriggerContext
import org.apache.flink.streaming.api.windowing.triggers._
import org.apache.flink.streaming.api.windowing.windows.TimeWindow
import org.apache.hadoop.hbase.util.Bytes
import org.slf4j.{Logger, LoggerFactory}
@zhaoawd
zhaoawd / SealedPut
Created April 10, 2019 02:16
[SealedPut] 针对 put 封装 #flink #hbase
import com.google.common.collect.Maps;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import java.util.Map;
@zhaoawd
zhaoawd / PutCollection
Last active April 10, 2019 02:05
[Put集合] #flink #hbase
import com.google.common.collect.Maps;
import org.apache.hadoop.hbase.util.Bytes;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
/**
* 各种Put的集合,包含表信息
*/
public class PutCollection {
@zhaoawd
zhaoawd / HBaseRichOutputFormat
Last active April 10, 2019 02:06
[HBase 数据持久化] #flink #hbase
import com.google.common.collect.Lists;
import org.apache.flink.api.common.io.RichOutputFormat;
import org.apache.flink.configuration.Configuration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@zhaoawd
zhaoawd / mysql 批量插入或更新
Last active January 22, 2019 03:19
[mysql 不存在则插入,存在则更新] #mysql
on duplicate key update file_name = values(file_name)
file_name应该是唯一索引
@zhaoawd
zhaoawd / FixedSqlSessionFactory.java
Last active January 22, 2019 03:20
[解决mybatis不自动报错的问题] #mybatis
public class FixedSqlSessionFactory extends SqlSessionFactoryBean {
@Override
protected SqlSessionFactory buildSqlSessionFactory() throws IOException {
try {
return super.buildSqlSessionFactory();
} catch (Exception e) {
e.printStackTrace();
} finally {
ErrorContext.instance().reset();
}