Skip to content

Instantly share code, notes, and snippets.

begin=start
size=1000000
while true do
insert into tbl select * from tbl_old limit start, size
start = start + size + 1 (видимо +1 должно быть)
if (start > XXXXXXXX) exit
end
-module(auth_test).
-export([get_default_page/0, do_auth/0]).
request(Data) ->
io:format("do login~n", []),
case httpc:request(post,
DELIMITER $$
DROP PROCEDURE IF EXISTS move_to_history$$
CREATE PROCEDURE `move_to_history`(
IN bu_id int
)
BEGIN
DECLARE done INT DEFAULT FALSE;
DECLARE i int;
LOAD DATA LOCAL INFILE '/home/ubuntu/dev/5016/csv/customers.csv' INTO TABLE tmp_5016
FIELDS TERMINATED BY ','
ENCLOSED BY '"'
lines terminated by '\n'
(client_id, name, @first_name, @last_name, title, phone1, @c1, fax1, email,
address1, address2, city, state, zip, phone2, fax2)
set
first_name = if(length(@first_name) = 0, null, first_name),
last_name = if(length(@last_name) = 0, null, last_name),
location_id = null;
-- if you have all {Child, Parents, Keys}
function check_relation_type(child_table, child_column, parent_table, parent_column)
begin
declare child_cardinality double;
declare parent_cardinality double;
declare q varchar(255);
set q = concat('select count(distinct ', child_column ,') / count ( ', child_column, ') into @val from ', child_table , 'where ', child_column , ' is not null');
parse_logfile_line_by_line(Device, Accum, State) ->
case io:get_line(Device, "") of
eof ->
ok;
Line ->
{Term, UnhandledData} = parse_raw_data(Accum ++ Line, string:chr(Line, $.)),
reinsert_message(State, Term),
parse_logfile_line_by_line(Device, UnhandledData, State)
end.
-module(log_parser).
-behaviour(gen_server).
-export([start_link/0]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
-export([parse/1]).
-export([do/0]).