Skip to content

Instantly share code, notes, and snippets.

@manpages
Created March 5, 2012 12:28
Show Gist options
  • Save manpages/1978150 to your computer and use it in GitHub Desktop.
Save manpages/1978150 to your computer and use it in GitHub Desktop.
ЛЯМ-search module used at memorici.de
<erl>
% TODO: Add page compiler
% TODO: RSS for blog
% TODO: Re-write category.php using sed instead of php
%%%% BEGIN CONFIG %%%%
% chroot:
root(A) ->
A#arg.docroot. %++ "/newRoot".
% html appending order (queue/stack):
%% either Current ++ New or vice-versa
glue(NewHTML, CurrentHTML) ->
NewHTML ++ CurrentHTML.
% how notes are wrapped:
note(start) ->
"
<div class=\"note\">
<div class=\"noteText\">
";
note(finish) ->
"
</div>
</div>
".
% which dynamic content is displayed (returns HTML):
body(A) ->
io:format("<REQUEST>~n~p~n</REQUEST>~n~n", [A]),
%DataPath = "/mnt/ramdisk/data/",
DataPath = "/home/sweater/coding/scripting/lyam/data/",
case (erlang:element(2, A#arg.req)) of
'GET' -> get_notes_html(A);
'POST'->
io:format("POST: ~p~n", [yaws_api:parse_post(A)]),
[{"wat", POST}] = yaws_api:parse_post(A),
os:cmd("echo '"++POST++"' >> post.log"),
SearchTerms = parse_search_string(POST),
io:format("POST: ~p~n", [SearchTerms]),
% getting notes, building body
NotesHTML = get_notes_html(A),
CmdHTML = case perform_search(POST, DataPath) of
[] -> "<hr />No results<hr />";
XX -> XX
end,
%CmdHTML = "",
NotesHTML ++ CmdHTML
end
.
perform_search(POST, DataPath) ->
AwkCmd = get_awk_cmd(POST),
io:format("for x in `cat " ++ DataPath ++ "cache.dat`; do echo \"<strong>***</strong>\"; " ++ AwkCmd ++ "; done"),
os:cmd("for x in `cat " ++ DataPath ++ "cache.dat`; do " ++ AwkCmd ++ " && echo \"<strong>***</strong><br \\/>\"; done")
.
get_awk_cmd({State, [H|T], Acc}) ->
io:format("DBG_THERE terms:~p;~nAccumulator: ~p~n~n", [[H|T], Acc]),
AwkCmd = "grep \"" ++ H ++ "\"",
TmpAcc = case State of
go_on -> Acc ++ AwkCmd;
start -> Acc ++ AwkCmd ++ " $x"
end,
NewAcc = case T of
[Hh|Tt] -> TmpAcc ++ " | ";
[] -> TmpAcc
end,
get_awk_cmd ({go_on, T, NewAcc})
;
get_awk_cmd({go_on, [], Acc}) ->
io:format("DBG_LAST terms:~p;~nAccumulator: ~p~n~n", [[], Acc]),
Acc
;
get_awk_cmd(POST) ->
io:format("DBG_HERE~n"),
get_awk_cmd({start, parse_search_string(POST), ""})
.
parse_search_string(POST) ->
Reserved = "+*.&%/\\()|'\"",
BNFTerm = "([^"++Reserved++"]+)",
BNFRest = "(.*)",
Opt = [{capture, all_but_first}],
M = case re:run (POST, "\\(?"++BNFTerm++"+"++BNFRest++"\\)?", Opt) of
{match, Positions} ->
{go_on, matches_to_strings(POST, Positions)};
nomatch ->
case re:run (POST, "\\(?"++BNFTerm++"\\)?", Opt) of
{match, Positions} ->
{terminate, matches_to_strings(POST, Positions)};
nomatch ->
{terminate, [""]}
end
end,
case M of
{terminate, [SearchTerm]} -> SearchTerm;
{go_on, [X, SearchTerms]} -> [X|parse_search_string(SearchTerms)]
end
.
matches_to_strings(String, Positions) ->
lists:map (fun({O, L}) -> string:substr(String, O+1, L) end, Positions)
.
%%%%% END CONFIG %%%%%
% source code:
get_notes_html(A) ->
Dir = string:substr(A#arg.fullpath, 1,
string:rchr(A#arg.fullpath, $/)
),
find_notes(Dir)
.
% get the list of all .note files
find_notes(Path) ->
case file:list_dir(Path) of
{ok, Files} ->
find_notes(
lists:map(
fun(X) -> Path ++ X end,
lists:sort(Files)
),
fun(X) -> note(start) ++ readlines(X) ++ note(finish) end,
[]
);
{error, _} -> false
end.
find_notes([], F, HTML) -> HTML;
find_notes([H|T], F, HTML) ->
NHTML = case (string:rstr(H, "note") > 0) of
true ->
glue(F(H), HTML);
false ->
HTML
end,
find_notes(T, F, NHTML)
.
% yeah, recursion and list re-building. fucked up for huge files
readlines(FileName) ->
{ok, Device} = file:open(FileName, [read]),
try get_all_lines(Device)
after file:close(Device)
end.
get_all_lines(Device) ->
case io:get_line(Device, "") of
eof -> [];
Line -> Line ++ get_all_lines(Device)
end.
% here's your stuff :)
out(A) ->
Root = root(A),
% getting header and footer
Header = readlines(Root ++ "/header.html"),
Footer = readlines(Root ++ "/footer.html"),
% getting body
Body = body(A),
% giving out content
{html, Header ++ Body ++ Footer}.
</erl>
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment