Skip to content

Instantly share code, notes, and snippets.

@ELLIOTTCABLE
Created March 12, 2019 10:04
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save ELLIOTTCABLE/08195986d7c8c2242cf47ac658014bf1 to your computer and use it in GitHub Desktop.
Save ELLIOTTCABLE/08195986d7c8c2242cf47ac658014bf1 to your computer and use it in GitHub Desktop.
Hello - from /Users/ec/.vscode/extensions/jaredly.reason-vscode-1.5.2/bin.native
Previous log location: /var/folders/xt/wm01x2h50nv993t_csb1t5qm0000gn/T/lsp.log
Sending notification {"jsonrpc": "2.0", "method": "client/registerCapability", "params": {"registrations": [{"id": "watching", "method": "workspace/didChangeWatchedFiles", "registerOptions": {"watchers": [{"globPattern": "**/bsconfig.json", "globPattern": "**/.merlin"}]}}]}}
Sending response {"id": 0, "jsonrpc": "2.0", "result": {"capabilities": {"textDocumentSync": 1, "hoverProvider": true, "completionProvider": {"resolveProvider": true, "triggerCharacters": ["."]}, "signatureHelpProvider": {"triggerCharacters": ["("]}, "definitionProvider": true, "typeDefinitionProvider": true, "referencesProvider": true, "documentSymbolProvider": true, "codeActionProvider": true, "executeCommandProvider": {"commands": ["reason-language-server.add_to_interface_inner"]}, "codeLensProvider": {"resolveProvider": true}, "documentHighlightProvider": true, "documentRangeFormattingProvider": true, "documentFormattingProvider": true, "documentFormattingProvider": true, "renameProvider": true}}}
Read message
{"jsonrpc":"2.0","method":"initialized","params":{}}
Read message
{"jsonrpc":"2.0","method":"workspace/didChangeConfiguration","params":{"settings":{"reason_language_server":{"location":"","build_system_override_by_root":{"file:///Users/ec/Sync/Code/excmd":"bsb:4.0.7"},"refmt":"","lispRefmt":"","format_width":90,"per_value_codelens":true,"dependencies_codelens":true,"opens_codelens":true,"show_module_path_on_hover":true,"reloadOnChange":false,"show_debug_errors":false,"autoRebuild":true}}}}
Read message
{"jsonrpc":"2.0","method":"textDocument/didOpen","params":{"textDocument":{"uri":"file:///Users/ec/Sync/Code/excmd/src/lexer.body.ml","languageId":"ocaml","version":1,"text":"(* This file is appended to uAX31.ml, the output of the Unicode classifier in pkg/. *)\n\nopen Tokens\nopen Sedlexing\n\ntype mode = Main | Immediate | BlockComment of int | String | RunningURL\n\ntype buffer = {sedlex : Sedlexing.lexbuf; mutable mode : mode}\n\ntype 'a located = 'a * Lexing.position * Lexing.position\n\ntype 'a gen = unit -> 'a option\n\nexception LexError of Lexing.position * string\n\nexception ParseError of token located\n\nlet sedlex_of_buffer buf = buf.sedlex\n\nlet buffer_of_sedlex sedlex = {sedlex; mode = Main}\n\n(* {2 Constructors } *)\nlet buffer_of_string str = buffer_of_sedlex (Sedlexing.Utf8.from_string str)\n\n(* {2 Helpers } *)\nlet locate buf tok =\n let start, curr = sedlex_of_buffer |> lexing_positions in\n (tok, start, curr)\n\n\nlet utf8 buf = sedlex_of_buffer |> Utf8.lexeme\n\n(* {2 Accessors } *)\nlet token (tok : token located) =\n let tok, _loc, _end = tok in\n tok\n\n\nlet start_lnum (tok : token located) =\n let _tok, loc, _end = tok in\n loc.pos_lnum\n\n\nlet start_cnum (tok : token located) =\n let _tok, loc, _end = tok in\n loc.pos_cnum - loc.pos_bol\n\n\nlet end_lnum (tok : token located) =\n let _tok, _start, loc = tok in\n loc.pos_lnum\n\n\nlet end_cnum (tok : token located) =\n let _tok, _start, loc = tok in\n loc.pos_cnum - loc.pos_bol\n\n\n(* FIXME: I really need ppx_deriving or something to DRY all this mess up. Sigh, bsb. *)\nlet example_tokens =\n [| COLON\n ; COMMENT_CHUNK \"ARBITRARY\"\n ; COMMENT_LINE \"ARBITRARY\"\n ; COUNT \"123\"\n ; EOF\n ; EQUALS\n ; IDENTIFIER \"ARBITRARY\"\n ; LEFT_COMMENT_DELIM\n ; LEFT_PAREN\n ; LONG_FLAG \"ARBITRARY\"\n ; PIPE\n ; RIGHT_COMMENT_DELIM\n ; RIGHT_PAREN\n ; SEMICOLON\n ; SHORT_FLAGS \"ABC\"\n ; URL \"https://a.testing.url\" |]\n\n\nlet show_token tok =\n match tok with\n | COLON -> \"COLON\"\n | COMMENT_CHUNK _ -> \"COMMENT_CHUNK\"\n | COMMENT_LINE _ -> \"COMMENT_LINE\"\n | COUNT _ -> \"COUNT\"\n | EOF -> \"EOF\"\n | EQUALS -> \"EQUALS\"\n | IDENTIFIER _ -> \"IDENTIFIER\"\n | LEFT_COMMENT_DELIM -> \"LEFT_COMMENT_DELIM\"\n | LEFT_PAREN -> \"LEFT_PAREN\"\n | LONG_FLAG _ -> \"LONG_FLAG\"\n | PIPE -> \"PIPE\"\n | RIGHT_COMMENT_DELIM -> \"RIGHT_COMMENT_DELIM\"\n | RIGHT_PAREN -> \"RIGHT_PAREN\"\n | SEMICOLON -> \"SEMICOLON\"\n | SHORT_FLAGS _ -> \"SHORT_FLAGS\"\n | URL _ -> \"URL\"\n\n\nlet example_of_token tok =\n match tok with\n | COLON -> \":\"\n | COMMENT_CHUNK _ -> \"comment body\"\n | COMMENT_LINE _ -> \"// comment\"\n | COUNT _ -> \"2\"\n | EOF -> \"\"\n | EQUALS -> \"=\"\n | IDENTIFIER str -> str\n | LEFT_COMMENT_DELIM -> \"/*\"\n | LEFT_PAREN -> \"(\"\n | LONG_FLAG flag -> \"--\" ^ flag\n | PIPE -> \"|\"\n | RIGHT_COMMENT_DELIM -> \"*/\"\n | RIGHT_PAREN -> \")\"\n | SEMICOLON -> \";\"\n | SHORT_FLAGS flags -> \"-\" ^ flags\n | URL url -> url\n\n\nlet compare_token a b =\n if a = b then true\n else\n match (a, b) with\n | COUNT _, COUNT _\n |COMMENT_CHUNK _, COMMENT_CHUNK _\n |COMMENT_LINE _, COMMENT_LINE _\n |IDENTIFIER _, IDENTIFIER _\n |LONG_FLAG _, LONG_FLAG _\n |SHORT_FLAGS _, SHORT_FLAGS _\n |URL _, URL _ ->\n true\n | _ -> false\n\n\nlet token_body tok =\n match tok with\n | COUNT s\n |COMMENT_CHUNK s\n |COMMENT_LINE s\n |IDENTIFIER s\n |LONG_FLAG s\n |SHORT_FLAGS s\n |URL s ->\n Some s\n | _ -> None\n\n\n(* {2 Errors } *)\nlet lexfail buf s =\n let _start, curr = sedlex_of_buffer buf |> lexing_positions in\n raise (LexError (curr, s))\n\n\nlet illegal buf c =\n Uchar.to_int c\n |> Printf.sprintf \"unexpected character in expression: 'U+%04X'\"\n |> lexfail buf\n\n\nlet unreachable str = failwith (Printf.sprintf \"Unreachable: %s\" str)\n\n(* {2 Regular expressions } *)\nlet newline_char = [%sedlex.regexp? '\\r' | '\\n']\n\nlet newline = [%sedlex.regexp? \"\\r\\n\" | newline_char]\n\n(* FIXME: Expand definition of whitespace to follow TR31 *)\nlet space_char = [%sedlex.regexp? white_space]\n\nlet space = [%sedlex.regexp? Sub (space_char, newline_char) | newline]\n\nlet zero = [%sedlex.regexp? '0']\n\nlet nonzero = [%sedlex.regexp? '1' .. '9']\n\nlet digit = [%sedlex.regexp? zero | nonzero]\n\nlet count = [%sedlex.regexp? nonzero, Star digit]\n\n(* FIXME: Add U+200C/D? *)\nlet start_char = [%sedlex.regexp? Sub (xid_start, digit)]\n\nlet continue_char = [%sedlex.regexp? xid_continue]\n\nlet medial_char =\n [%sedlex.regexp?\n ( 0x002D (* '-' HYPHEN-MINUS *) | 0x002E (* '.' FULL STOP *)\n | 0x00B7\n (* '·' MIDDLE DOT *) | 0x058A (* '֊' ARMENIAN HYPHEN *)\n | 0x05F4\n (* '״' HEBREW PUNCTUATION GERSHAYIM *)\n | 0x0F0B\n (* '་' TIBETAN MARK INTERSYLLABIC TSHEG *) | 0x2027 (* '‧' HYPHENATION POINT *)\n | 0x30FB (* '・' KATAKANA MIDDLE DOT *) )]\n\n\n(* UAX31-D1:\n *\n * <Identifier> := <Start> <Continue>* (<Medial> <Continue>+)*\n*)\nlet identifier =\n [%sedlex.regexp?\n start_char, Star continue_char, Star (medial_char, Plus continue_char)]\n\n\n(* A lot of the details of this URL-parsing architecture comes from RFC3986. See also,\n ELLIOTTCABLE/excmd.js#4. <https://tools.ietf.org/html/rfc3986> *)\nlet ascii_letter = [%sedlex.regexp? 'a' .. 'z' | 'A' .. 'Z']\n\nlet url_scheme_char = [%sedlex.regexp? ascii_letter | '+' | '-' | '.']\n\n(* Of note, this doesn't restrict which URL schemes we can *support*, just which we can\n support users typing without adding quote-marks around them. *)\nlet url_known_scheme =\n [%sedlex.regexp?\n ( \"http\" | \"https\" | \"file\" | \"ftp\" | \"dat\" | \"gopher\" | \"ldap\" | \"mailto\" | \"news\"\n | \"telnet\" )]\n\nlet url_doubleslash_scheme =\n [%sedlex.regexp? Star url_scheme_char, \"://\"]\n\nlet url_scheme =\n [%sedlex.regexp? url_known_scheme, ':' | url_doubleslash_scheme]\n\n\n(* TLDs represented with >=1,000 domains in the Alexa top-1M. Messily extracted from\n here: <https://www.hayksaakian.com/most-popular-tlds/> *)\nlet url_known_tld =\n [%sedlex.regexp?\n ( \"ar\" | \"at\" | \"au\" | \"az\" | \"be\" | \"bg\" | \"biz\" | \"br\" | \"by\" | \"ca\" | \"cc\" | \"ch\"\n | \"cl\" | \"club\" | \"cn\" | \"co\" | \"com\" | \"cz\" | \"de\" | \"dk\" | \"edu\" | \"es\" | \"eu\"\n | \"fi\" | \"fr\" | \"gov\" | \"gr\" | \"hk\" | \"hr\" | \"hu\" | \"id\" | \"ie\" | \"il\" | \"in\"\n | \"info\" | \"io\" | \"ir\" | \"it\" | \"jp\" | \"kr\" | \"kz\" | \"lt\" | \"me\" | \"mil\" | \"mx\"\n | \"my\" | \"net\" | \"nl\" | \"no\" | \"nz\" | \"online\" | \"org\" | \"pl\" | \"pro\" | \"pt\" | \"ro\"\n | \"ru\" | \"se\" | \"site\" | \"sk\" | \"su\" | \"th\" | \"tr\" | \"tv\" | \"tw\" | \"ua\" | \"uk\" | \"us\"\n | \"vn\" | \"xyz\" | \"za\" )]\n\n(* FIXME: The IDENTIFIER rules probably don't fully capture the allowable IDN construction. Generate\n this from the IDNA tables directly, like a Good Internet Citizen:\n <https://www.iana.org/assignments/idna-tables-6.3.0/idna-tables-6.3.0.xhtml#idna-tables-properties> *)\nlet url_2nd_level_domain = [%sedlex.regexp? identifier]\n\nlet url_start = [%sedlex.regexp?\n url_scheme | url_2nd_level_domain, '.', url_known_tld\n]\n\n(* {2 Lexer body } *)\n\n(* Validate incoming delimiter. As a special case, if there's no more delimiters to be matched, an\n incoming closing delimiter returns [None]. *)\nlet pop_delim buf opening closing xs =\n match xs with\n | [] -> None\n | hd :: tl ->\n if hd != opening then\n lexfail buf (String.concat\n [\"Unmatched opening `\", opening, \"`. (Did you forget a `\", closinig, \"`?)\"])\n else\n Some tl\n\n(* Swallow and discard whitespace; produces no tokens. *)\nlet rec swallow_atmosphere ?(saw_whitespace = false) buf =\n let slbuf = sedlex_of_buffer buf in\n match%sedlex slbuf with\n | Plus space -> swallow_atmosphere ~saw_whitespace:true buf\n | _ -> saw_whitespace\n\n\n(* Produces a single line of comment, wholesale, as a token. *)\nand comment buf =\n let slbuf = sedlex_of_buffer buf in\n match%sedlex slbuf with\n | Star (Compl (newline_char | eof)) -> COMMENT_LINE (utf8 buf) |> locate buf\n | _ -> unreachable \"comment\"\n\n\n(* Wow. This is a monstrosity. *)\nand block_comment depth buf =\n (* Js.log \"token (mode: BlockComment)\"; *)\n let slbuf = sedlex_of_buffer buf in\n match%sedlex slbuf with\n | \"*/\" ->\n buf.mode <- (if depth = 1 then Main else BlockComment (depth - 1)) ;\n RIGHT_COMMENT_DELIM |> locate buf\n | \"/*\" ->\n buf.mode <- BlockComment (depth + 1) ;\n LEFT_COMMENT_DELIM |> locate buf\n | '/', Compl '*' | '*', Compl '/' | Plus (Compl ('*' | '/')) ->\n let start, _curr = lexing_positions slbuf\n and acc = Buffer.create 256 (* 3 lines of 80 chars = ~240 bytes *) in\n Buffer.add_string acc (utf8 buf) ;\n continuing_block_comment buf start acc\n | eof ->\n lexfail buf\n \"Reached end-of-file without finding a matching block-comment end-delimiter\"\n | _ -> unreachable \"block_comment\"\n\n\nand continuing_block_comment buf start acc =\n let slbuf = sedlex_of_buffer buf in\n let _start, curr = lexing_positions slbuf in\n match%sedlex slbuf with\n | \"*/\" | \"/*\" ->\n rollback slbuf ;\n (COMMENT_CHUNK (Buffer.contents acc), start, curr)\n | '/', Compl '*' | '*', Compl '/' | Plus (Compl ('*' | '/')) ->\n Buffer.add_string acc (utf8 buf) ;\n continuing_block_comment buf start acc\n | eof ->\n lexfail buf\n \"Reached end-of-file without finding a matching block-comment end-delimiter\"\n | _ -> unreachable \"continuing_block_comment\"\n\nand continuing_url buf start acc delims =\n let last_delim = match delims with\n | hd :: _tl -> Some hd\n | []\nlet slbuf = sedlex_of_buffer buf in\nlet _start, curr = lexing_positions slbuf in\nmatch%sedlex slbuf with\n| space ->\n rollback slbuf;\n (URL (Buffer.contents acc), start, curr)\n\nand immediate ?(saw_whitespace = false) buf =\n (* Js.log \"token (mode: Immediate)\"; *)\n buf.mode <- Main ;\n let slbuf = sedlex_of_buffer buf in\n match%sedlex slbuf with\n | eof -> EOF |> locate buf\n (* One-line comments are lexed as a single token ... *)\n | \"//\" -> comment buf\n (* ... while block-comments swap into a custom lexing-mode to handle proper nesting. *)\n | \"/*\" ->\n buf.mode <- BlockComment 1 ;\n LEFT_COMMENT_DELIM |> locate buf\n | \"*/\" -> lexfail buf \"Unmatched block-comment end-delimiter\"\n | ':' -> COLON |> locate buf\n | '|' -> PIPE |> locate buf\n | ';' -> SEMICOLON |> locate buf\n | count -> COUNT (utf8 buf) |> locate buf\n | url_start ->\n let start, _curr = lexing_positions slbuf\n (* 99.5th% confidence interval for URLs is 218 chars.\n <https://stackoverflow.com/a/31758386/31897> *)\n and acc = Buffer.create 256 in\n Buffer.add_string acc (utf8 buf) ;\n continuing_url buf start acc []\n\n | identifier -> IDENTIFIER (utf8 buf) |> locate buf\n | \"--\", identifier ->\n let whole = utf8 buf in\n let flag = String.sub whole 2 (String.length whole - 2) in\n LONG_FLAG flag |> locate buf\n | \"-\", identifier ->\n let whole = utf8 buf in\n let flags = String.sub whole 1 (String.length whole - 1) in\n SHORT_FLAGS flags |> locate buf\n | '=' ->\n if saw_whitespace then\n lexfail buf\n \"Unexpected whitespace before '='; try attaching explicit parameters directly \\\n after their flag\"\n else buf.mode <- Immediate ;\n EQUALS |> locate buf\n | '(' -> LEFT_PAREN |> locate buf\n | ')' -> RIGHT_PAREN |> locate buf\n | _ -> (\n match next buf.sedlex with Some c -> illegal buf c | None -> unreachable \"main\" )\n\n\nand main buf =\n let saw_whitespace = swallow_atmosphere buf in\n immediate ~saw_whitespace buf\n\n\n(** Return the next token, with location information. *)\nlet next_loc buf =\n match buf.mode with\n | Main -> main buf\n | Immediate -> immediate buf\n | BlockComment depth -> block_comment depth buf\n | String -> failwith \"NYI\"\n\n\n(** Return *just* the next token, discarding location information. *)\nlet next buf =\n let tok, _, _ = next_loc buf in\n tok\n\n\nlet gen_loc buf () =\n match next_loc buf with EOF, _, _ -> None | _ as tuple -> Some tuple\n\n\nlet gen buf () = match next_loc buf with EOF, _, _ -> None | tok, _, _ -> Some tok\n\nlet tokens_loc buf = gen_loc buf |> Gen.to_list |> Array.of_list\n\nlet tokens buf = gen buf |> Gen.to_list |> Array.of_list\n\nlet mode buf = buf.mode\n"}}}
Sending notification {"jsonrpc": "2.0", "method": "window/showMessage", "params": {"type": 1, "message": "Unable to read file:///Users/ec/Sync/Code/excmd/bsconfig.json"}}
Read message
{"jsonrpc":"2.0","id":1,"method":"textDocument/documentSymbol","params":{"textDocument":{"uri":"file:///Users/ec/Sync/Code/excmd/src/lexer.body.ml"}}}
[server] Got a method textDocument/documentSymbol
[server] processing took 0.00596046447754ms
Sending response {"id": 1, "jsonrpc": "2.0", "error": {"code": -32603, "message": "Unable to read file:///Users/ec/Sync/Code/excmd/bsconfig.json"}}
Read message
{"jsonrpc":"2.0","id":2,"method":"textDocument/codeAction","params":{"textDocument":{"uri":"file:///Users/ec/Sync/Code/excmd/src/lexer.body.ml"},"range":{"start":{"line":0,"character":0},"end":{"line":0,"character":0}},"context":{"diagnostics":[]}}}
[server] Got a method textDocument/codeAction
[server] processing took 0.00596046447754ms
Sending response {"id": 2, "jsonrpc": "2.0", "error": {"code": -32603, "message": "Unable to read file:///Users/ec/Sync/Code/excmd/bsconfig.json"}}
Read message
{"jsonrpc":"2.0","id":3,"method":"textDocument/codeLens","params":{"textDocument":{"uri":"file:///Users/ec/Sync/Code/excmd/src/lexer.body.ml"}}}
[server] Got a method textDocument/codeLens
[server] processing took 0.00596046447754ms
Sending response {"id": 3, "jsonrpc": "2.0", "result": [{"range": {"start": {"line": 0, "character": 0}, "end": {"line": 0, "character": 0}}, "command": {"title": "Unable to load compilation data: Unable to read file:///Users/ec/Sync/Code/excmd/bsconfig.json", "command": ""}}]}
Read message
{"jsonrpc":"2.0","id":4,"method":"textDocument/documentSymbol","params":{"textDocument":{"uri":"file:///Users/ec/Sync/Code/excmd/src/lexer.body.ml"}}}
[server] Got a method textDocument/documentSymbol
[server] processing took 0.00596046447754ms
Sending response {"id": 4, "jsonrpc": "2.0", "error": {"code": -32603, "message": "Unable to read file:///Users/ec/Sync/Code/excmd/bsconfig.json"}}
Read message
{"jsonrpc":"2.0","id":5,"method":"textDocument/hover","params":{"textDocument":{"uri":"file:///Users/ec/Sync/Code/excmd/src/lexer.body.ml"},"position":{"line":5,"character":30}}}
[server] Got a method textDocument/hover
[server] processing took 0.0100135803223ms
Sending response {"id": 5, "jsonrpc": "2.0", "error": {"code": -32603, "message": "Unable to read file:///Users/ec/Sync/Code/excmd/bsconfig.json"}}
Read message
{"jsonrpc":"2.0","id":6,"method":"textDocument/hover","params":{"textDocument":{"uri":"file:///Users/ec/Sync/Code/excmd/src/lexer.body.ml"},"position":{"line":5,"character":30}}}
[server] Got a method textDocument/hover
[server] processing took 0.018835067749ms
Sending response {"id": 6, "jsonrpc": "2.0", "error": {"code": -32603, "message": "Unable to read file:///Users/ec/Sync/Code/excmd/bsconfig.json"}}
Read message
{"jsonrpc":"2.0","id":7,"method":"textDocument/hover","params":{"textDocument":{"uri":"file:///Users/ec/Sync/Code/excmd/src/lexer.body.ml"},"position":{"line":0,"character":65}}}
[server] Got a method textDocument/hover
[server] processing took 0.0109672546387ms
Sending response {"id": 7, "jsonrpc": "2.0", "error": {"code": -32603, "message": "Unable to read file:///Users/ec/Sync/Code/excmd/bsconfig.json"}}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment