Created
March 14, 2011 15:36
-
-
Save jpf91/869324 to your computer and use it in GitHub Desktop.
jpf.http.formatter
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/** | |
* Contains functions to format the jpf.http.types types | |
* into the HTTP format (formatValue, formatHeader). | |
* | |
* TODO: | |
* Remove trailing zeros with floating point numbers | |
* Spec allows this, but it wastes bandwidth | |
* | |
* Not implemented headers: | |
* Authorization | |
* Proxy-Authorization | |
* Cache-Control | |
* Via | |
* Warning | |
* | |
* Because of the rewrite of the datetime code in Phobos the | |
* following headers are not implemented: | |
* If-Modified-Since | |
* If-Range | |
* If-Unmodified-Since | |
* Date | |
* Expires | |
* Last-Modified | |
* | |
* Tips: | |
* From; use an email address string | |
* Host; use an host:port string | |
* Max-Forwards; use to!string(uint) from std.conv | |
* Content-Length; use to!string(ulong) from std.conv | |
* Referer: Use an absolute or relative URI string | |
* Content-Location: Use an absolute or relative URI string | |
* User-Agent: Use a properly formatted string | |
*/ | |
module jpf.http.formatter; | |
import std.range; | |
import std.array; //Appender | |
import std.stdio; | |
import std.format; | |
import std.ctype; | |
import std.conv; | |
import std.base64; //encode | |
import jpf.http.types; | |
import jpf.http.parser; //isToken | |
bool isQuotedString(string val) | |
{ | |
if(val.length < 2) | |
return false; | |
if(val[0] != '"' || val[$ - 1] != '"') | |
return false; | |
return true; | |
} | |
string quote(string val) | |
{ | |
auto result = appender!string; | |
foreach(c; val) | |
{ | |
if(c == '"') | |
result.put('\\'); | |
result.put(c); | |
} | |
return '"' ~ result.data ~ '"'; | |
} | |
void writeParameter(Appender!string writer, string key, string value) | |
{ | |
writer.put(";"); | |
assert(isToken(key)); | |
writer.put(key); | |
if(value != "") | |
{ | |
writer.put("="); | |
if(isToken(value)) | |
writer.put(value); | |
else | |
writer.put(quote(value)); | |
} | |
} | |
/** | |
* Accept header | |
*/ | |
void formatValue(T)(MediaRange[] mr, T writer) | |
if (isOutputRange!(T, string)) | |
{ | |
foreach(i, range; mr) | |
{ | |
debug | |
{ | |
if(range.Type.SubType != "*") | |
assert(range.Type.Type != "*"); | |
} | |
assert(isToken(range.Type.Type)); | |
assert(isToken(range.Type.SubType)); | |
formattedWrite(writer, "%s/%s", range.Type.Type, range.Type.SubType); | |
foreach(key, value; range.Type.Parameters) | |
{ | |
writeParameter(writer, key, value); | |
} | |
if(range.AcceptParam >= 0) | |
{ | |
assert(range.AcceptParam <= 1); | |
formattedWrite(writer, ";q=%.3f", range.AcceptParam); | |
foreach(key, value; range.AcceptExtension) | |
{ | |
writeParameter(writer, key, value); | |
} | |
} | |
if(i != mr.length - 1) | |
writer.put(", "); | |
} | |
} | |
/** | |
* Accept-Charset header | |
*/ | |
void formatValue(T)(Charset[] cs, T writer) | |
if (isOutputRange!(T, string)) | |
{ | |
foreach(i, charset; cs) | |
{ | |
assert(isToken(charset.Name)); | |
writer.put(charset.Name); | |
if(charset.Q >= 0) | |
{ | |
assert(charset.Q <= 1); | |
formattedWrite(writer, ";q=%.3f", charset.Q); | |
} | |
if(i != cs.length - 1) | |
writer.put(", "); | |
} | |
} | |
/** | |
* Accept-Encoding header | |
*/ | |
void formatValue(T)(AcceptEncoding[] list, T writer) | |
if (isOutputRange!(T, string)) | |
{ | |
foreach(i, entry; list) | |
{ | |
final switch(entry.Coding.Type) | |
{ | |
case ContentCodingType.Compress: | |
writer.put("compress"); | |
break; | |
case ContentCodingType.Deflate: | |
writer.put("deflate"); | |
break; | |
case ContentCodingType.Gzip: | |
writer.put("gzip"); | |
break; | |
case ContentCodingType.Identity: | |
writer.put("identity"); | |
break; | |
case ContentCodingType.Other: | |
assert(isToken(entry.Coding.Other)); | |
writer.put(entry.Coding.Other); | |
break; | |
} | |
if(entry.Q >= 0) | |
{ | |
assert(entry.Q <= 1); | |
formattedWrite(writer, ";q=%.3f", entry.Q); | |
} | |
if(i != list.length - 1) | |
writer.put(", "); | |
} | |
} | |
/** | |
* Accept-Language header | |
*/ | |
void formatValue(T)(AcceptLanguage[] list, T writer) | |
if (isOutputRange!(T, string)) | |
{ | |
foreach(i, entry; list) | |
{ | |
assert(entry.Tag.Main.length <= 8); | |
assert(entry.Tag.Sub.length <= 8); | |
debug | |
{ | |
if(entry.Tag.Main != "*") | |
{ | |
//assert(isalpha(entry.Sub)); | |
//assert(isalpha(entry.Main)); | |
} | |
else | |
{ | |
assert(entry.Tag.Sub == ""); | |
} | |
} | |
writer.put(entry.Tag.Main); | |
if(entry.Tag.Sub != "") | |
{ | |
writer.put("-"); | |
writer.put(entry.Tag.Sub); | |
} | |
if(entry.Q >= 0) | |
{ | |
assert(entry.Q <= 1); | |
formattedWrite(writer, ";q=%.3f", entry.Q); | |
} | |
if(i != list.length - 1) | |
writer.put(", "); | |
} | |
} | |
/** | |
* Expect header | |
*/ | |
void formatValue(T)(Expectation[] list, T writer) | |
if (isOutputRange!(T, string)) | |
{ | |
foreach(i, entry; list) | |
{ | |
if(entry.Type == ExpectationType.Continue) | |
{ | |
assert(entry.ExtensionKey == ""); | |
assert(entry.ExtensionValue == ""); | |
assert(entry.ExtensionParameters.length == 0); | |
writer.put("100-continue"); | |
} | |
else | |
{ | |
assert(entry.ExtensionKey != ""); | |
assert(isToken(entry.ExtensionKey)); | |
writer.put(entry.ExtensionKey); | |
if(entry.ExtensionValue != "") | |
{ | |
writer.put("="); | |
if(isToken(entry.ExtensionValue)) | |
writer.put(entry.ExtensionValue); | |
else | |
writer.put(quote(entry.ExtensionValue)); | |
} | |
foreach(key, value; entry.ExtensionParameters) | |
{ | |
writeParameter(writer, key, value); | |
} | |
} | |
if(i != list.length - 1) | |
writer.put(", "); | |
} | |
} | |
/** | |
* If-Range header | |
*/ | |
void formatValue(T)(ETag etag, T writer) | |
if (isOutputRange!(T, string)) | |
{ | |
if(etag.Weak) | |
writer.put("W/"); | |
assert(etag.Value != ""); | |
writer.put(quote(etag.Value)); | |
} | |
/** | |
* If-Match header | |
* If-None-Match header | |
*/ | |
void formatValue(T)(IfMatchHeader header, T writer) | |
if (isOutputRange!(T, string)) | |
{ | |
if(header.All) | |
{ | |
writer.put("*"); | |
assert(header.Specific.length == 0); | |
} | |
else | |
{ | |
assert(header.Specific.length > 0); | |
foreach(i, entry; header.Specific) | |
{ | |
formatValue(entry, writer); | |
if(i != header.Specific.length - 1) | |
writer.put(", "); | |
} | |
} | |
} | |
/** | |
* Range header | |
*/ | |
void formatValue(T)(RangeEntry[] ranges, T writer) | |
if (isOutputRange!(T, string)) | |
{ | |
writer.put("bytes="); | |
foreach(i, entry; ranges) | |
{ | |
if(entry.From.Position != 0) | |
{ | |
writer.put(to!string(entry.From.Position)); | |
} | |
writer.put("-"); | |
if(entry.To.Position != 0) | |
{ | |
writer.put(to!string(entry.To.Position)); | |
} | |
if(i != ranges.length - 1) | |
writer.put(","); | |
} | |
} | |
/** | |
* TE header | |
*/ | |
void formatValue(T)(TE te, T writer) | |
if (isOutputRange!(T, string)) | |
{ | |
if(te.Trailers) | |
writer.put("trailers, "); | |
foreach(i, entry; te.Codings) | |
{ | |
final switch(entry.Coding.Type) | |
{ | |
case ContentCodingType.Compress: | |
writer.put("compress"); | |
break; | |
case ContentCodingType.Deflate: | |
writer.put("deflate"); | |
break; | |
case ContentCodingType.Gzip: | |
writer.put("gzip"); | |
break; | |
case ContentCodingType.Identity: | |
writer.put("identity"); | |
break; | |
case ContentCodingType.Other: | |
assert(isToken(entry.Coding.Other)); | |
writer.put(entry.Coding.Other); | |
break; | |
} | |
foreach(key, value; entry.Parameters) | |
{ | |
writeParameter(writer, key, value); | |
} | |
if(entry.Q >= 0) | |
{ | |
assert(entry.Q <= 1); | |
formattedWrite(writer, ";q=%.3f", entry.Q); | |
foreach(key, value; entry.AcceptParameters) | |
{ | |
writeParameter(writer, key, value); | |
} | |
} | |
if(i != te.Codings.length - 1) | |
writer.put(", "); | |
} | |
} | |
/** | |
* Connection header | |
*/ | |
void formatValue(T)(ConnectionHeader head, T writer) | |
if (isOutputRange!(T, string)) | |
{ | |
foreach(i, entry; head.Fragments) | |
{ | |
if(entry.Type == ConnectionType.Close) | |
{ | |
writer.put("close"); | |
assert(entry.Other == ""); | |
} | |
else | |
{ | |
assert(entry.Other != ""); | |
assert(isToken(entry.Other)); | |
writer.put(entry.Other); | |
} | |
if(i != head.Fragments.length - 1) | |
writer.put(", "); | |
} | |
} | |
/** | |
* Pragma header | |
*/ | |
void formatValue(T)(PragmaHeader head, T writer) | |
if (isOutputRange!(T, string)) | |
{ | |
foreach(i, entry; head.Fragments) | |
{ | |
if(entry.Type == PragmaType.No_cache) | |
{ | |
writer.put("no-cache"); | |
assert(entry.ExtensionKey == ""); | |
assert(entry.ExtensionValue == ""); | |
} | |
else | |
{ | |
assert(entry.ExtensionKey != ""); | |
assert(isToken(entry.ExtensionKey)); | |
writer.put(entry.ExtensionKey); | |
if(entry.ExtensionValue != "") | |
{ | |
writer.put("="); | |
if(isToken(entry.ExtensionValue)) | |
writer.put(entry.ExtensionValue); | |
else | |
writer.put(quote(entry.ExtensionValue)); | |
} | |
} | |
if(i != head.Fragments.length - 1) | |
writer.put(", "); | |
} | |
} | |
/** | |
* Transfer-Encoding header | |
*/ | |
void formatValue(T)(TransferEncodingHeader head, T writer) | |
if (isOutputRange!(T, string)) | |
{ | |
foreach(i, entry; head.Fragments) | |
{ | |
if(entry.Type == TransferCodingType.Chunked) | |
{ | |
writer.put("chunked"); | |
assert(entry.Other == ""); | |
assert(entry.Parameters.length == 0); | |
} | |
else | |
{ | |
assert(entry.Other != ""); | |
assert(isToken(entry.Other)); | |
writer.put(entry.Other); | |
foreach(key, value; entry.Parameters) | |
{ | |
writeParameter(writer, key, value); | |
} | |
} | |
if(i != head.Fragments.length - 1) | |
writer.put(", "); | |
} | |
} | |
/** | |
* Upgrade header | |
*/ | |
void formatValue(T)(Product[] prod, T writer) | |
if (isOutputRange!(T, string)) | |
{ | |
foreach(i, entry; prod) | |
{ | |
assert(entry.Name != ""); | |
assert(isToken(entry.Name)); | |
writer.put(entry.Name); | |
if(entry.Version != "") | |
{ | |
assert(isToken(entry.Version)); | |
writer.put("/"); | |
writer.put(entry.Version); | |
} | |
if(i != prod.length - 1) | |
writer.put(", "); | |
} | |
} | |
/** | |
* Allow header | |
*/ | |
void formatValue(T)(AllowHeader head, T writer) | |
if (isOutputRange!(T, string)) | |
{ | |
foreach(i, entry; head.Fragments) | |
{ | |
final switch(entry.Method) | |
{ | |
case HTTPMethod.OPTIONS: | |
writer.put("OPTIONS"); | |
break; | |
case HTTPMethod.GET: | |
writer.put("GET"); | |
break; | |
case HTTPMethod.HEAD: | |
writer.put("HEAD"); | |
break; | |
case HTTPMethod.POST: | |
writer.put("POST"); | |
break; | |
case HTTPMethod.PUT: | |
writer.put("PUT"); | |
break; | |
case HTTPMethod.DELETE: | |
writer.put("DELETE"); | |
break; | |
case HTTPMethod.TRACE: | |
writer.put("TRACE"); | |
break; | |
case HTTPMethod.CONNECT: | |
writer.put("CONNECT"); | |
break; | |
case HTTPMethod.Other: | |
assert(entry.OtherMethod != ""); | |
assert(isToken(entry.OtherMethod)); | |
writer.put(entry.OtherMethod); | |
break; | |
} | |
if(i != head.Fragments.length - 1) | |
writer.put(", "); | |
} | |
} | |
/** | |
* Content-Encoding header | |
*/ | |
void formatValue(T)(ContentCoding[] coding, T writer) | |
if (isOutputRange!(T, string)) | |
{ | |
foreach(i, entry; coding) | |
{ | |
final switch(entry.Type) | |
{ | |
case ContentCodingType.Compress: | |
assert(entry.Other == ""); | |
writer.put("compress"); | |
break; | |
case ContentCodingType.Gzip: | |
assert(entry.Other == ""); | |
writer.put("gzip"); | |
break; | |
case ContentCodingType.Deflate: | |
assert(entry.Other == ""); | |
writer.put("deflate"); | |
break; | |
case ContentCodingType.Identity: | |
assert(entry.Other == ""); | |
writer.put("identity"); | |
break; | |
case ContentCodingType.Other: | |
assert(entry.Other != ""); | |
assert(isToken(entry.Other)); | |
writer.put(entry.Other); | |
break; | |
} | |
if(i != coding.length - 1) | |
writer.put(", "); | |
} | |
} | |
/** | |
* Content-Range header | |
*/ | |
void formatValue(T)(ContentRange range, T writer) | |
if (isOutputRange!(T, string)) | |
{ | |
writer.put("bytes "); | |
if(range.From.Unknown || range.To.Unknown) | |
{ | |
writer.put("*"); | |
} | |
else | |
{ | |
writer.put(to!string(range.From.Position)); | |
writer.put("-"); | |
writer.put(to!string(range.To.Position)); | |
} | |
writer.put("/"); | |
if(range.Length.Unknown) | |
{ | |
writer.put("*"); | |
} | |
else | |
{ | |
writer.put(to!string(range.Length.Position)); | |
} | |
} | |
/** | |
* Content-Type header | |
*/ | |
void formatValue(T)(MediaType media, T writer) | |
if (isOutputRange!(T, string)) | |
{ | |
assert(isToken(media.Type)); | |
assert(isToken(media.SubType)); | |
writer.put(media.Type); | |
writer.put("/"); | |
writer.put(media.SubType); | |
foreach(key, value; media.Parameters) | |
writeParameter(writer, key, value); | |
} | |
/** | |
* Trailer header | |
*/ | |
void formatTrailerValue(T)(string[] fields, T writer) | |
if (isOutputRange!(T, string)) | |
{ | |
foreach(i, entry; fields) | |
{ | |
assert(isToken(entry)); | |
writer.put(entry); | |
if(i != fields.length - 1) | |
writer.put(", "); | |
} | |
} | |
string formatTrailerValue()(string[] fields) | |
{ | |
auto writer = appender!string(); | |
formatTrailerValue(fields, writer); | |
return writer.data; | |
} | |
/** | |
* Content-Language header | |
*/ | |
void formatContentLanguageValue(T)(LanguageTag[] fields, T writer) | |
if (isOutputRange!(T, string)) | |
{ | |
foreach(i, entry; fields) | |
{ | |
assert(isToken(entry.Main)); | |
writer.put(entry.Main); | |
if(entry.Sub != "") | |
{ | |
assert(isToken(entry.Sub)); | |
writer.put("-"); | |
writer.put(entry.Sub); | |
} | |
if(i != fields.length - 1) | |
writer.put(", "); | |
} | |
} | |
string formatContentLanguageValue()(LanguageTag[] fields) | |
{ | |
auto writer = appender!string(); | |
formatContentLanguageValue(fields, writer); | |
return writer.data; | |
} | |
/** | |
* | |
*/ | |
void formatContentMD5Value(T)(ubyte[16] md5, T writer) | |
if (isOutputRange!(T, string)) | |
{ | |
char[24] base; | |
writer.put(encode(cast(string)md5, base)); | |
} | |
string formatContentMD5Value()(ubyte[16] md5) | |
{ | |
auto writer = appender!string(); | |
formatContentMD5Value(md5, writer); | |
return writer.data; | |
} | |
/** | |
* | |
* | |
*/ | |
string formatValue(T)(T value) | |
if (__traits(compiles, formatValue(value, appender!string()))) | |
{ | |
auto writer = appender!string(); | |
formatValue(value, writer); | |
return writer.data; | |
} | |
/** | |
* | |
* | |
*/ | |
void formatHeader(T)(string key, T value, Appender!string writer) | |
if(is(T == string) || __traits(compiles, formatValue(value, writer))) | |
{ | |
assert(isToken(key)); | |
writer.put(key); | |
writer.put(": "); | |
static if (is(T == string)) | |
{ | |
writer.put(value); | |
} | |
else | |
{ | |
formatValue(value, writer); | |
} | |
writer.put("\r\n"); | |
} | |
string formatHeader(T)(string key, T value) | |
if(is(T == string) || __traits(compiles, formatValue(value))) | |
{ | |
auto writer = appender!string(); | |
formatHeader(key, value, writer); | |
return writer.data; | |
} | |
unittest | |
{ | |
MediaRange[] mr; | |
MediaRange m1; | |
m1.Type.Type = "text"; | |
m1.Type.SubType = "html"; | |
m1.Type.Parameters["charset"] = "utf8"; | |
MediaRange m2; | |
m2.Type.Type = "text"; | |
m2.Type.SubType = "json"; | |
m2.Type.Parameters["charset"] = "utf8"; | |
m2.Type.Parameters["secondparam"] = "test"; | |
m2.AcceptParam = 0.7f; | |
m2.AcceptExtension["abcd"] = "def"; | |
m2.AcceptExtension["efg"] = "hijk"; | |
mr ~= m1; | |
mr ~= m2; | |
assert(formatValue(mr) == "text/html;charset=utf8, text/json;" | |
"charset=utf8;secondparam=test;q=0.700;abcd=def;efg=hijk"); | |
} | |
unittest | |
{ | |
Charset[] chars; | |
Charset char1; | |
char1.Name = "iso-8859-5"; | |
char1.Q = 0.7; | |
Charset char2; | |
char2.Name = "unicode-1-1"; | |
char2.Q = 0.999; | |
chars ~= char1; | |
chars ~= char2; | |
assert(formatValue(chars) == "iso-8859-5;q=0.700, unicode-1-1;q=0.999"); | |
} | |
unittest | |
{ | |
AcceptEncoding[] list; | |
AcceptEncoding enc1; | |
AcceptEncoding enc2; | |
AcceptEncoding enc3; | |
enc1.Coding.Type = ContentCodingType.Gzip; | |
enc1.Q = 1f; | |
enc2.Coding.Type = ContentCodingType.Deflate; | |
enc2.Q = 0.5f; | |
enc3.Coding.Type = ContentCodingType.Other; | |
enc3.Coding.Other = "*"; | |
enc3.Q = 0f; | |
list ~= enc1; | |
list ~= enc2; | |
list ~= enc3; | |
assert(formatValue(list) == "gzip;q=1.000, deflate;q=0.500, *;q=0.000"); | |
} | |
unittest | |
{ | |
AcceptLanguage[] list; | |
AcceptLanguage lang1; | |
AcceptLanguage lang2; | |
AcceptLanguage lang3; | |
lang1.Tag.Main = "de"; | |
lang2.Tag.Main = "en"; | |
lang2.Tag.Sub = "us"; | |
lang2.Q = 0.8f; | |
lang3.Tag.Main = "en"; | |
lang3.Q = 0.7f; | |
list ~= lang1; | |
list ~= lang2; | |
list ~= lang3; | |
assert(formatValue(list) == "de, en-us;q=0.800, en;q=0.700"); | |
} | |
unittest | |
{ | |
Expectation[] list; | |
Expectation e1; | |
Expectation e2; | |
Expectation e3; | |
e1.Type = ExpectationType.Continue; | |
e2.Type = ExpectationType.Other; | |
e2.ExtensionKey = "test"; | |
e3.Type = ExpectationType.Other; | |
e3.ExtensionKey = "test2"; | |
e3.ExtensionValue = "some:thing"; | |
e3.ExtensionParameters = ["a": "b", "c": "", "d": "e:f"]; | |
list ~= e1; | |
list ~= e2; | |
list ~= e3; | |
assert(formatValue(list) == "100-continue, test, test2=\"some:thing\";a=b;c;d=\"e:f\""); | |
} | |
unittest | |
{ | |
IfMatchHeader header; | |
header.All = true; | |
assert(formatValue(header) == "*"); | |
header.All = false; | |
ETag e1; | |
ETag e2; | |
ETag e3; | |
e1.Weak = true; | |
e1.Value = "test"; | |
e2.Weak = false; | |
e2.Value = "test:test"; | |
e3.Weak = true; | |
e3.Value = "test:abcd"; | |
header.Specific ~= e1; | |
header.Specific ~= e2; | |
header.Specific ~= e3; | |
assert(formatValue(header) == "W/\"test\", \"test:test\", W/\"test:abcd\""); | |
} | |
unittest | |
{ | |
RangeEntry[] ranges; | |
RangeEntry r1; | |
RangeEntry r2; | |
RangeEntry r3; | |
r1.From.Position = 123; | |
r1.To.Position = 345; | |
r2.To.Position = 1000; | |
r3.From.Position = 3000; | |
ranges ~= r1; | |
ranges ~= r2; | |
ranges ~= r3; | |
assert(formatValue(ranges) == "bytes=123-345,-1000,3000-"); | |
} | |
unittest | |
{ | |
TE te; | |
te.Trailers = true; | |
TEFragment tef; | |
tef.Coding.Type = ContentCodingType.Gzip; | |
te.Codings ~= tef; | |
tef.Coding.Type = ContentCodingType.Other; | |
tef.Coding.Other = "test"; | |
te.Codings ~= tef; | |
tef.Coding.Type = ContentCodingType.Deflate; | |
tef.Parameters = ["test":"nothing", "test2":"abc"]; | |
tef.Q = 0.7f; | |
tef.AcceptParameters = ["test23":"nothing", "test32":"abc"]; | |
te.Codings ~= tef; | |
assert(formatValue(te) == "trailers, gzip, test, deflate;" | |
"test=nothing;test2=abc;q=0.700;test23=nothing;test32=abc"); | |
} | |
unittest | |
{ | |
ConnectionHeader head; | |
ConnectionHeaderFragment f1; | |
f1.Type = ConnectionType.Close; | |
head.Fragments ~= f1; | |
f1.Type = ConnectionType.Other; | |
f1.Other = "test"; | |
head.Fragments ~= f1; | |
assert(formatValue(head) == "close, test"); | |
} | |
unittest | |
{ | |
PragmaHeader head; | |
PragmaHeaderFragment f1; | |
f1.Type = PragmaType.No_cache; | |
head.Fragments ~= f1; | |
f1.Type = PragmaType.Extension; | |
f1.ExtensionKey = "test"; | |
head.Fragments ~= f1; | |
f1.Type = PragmaType.Extension; | |
f1.ExtensionKey = "test2"; | |
f1.ExtensionValue = "abcd"; | |
head.Fragments ~= f1; | |
assert(formatValue(head) == "no-cache, test, test2=abcd"); | |
} | |
unittest | |
{ | |
assert(formatTrailerValue(["Content-Length", "Test", "ABCd"]) | |
== "Content-Length, Test, ABCd"); | |
} | |
unittest | |
{ | |
TransferEncodingHeader head; | |
TransferCoding f1; | |
f1.Type = TransferCodingType.Chunked; | |
head.Fragments ~= f1; | |
f1.Type = TransferCodingType.Custom; | |
f1.Other = "test"; | |
head.Fragments ~= f1; | |
f1.Type = TransferCodingType.Custom; | |
f1.Other = "test"; | |
f1.Parameters = ["ab": "cd", "ef":"gh"]; | |
head.Fragments ~= f1; | |
assert(formatValue(head) == "chunked, test, test;ef=gh;ab=cd"); | |
} | |
unittest | |
{ | |
Product[] prod; | |
Product p1; | |
p1.Name = "HTTP"; | |
p1.Version = "1.1"; | |
prod ~= p1; | |
p1.Name = "SHTTP"; | |
p1.Version = ""; | |
prod ~= p1; | |
p1.Name= "RTA"; | |
p1.Version = "x11"; | |
prod ~= p1; | |
assert(formatValue(prod) == "HTTP/1.1, SHTTP, RTA/x11"); | |
} | |
unittest | |
{ | |
AllowHeader head; | |
AllowHeaderFragment f1; | |
f1.Method = HTTPMethod.GET; | |
head.Fragments ~= f1; | |
f1.Method = HTTPMethod.HEAD; | |
head.Fragments ~= f1; | |
f1.Method = HTTPMethod.PUT; | |
head.Fragments ~= f1; | |
f1.Method = HTTPMethod.Other; | |
f1.OtherMethod = "TEST"; | |
head.Fragments ~= f1; | |
assert(formatValue(head) == "GET, HEAD, PUT, TEST"); | |
} | |
unittest | |
{ | |
ContentCoding[] coding; | |
ContentCoding cod; | |
cod.Type = ContentCodingType.Gzip; | |
coding ~= cod; | |
cod.Type = ContentCodingType.Deflate; | |
coding ~= cod; | |
cod.Type = ContentCodingType.Other; | |
cod.Other = "test"; | |
coding ~= cod; | |
assert(formatValue(coding) == "gzip, deflate, test"); | |
} | |
unittest | |
{ | |
LanguageTag[] tags; | |
LanguageTag tag; | |
tag.Main = "de"; | |
tags ~= tag; | |
tag.Main = "en"; | |
tag.Sub = "us"; | |
tags ~= tag; | |
tag.Main = "en"; | |
tag.Sub = ""; | |
tags ~= tag; | |
assert(formatContentLanguageValue(tags) == "de, en-us, en"); | |
} | |
unittest | |
{ | |
ubyte[16] md5 = [67, 104, 101, 99, 107, 32, 73, 110, 116, 101, 103, 114, 105, 116, 121, 33]; | |
assert(formatContentMD5Value(md5) == "Q2hlY2sgSW50ZWdyaXR5IQ=="); | |
} | |
unittest | |
{ | |
ContentRange range; | |
range.From.Unknown = true; | |
range.To.Unknown = true; | |
range.Length.Position = 1000; | |
assert(formatValue(range) == "bytes */1000"); | |
range.From.Unknown = false; | |
range.To.Unknown = false; | |
range.From.Position = 500; | |
range.To.Position = 750; | |
range.Length.Unknown = true; | |
assert(formatValue(range) == "bytes 500-750/*"); | |
} | |
unittest | |
{ | |
MediaType media; | |
media.Type = "text"; | |
media.SubType = "html"; | |
media.Parameters["charset"] = "utf8"; | |
assert(formatValue(media) == "text/html;charset=utf8"); | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/** | |
* | |
* | |
* | |
* TODO: Parsing of HTTP Dates as soon as phobos has a new Date/Time | |
* API. Affected headers: | |
* Date | |
* Retry-After | |
* Expires | |
* Last-Modified | |
* | |
* Parsing of Urls if phobos gets an url type. Affected headers: | |
* Location | |
* Content-Location | |
* | |
* Unimplemented Headers: | |
* Cache-Control, Via, Warning, Proxy-Authenticate, Vary, Server, WWW-Authenticate | |
* | |
* | |
*/ | |
module jpf.http.parser; | |
import std.conv; | |
import std.string; | |
import std.base64; | |
import std.format; | |
import std.range; //Appender | |
public import jpf.http.types; | |
//the actual parsers | |
mixin(import("parser.rl.d")); | |
public class ParserException : Exception | |
{ | |
public: | |
string Input; | |
uint Position; | |
this(string input, uint pos, string msg = "") | |
{ | |
Input = input; | |
Position = pos; | |
auto writer = appender!string(); | |
formattedWrite(writer, "An error occured in the HTTP parser:%s\n" | |
"-Input: '%s'\n-Position: %s", msg, replace(replace(input, | |
"\r", "\\r"), "\n", "\\n"), pos); | |
super(writer.data); | |
} | |
} | |
public class InsufficientInputException : ParserException | |
{ | |
public this(string input) | |
{ | |
super(input, input.length, " Insufficient input"); | |
} | |
} | |
AllowHeaderFragment parseAllowFragment(string fragment) | |
{ | |
AllowHeaderFragment frg; | |
switch(fragment) | |
{ | |
case "OPTIONS": | |
frg.Method = HTTPMethod.OPTIONS; | |
break; | |
case "GET": | |
frg.Method = HTTPMethod.GET; | |
break; | |
case "HEAD": | |
frg.Method = HTTPMethod.HEAD; | |
break; | |
case "POST": | |
frg.Method = HTTPMethod.POST; | |
break; | |
case "PUT": | |
frg.Method = HTTPMethod.PUT; | |
break; | |
case "DELETE": | |
frg.Method = HTTPMethod.DELETE; | |
break; | |
case "TRACE": | |
frg.Method = HTTPMethod.TRACE; | |
break; | |
case "CONNECT": | |
frg.Method = HTTPMethod.CONNECT; | |
break; | |
default: | |
frg.Method = HTTPMethod.Other; | |
frg.OtherMethod = fragment; | |
break; | |
} | |
return frg; | |
} | |
AllowHeader parseAllowHeader(string[] list) | |
{ | |
AllowHeader hdr; | |
foreach(fragment; list) | |
{ | |
hdr.Fragments ~= parseAllowFragment(fragment); | |
} | |
return hdr; | |
} | |
AllowHeader parseAllowHeader(string value) | |
{ | |
return parseAllowHeader(parseCommaList(value)); | |
} | |
ConnectionHeaderFragment parseConnectionFragment(string fragment) | |
{ | |
ConnectionHeaderFragment frg; | |
switch(tolower(fragment)) | |
{ | |
case "close": | |
frg.Type = ConnectionType.Close; | |
break; | |
default: | |
frg.Type = ConnectionType.Other; | |
frg.Other = fragment; | |
break; | |
} | |
return frg; | |
} | |
ConnectionHeader parseConnectionHeader(string[] list) | |
{ | |
ConnectionHeader hdr; | |
foreach(fragment; list) | |
{ | |
hdr.Fragments ~= parseConnectionFragment(fragment); | |
} | |
return hdr; | |
} | |
ConnectionHeader parseConnectionHeader(string value) | |
{ | |
return parseConnectionHeader(parseCommaList(value)); | |
} | |
PragmaHeader parsePragmaHeader(string[] list) | |
{ | |
PragmaHeader hdr; | |
foreach(fragment; list) | |
{ | |
hdr.Fragments ~= parsePragmaFragment(fragment); | |
} | |
return hdr; | |
} | |
PragmaHeader parsePragmaHeader(string value) | |
{ | |
return parsePragmaHeader(parseCommaList(value)); | |
} | |
string[] parseTrailerHeader(string value) | |
{ | |
return parseCommaList(value); | |
} | |
TransferEncodingHeader parseTransferCodingHeader(string[] list) | |
{ | |
TransferEncodingHeader head; | |
foreach(entry; list) | |
{ | |
head.Fragments ~= parseTransferCodingFragment(entry); | |
} | |
return head; | |
} | |
TransferEncodingHeader parseTransferCodingHeader(string value) | |
{ | |
return parseTransferCodingHeader(parseCommaList(value)); | |
} | |
Product[] parseUpgradeHeader(string[] list) | |
{ | |
Product[] prod; | |
foreach(entry; list) | |
{ | |
prod ~= parseProduct(entry); | |
} | |
return prod; | |
} | |
Product[] parseUpgradeHeader(string value) | |
{ | |
return parseUpgradeHeader(parseCommaList(value)); | |
} | |
AcceptRangesHeader parseAcceptRangesHeader(string[] list) | |
{ | |
AcceptRangesHeader hdr; | |
if(list.length == 1 && list[0] == "none") | |
{ | |
hdr.None = true; | |
return hdr; | |
} | |
foreach(entry; list) | |
{ | |
jpf.http.types.RangeType type; | |
if(entry == "bytes") | |
type.Type = RangeTypeId.Bytes; | |
else | |
{ | |
type.Type = RangeTypeId.Custom; | |
type.Custom = entry; | |
} | |
hdr.Types ~= type; | |
} | |
return hdr; | |
} | |
AcceptRangesHeader parseAcceptRangesHeader(string value) | |
{ | |
return parseAcceptRangesHeader(parseCommaList(value)); | |
} | |
/+Challenge[] parseWWWAuthenticateHeader(string[] list) | |
{ | |
Product[] prod; | |
foreach(entry; list) | |
{ | |
prod ~= parseProduct(entry); | |
} | |
return prod; | |
} | |
Challenge[] parseWWWAuthenticateHeader(string value) | |
{ | |
return parseWWWAuthenticateHeader(parseCommaList(value)); | |
}+/ | |
ulong parseAgeHeader(string value) | |
{ | |
return parse!ulong(value); | |
} | |
ContentCoding parseContentCodingFragment(string fragment) | |
{ | |
ContentCoding frg; | |
switch(tolower(fragment)) | |
{ | |
case "compress": | |
frg.Type = ContentCodingType.Compress; | |
break; | |
case "deflate": | |
frg.Type = ContentCodingType.Deflate; | |
break; | |
case "gzip": | |
frg.Type = ContentCodingType.Gzip; | |
break; | |
case "identity": | |
frg.Type = ContentCodingType.Identity; | |
break; | |
default: | |
frg.Type = ContentCodingType.Other; | |
frg.Other = fragment; | |
break; | |
} | |
return frg; | |
} | |
ContentCoding[] parseContentEncodingHeader(string[] list) | |
{ | |
ContentCoding[] hdr; | |
foreach(fragment; list) | |
{ | |
hdr ~= parseContentCodingFragment(fragment); | |
} | |
return hdr; | |
} | |
ContentCoding[] parseContentEncodingHeader(string value) | |
{ | |
return parseContentEncodingHeader(parseCommaList(value)); | |
} | |
string[] parseContentLanguageHeader(string value) | |
{ | |
return parseCommaList(value); | |
} | |
ulong parseContentLengthHeader(string value) | |
{ | |
return parse!ulong(value); | |
} | |
ubyte[16] parseContentMD5Header(string value) | |
{ | |
//strip trailing = for lenth calculation | |
int elength = value.length - 1; | |
for(; elength >= 0; elength--) | |
{ | |
if(value[elength] != '=') | |
break; | |
} | |
ubyte[16] buf; | |
if(Base64.decodeLength(elength) > 16) | |
return buf; | |
Base64.decode!(string, ubyte[])(value, buf); | |
return buf; | |
} | |
Cookie[] parseSetCookieHeader(string[] list) | |
{ | |
Cookie[] hdr; | |
foreach(fragment; list) | |
{ | |
hdr ~= parseCookie(fragment); | |
} | |
return hdr; | |
} | |
Cookie[] parseSetCookieHeader(string value) | |
{ | |
return parseSetCookieHeader(parseCommaList2(value)); | |
} | |
Cookie2[] parseSetCookie2Header(string[] list) | |
{ | |
Cookie2[] hdr; | |
foreach(fragment; list) | |
{ | |
hdr ~= parseCookie2(fragment); | |
} | |
return hdr; | |
} | |
Cookie2[] parseSetCookie2Header(string value) | |
{ | |
return parseSetCookie2Header(parseCommaList2(value)); | |
} | |
string unescape(string value) | |
{ | |
string tmp; | |
tmp.reserve(value.length); | |
uint newLength = 0; | |
foreach(c; value) | |
{ | |
if(c == '\\') | |
continue; | |
tmp ~= c; | |
} | |
return tmp; | |
} | |
string prepare(string value) | |
{ | |
return unquote(stripr(value)); | |
} | |
string unquote(string value) | |
{ | |
if(value.length > 1 && value[0] == '"' && value[$ - 1] == '"') | |
return unescape(value[1 .. $ - 1]); | |
return value; | |
} | |
bool isFirstDatePart(string line) | |
{ | |
string line2 = tolower(line); | |
return (endsWith(line2, "mon") || endsWith(line2, "tue") || endsWith(line2, "wed") | |
|| endsWith(line2, "thu") || endsWith(line2, "fri") || endsWith(line2, "sat") | |
|| endsWith(line2, "sun") || endsWith(line2, "monday") || endsWith(line2, "tuesday") | |
|| endsWith(line2, "wednesday") || endsWith(line2, "thursday") || endsWith(line2, "friday") | |
|| endsWith(line2, "saturday") || endsWith(line2, "sunday")); | |
} | |
unittest | |
{ | |
ResponseLine rl = parseResponseLine("HTTP/1.2 200 OK\r\n"); | |
assert(rl.Type == ResponseLineType.StatusLine); | |
assert(rl.Line == "HTTP/1.2 200 OK\r\n"); | |
StatusLine sl = parseStatusLine(rl.Line); | |
assert(sl.Major == 1); | |
assert(sl.Minor == 2); | |
assert(sl.StatusCode == 200); | |
assert(sl.Reason == "OK"); | |
} | |
unittest | |
{ | |
ResponseLine rl = parseResponseLine("\r\n"); | |
assert(rl.Type == ResponseLineType.Empty); | |
} | |
unittest | |
{ | |
bool thrown = false; | |
try | |
parseResponseLine(""); | |
catch(Exception) | |
thrown = true; | |
assert(thrown); | |
} | |
unittest | |
{ | |
ResponseLine rl = parseResponseLine("Server: Apache/1.3.29 (Unix) PHP/4.3.4\r\n"); | |
assert(rl.Type == ResponseLineType.Header); | |
assert(rl.Key == "Server"); | |
assert(rl.Value == "Apache/1.3.29 (Unix) PHP/4.3.4"); | |
rl = parseResponseLine("Content-Length: (Größe von infotext.html in Byte)\r\n"); | |
assert(rl.Type == ResponseLineType.Header); | |
assert(rl.Key == "Content-Length"); | |
assert(rl.Value == "(Größe von infotext.html in Byte)"); | |
} | |
unittest | |
{ | |
string[] list = parseCommaList(" , , Test , a,a , test,a, ,test,ab cd, ab dc "); | |
assert(list == ["Test", "a", "a", "test", "a", "test", "ab cd", "ab dc"]); | |
//Headers might be split on multiple lines by CLRF (SP | HT)+ | |
list = parseCommaList(" , test\r\n test , test"); | |
assert(list == ["test\r\n test", "test"]); | |
} | |
unittest | |
{ | |
ResponseLine rl = parseResponseLine("Allow: GET, HEAD\r\n"); | |
assert(rl.Type == ResponseLineType.Header); | |
assert(rl.Key == "Allow"); | |
AllowHeader head = parseAllowHeader(rl.Value); | |
assert(head.Fragments.length == 2); | |
assert(head.Fragments[0].Method == HTTPMethod.GET); | |
assert(head.Fragments[1].Method == HTTPMethod.HEAD); | |
} | |
unittest | |
{ | |
ResponseLine rl = parseResponseLine("Connection: close\r\n"); | |
assert(rl.Type == ResponseLineType.Header); | |
assert(rl.Key == "Connection"); | |
ConnectionHeader head = parseConnectionHeader(rl.Value); | |
assert(head.Fragments.length == 1); | |
assert(head.Fragments[0].Type == ConnectionType.Close); | |
} | |
unittest | |
{ | |
ResponseLine rl = parseResponseLine("Pragma: no-cache, test=test234\r\n"); | |
assert(rl.Type == ResponseLineType.Header); | |
assert(rl.Key == "Pragma"); | |
PragmaHeader head = parsePragmaHeader(rl.Value); | |
assert(head.Fragments.length == 2); | |
assert(head.Fragments[0].Type == PragmaType.No_cache); | |
assert(head.Fragments[1].Type == PragmaType.Extension); | |
assert(head.Fragments[1].ExtensionKey == "test"); | |
assert(head.Fragments[1].ExtensionValue == "test234"); | |
} | |
unittest | |
{ | |
ResponseLine rl = parseResponseLine("Transfer-Encoding: chunked;test=abcd\r\n"); | |
assert(rl.Type == ResponseLineType.Header); | |
assert(rl.Key == "Transfer-Encoding"); | |
TransferEncodingHeader head = parseTransferCodingHeader(rl.Value); | |
assert(head.Fragments.length == 1); | |
assert(head.Fragments[0].Type == TransferCodingType.Chunked); | |
head = parseTransferCodingHeader("chunked; test = abcd , other ; test = value ; abcd = def"); | |
assert(head.Fragments.length == 2); | |
assert(head.Fragments[0].Type == TransferCodingType.Chunked); | |
assert(head.Fragments[1].Type == TransferCodingType.Custom); | |
assert(head.Fragments[1].Other == "other"); | |
assert(head.Fragments[0].Parameters == ["test": "abcd"]); | |
assert(head.Fragments[1].Parameters == ["test": "value", "abcd": "def"]); | |
} | |
unittest | |
{ | |
ResponseLine rl = parseResponseLine("Transfer-Encoding: chunked\r\n"); | |
assert(rl.Type == ResponseLineType.Header); | |
assert(rl.Key == "Transfer-Encoding"); | |
TransferEncodingHeader head = parseTransferCodingHeader(rl.Value); | |
assert(head.Fragments.length == 1); | |
assert(head.Fragments[0].Type == TransferCodingType.Chunked); | |
} | |
unittest | |
{ | |
ResponseLine rl = parseResponseLine("Upgrade: HTTP/2.0, SHTTP/1.3, IRC/6.9, RTA/x11\r\n"); | |
assert(rl.Type == ResponseLineType.Header); | |
assert(rl.Key == "Upgrade"); | |
Product[] prod = parseUpgradeHeader(rl.Value); | |
assert(prod.length == 4); | |
assert(prod[0].Name == "HTTP"); | |
assert(prod[0].Version == "2.0"); | |
assert(prod[1].Name == "SHTTP"); | |
assert(prod[1].Version == "1.3"); | |
assert(prod[2].Name == "IRC"); | |
assert(prod[2].Version == "6.9"); | |
assert(prod[3].Name == "RTA"); | |
assert(prod[3].Version == "x11"); | |
} | |
unittest | |
{ | |
ResponseLine rl = parseResponseLine("Age: 123456789\r\n"); | |
assert(rl.Type == ResponseLineType.Header); | |
assert(rl.Key == "Age"); | |
ulong age = parseAgeHeader(rl.Value); | |
assert(age == 123456789); | |
} | |
unittest | |
{ | |
ResponseLine rl = parseResponseLine("ETag: W/\"abcdefg\"\r\n"); | |
assert(rl.Type == ResponseLineType.Header); | |
assert(rl.Key == "ETag"); | |
ETag tag = parseETag(rl.Value); | |
assert(tag.Weak == true); | |
assert(tag.Value == "abcdefg"); | |
tag = parseETag("\"abcdef\""); | |
assert(tag.Weak == false); | |
assert(tag.Value == "abcdef"); | |
} | |
unittest | |
{ | |
ResponseLine rl = parseResponseLine("Retry-After: Fri, 31 Dec 1999 23:59:59 GMT\r\n"); | |
assert(rl.Type == ResponseLineType.Header); | |
assert(rl.Key == "Retry-After"); | |
RetryAfterHeader ret = parseRetryAfter(rl.Value); | |
assert(ret.Type == RetryAfterType.Date); | |
assert(ret.Date == "Fri, 31 Dec 1999 23:59:59 GMT"); | |
ret = parseRetryAfter("120"); | |
assert(ret.Type == RetryAfterType.Seconds); | |
assert(ret.Seconds == 120); | |
} | |
unittest | |
{ | |
ResponseLine rl = parseResponseLine("Content-Encoding: gzip, deflate, custom\r\n"); | |
assert(rl.Type == ResponseLineType.Header); | |
assert(rl.Key == "Content-Encoding"); | |
ContentCoding[] cc = parseContentEncodingHeader(rl.Value); | |
assert(cc.length == 3); | |
assert(cc[0].Type == ContentCodingType.Gzip); | |
assert(cc[1].Type == ContentCodingType.Deflate); | |
assert(cc[2].Type == ContentCodingType.Other); | |
assert(cc[2].Other == "custom"); | |
} | |
unittest | |
{ | |
ResponseLine rl = parseResponseLine("Content-Language: da, mi, en-US\r\n"); | |
assert(rl.Type == ResponseLineType.Header); | |
assert(rl.Key == "Content-Language"); | |
string[] lang = parseContentLanguageHeader(rl.Value); | |
assert (lang == ["da", "mi", "en-US"]); | |
} | |
unittest | |
{ | |
ResponseLine rl = parseResponseLine("Content-Length: 348\r\n"); | |
assert(rl.Type == ResponseLineType.Header); | |
assert(rl.Key == "Content-Length"); | |
ulong length = parseContentLengthHeader(rl.Value); | |
assert (length == 348); | |
} | |
unittest | |
{ | |
ResponseLine rl = parseResponseLine("Content-MD5: Q2hlY2sgSW50ZWdyaXR5IQ==\r\n"); | |
assert(rl.Type == ResponseLineType.Header); | |
assert(rl.Key == "Content-MD5"); | |
ubyte[16] md5 = parseContentMD5Header(rl.Value); | |
assert(md5 == [67, 104, 101, 99, 107, 32, 73, 110, 116, 101, 103, 114, 105, 116, 121, 33]); | |
} | |
unittest | |
{ | |
ResponseLine rl = parseResponseLine("Content-Range: bytes 21010-47021/47022\r\n"); | |
assert(rl.Type == ResponseLineType.Header); | |
assert(rl.Key == "Content-Range"); | |
ContentRange range = parseContentRangeHeader(rl.Value); | |
assert(range.From.Position == 21010); | |
assert(range.From.Unknown == false); | |
assert(range.To.Position == 47021); | |
assert(range.To.Unknown == false); | |
assert(range.Length.Position == 47022); | |
assert(range.Length.Unknown == false); | |
range = parseContentRangeHeader("bytes */*"); | |
assert(range.From.Unknown == true); | |
assert(range.To.Unknown == true); | |
assert(range.Length.Unknown == true); | |
} | |
unittest | |
{ | |
ResponseLine rl = parseResponseLine("Content-Type: text/html; charset=utf-8\r\n"); | |
assert(rl.Type == ResponseLineType.Header); | |
assert(rl.Key == "Content-Type"); | |
MediaType type = parseContentTypeHeader(rl.Value); | |
assert(type.Type == "text"); | |
assert(type.SubType == "html"); | |
assert(type.Parameters == ["charset": "utf-8"]); | |
} | |
unittest | |
{ | |
ResponseLine rl = parseResponseLine("Content-Disposition: attachment; filename=fname.ext\r\n"); | |
assert(rl.Type == ResponseLineType.Header); | |
assert(rl.Key == "Content-Disposition"); | |
Disposition dis = parseContentDispositionHeader(rl.Value); | |
assert(dis.Type == ""); | |
assert(dis.Attachment == true); | |
assert(dis.Parameters == ["filename": "fname.ext"]); | |
} | |
unittest | |
{ | |
string[string] parms = parseParameterList2(";Test=ABCD;Test2=CDEF;Test3; test4 ; test5 = def"); | |
assert(parms == ["Test2":"CDEF", "Test3":"", "Test":"ABCD", "test4":"", "test5":"def"]); | |
} | |
unittest | |
{ | |
ResponseLine rl = parseResponseLine("Set-Cookie: GEO=efe766ae52cd215ccad570d1bc2b6c3ecwsAAAAzREVUng7LTH/Ayg; " | |
"path=/; domain=\".youtube.com\"\r\n"); | |
assert(rl.Type == ResponseLineType.Header); | |
assert(rl.Key == "Set-Cookie"); | |
Cookie[] cookies = parseSetCookieHeader(rl.Value); | |
assert(cookies.length == 1); | |
assert(cookies[0].Name == "GEO"); | |
assert(cookies[0].Value == "efe766ae52cd215ccad570d1bc2b6c3ecwsAAAAzREVUng7LTH/Ayg"); | |
assert(cookies[0].Path == "/"); | |
assert(cookies[0].Domain == ".youtube.com"); | |
assert(cookies[0].Parameters.length == 0); | |
} | |
unittest | |
{ | |
ResponseLine rl = parseResponseLine("Set-Cookie: use_hitbox=72c46ff6cbcdb7c5585c36411b6b334edAEAAAAw; path=/; domain=.youtube.com\r\n"); | |
assert(rl.Type == ResponseLineType.Header); | |
assert(rl.Key == "Set-Cookie"); | |
Cookie[] cookies = parseSetCookieHeader(rl.Value); | |
assert(cookies.length == 1); | |
assert(cookies[0].Name == "use_hitbox"); | |
assert(cookies[0].Value == "72c46ff6cbcdb7c5585c36411b6b334edAEAAAAw"); | |
assert(cookies[0].Path == "/"); | |
assert(cookies[0].Domain == ".youtube.com"); | |
assert(cookies[0].Parameters.length == 0); | |
} | |
unittest | |
{ | |
ResponseLine rl = parseResponseLine("Set-Cookie: VISITOR_INFO1_LIVE=" | |
"GVUEqUlnVIs; path=/; domain=.youtube.com; expires=Sat, 30-Apr" | |
"-2011 15:59:28 GMT\r\n"); | |
assert(rl.Type == ResponseLineType.Header); | |
assert(rl.Key == "Set-Cookie"); | |
Cookie[] cookies = parseSetCookieHeader(rl.Value); | |
assert(cookies.length == 1); | |
assert(cookies[0].Name == "VISITOR_INFO1_LIVE"); | |
assert(cookies[0].Value == "GVUEqUlnVIs"); | |
assert(cookies[0].Path == "/"); | |
assert(cookies[0].Domain == ".youtube.com"); | |
assert(cookies[0].Expires == "Sat, 30-Apr-2011 15:59:28 GMT"); | |
assert(cookies[0].Parameters.length == 0); | |
} | |
unittest | |
{ | |
ResponseLine rl = parseResponseLine("Set-Cookie: PREF=f1=50000000;" | |
" path=/; domain=.youtube.com; expires=Mon, 31-Aug-2020 " | |
"11:39:48 GMT\r\n"); | |
assert(rl.Type == ResponseLineType.Header); | |
assert(rl.Key == "Set-Cookie"); | |
Cookie[] cookies = parseSetCookieHeader(rl.Value); | |
assert(cookies.length == 1); | |
assert(cookies[0].Name == "PREF"); | |
assert(cookies[0].Value == "f1=50000000"); | |
assert(cookies[0].Path == "/"); | |
assert(cookies[0].Domain == ".youtube.com"); | |
assert(cookies[0].Expires == "Mon, 31-Aug-2020 11:39:48 GMT"); | |
assert(cookies[0].Parameters.length == 0); | |
} | |
unittest | |
{ | |
ResponseLine rl = parseResponseLine("Set-Cookie: PREF=f1=50000000;" | |
" path=/; domain=.youtube.com; expires=Mon, 31-Aug-2020 " | |
"11:39:48 GMT, VISITOR_INFO1_LIVE=" | |
"GVUEqUlnVIs; path=/; domain=.youtube.com; expires=Sat, 30-Apr" | |
"-2011 15:59:28 GMT\r\n"); | |
assert(rl.Type == ResponseLineType.Header); | |
assert(rl.Key == "Set-Cookie"); | |
Cookie[] cookies = parseSetCookieHeader(rl.Value); | |
assert(cookies.length == 2); | |
assert(cookies[0].Name == "PREF"); | |
assert(cookies[0].Value == "f1=50000000"); | |
assert(cookies[0].Path == "/"); | |
assert(cookies[0].Domain == ".youtube.com"); | |
assert(cookies[0].Expires == "Mon, 31-Aug-2020 11:39:48 GMT"); | |
assert(cookies[0].Parameters.length == 0); | |
assert(cookies[1].Name == "VISITOR_INFO1_LIVE"); | |
assert(cookies[1].Value == "GVUEqUlnVIs"); | |
assert(cookies[1].Path == "/"); | |
assert(cookies[1].Domain == ".youtube.com"); | |
assert(cookies[1].Expires == "Sat, 30-Apr-2011 15:59:28 GMT"); | |
assert(cookies[1].Parameters.length == 0); | |
} | |
unittest | |
{ | |
assert(isDatePart(", 30-Apr-2011 15:59:28 GMTSat, 30-Apr-2011 15:59:28 GMT")); | |
assert(!isDatePart("blah")); | |
assert(parseCommaList2("Sat, 30-Apr-2011 15:59:28 GMT,Sat, 30-Apr-2011" | |
" 15:59:28 GMTSat, 30-Apr-2011 15:59:28 GMT") == ["Sat, 30-Apr-2011 15:59:28 GMT", | |
"Sat, 30-Apr-2011 15:59:28 GMTSat, 30-Apr-2011 15:59:28 GMT"]); | |
assert(parseCommaList2("\"quoted,stillquoted\"notquoted, second") | |
== ["\"quoted,stillquoted\"notquoted", "second"]); | |
} | |
unittest | |
{ | |
string[string] parms = parseParameterList(";test=\"ab cd\""); | |
assert(parms == ["test": "ab cd"]); | |
} | |
unittest | |
{ | |
ResponseLine rl = parseResponseLine("Set-Cookie2: test=abcd;Comment=abcd;" | |
"CommentURL=\"def\";Discard;Domain=.test.com;Max-Age=999;Path=/;Port=\"123,456\"" | |
";Secure;Version=2,test2=5\r\n"); | |
assert(rl.Type == ResponseLineType.Header); | |
assert(rl.Key == "Set-Cookie2"); | |
Cookie2[] cookies = parseSetCookie2Header(rl.Value); | |
assert(cookies.length == 2); | |
assert(cookies[0].Name == "test"); | |
assert(cookies[0].Value == "abcd"); | |
assert(cookies[0].Comment == "abcd"); | |
assert(cookies[0].CommentURL == "def"); | |
assert(cookies[0].Discard == true); | |
assert(cookies[0].Domain == ".test.com"); | |
assert(cookies[0].MaxAge == "999"); | |
assert(cookies[0].Path == "/"); | |
assert(cookies[0].Secure == true); | |
assert(cookies[0].Version == 2); | |
assert(cookies[0].Port == [123, 456]); | |
assert(cookies[1].Name == "test2"); | |
assert(cookies[1].Value == "5"); | |
assert(cookies[1].Comment == ""); | |
assert(cookies[1].CommentURL == ""); | |
assert(cookies[1].Discard == false); | |
assert(cookies[1].Domain == ""); | |
assert(cookies[1].MaxAge == ""); | |
assert(cookies[1].Path == ""); | |
assert(cookies[1].Secure == false); | |
assert(cookies[1].Version == 0); | |
assert(cookies[1].Port == []); | |
} | |
unittest | |
{ | |
ResponseLine rl = parseResponseLine("Set-cookie: idrxvr=E142EC17-" | |
"BADE-4AB6-8D28-D863B2C3BA49; expires=Tue, 01-Sep-2020 16:08:42 GMT;" | |
" domain=xiti.com; path=/;\r\n"); | |
assert(rl.Type == ResponseLineType.Header); | |
assert(rl.Key == "Set-cookie"); | |
Cookie[] cookies = parseSetCookieHeader(rl.Value); | |
} | |
unittest | |
{ | |
ResponseLine rl = parseResponseLine("set-cookie: NGUserID=12836178" | |
"718147; path=/; domain=.focus.de; expires=fri, 10-aug-2012 16:48:59 gmt\r\n"); | |
assert(rl.Type == ResponseLineType.Header); | |
assert(rl.Key == "set-cookie"); | |
Cookie[] cookies = parseSetCookieHeader(rl.Value); | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
template initParser() | |
{ | |
const string initParser = "int cs;\n" ~ | |
"const (char)* p = line.ptr;\n" ~ | |
"const char* pe = line.ptr + line.length;\n" ~ | |
"const char* eof = pe;\n"; | |
} | |
template finishParser(string parserName) | |
{ | |
const string finishParser = "if(cs == " ~ parserName ~ "_error)\n" ~ | |
"throw new ParserException(line, p - line.ptr);\n" ~ | |
"if(cs < " ~ parserName ~ "_first_final)\n" ~ | |
"throw new InsufficientInputException(line);\n"; | |
} | |
ResponseLine parseResponseLine(string line) | |
{ | |
ResponseLine rline; | |
const (char)* hstart; | |
mixin(initParser!()); | |
%%{ | |
machine parseResponseLine; | |
write init; | |
write exec; | |
}%% | |
mixin(finishParser!"parseResponseLine"); | |
return rline; | |
} | |
StatusLine parseStatusLine(string line) | |
{ | |
StatusLine sline; | |
const(char)* numstart; | |
mixin(initParser!()); | |
%%{ | |
machine parseStatusLine; | |
write init; | |
write exec; | |
}%% | |
mixin(finishParser!"parseStatusLine"); | |
return sline; | |
} | |
string[] parseCommaList(string line) | |
{ | |
string[] list; | |
const(char)* start; | |
mixin(initParser!()); | |
%%{ | |
machine parseCommaList; | |
write init; | |
write exec; | |
}%% | |
mixin(finishParser!"parseCommaList"); | |
return list; | |
} | |
bool isDatePart(string line) | |
{ | |
bool isPart = false; | |
mixin(initParser!()); | |
%%{ | |
machine isDatePart; | |
write init; | |
write exec; | |
}%% | |
mixin(finishParser!"isDatePart"); | |
return isPart; | |
} | |
string[] parseCommaList2(string line) | |
{ | |
int escape; | |
string[] list; | |
bool inQuotes = false; | |
bool added = true; | |
const(char)* start; | |
mixin(initParser!()); | |
%%{ | |
machine parseCommaList2; | |
write init; | |
write exec; | |
}%% | |
mixin(finishParser!"parseCommaList2"); | |
return list; | |
} | |
string[string] parseParameterList(string line) | |
{ | |
string[string] list; | |
string key; | |
const(char)* start; | |
mixin(initParser!()); | |
%%{ | |
machine parseParameterList; | |
write init; | |
write exec; | |
}%% | |
mixin(finishParser!"parseParameterList"); | |
return list; | |
} | |
string[string] parseParameterList2(string line) | |
{ | |
string[string] list; | |
string key; | |
const(char)* start, start2; | |
mixin(initParser!()); | |
%%{ | |
machine parseParameterList2; | |
write init; | |
write exec; | |
}%% | |
mixin(finishParser!"parseParameterList2"); | |
return list; | |
} | |
PragmaHeaderFragment parsePragmaFragment(string line) | |
{ | |
PragmaHeaderFragment frg; | |
const(char)* start; | |
mixin(initParser!()); | |
%%{ | |
machine parsePragmaFragment; | |
write init; | |
write exec; | |
}%% | |
mixin(finishParser!"parsePragmaFragment"); | |
return frg; | |
} | |
TransferCoding parseTransferCodingFragment(string line) | |
{ | |
TransferCoding frg; | |
const(char)* start; | |
mixin(initParser!()); | |
%%{ | |
machine parseTransferCodingFragment; | |
write init; | |
write exec; | |
}%% | |
mixin(finishParser!"parseTransferCodingFragment"); | |
return frg; | |
} | |
Product parseProduct(string line) | |
{ | |
Product prd; | |
const(char)* start; | |
mixin(initParser!()); | |
%%{ | |
machine parseProduct; | |
write init; | |
write exec; | |
}%% | |
mixin(finishParser!"parseProduct"); | |
return prd; | |
} | |
ETag parseETag(string line) | |
{ | |
ETag tag; | |
const(char)* start; | |
mixin(initParser!()); | |
%%{ | |
machine parseETag; | |
write init; | |
write exec; | |
}%% | |
mixin(finishParser!"parseETag"); | |
return tag; | |
} | |
RetryAfterHeader parseRetryAfter(string line) | |
{ | |
RetryAfterHeader ret; | |
const(char)* start; | |
mixin(initParser!()); | |
%%{ | |
machine parseRetryAfter; | |
write init; | |
write exec; | |
}%% | |
mixin(finishParser!"parseRetryAfter"); | |
return ret; | |
} | |
ContentRange parseContentRangeHeader(string line) | |
{ | |
ContentRange ret; | |
const(char)* start; | |
mixin(initParser!()); | |
%%{ | |
machine parseContentRangeHeader; | |
write init; | |
write exec; | |
}%% | |
mixin(finishParser!"parseContentRangeHeader"); | |
return ret; | |
} | |
MediaType parseContentTypeHeader(string line) | |
{ | |
MediaType ret; | |
const(char)* start; | |
mixin(initParser!()); | |
%%{ | |
machine parseContentTypeHeader; | |
write init; | |
write exec; | |
}%% | |
mixin(finishParser!"parseContentTypeHeader"); | |
return ret; | |
} | |
ContentDispositionHeader parseContentDispositionHeader(string line) | |
{ | |
ContentDispositionHeader ret; | |
const(char)* start; | |
mixin(initParser!()); | |
%%{ | |
machine parseContentDispositionHeader; | |
write init; | |
write exec; | |
}%% | |
mixin(finishParser!"parseContentDispositionHeader"); | |
return ret; | |
} | |
Cookie parseCookie(string line) | |
{ | |
Cookie ret; | |
const(char)* start; | |
mixin(initParser!()); | |
%%{ | |
machine parseCookie; | |
write init; | |
write exec; | |
}%% | |
mixin(finishParser!"parseCookie"); | |
return ret; | |
} | |
Cookie2 parseCookie2(string line) | |
{ | |
Cookie2 ret; | |
const(char)* start; | |
mixin(initParser!()); | |
%%{ | |
machine parseCookie2; | |
write init; | |
write exec; | |
}%% | |
mixin(finishParser!"parseCookie2"); | |
return ret; | |
} | |
bool isToken(string line) | |
{ | |
const(char)* start; | |
mixin(initParser!()); | |
%%{ | |
machine isToken; | |
write init; | |
write exec; | |
}%% | |
mixin(finishParser!"isToken"); | |
return true; | |
} | |
/* Ragel FSM definitions */ | |
%%{ | |
#RFC 2616 | |
machine general; | |
OCTET = any; | |
CHAR = ascii; | |
UPALPHA = [A-Z]; | |
LOALPHA = [a-z]; | |
ALPHA = UPALPHA | LOALPHA; | |
DIGIT = digit; | |
CTL = cntrl | 127; | |
CR = "\r"; | |
LF = "\n"; | |
SP = " "; | |
HT = "\t"; | |
DQUOTE = '"'; | |
CRLF = CR LF; | |
LWS = CRLF? (SP | HT)+; | |
TEXT = (OCTET - CTL) | LWS; | |
HEX = xdigit; | |
separators = "(" | ")" | "<" | ">" | "@" | |
| "," | ";" | ":" | "\\" | '"' | |
| "/" | "[" | "]" | "?" | "=" | |
| "{" | "}" | SP | HT; | |
token = (CHAR - CTL - separators)+; | |
ctext = TEXT - [()]; | |
quoted_pair = "\\" CHAR; | |
#not working | |
#comment = "(" (ctext | quoted_pair | comment)* ")"; | |
qdtext = TEXT - DQUOTE; | |
quoted_string = DQUOTE (qdtext | quoted_pair)* DQUOTE; | |
field_name = token; | |
wkday = "Mon"i | "Tue"i | "Wed"i | |
| "Thu"i | "Fri"i | "Sat"i | "Sun"i; | |
weekday = "Monday"i | "Tuesday"i | "Wednesday"i | |
| "Thursday"i | "Friday"i | "Saturday"i | "Sunday"i; | |
month = "Jan"i | "Feb"i | "Mar"i | "Apr"i | |
| "May"i | "Jun"i | "Jul"i | "Aug"i | |
| "Sep"i | "Oct"i | "Nov"i | "Dec"i; | |
date1 = DIGIT{2} SP month SP DIGIT{4}; #day month year (e.g., 02 Jun 1982) | |
#Also accept 4 digit Years | |
date2 = DIGIT{2} "-" month "-" (DIGIT{2} | DIGIT{4}); #day-month-year (e.g., 02-Jun-82) | |
date3 = month SP ( DIGIT{2} | ( SP DIGIT )); #month day (e.g., Jun 2) | |
time = DIGIT{2} ":" DIGIT{2} ":" DIGIT{2}; #00:00:00 - 23:59:59 | |
#always accept wkday and weekday: This is against the spec, | |
#but it's needed to work around some broken formatters. | |
#Also accepting GMT case independent | |
asctime_date = (wkday | weekday) SP date3 SP time SP DIGIT{4}; | |
rfc850_date = (weekday | wkday) "," SP date2 SP time SP "GMT"i; | |
rfc1123_date = (wkday | weekday) "," SP date1 SP time SP "GMT"i; | |
HTTP_date = rfc1123_date | rfc850_date | asctime_date; | |
}%% | |
%%{ | |
machine parseResponseLine; | |
include general; | |
action setStatusLineEnd { | |
rline.Type = ResponseLineType.StatusLine; | |
rline.Line = line[0 .. (p - line.ptr + 1)]; | |
} | |
action setEmptyLine { | |
rline.Type = ResponseLineType.Empty; | |
} | |
action headerStart { | |
rline.Type = ResponseLineType.Header; | |
hstart = p; | |
} | |
action headerKeyEnd { | |
rline.Key = line[(hstart - line.ptr) .. (p - line.ptr)]; | |
} | |
action headerValueEnd { | |
rline.Value = line[(hstart - line.ptr) .. (p - line.ptr)]; | |
} | |
HTTP_Version = "HTTP" "/" DIGIT+ "." DIGIT+; | |
Status_Code = DIGIT{3}; | |
Reason_Phrase = (TEXT - CR - LF)*; | |
Status_Line = HTTP_Version SP Status_Code SP Reason_Phrase CRLF; | |
raw_header = (token >headerStart %headerKeyEnd) LWS* ":" LWS* (TEXT* >headerStart %headerValueEnd); | |
Response_line = (Status_Line @setStatusLineEnd) | (raw_header CRLF) | |
| (CRLF @setEmptyLine); | |
main := Response_line; | |
write data; | |
}%% | |
%%{ | |
machine parseCommaList; | |
include general; | |
action listEntry { | |
if(start != p) | |
{ | |
list ~= stripr(line[(start - line.ptr) | |
.. (p - line.ptr)]); | |
} | |
} | |
action Start { | |
start = p; | |
} | |
Comma_List_Entry = (((TEXT - ",") | "\\,")*) >Start %listEntry; | |
Comma_List = LWS* Comma_List_Entry? ("," LWS* Comma_List_Entry? )*; | |
main := Comma_List; | |
write data; | |
}%% | |
%%{ | |
machine isDatePart; | |
include general; | |
action True { | |
isPart = true; | |
} | |
#always accept wkday and weekday: This is against the spec, | |
#but it's needed to work around some broken formatters. | |
#Also accept lower case GMT | |
rfc850_date_part = "," SP date2 SP time SP "GMT"i; | |
rfc1123_date_part = "," SP date1 SP time SP "GMT"i; | |
HTTP_date_part = (rfc850_date_part | rfc1123_date_part) %True; | |
main := (HTTP_date_part)? any*; | |
write data; | |
}%% | |
%%{ | |
machine parseCommaList2; | |
include general; | |
action listEntry { | |
if((isDatePart(line[(p - line.ptr) .. $]) && | |
isFirstDatePart(line[0 .. (p - line.ptr)])) || | |
inQuotes) | |
{ | |
added = false; | |
} | |
else | |
{ | |
if(start != p) | |
{ | |
list ~= stripr(line[(start - line.ptr) | |
.. (p - line.ptr)]); | |
} | |
added = true; | |
} | |
} | |
action Start { | |
if(added) | |
start = p; | |
} | |
action Quote { | |
escape = start - line.ptr; | |
if(escape >= 0 && line[escape] != '\\') | |
inQuotes = !inQuotes; | |
} | |
Comma_List_Entry = (((TEXT - "," - '"') | "\\," | ('"' >Quote))*) >Start %listEntry; | |
Comma_List = LWS* Comma_List_Entry? ("," LWS* Comma_List_Entry? )*; | |
main := Comma_List; | |
write data; | |
}%% | |
%%{ | |
machine parseParameterList; | |
include general; | |
action Value { | |
if(start != p) | |
{ | |
list[key] = prepare(line[(start - line.ptr) .. (p - line.ptr)]); | |
} | |
} | |
action Attribute { | |
if(start != p) | |
{ | |
key = line[(start - line.ptr) .. (p - line.ptr)]; | |
} | |
} | |
action Start { | |
start = p; | |
} | |
n_value = (token | quoted_string) >Start %Value; | |
n_attribute = token >Start %Attribute; | |
n_parameter = LWS* n_attribute LWS* "=" LWS* n_value; | |
Comma_List = (LWS* ";" (n_parameter))*; | |
main := Comma_List; | |
write data; | |
}%% | |
%%{ | |
machine parseParameterList2; | |
include general; | |
action Value { | |
if(start2 != p) | |
{ | |
string line_tmp = stripr(line[(start2 - line.ptr) .. (p - line.ptr)]); | |
list[key] = unquote(line_tmp); | |
} | |
} | |
action ValueEnd { | |
if(start2 is null) | |
{ | |
list[key] = ""; | |
} | |
} | |
action Attribute { | |
if(start != p) | |
{ | |
key = line[(start - line.ptr) .. (p - line.ptr)]; | |
} | |
} | |
action Start { | |
start2 = null; | |
start = p; | |
} | |
action Start2 { | |
start2 = p; | |
} | |
n_value = (any - ";")* >Start2 %Value; | |
n_attribute = token >Start %Attribute; | |
n_parameter = LWS* n_attribute LWS* (("=" LWS* n_value)? %ValueEnd); | |
main := (LWS* ";" (n_parameter))* ";"?; | |
write data; | |
}%% | |
%%{ | |
machine parseStatusLine; | |
include general; | |
action httpMajor { | |
sline.Major = parse!(uint)(line[(numstart - line.ptr) | |
.. (p - line.ptr)]); | |
} | |
action httpMinor { | |
sline.Minor = parse!(uint)(line[(numstart - line.ptr) | |
.. (p - line.ptr)]); | |
} | |
action numStart { | |
numstart = p; | |
} | |
action statusCode { | |
sline.StatusCode = parse!(uint)(line[(p - line.ptr -3) | |
.. (p - line.ptr)]); | |
} | |
action reason { | |
sline.Reason = line[(numstart - line.ptr) | |
.. (p - line.ptr)]; | |
} | |
HTTP_Version = "HTTP" "/" (DIGIT+ >numStart %httpMajor) "." (DIGIT+ >numStart %httpMinor); | |
Status_Code = DIGIT{3}; | |
Reason_Phrase = (TEXT - CR - LF)*; | |
Status_Line = HTTP_Version SP (Status_Code %statusCode) SP (Reason_Phrase >numStart %reason) CRLF; | |
main := Status_Line; | |
write data; | |
}%% | |
%%{ | |
machine parsePragmaFragment; | |
include general; | |
action Start { | |
start = p; | |
} | |
action KeyEnd { | |
string key = line[(start - line.ptr) | |
.. (p - line.ptr)]; | |
if(key == "no-cache") | |
{ | |
frg.Type = PragmaType.No_cache; | |
} | |
else | |
{ | |
frg.Type = PragmaType.Extension; | |
frg.ExtensionKey = key; | |
} | |
} | |
action ValueEnd { | |
frg.ExtensionValue = prepare(line[(start - line.ptr) | |
.. (p - line.ptr)]); | |
} | |
main := (token >Start %KeyEnd) ("=" (( token | quoted_string ) >Start %ValueEnd))?; | |
write data; | |
}%% | |
%%{ | |
machine parseTransferCodingFragment; | |
include general; | |
action Start { | |
start = p; | |
} | |
action KeyEnd { | |
string key = line[(start - line.ptr) | |
.. (p - line.ptr)]; | |
if(key == "chunked") | |
{ | |
frg.Type = TransferCodingType.Chunked; | |
} | |
else | |
{ | |
frg.Type = TransferCodingType.Custom; | |
frg.Other = key; | |
} | |
} | |
action ValueEnd { | |
frg.Parameters = parseParameterList(line[(start - line.ptr) | |
.. (p - line.ptr)]); | |
} | |
main := (token >Start %KeyEnd) LWS* ((";" any*) >Start %ValueEnd)?; | |
write data; | |
}%% | |
%%{ | |
machine parseProduct; | |
include general; | |
action Start { | |
start = p; | |
} | |
action KeyEnd { | |
prd.Name = line[(start - line.ptr) .. (p - line.ptr)]; | |
} | |
action ValueEnd { | |
prd.Version = line[(start - line.ptr) .. (p - line.ptr)]; | |
} | |
main := (token >Start %KeyEnd) LWS* ("/" LWS* (token >Start %ValueEnd))?; | |
write data; | |
}%% | |
%%{ | |
machine parseETag; | |
include general; | |
action Weak { | |
tag.Weak = true; | |
} | |
action Start { | |
start = p; | |
} | |
action End { | |
tag.Value = prepare(line[(start - line.ptr) .. (p - line.ptr)]); | |
} | |
weak = "W/"; | |
#Spec says quoted_string only, but some servers do not quote ETags | |
opaque_tag = quoted_string | token; | |
main := (weak %Weak)? (opaque_tag >Start %End); | |
write data; | |
}%% | |
%%{ | |
machine parseRetryAfter; | |
include general; | |
action Seconds { | |
ret.Type = RetryAfterType.Seconds; | |
ret.Seconds = parse!ulong(line); | |
} | |
action Text { | |
ret.Type = RetryAfterType.Date; | |
ret.Date = line; | |
} | |
main := (DIGIT+ %Seconds) | ((TEXT - DIGIT) TEXT* %Text); | |
write data; | |
}%% | |
%%{ | |
machine parseContentRangeHeader; | |
include general; | |
action Start { | |
start = p; | |
} | |
action From { | |
ret.From.Position = parse!ulong(line[(start - line.ptr) | |
.. (p - line.ptr)]); | |
} | |
action To { | |
ret.To.Position = parse!ulong(line[(start - line.ptr) | |
.. (p - line.ptr)]); | |
} | |
action Length { | |
ret.Length.Position = parse!ulong(line[(start - line.ptr) | |
.. (p - line.ptr)]); | |
} | |
action UnknownLength { | |
ret.Length.Unknown = true; | |
} | |
action UnknownPos { | |
ret.To.Unknown = true; | |
ret.From.Unknown = true; | |
} | |
byte_range_resp_spec = ((DIGIT+ >Start %From) "-" (DIGIT+ >Start %To)) | ("*" %UnknownPos); | |
main := "bytes"i SP byte_range_resp_spec "/" ( (DIGIT+ >Start %Length) | ("*" %UnknownLength)); | |
write data; | |
}%% | |
%%{ | |
machine parseContentTypeHeader; | |
include general; | |
action Start { | |
start = p; | |
} | |
action Type { | |
ret.Type = line[(start - line.ptr) .. (p - line.ptr)]; | |
} | |
action SubType { | |
ret.SubType = line[(start - line.ptr) .. (p - line.ptr)]; | |
} | |
action ValueEnd { | |
ret.Parameters = parseParameterList(line[(start - line.ptr) | |
.. (p - line.ptr)]); | |
} | |
main := (token >Start %Type) "/" (token >Start %SubType) LWS* ((";" any*) >Start %ValueEnd)?; | |
write data; | |
}%% | |
%%{ | |
machine parseContentDispositionHeader; | |
include general; | |
action Start { | |
start = p; | |
} | |
action Type { | |
string type = line[(start - line.ptr) .. (p - line.ptr)]; | |
if(type == "attachment") | |
ret.Attachment = true; | |
else | |
ret.Type = type; | |
} | |
action ValueEnd { | |
ret.Parameters = parseParameterList(line[(start - line.ptr) | |
.. (p - line.ptr)]); | |
} | |
main := (token >Start %Type) LWS* ((";" any*) >Start %ValueEnd); | |
write data; | |
}%% | |
%%{ | |
machine parseCookie; | |
include general; | |
action Start { | |
start = p; | |
} | |
action NameEnd { | |
ret.Name = line[(start - line.ptr) .. (p - line.ptr)]; | |
} | |
action ValueEnd { | |
ret.Value = stripr(line[(start - line.ptr) .. (p - line.ptr)]); | |
} | |
action ParamEnd { | |
string[string] params = parseParameterList2(line[(start - line.ptr) | |
.. (p - line.ptr)]); | |
foreach(key, line; params) | |
{ | |
switch(tolower(key)) | |
{ | |
case "comment": | |
ret.Comment = prepare(line); | |
break; | |
case "domain": | |
ret.Domain = prepare(line); | |
break; | |
case "max-age": | |
ret.MaxAge = prepare(line); | |
break; | |
case "path": | |
ret.Path = prepare(line); | |
break; | |
case "secure": | |
ret.Secure = true; | |
break; | |
case "version": | |
ret.Version = parse!uint(line); | |
break; | |
case "expires": | |
ret.Expires = prepare(line); | |
break; | |
default: | |
ret.Parameters[key] = prepare(line); | |
} | |
} | |
} | |
word = token | quoted_string; | |
main := (token >Start %NameEnd) "=" ((any -";")* >Start %ValueEnd) LWS* ((";" any*) >Start %ParamEnd)?; | |
write data; | |
}%% | |
%%{ | |
machine parseCookie2; | |
include general; | |
action Start { | |
start = p; | |
} | |
action NameEnd { | |
ret.Name = line[(start - line.ptr) .. (p - line.ptr)]; | |
} | |
action ValueEnd { | |
ret.Value = stripr(line[(start - line.ptr) .. (p - line.ptr)]); | |
} | |
action ParamEnd { | |
string[string] params = parseParameterList2(line[(start - line.ptr) | |
.. (p - line.ptr)]); | |
foreach(key, line; params) | |
{ | |
switch(tolower(key)) | |
{ | |
case "comment": | |
ret.Comment = prepare(line); | |
break; | |
case "commenturl": | |
ret.CommentURL = prepare(line); | |
break; | |
case "discard": | |
ret.Discard = true; | |
break; | |
case "domain": | |
ret.Domain = prepare(line); | |
break; | |
case "max-age": | |
ret.MaxAge = prepare(line); | |
break; | |
case "path": | |
ret.Path = prepare(line); | |
break; | |
case "port": | |
string[] ports = parseCommaList(prepare(line)); | |
foreach(entry; ports) | |
{ | |
ret.Port ~= parse!ulong(entry); | |
} | |
break; | |
case "secure": | |
ret.Secure = true; | |
break; | |
case "version": | |
ret.Version = parse!uint(line); | |
break; | |
default: | |
ret.Parameters[key] = prepare(line); | |
} | |
} | |
} | |
word = token | quoted_string; | |
main := (token >Start %NameEnd) "=" ((any -";")* >Start %ValueEnd) LWS* ((";" any*) >Start %ParamEnd)?; | |
write data; | |
}%% | |
%%{ | |
machine isToken; | |
include general; | |
main := token; | |
write data; | |
}%% |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
module jpf.http.types; | |
/** | |
* | |
*/ | |
enum ResponseLineType : ushort | |
{ | |
StatusLine, | |
Header, | |
Empty, | |
Other | |
} | |
/** | |
* | |
*/ | |
enum HTTPMethod : ushort | |
{ | |
OPTIONS, | |
GET, | |
HEAD, | |
POST, | |
PUT, | |
DELETE, | |
TRACE, | |
CONNECT, | |
Other | |
} | |
/** | |
* | |
*/ | |
struct ResponseLine | |
{ | |
ResponseLineType Type; | |
union | |
{ | |
string Line; | |
string Key; | |
} | |
string Value; | |
} | |
/** | |
* | |
*/ | |
struct StatusLine | |
{ | |
uint Major, Minor; | |
uint StatusCode; | |
string Reason; | |
} | |
/**Allow Header**/ | |
struct AllowHeaderFragment | |
{ | |
HTTPMethod Method; | |
string OtherMethod; | |
} | |
///ditto | |
struct AllowHeader | |
{ | |
AllowHeaderFragment[] Fragments; | |
} | |
/**Connection Header**/ | |
enum ConnectionType | |
{ | |
Close, | |
Other | |
} | |
///ditto | |
struct ConnectionHeaderFragment | |
{ | |
ConnectionType Type; | |
string Other; | |
} | |
///ditto | |
struct ConnectionHeader | |
{ | |
ConnectionHeaderFragment[] Fragments; | |
} | |
/**Pragma Header**/ | |
enum PragmaType | |
{ | |
No_cache, | |
Extension | |
} | |
///ditto | |
struct PragmaHeaderFragment | |
{ | |
PragmaType Type; | |
string ExtensionKey; | |
string ExtensionValue; | |
} | |
///ditto | |
struct PragmaHeader | |
{ | |
PragmaHeaderFragment[] Fragments; | |
} | |
/**Transfer-Encoding Header**/ | |
enum TransferCodingType : ushort | |
{ | |
Chunked, | |
Custom | |
} | |
///ditto | |
struct TransferCoding | |
{ | |
TransferCodingType Type; | |
string Other; | |
string[string] Parameters; | |
} | |
///ditto | |
struct TransferEncodingHeader | |
{ | |
TransferCoding[] Fragments; | |
} | |
/// | |
struct Product | |
{ | |
string Name; | |
string Version; | |
} | |
/**Accept-Ranges Header**/ | |
enum RangeTypeId | |
{ | |
Bytes, | |
Custom | |
} | |
///ditto | |
struct RangeType | |
{ | |
RangeTypeId Type; | |
string Custom; | |
} | |
///ditto | |
struct AcceptRangesHeader | |
{ | |
bool None = false; | |
RangeType[] Types; | |
} | |
/**ETag Header**/ | |
struct ETag | |
{ | |
bool Weak = false; | |
string Value; | |
} | |
///ditto | |
struct ETagHeader | |
{ | |
ETag Tag; | |
} | |
/**Retry-After Header**/ | |
enum RetryAfterType | |
{ | |
Seconds, | |
Date | |
} | |
///ditto | |
struct RetryAfterHeader | |
{ | |
RetryAfterType Type; | |
union | |
{ | |
ulong Seconds; | |
string Date; | |
} | |
} | |
/+struct Challenge | |
{ | |
string AuthScheme; | |
string[string] Parameters; | |
}+/ | |
/// | |
enum ContentCodingType | |
{ | |
Compress, | |
Gzip, | |
Deflate, | |
Identity, | |
Other | |
} | |
/// | |
struct ContentCoding | |
{ | |
ContentCodingType Type; | |
string Other; | |
} | |
/// | |
struct BytePos | |
{ | |
bool Unknown = false; | |
ulong Position; | |
} | |
/// | |
struct ContentRange | |
{ | |
BytePos From; | |
BytePos To; | |
BytePos Length; | |
} | |
/// | |
struct MediaType | |
{ | |
string Type; | |
string SubType; | |
string[string] Parameters; | |
} | |
/**Content-Disposition Header**/ | |
struct ContentDispositionHeader | |
{ | |
bool Attachment = false; | |
string Type; | |
string[string] Parameters; | |
} | |
/// | |
struct Cookie | |
{ | |
string Name, Value, Comment, Domain, MaxAge, Path, Expires; | |
bool Secure; | |
uint Version; | |
string[string] Parameters; | |
} | |
/// | |
struct Cookie2 | |
{ | |
string Name, Value, Comment, CommentURL, Domain, MaxAge, Path; | |
ulong[] Port; | |
bool Secure, Discard; | |
uint Version; | |
string[string] Parameters; | |
} | |
/// | |
struct MediaRange | |
{ | |
MediaType Type; | |
float AcceptParam = -1f; | |
string[string] AcceptExtension; | |
} | |
/// | |
struct Charset | |
{ | |
string Name; | |
float Q = -1f; | |
} | |
/**Accept-Encoding Header**/ | |
struct AcceptEncodingHeader | |
{ | |
ContentCoding Coding; | |
float Q = -1f; | |
} | |
/**Accept-Language Header**/ | |
struct LanguageTag | |
{ | |
string Main; | |
string Sub; | |
} | |
///ditto | |
struct AcceptLanguage | |
{ | |
LanguageTag Tag; | |
float Q = -1f; | |
} | |
/// | |
enum ExpectationType | |
{ | |
Other, | |
Continue | |
} | |
/**Transfer-Encoding Header**/ | |
struct Expectation | |
{ | |
ExpectationType Type; | |
string ExtensionKey; | |
string ExtensionValue; | |
string[string] ExtensionParameters; | |
} | |
/**If-Match Header**/ | |
struct IfMatchHeader | |
{ | |
bool All; | |
ETag[] Specific; | |
} | |
/**Range Header**/ | |
struct RangeHeader | |
{ | |
BytePos From; | |
BytePos To; | |
} | |
/**TE Header**/ | |
struct TEHeader | |
{ | |
TEFragment[] Codings; | |
bool Trailers = false; | |
} | |
///ditto | |
struct TEFragment | |
{ | |
string[string] Parameters, AcceptParameters; | |
ContentCoding Coding; | |
float Q = -1f; | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment