1
0
Fork 0

first commit

This commit is contained in:
namedkitten 2020-07-09 01:16:31 +01:00
commit dab150abf0
26 changed files with 2586 additions and 0 deletions

1
.gitignore vendored Normal file
View file

@ -0,0 +1 @@
zig-cache

9
.gitmodules vendored Normal file
View file

@ -0,0 +1,9 @@
[submodule "deps/interfaces"]
path = deps/interfaces
url = https://github.com/alexnask/interface.zig
[submodule "deps/time"]
path = deps/time
url = https://github.com/gernest/time
[submodule "deps/hzzp"]
path = deps/hzzp
url = https://github.com/truemedian/hzzp

1
README.md Normal file
View file

@ -0,0 +1 @@
# zar

25
build.zig Normal file
View file

@ -0,0 +1,25 @@
const std = @import("std");
const Builder = @import("std").build.Builder;
pub fn build(b: *Builder) void {
const mode = b.standardReleaseOptions();
const exe = b.addExecutable("zar", "src/main.zig");
exe.addPackage(.{
.name = "interfaces",
.path = "deps/interfaces/interface.zig",
});
exe.addPackage(.{
.name = "hzzp",
.path = "deps/hzzp/src/main.zig",
});
exe.setBuildMode(mode);
const run_cmd = exe.run();
const run_step = b.step("run", "Run the app");
run_step.dependOn(&run_cmd.step);
b.default_step.dependOn(&exe.step);
b.installArtifact(exe);
}

1
deps/hzzp/.gitignore vendored Normal file
View file

@ -0,0 +1 @@
zig-cache/

21
deps/hzzp/LICENSE vendored Normal file
View file

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2020 truemedian
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

18
deps/hzzp/README.md vendored Normal file
View file

@ -0,0 +1,18 @@
# HZZP
Hzzp is a HTTP/1.1 library for Zig.
## BaseClient and BaseServer
These are designed with performance in mind, no allocations are made by the parser. However, you must guarentee that
the buffer provided to `create` is long enough for the largest chunk that will be parsed. In BaseClient this is will
be a `Header: value` pair (including CRLF), in BaseServer it will be the requested path. If your buffer is too short
you `readEvent` will throw a `BufferOverflow` error.
## Todo
- [x] low-level allocation-free client and server parser
- [ ] higher-level allocating, but easier to use client and server parser
- [ ] very simple request wrapper (probably around the high-level allocating client)
- [x] "prettyify" error sets

15
deps/hzzp/build.zig vendored Normal file
View file

@ -0,0 +1,15 @@
const std = @import("std");
const Builder = std.build.Builder;
const Pkg = std.build.Pkg;
pub fn build(b: *Builder) void {
const mode = b.standardReleaseOptions();
var tests = b.addTest("src/main.zig");
tests.setBuildMode(mode);
const test_step = b.step("test", "Run library tests");
test_step.dependOn(&tests.step);
b.default_step.dependOn(test_step);
}

441
deps/hzzp/src/base/client.zig vendored Normal file
View file

@ -0,0 +1,441 @@
const std = @import("std");
const ascii = std.ascii;
const fmt = std.fmt;
const mem = std.mem;
const assert = std.debug.assert;
pub usingnamespace @import("events.zig");
fn stripCarriageReturn(buffer: []u8) []u8 {
if (buffer[buffer.len - 1] == '\r') {
return buffer[0 .. buffer.len - 1];
} else {
return buffer;
}
}
pub fn create(buffer: []u8, reader: var, writer: var) BaseClient(@TypeOf(reader), @TypeOf(writer)) {
assert(@typeInfo(@TypeOf(reader)) == .Pointer);
assert(@typeInfo(@TypeOf(writer)) == .Pointer);
assert(buffer.len >= 32);
return BaseClient(@TypeOf(reader), @TypeOf(writer)).init(buffer, reader, writer);
}
pub fn BaseClient(comptime Reader: type, comptime Writer: type) type {
const ReaderError = @typeInfo(Reader).Pointer.child.Error;
const WriterError = @typeInfo(Writer).Pointer.child.Error;
return struct {
const Self = @This();
read_buffer: []u8,
send_encoding: TransferEncoding = .unknown,
recv_encoding: TransferEncoding = .unknown,
enc_need: usize = 0,
enc_read: usize = 0,
reader: Reader,
writer: Writer,
done: bool = false,
head_sent: bool = false,
state: ParserState = .initial,
pub fn init(buffer: []u8, reader: Reader, writer: Writer) Self {
return Self{
.read_buffer = buffer,
.reader = reader,
.writer = writer,
};
}
pub fn reset(self: *Self) void {
self.send_encoding = .unknown;
self.recv_encoding = .unknown;
self.enc_need = 0;
self.enc_read = 0;
self.done = false;
self.head_sent = false;
self.state = .initial;
}
pub fn writeHead(self: *Self, method: []const u8, path: []const u8) WriterError!void {
try self.writer.writeAll(method);
try self.writer.writeAll(" ");
try self.writer.writeAll(path);
try self.writer.writeAll(" HTTP/1.1\r\n");
}
pub fn writeHeader(self: *Self, key: []const u8, value: []const u8) WriterError!void {
if (ascii.eqlIgnoreCase(key, "transfer-encoding")) {
self.send_encoding = .chunked;
} else if (ascii.eqlIgnoreCase(key, "content-length")) {
self.send_encoding = .length;
}
try self.writer.writeAll(key);
try self.writer.writeAll(": ");
try self.writer.writeAll(value);
try self.writer.writeAll("\r\n");
}
pub fn writeHeadComplete(self: *Self) WriterError!void {
if (!self.head_sent) {
try self.writer.writeAll("\r\n");
self.head_sent = true;
}
}
pub fn writeChunk(self: *Self, data: ?[]const u8) WriterError!void {
try self.writeHeadComplete();
switch (self.send_encoding) {
.chunked => {
if (data) |payload| {
try fmt.formatInt(payload.len, 16, true, fmt.FormatOptions{}, self.writer);
try self.writer.writeAll("\r\n");
try self.writer.writeAll(payload);
try self.writer.writeAll("\r\n");
} else {
try self.writer.writeAll("0\r\n");
}
},
.length, .unknown => {
if (data) |payload| {
try self.writer.writeAll(payload);
}
},
}
}
var read_byte: [1]u8 = undefined;
const ReadUntilError = ReaderError || error{BufferOverflow};
fn readUntilDelimiterOrEof(self: *Self, buf: []u8, delimiter: u8) ReadUntilError!?[]u8 {
var index: usize = 0;
while (true) {
const read_len = try self.reader.read(&read_byte);
if (read_len < 1) {
if (index == 0) return null;
return buf[0..index];
}
if (read_byte[0] == delimiter) return buf[0..index];
if (index >= buf.len) return error.BufferOverflow;
buf[index] = read_byte[0];
index += 1;
}
}
fn skipUntilDelimiterOrEof(self: *Self, delimiter: u8) ReaderError!void {
while (true) {
const read_len = try self.reader.read(&read_byte);
if (read_len < 1) return;
if (read_byte[0] == delimiter) return;
}
}
pub const ReadError = ReadUntilError || fmt.ParseIntError;
pub fn readEvent(self: *Self) ReadError!?ClientEvent {
if (self.done) return null;
switch (self.state) {
.initial => {
if (try self.readUntilDelimiterOrEof(self.read_buffer, ' ')) |buffer| {
if (!mem.eql(u8, buffer, "HTTP/1.1")) {
return ClientEvent{
.invalid = .{
.buffer = buffer,
.message = "expected HTTP/1.1",
.state = self.state,
},
};
}
} else {
return ClientEvent.closed;
}
var code: u16 = 0;
if (try self.readUntilDelimiterOrEof(self.read_buffer, ' ')) |buffer| {
if (buffer.len != 3)
return ClientEvent{
.invalid = Invalid{
.buffer = buffer,
.message = "expected response code to be 3 digits",
.state = self.state,
},
};
code = try fmt.parseUnsigned(u16, buffer, 10);
if (code < 100 or code >= 600)
return ClientEvent{
.invalid = Invalid{
.buffer = buffer,
.message = "expected response code to be in range 100 -> 599",
.state = self.state,
},
};
} else {
return ClientEvent.closed;
}
if (try self.readUntilDelimiterOrEof(self.read_buffer, '\n')) |buffer| {
self.state = .headers;
return ClientEvent{
.status = .{
.code = code,
.reason = stripCarriageReturn(buffer),
},
};
} else {
return ClientEvent.closed;
}
},
.headers => {
if (try self.readUntilDelimiterOrEof(self.read_buffer, '\n')) |buffer| {
if (buffer.len == 1 and buffer[0] == '\r') {
self.state = .payload;
return ClientEvent.head_complete;
}
const separator = blk: {
if (mem.indexOfScalar(u8, buffer, ':')) |pos| {
break :blk pos;
} else {
return ClientEvent{
.invalid = .{
.buffer = buffer,
.message = "expected header to be separated with a ':' (colon)",
.state = self.state,
},
};
}
};
var index = separator + 1;
while (true) : (index += 1) {
if (buffer[index] != ' ') break;
if (index >= buffer[index]) {
return ClientEvent{
.invalid = .{
.buffer = buffer,
.message = "no header value provided",
.state = self.state,
},
};
}
}
const name = buffer[0..separator];
const value = stripCarriageReturn(buffer[index..]);
if (ascii.eqlIgnoreCase(name, "content-length")) {
self.recv_encoding = .length;
self.enc_need = try fmt.parseUnsigned(usize, value, 10);
} else if (ascii.eqlIgnoreCase(name, "transfer-encoding")) {
if (ascii.eqlIgnoreCase(value, "chunked")) {
self.recv_encoding = .chunked;
}
}
return ClientEvent{
.header = .{
.name = name,
.value = value,
},
};
} else {
return ClientEvent.closed;
}
},
.payload => {
switch (self.recv_encoding) {
.unknown => {
self.done = true;
return ClientEvent.end;
},
.length => {
const left = self.enc_need - self.enc_read;
if (left <= self.read_buffer.len) {
const read_len = try self.reader.readAll(self.read_buffer[0..left]);
if (read_len != left) return ClientEvent.closed;
self.done = true;
return ClientEvent{
.chunk = .{
.data = self.read_buffer[0..read_len],
.final = true,
},
};
} else {
const read_len = try self.reader.read(self.read_buffer);
if (read_len == 0) return ClientEvent.closed;
self.enc_read += read_len;
return ClientEvent{
.chunk = .{
.data = self.read_buffer[0..read_len],
},
};
}
},
.chunked => {
if (self.enc_need == 0) {
if (try self.readUntilDelimiterOrEof(self.read_buffer, '\n')) |buffer| {
const chunk_len = try fmt.parseInt(usize, stripCarriageReturn(buffer), 16);
if (chunk_len == 0) {
try self.skipUntilDelimiterOrEof('\n');
self.done = true;
return ClientEvent.end;
} else if (chunk_len <= self.read_buffer.len) {
const read_len = try self.reader.readAll(self.read_buffer[0..chunk_len]);
if (read_len != chunk_len) return ClientEvent.closed;
try self.skipUntilDelimiterOrEof('\n');
return ClientEvent{
.chunk = .{
.data = self.read_buffer[0..read_len],
.final = true,
},
};
} else {
self.enc_need = chunk_len;
self.enc_read = 0;
const read_len = try self.reader.read(self.read_buffer);
if (read_len != 0) return ClientEvent.closed;
self.enc_read += read_len;
return ClientEvent{
.chunk = .{
.data = self.read_buffer[0..read_len],
},
};
}
} else {
return ClientEvent.closed;
}
} else {
const left = self.enc_need - self.enc_read;
if (left <= self.read_buffer.len) {
const read_len = try self.reader.readAll(self.read_buffer[0..left]);
if (read_len != left) return ClientEvent.closed;
try self.skipUntilDelimiterOrEof('\n');
self.enc_need = 0;
self.enc_read = 0;
return ClientEvent{
.chunk = .{
.data = self.read_buffer[0..read_len],
.final = true,
},
};
} else {
const read_len = try self.reader.read(self.read_buffer);
if (read_len == 0) return ClientEvent.closed;
self.enc_read += read_len;
return ClientEvent{
.chunk = .{
.data = self.read_buffer[0..read_len],
},
};
}
}
},
}
},
}
}
};
}
const testing = std.testing;
const io = std.io;
test "decodes a simple response" {
var read_buffer: [32]u8 = undefined;
var the_void: [1024]u8 = undefined;
var response = "HTTP/1.1 200 OK\r\nContent-Length: 4\r\n\r\ngood";
var reader = io.fixedBufferStream(response).reader();
var writer = io.fixedBufferStream(&the_void).writer();
var client = create(&read_buffer, &reader, &writer);
try client.writeHead("GET", "/");
try client.writeHeader("Host", "localhost");
try client.writeChunk("aaabbbccc");
var status = try client.readEvent();
testing.expect(status == .status and status.status.code == 200);
var header = try client.readEvent();
testing.expect(header == .header and mem.eql(u8, header.header.name, "Content-Length") and mem.eql(u8, header.header.value, "4"));
var complete = try client.readEvent();
testing.expect(complete == .head_complete);
var body = try client.readEvent();
testing.expect(body == .chunk and mem.eql(u8, body.chunk.data, "good") and body.chunk.final);
var end = try client.readEvent();
testing.expect(end == .end);
}
test "decodes a chunked response" {
var read_buffer: [32]u8 = undefined;
var the_void: [1024]u8 = undefined;
var response = "HTTP/1.1 200 OK\r\nTransfer-Encoding: chunked\r\n\r\n4\r\ngood\r\n0\r\n";
var reader = io.fixedBufferStream(response).reader();
var writer = io.fixedBufferStream(&the_void).writer();
var client = create(&read_buffer, &reader, &writer);
try client.writeHead("GET", "/");
try client.writeHeader("Host", "localhost");
try client.writeChunk("aaabbbccc");
var status = try client.readEvent();
testing.expect(status == .status and status.status.code == 200);
var header = try client.readEvent();
testing.expect(header == .header and mem.eql(u8, header.header.name, "Transfer-Encoding") and mem.eql(u8, header.header.value, "chunked"));
var complete = try client.readEvent();
testing.expect(complete == .head_complete);
var body = try client.readEvent();
testing.expect(body == .chunk and mem.eql(u8, body.chunk.data, "good") and body.chunk.final);
var end = try client.readEvent();
testing.expect(end == .end);
}
test "refAllDecls" {
std.meta.refAllDecls(@This());
}

57
deps/hzzp/src/base/events.zig vendored Normal file
View file

@ -0,0 +1,57 @@
pub const TransferEncoding = enum {
length,
chunked,
unknown,
};
pub const ParserState = enum {
initial,
headers,
payload,
};
pub const ResponseStatus = struct {
code: u16,
reason: []const u8,
};
pub const RequestStatus = struct {
method: []const u8,
path: []const u8,
};
pub const Header = struct {
name: []const u8,
value: []const u8,
};
pub const Chunk = struct {
data: []const u8,
final: bool = false,
};
pub const Invalid = struct {
buffer: []const u8,
message: []const u8,
state: ParserState,
};
pub const ClientEvent = union(enum) {
status: ResponseStatus,
header: Header,
head_complete: void,
chunk: Chunk,
end: void,
invalid: Invalid,
closed: void,
};
pub const ServerEvent = union(enum) {
status: RequestStatus,
header: Header,
head_complete: void,
chunk: Chunk,
end: void,
invalid: Invalid,
closed: void,
};

441
deps/hzzp/src/base/server.zig vendored Normal file
View file

@ -0,0 +1,441 @@
const std = @import("std");
const ascii = std.ascii;
const fmt = std.fmt;
const mem = std.mem;
const assert = std.debug.assert;
pub usingnamespace @import("events.zig");
fn stripCarriageReturn(buffer: []u8) []u8 {
if (buffer[buffer.len - 1] == '\r') {
return buffer[0 .. buffer.len - 1];
} else {
return buffer;
}
}
pub fn create(buffer: []u8, reader: var, writer: var) BaseServer(@TypeOf(reader), @TypeOf(writer)) {
assert(@typeInfo(@TypeOf(reader)) == .Pointer);
assert(@typeInfo(@TypeOf(writer)) == .Pointer);
assert(buffer.len >= 32);
return BaseServer(@TypeOf(reader), @TypeOf(writer)).init(buffer, reader, writer);
}
pub fn BaseServer(comptime Reader: type, comptime Writer: type) type {
const ReaderError = @typeInfo(Reader).Pointer.child.Error;
const WriterError = @typeInfo(Writer).Pointer.child.Error;
return struct {
const Self = @This();
read_buffer: []u8,
send_encoding: TransferEncoding = .unknown,
recv_encoding: TransferEncoding = .unknown,
enc_need: usize = 0,
enc_read: usize = 0,
reader: Reader,
writer: Writer,
done: bool = false,
head_sent: bool = false,
state: ParserState = .initial,
pub fn init(buffer: []u8, reader: Reader, writer: Writer) Self {
return Self{
.read_buffer = buffer,
.reader = reader,
.writer = writer,
};
}
pub fn reset(self: *Self) void {
self.send_encoding = .unknown;
self.recv_encoding = .unknown;
self.enc_need = 0;
self.enc_read = 0;
self.done = false;
self.head_sent = false;
self.state = .initial;
}
pub fn writeHead(self: *Self, code: u16, reason: []const u8) WriterError!void {
try self.writer.writeAll("HTTP/1.1 ");
try fmt.formatInt(code, 10, true, .{}, self.writer);
try self.writer.writeAll(" ");
try self.writer.writeAll(reason);
try self.writer.writeAll("\r\n");
}
pub fn writeHeader(self: *Self, key: []const u8, value: []const u8) WriterError!void {
if (ascii.eqlIgnoreCase(key, "transfer-encoding")) {
self.send_encoding = .chunked;
} else if (ascii.eqlIgnoreCase(key, "content-length")) {
self.send_encoding = .length;
}
try self.writer.writeAll(key);
try self.writer.writeAll(": ");
try self.writer.writeAll(value);
try self.writer.writeAll("\r\n");
}
pub fn writeHeadComplete(self: *Self) WriterError!void {
if (!self.head_sent) {
try self.writer.writeAll("\r\n");
self.head_sent = true;
}
}
pub fn writeChunk(self: *Self, data: ?[]const u8) WriterError!void {
try self.writeHeadComplete();
switch (self.send_encoding) {
.chunked => {
if (data) |payload| {
try fmt.formatInt(payload.len, 16, true, .{}, self.writer);
try self.writer.writeAll("\r\n");
try self.writer.writeAll(payload);
try self.writer.writeAll("\r\n");
} else {
try self.writer.writeAll("0\r\n");
}
},
.length, .unknown => {
if (data) |payload| {
try self.writer.writeAll(payload);
}
},
}
}
var read_byte: [1]u8 = undefined;
const ReadUntilError = ReaderError || error{BufferOverflow};
fn readUntilDelimiterOrEof(self: *Self, buf: []u8, delimiter: u8) ReadUntilError!?[]u8 {
var index: usize = 0;
while (true) {
const read_len = try self.reader.read(&read_byte);
if (read_len < 1) {
if (index == 0) return null;
return buf[0..index];
}
if (read_byte[0] == delimiter) return buf[0..index];
if (index >= buf.len) return error.BufferOverflow;
buf[index] = read_byte[0];
index += 1;
}
}
fn skipUntilDelimiterOrEof(self: *Self, delimiter: u8) ReaderError!void {
while (true) {
const read_len = try self.reader.read(&read_byte);
if (read_len < 1) return;
if (read_byte[0] == delimiter) return;
}
}
pub const ReadError = ReadUntilError || fmt.ParseIntError;
pub fn readEvent(self: *Self) ReadError!?ServerEvent {
if (self.done) return null;
switch (self.state) {
.initial => {
if (try self.readUntilDelimiterOrEof(self.read_buffer, ' ')) |method| {
for (method) |c| {
if (!ascii.isAlpha(c) or !ascii.isUpper(c)) {
return ServerEvent{
.invalid = .{
.buffer = method,
.message = "invalid HTTP method",
.state = self.state,
},
};
}
}
if (try self.readUntilDelimiterOrEof(self.read_buffer[method.len..], ' ')) |path| {
if (try self.readUntilDelimiterOrEof(self.read_buffer[method.len + path.len ..], '\n')) |buffer| {
if (!mem.eql(u8, stripCarriageReturn(buffer), "HTTP/1.1")) {
return ServerEvent{
.invalid = .{
.buffer = buffer,
.message = "expected HTTP/1.1",
.state = self.state,
},
};
}
self.state = .headers;
return ServerEvent{
.status = .{
.method = method,
.path = path,
},
};
} else {
return ServerEvent.closed;
}
} else {
return ServerEvent.closed;
}
} else {
return ServerEvent.closed;
}
},
.headers => {
if (try self.readUntilDelimiterOrEof(self.read_buffer, '\n')) |buffer| {
if (buffer.len == 1 and buffer[0] == '\r') {
self.state = .payload;
return ServerEvent.head_complete;
}
const separator = blk: {
if (mem.indexOfScalar(u8, buffer, ':')) |pos| {
break :blk pos;
} else {
return ServerEvent{
.invalid = .{
.buffer = buffer,
.message = "expected header to be separated with a ':' (colon)",
.state = self.state,
},
};
}
};
var index = separator + 1;
while (true) : (index += 1) {
if (buffer[index] != ' ') break;
if (index >= buffer[index]) {
return ServerEvent{
.invalid = .{
.buffer = buffer,
.message = "no header value provided",
.state = self.state,
},
};
}
}
const name = buffer[0..separator];
const value = stripCarriageReturn(buffer[index..]);
if (ascii.eqlIgnoreCase(name, "content-length")) {
self.recv_encoding = .length;
self.enc_need = try fmt.parseUnsigned(usize, value, 10);
} else if (ascii.eqlIgnoreCase(name, "transfer-encoding")) {
if (ascii.eqlIgnoreCase(value, "chunked")) {
self.recv_encoding = .chunked;
}
}
return ServerEvent{
.header = .{
.name = name,
.value = value,
},
};
} else {
return ServerEvent.closed;
}
},
.payload => {
switch (self.recv_encoding) {
.unknown => {
self.done = true;
return ServerEvent.end;
},
.length => {
const left = self.enc_need - self.enc_read;
if (left <= self.read_buffer.len) {
const read_len = try self.reader.readAll(self.read_buffer[0..left]);
if (read_len != left) return ServerEvent.closed;
self.done = true;
return ServerEvent{
.chunk = .{
.data = self.read_buffer[0..read_len],
.final = true,
},
};
} else {
const read_len = try self.reader.read(self.read_buffer);
if (read_len == 0) return ServerEvent.closed;
self.enc_read += read_len;
return ServerEvent{
.chunk = .{
.data = self.read_buffer[0..read_len],
},
};
}
},
.chunked => {
if (self.enc_need == 0) {
if (try self.readUntilDelimiterOrEof(self.read_buffer, '\n')) |buffer| {
const chunk_len = try fmt.parseInt(usize, stripCarriageReturn(buffer), 16);
if (chunk_len == 0) {
try self.skipUntilDelimiterOrEof('\n');
self.done = true;
return ServerEvent.end;
} else if (chunk_len <= self.read_buffer.len) {
const read_len = try self.reader.readAll(self.read_buffer[0..chunk_len]);
if (read_len != chunk_len) return ServerEvent.closed;
try self.skipUntilDelimiterOrEof('\n');
return ServerEvent{
.chunk = .{
.data = self.read_buffer[0..read_len],
.final = true,
},
};
} else {
self.enc_need = chunk_len;
self.enc_read = 0;
const read_len = try self.reader.read(self.read_buffer);
if (read_len != 0) return ServerEvent.closed;
self.enc_read += read_len;
return ServerEvent{
.chunk = .{
.data = self.read_buffer[0..read_len],
},
};
}
} else {
return ServerEvent.closed;
}
} else {
const left = self.enc_need - self.enc_read;
if (left <= self.read_buffer.len) {
const read_len = try self.reader.readAll(self.read_buffer[0..left]);
if (read_len != left) return ServerEvent.closed;
try self.skipUntilDelimiterOrEof('\n');
self.enc_need = 0;
self.enc_read = 0;
return ServerEvent{
.chunk = .{
.data = self.read_buffer[0..read_len],
.final = true,
},
};
} else {
const read_len = try self.reader.read(self.read_buffer);
if (read_len == 0) return ServerEvent.closed;
self.enc_read += read_len;
return ServerEvent{
.chunk = .{
.data = self.read_buffer[0..read_len],
},
};
}
}
},
}
},
}
}
};
}
const testing = std.testing;
const io = std.io;
test "decodes a simple response" {
var read_buffer: [32]u8 = undefined;
var the_void: [1024]u8 = undefined;
var response = "GET / HTTP/1.1\r\nHost: localhost\r\nContent-Length: 4\r\n\r\ngood";
var reader = io.fixedBufferStream(response).reader();
var writer = io.fixedBufferStream(&the_void).writer();
var client = create(&read_buffer, &reader, &writer);
try client.writeHead(200, "OK");
try client.writeHeader("Content-Length", "9");
try client.writeChunk("aaabbbccc");
var status = try client.readEvent();
testing.expect(status == .status and mem.eql(u8, status.status.method, "GET"));
testing.expect(status == .status and mem.eql(u8, status.status.path, "/"));
var header1 = try client.readEvent();
testing.expect(header1 == .header and mem.eql(u8, header1.header.name, "Host") and mem.eql(u8, header1.header.value, "localhost"));
var header2 = try client.readEvent();
testing.expect(header2 == .header and mem.eql(u8, header2.header.name, "Content-Length") and mem.eql(u8, header2.header.value, "4"));
var complete = try client.readEvent();
testing.expect(complete == .head_complete);
var body = try client.readEvent();
testing.expect(body == .chunk and mem.eql(u8, body.chunk.data, "good") and body.chunk.final);
var end = try client.readEvent();
testing.expect(end == .end);
}
test "decodes a chunked response" {
var read_buffer: [32]u8 = undefined;
var the_void: [1024]u8 = undefined;
var response = "GET / HTTP/1.1\r\nHost: localhost\r\nTransfer-Encoding: chunked\r\n\r\n4\r\ngood\r\n0\r\n";
var reader = io.fixedBufferStream(response).reader();
var writer = io.fixedBufferStream(&the_void).writer();
var client = create(&read_buffer, &reader, &writer);
try client.writeHead(200, "OK");
try client.writeHeader("Content-Length", "9");
try client.writeChunk("aaabbbccc");
var status = try client.readEvent();
testing.expect(status == .status and mem.eql(u8, status.status.method, "GET"));
testing.expect(status == .status and mem.eql(u8, status.status.path, "/"));
var header1 = try client.readEvent();
testing.expect(header1 == .header and mem.eql(u8, header1.header.name, "Host") and mem.eql(u8, header1.header.value, "localhost"));
var header2 = try client.readEvent();
testing.expect(header2 == .header and mem.eql(u8, header2.header.name, "Transfer-Encoding") and mem.eql(u8, header2.header.value, "chunked"));
var complete = try client.readEvent();
testing.expect(complete == .head_complete);
var body = try client.readEvent();
testing.expect(body == .chunk and mem.eql(u8, body.chunk.data, "good") and body.chunk.final);
var end = try client.readEvent();
testing.expect(end == .end);
}
test "refAllDecls" {
std.meta.refAllDecls(@This());
}

8
deps/hzzp/src/main.zig vendored Normal file
View file

@ -0,0 +1,8 @@
const std = @import("std");
pub const BaseClient = @import("base/client.zig");
pub const BaseServer = @import("base/server.zig");
test "refAllDecls" {
std.meta.refAllDecls(@This());
}

82
deps/interfaces/README.md vendored Normal file
View file

@ -0,0 +1,82 @@
# Zig Interfaces
Easy solution for all your zig dynamic dispatch needs!
## Features
- Fully decoupled interfaces and implementations
- Control over the storage/ownership of interface objects
- Comptime support (including comptime-only interfaces)
- Async function partial support (blocking on [#4621](https://github.com/ziglang/zig/issues/4621))
- Optional function support
- Support for manually written vtables
## Example
```zig
const interface = @import("interface.zig");
const Interface = interface.Interface;
const SelfType = interface.SelfType;
// Let us create a Reader interface.
// We wrap it in our own struct to make function calls more natural.
const Reader = struct {
pub const ReadError = error { CouldNotRead };
const IFace = Interface(struct {
// Our interface requires a single non optional, non-const read function.
read: fn (*SelfType, buf: []u8) ReadError!usize,
}, interface.Storage.NonOwning); // This is a non owning interface, similar to Rust dyn traits.
iface: IFace,
// Wrap the interface's init, since the interface is non owning it requires no allocator argument.
pub fn init(impl_ptr: var) Reader {
return .{ .iface = try IFace.init(.{impl_ptr}) };
}
// Wrap the read function call
pub fn read(self: *Reader, buf: []u8) ReadError!usize {
return self.iface.call("read", .{buf});
}
// Define additional, non-dynamic functions!
pub fn readAll(self: *Self, buf: []u8) ReadError!usize {
var index: usize = 0;
while (index != buf.len) {
const partial_amt = try self.read(buffer[index..]);
if (partial_amt == 0) return index;
index += partial_amt;
}
return index;
}
};
// Let's create an example reader
const ExampleReader = struct {
state: u8,
// Note that this reader cannot return an error, the return type
// of our implementation functions only needs to coerce to the
// interface's function return type.
pub fn read(self: ExampleReader, buf: []u8) usize {
for (buf) |*c| {
c.* = self.state;
}
return buf.len;
}
};
test "Use our reader interface!" {
var example_reader = ExampleReader{ .state=42 };
var reader = Reader.init(&example_reader);
var buf: [100]u8 = undefined;
_ = reader.read(&buf) catch unreachable;
}
```
See examples.zig for more examples.

151
deps/interfaces/examples.zig vendored Normal file
View file

@ -0,0 +1,151 @@
const interface = @import("interface.zig");
const Interface = interface.Interface;
const SelfType = interface.SelfType;
const std = @import("std");
const mem = std.mem;
const expectEqual = std.testing.expectEqual;
const assert = std.debug.assert;
test "Simple NonOwning interface" {
const NonOwningTest = struct {
fn run() !void {
const Fooer = Interface(struct {
foo: fn (*SelfType) usize,
}, interface.Storage.NonOwning);
const TestFooer = struct {
const Self = @This();
state: usize,
pub fn foo(self: *Self) usize {
const tmp = self.state;
self.state += 1;
return tmp;
}
};
var f = TestFooer{ .state = 42 };
var fooer = try Fooer.init(.{&f});
defer fooer.deinit();
expectEqual(@as(usize, 42), fooer.call("foo", .{}));
expectEqual(@as(usize, 43), fooer.call("foo", .{}));
}
};
try NonOwningTest.run();
comptime try NonOwningTest.run();
}
test "Comptime only interface" {
const TestIFace = Interface(struct {
foo: fn (*SelfType, u8) u8,
}, interface.Storage.Comptime);
const TestType = struct {
const Self = @This();
state: u8,
pub fn foo(self: Self, a: u8) u8 {
return self.state + a;
}
};
comptime var iface = try TestIFace.init(.{TestType{ .state = 0 }});
expectEqual(@as(u8, 42), iface.call("foo", .{42}));
}
test "Owning interface with optional function" {
const OwningOptionalFuncTest = struct {
fn run() !void {
const TestOwningIface = Interface(struct {
someFn: ?fn (*const SelfType, usize, usize) usize,
otherFn: fn (*SelfType, usize) anyerror!void,
}, interface.Storage.Owning);
const TestStruct = struct {
const Self = @This();
state: usize,
pub fn someFn(self: Self, a: usize, b: usize) usize {
return self.state * a + b;
}
// Note that our return type need only coerce to the virtual function's
// return type.
pub fn otherFn(self: *Self, new_state: usize) void {
self.state = new_state;
}
};
var iface_instance = try TestOwningIface.init(.{ comptime TestStruct{ .state = 0 }, std.testing.allocator });
defer iface_instance.deinit();
try iface_instance.call("otherFn", .{100});
expectEqual(@as(usize, 42), iface_instance.call("someFn", .{ 0, 42 }).?);
}
};
try OwningOptionalFuncTest.run();
}
test "Interface with virtual async function implemented by an async function" {
const AsyncIFace = Interface(struct {
const async_call_stack_size = 1024;
foo: fn (*SelfType) callconv(.Async) void,
}, interface.Storage.NonOwning);
const Impl = struct {
const Self = @This();
state: usize,
frame: anyframe = undefined,
pub fn foo(self: *Self) void {
suspend {
self.frame = @frame();
}
self.state += 1;
suspend;
self.state += 1;
}
};
var i = Impl{ .state = 0 };
var instance = try AsyncIFace.init(.{&i});
_ = async instance.call("foo", .{});
expectEqual(@as(usize, 0), i.state);
resume i.frame;
expectEqual(@as(usize, 1), i.state);
resume i.frame;
expectEqual(@as(usize, 2), i.state);
}
test "Interface with virtual async function implemented by a blocking function" {
const AsyncIFace = Interface(struct {
readBytes: fn (*SelfType, []u8) callconv(.Async) anyerror!void,
}, interface.Storage.Inline(8));
const Impl = struct {
const Self = @This();
pub fn readBytes(self: Self, outBuf: []u8) void {
for (outBuf) |*c| {
c.* = 3;
}
}
};
var instance = try AsyncIFace.init(.{Impl{}});
var buf: [256]u8 = undefined;
try await async instance.call("readBytes", .{buf[0..]});
expectEqual([_]u8{3} ** 256, buf);
}

472
deps/interfaces/interface.zig vendored Normal file
View file

@ -0,0 +1,472 @@
const std = @import("std");
const mem = std.mem;
const trait = std.meta.trait;
const assert = std.debug.assert;
const expect = std.testing.expect;
const expectEqual = std.testing.expectEqual;
pub const SelfType = @OpaqueType();
fn makeSelfPtr(ptr: var) *SelfType {
if (comptime !trait.isSingleItemPtr(@TypeOf(ptr))) {
@compileError("SelfType pointer initialization expects pointer parameter.");
}
const T = std.meta.Child(@TypeOf(ptr));
if (@sizeOf(T) > 0) {
return @ptrCast(*SelfType, ptr);
} else {
return undefined;
}
}
fn selfPtrAs(self: *SelfType, comptime T: type) *T {
if (@sizeOf(T) > 0) {
return @alignCast(@alignOf(T), @ptrCast(*align(1) T, self));
} else {
return undefined;
}
}
fn constSelfPtrAs(self: *const SelfType, comptime T: type) *const T {
if (@sizeOf(T) > 0) {
return @alignCast(@alignOf(T), @ptrCast(*align(1) const T, self));
} else {
return undefined;
}
}
pub const Storage = struct {
pub const Comptime = struct {
erased_ptr: *SelfType,
ImplType: type,
pub fn init(args: var) !Comptime {
if (args.len != 1) {
@compileError("Comptime storage expected a 1-tuple in initialization.");
}
var obj = args[0];
return Comptime{
.erased_ptr = makeSelfPtr(&obj),
.ImplType = @TypeOf(args[0]),
};
}
pub fn getSelfPtr(comptime self: *Comptime) *SelfType {
return self.erased_ptr;
}
pub fn deinit(comptime self: Comptime) void {}
};
pub const NonOwning = struct {
erased_ptr: *SelfType,
pub fn init(args: var) !NonOwning {
if (args.len != 1) {
@compileError("NonOwning storage expected a 1-tuple in initialization.");
}
return NonOwning{
.erased_ptr = makeSelfPtr(args[0]),
};
}
pub fn getSelfPtr(self: NonOwning) *SelfType {
return self.erased_ptr;
}
pub fn deinit(self: NonOwning) void {}
};
pub const Owning = struct {
allocator: *mem.Allocator,
mem: []u8,
pub fn init(args: var) !Owning {
if (args.len != 2) {
@compileError("Owning storage expected a 2-tuple in initialization.");
}
const AllocT = @TypeOf(args[0]);
var obj = try args[1].create(AllocT);
obj.* = args[0];
return Owning{
.allocator = args[1],
.mem = std.mem.asBytes(obj)[0..],
};
}
pub fn getSelfPtr(self: Owning) *SelfType {
return makeSelfPtr(&self.mem[0]);
}
pub fn deinit(self: Owning) void {
const result = self.allocator.shrinkBytes(self.mem, 0, 0);
assert(result == 0);
}
};
pub fn Inline(comptime size: usize) type {
return struct {
const Self = @This();
mem: [size]u8,
pub fn init(args: var) !Self {
if (args.len != 1) {
@compileError("Inline storage expected a 1-tuple in initialization.");
}
const ImplSize = @sizeOf(@TypeOf(args[0]));
if (ImplSize > size) {
@compileError("Type does not fit in inline storage.");
}
var self: Self = undefined;
if (ImplSize > 0) {
std.mem.copy(u8, self.mem[0..], @ptrCast([*]const u8, &args[0])[0..ImplSize]);
}
return self;
}
pub fn getSelfPtr(self: *Self) *SelfType {
return makeSelfPtr(&self.mem[0]);
}
pub fn deinit(self: Self) void {}
};
}
pub fn InlineOrOwning(comptime size: usize) type {
return struct {
const Self = @This();
data: union(enum) {
Inline: Inline(size),
Owning: Owning,
},
pub fn init(args: var) !Self {
if (args.len != 2) {
@compileError("InlineOrOwning storage expected a 2-tuple in initialization.");
}
const ImplSize = @sizeOf(@TypeOf(args[0]));
if (ImplSize > size) {
return Self{
.data = .{
.Owning = try Owning.init(args),
},
};
} else {
return Self{
.data = .{
.Inline = try Inline(size).init(.{args[0]}),
},
};
}
}
pub fn getSelfPtr(self: *Self) *SelfType {
return switch (self.data) {
.Inline => |*i| i.getSelfPtr(),
.Owning => |*o| o.getSelfPtr(),
};
}
pub fn deinit(self: Self) void {
switch (self.data) {
.Inline => |i| i.deinit(),
.Owning => |o| o.deinit(),
}
}
};
}
};
fn PtrChildOrSelf(comptime T: type) type {
if (comptime trait.isSingleItemPtr(T)) {
return std.meta.Child(T);
}
return T;
}
const GenCallType = enum {
BothAsync,
BothBlocking,
AsyncCallsBlocking,
BlockingCallsAsync,
};
fn makeCall(
comptime name: []const u8,
comptime CurrSelfType: type,
comptime Return: type,
comptime ImplT: type,
comptime call_type: GenCallType,
self_ptr: CurrSelfType,
args: var,
) Return {
const is_const = CurrSelfType == *const SelfType;
const self = if (is_const) constSelfPtrAs(self_ptr, ImplT) else selfPtrAs(self_ptr, ImplT);
const fptr = @field(ImplT, name);
const first_arg_ptr = comptime std.meta.trait.is(.Pointer)(@typeInfo(@TypeOf(fptr)).Fn.args[0].arg_type.?);
const self_arg = if (first_arg_ptr) .{self} else .{self.*};
return switch (call_type) {
.BothBlocking => @call(.{ .modifier = .always_inline }, fptr, self_arg ++ args),
.AsyncCallsBlocking, .BothAsync => await @call(.{ .modifier = .async_kw }, fptr, self_arg ++ args),
.BlockingCallsAsync => @compileError("Trying to implement blocking virtual function " ++ name ++ " with async implementation."),
};
}
fn getFunctionFromImpl(comptime name: []const u8, comptime FnT: type, comptime ImplT: type) ?FnT {
const our_cc = @typeInfo(FnT).Fn.calling_convention;
// Find the candidate in the implementation type.
for (std.meta.declarations(ImplT)) |decl| {
if (std.mem.eql(u8, name, decl.name)) {
switch (decl.data) {
.Fn => |fn_decl| {
const args = @typeInfo(fn_decl.fn_type).Fn.args;
if (args.len == 0) {
return null;
}
const arg0_type = args[0].arg_type.?;
if (arg0_type != ImplT and arg0_type != *ImplT and arg0_type != *const ImplT) {
return null;
}
const candidate_cc = @typeInfo(fn_decl.fn_type).Fn.calling_convention;
switch (candidate_cc) {
.Async, .Unspecified => {},
else => return null,
}
const Return = @typeInfo(FnT).Fn.return_type orelse noreturn;
const CurrSelfType = @typeInfo(FnT).Fn.args[0].arg_type.?;
const call_type: GenCallType = switch (our_cc) {
.Async => if (candidate_cc == .Async) .BothAsync else .AsyncCallsBlocking,
.Unspecified => if (candidate_cc == .Unspecified) .BothBlocking else .BlockingCallsAsync,
else => unreachable,
};
// TODO: Make this less hacky somehow?
// We need some new feature to do so unfortunately.
return switch (args.len) {
1 => struct {
fn impl(self_ptr: CurrSelfType) callconv(our_cc) Return {
return @call(.{ .modifier = .always_inline }, makeCall, .{ name, CurrSelfType, Return, ImplT, call_type, self_ptr, .{} });
}
}.impl,
2 => struct {
fn impl(self_ptr: CurrSelfType, arg: args[1].arg_type.?) callconv(our_cc) Return {
return @call(.{ .modifier = .always_inline }, makeCall, .{ name, CurrSelfType, Return, ImplT, call_type, self_ptr, .{arg} });
}
}.impl,
3 => struct {
fn impl(self_ptr: CurrSelfType, arg1: args[1].arg_type.?, arg2: args[2].arg_type.?) callconv(our_cc) Return {
return @call(.{ .modifier = .always_inline }, makeCall, .{ name, CurrSelfType, Return, ImplT, call_type, self_ptr, .{ arg1, arg2 } });
}
}.impl,
4 => struct {
fn impl(self_ptr: CurrSelfType, arg1: args[1].arg_type.?, arg2: args[2].arg_type.?, arg3: args[3].arg_type.?) callconv(our_cc) Return {
return @call(.{ .modifier = .always_inline }, makeCall, .{ name, CurrSelfType, Return, ImplT, call_type, self_ptr, .{ arg1, arg2, arg3 } });
}
}.impl,
5 => struct {
fn impl(self_ptr: CurrSelfType, arg1: args[1].arg_type.?, arg2: args[2].arg_type.?, arg3: args[3].arg_type.?, arg4: args[4].arg_type.?) callconv(our_cc) Return {
return @call(.{ .modifier = .always_inline }, makeCall, .{ name, CurrSelfType, Return, ImplT, call_type, self_ptr, .{ arg1, arg2, arg3, arg4 } });
}
}.impl,
6 => struct {
fn impl(self_ptr: CurrSelfType, arg1: args[1].arg_type.?, arg2: args[2].arg_type.?, arg3: args[3].arg_type.?, arg4: args[4].arg_type.?, arg5: args[5].arg_type.?) callconv(our_cc) Return {
return @call(.{ .modifier = .always_inline }, makeCall, .{ name, CurrSelfType, Return, ImplT, call_type, self_ptr, .{ arg1, arg2, arg3, arg4, arg5 } });
}
}.impl,
else => @compileError("Unsupported number of arguments, please provide a manually written vtable."),
};
},
else => return null,
}
}
}
return null;
}
fn makeVTable(comptime VTableT: type, comptime ImplT: type) VTableT {
if (comptime !trait.isContainer(ImplT)) {
@compileError("Type '" ++ @typeName(ImplT) ++ "' must be a container to implement interface.");
}
var vtable: VTableT = undefined;
for (std.meta.fields(VTableT)) |field| {
var fn_type = field.field_type;
const is_optional = trait.is(.Optional)(fn_type);
if (is_optional) {
fn_type = std.meta.Child(fn_type);
}
const candidate = comptime getFunctionFromImpl(field.name, fn_type, ImplT);
if (candidate == null and !is_optional) {
@compileError("Type '" ++ @typeName(ImplT) ++ "' does not implement non optional function '" ++ field.name ++ "'.");
} else if (!is_optional) {
@field(vtable, field.name) = candidate.?;
} else {
@field(vtable, field.name) = candidate;
}
}
return vtable;
}
fn checkVtableType(comptime VTableT: type) void {
if (comptime !trait.is(.Struct)(VTableT)) {
@compileError("VTable type " ++ @typeName(VTableT) ++ " must be a struct.");
}
for (std.meta.declarations(VTableT)) |decl| {
switch (decl.data) {
.Fn => @compileError("VTable type defines method '" ++ decl.name ++ "'."),
.Type, .Var => {},
}
}
for (std.meta.fields(VTableT)) |field| {
var field_type = field.field_type;
if (trait.is(.Optional)(field_type)) {
field_type = std.meta.Child(field_type);
}
if (!trait.is(.Fn)(field_type)) {
@compileError("VTable type defines non function field '" ++ field.name ++ "'.");
}
const type_info = @typeInfo(field_type);
if (type_info.Fn.is_generic) {
@compileError("Virtual function '" ++ field.name ++ "' cannot be generic.");
}
switch (type_info.Fn.calling_convention) {
.Unspecified, .Async => {},
else => @compileError("Virtual function's '" ++ field.name ++ "' calling convention is not default or async."),
}
if (type_info.Fn.args.len == 0) {
@compileError("Virtual function '" ++ field.name ++ "' must have at least one argument.");
}
const arg_type = type_info.Fn.args[0].arg_type.?;
if (arg_type != *SelfType and arg_type != *const SelfType) {
@compileError("Virtual function's '" ++ field.name ++ "' first argument must be *SelfType or *const SelfType");
}
}
}
fn vtableHasMethod(comptime VTableT: type, comptime name: []const u8, is_optional: *bool, is_async: *bool) bool {
for (std.meta.fields(VTableT)) |field| {
if (std.mem.eql(u8, name, field.name)) {
is_optional.* = trait.is(.Optional)(field.field_type);
is_async.* = @typeInfo(if (is_optional.*) std.meta.Child(field.field_type) else field.field_type).Fn.calling_convention == .Async;
return true;
}
}
return false;
}
fn VTableReturnType(comptime VTableT: type, comptime name: []const u8) type {
for (std.meta.fields(VTableT)) |field| {
if (std.mem.eql(u8, name, field.name)) {
const is_optional = trait.is(.Optional)(field.field_type);
var fn_ret_type = (if (is_optional)
@typeInfo(std.meta.Child(field.field_type)).Fn.return_type
else
@typeInfo(field.field_type).Fn.return_type) orelse noreturn;
if (is_optional) {
return ?fn_ret_type;
}
return fn_ret_type;
}
}
@compileError("VTable type '" ++ @typeName(VTableT) ++ "' has no virtual function '" ++ name ++ "'.");
}
pub fn Interface(comptime VTableT: type, comptime StorageT: type) type {
comptime checkVtableType(VTableT);
const stack_size: usize = if (@hasDecl(VTableT, "async_call_stack_size"))
VTableT.async_call_stack_size
else
1 * 1024 * 1024;
return struct {
vtable_ptr: *const VTableT,
storage: StorageT,
const Self = @This();
pub fn init(args: var) !Self {
const ImplType = PtrChildOrSelf(@TypeOf(args.@"0"));
return Self{
.vtable_ptr = &comptime makeVTable(VTableT, ImplType),
.storage = try StorageT.init(args),
};
}
pub fn initWithVTable(vtable_ptr: *const VTableT, args: var) !Self {
return .{
.vtable_ptr = vtable_ptr,
.storage = try StorageT.init(args),
};
}
pub fn call(self: var, comptime name: []const u8, args: var) VTableReturnType(VTableT, name) {
comptime var is_optional = true;
comptime var is_async = true;
comptime assert(vtableHasMethod(VTableT, name, &is_optional, &is_async));
const fn_ptr = if (is_optional) blk: {
const val = @field(self.vtable_ptr, name);
if (val) |v| break :blk v;
return null;
} else @field(self.vtable_ptr, name);
const self_ptr = self.storage.getSelfPtr();
const new_args = .{self_ptr};
if (!is_async) {
return @call(.{}, fn_ptr, new_args ++ args);
} else {
var stack_frame: [stack_size]u8 align(std.Target.stack_align) = undefined;
return await @asyncCall(&stack_frame, {}, fn_ptr, new_args ++ args);
}
}
pub fn deinit(self: Self) void {
self.storage.deinit();
}
};
}

48
src/bar/bar.zig Normal file
View file

@ -0,0 +1,48 @@
const std = @import("std");
const Widget = @import("../types/widget.zig").Widget;
const Info = @import("../types/info.zig").Info;
pub const Bar = struct {
allocator: *std.mem.Allocator,
widgets: []const *Widget,
running: bool,
pub fn start(self: *Bar) !void {
self.running = true;
for (self.widgets) |w| {
std.debug.warn("Starting widget: {}\n", .{w.name()});
var thread = try std.Thread.spawn(w, Widget.start);
}
var thread = try std.Thread.spawn(self, Bar.process);
std.time.sleep(100000 * std.time.ns_per_ms);
self.running = false;
std.time.sleep(1000 * std.time.ns_per_ms);
return;
}
fn process(self: *Bar) !void {
const out_file = std.io.getStdOut();
try out_file.writer().writeAll("{\"version\": 1,\"click_events\": true}\n[\n");
while (self.running) {
//std.debug.warn("I am a Square!\n", .{});
std.time.sleep(250 * std.time.ns_per_ms);
try out_file.writer().writeAll("[");
for (self.widgets) |w, i| {
try std.json.stringify(w.info(), .{}, out_file.writer());
if (i < self.widgets.len - 1) {
try out_file.writer().writeAll(",");
}
}
try out_file.writer().writeAll("],\n");
}
}
pub fn add(self: Bar, i: *Info) void {
std.debug.warn("Add {}!\n", .{i.name});
}
};
pub fn InitBar(allocator: *std.mem.Allocator) Bar {
return Bar{
.allocator = allocator,
.widgets = undefined,
.running = false,
};
}

190
src/debug_allocator.zig Normal file
View file

@ -0,0 +1,190 @@
//! This allocator collects information about allocation sizes
const std = @import("std");
const DebugAllocator = @This();
const Stats = struct {
mean: f64 = 0,
mean_of_squares: f64 = 0,
total: usize = 0,
count: usize = 0,
fn addSample(self: *Stats, value: usize) void {
const count_f64 = @intToFloat(f64, self.count);
self.mean = (self.mean * count_f64 + @intToFloat(f64, value)) / (count_f64 + 1);
self.mean_of_squares = (self.mean_of_squares * count_f64 + @intToFloat(f64, value * value)) / (count_f64 + 1);
self.total += value;
self.count += 1;
}
fn stdDev(self: Stats) f64 {
return std.math.sqrt(self.mean_of_squares - self.mean * self.mean);
}
};
pub const AllocationInfo = struct {
allocation_stats: Stats = Stats{},
deallocation_count: usize = 0,
deallocation_total: usize = 0,
peak_allocated: usize = 0,
reallocation_stats: Stats = Stats{},
shrink_stats: Stats = Stats{},
fn currentlyAllocated(self: AllocationInfo) usize {
return self.allocation_stats.total + self.reallocation_stats.total - self.deallocation_total - self.shrink_stats.total;
}
pub fn format(
self: AllocationInfo,
comptime fmt: []const u8,
options: std.fmt.FormatOptions,
out_stream: var,
) !void {
@setEvalBranchQuota(2000);
return std.fmt.format(
out_stream,
\\------------------------------------------ Allocation info ------------------------------------------
\\{} total allocations (total: {Bi:.2}, mean: {Bi:.2}, std. dev: {Bi:.2} MB), {} deallocations
\\{} current allocations ({Bi:.2}), peak mem usage: {Bi:.2}
\\{} reallocations (total: {Bi:.2}, mean: {Bi:.2}, std. dev: {Bi:.2})
\\{} shrinks (total: {Bi:.2}, mean: {Bi:.2}, std. dev: {Bi:.2})
\\-----------------------------------------------------------------------------------------------------
,
.{
self.allocation_stats.count,
self.allocation_stats.total,
self.allocation_stats.mean,
self.allocation_stats.stdDev(),
self.deallocation_count,
self.allocation_stats.count - self.deallocation_count,
self.currentlyAllocated(),
self.peak_allocated,
self.reallocation_stats.count,
self.reallocation_stats.total,
self.reallocation_stats.mean,
self.reallocation_stats.stdDev(),
self.shrink_stats.count,
self.shrink_stats.total,
self.shrink_stats.mean,
self.shrink_stats.stdDev(),
},
);
}
};
const stack_addresses_size = 15;
base_allocator: *std.mem.Allocator,
info: AllocationInfo,
max_bytes: usize,
allocation_strack_addresses: std.AutoHashMap(usize, [stack_addresses_size]usize),
// Interface implementation
allocator: std.mem.Allocator,
pub fn init(base_allocator: *std.mem.Allocator, max_bytes: usize) DebugAllocator {
return .{
.base_allocator = base_allocator,
.info = .{},
.max_bytes = max_bytes,
.allocation_strack_addresses = std.AutoHashMap(usize, [stack_addresses_size]usize).init(base_allocator),
.allocator = .{
.allocFn = alloc,
.resizeFn = resize,
},
};
}
pub fn deinit(self: *DebugAllocator) void {
self.allocation_strack_addresses.deinit();
}
fn alloc(allocator: *std.mem.Allocator, len: usize, ptr_align: u29, len_align: u29) error{OutOfMemory}![]u8 {
const self = @fieldParentPtr(DebugAllocator, "allocator", allocator);
const ptr = try self.base_allocator.callAllocFn(len, ptr_align, len_align);
self.info.allocation_stats.addSample(ptr.len);
var stack_addresses = std.mem.zeroes([stack_addresses_size + 2]usize);
var stack_trace = std.builtin.StackTrace{
.instruction_addresses = &stack_addresses,
.index = 0,
};
std.debug.captureStackTrace(@returnAddress(), &stack_trace);
try self.allocation_strack_addresses.putNoClobber(@ptrToInt(ptr.ptr), stack_addresses[2..].*);
const curr_allocs = self.info.currentlyAllocated();
if (self.max_bytes != 0 and curr_allocs >= self.max_bytes) {
std.debug.print("Exceeded maximum bytes {}, exiting.\n", .{self.max_bytes});
std.process.exit(1);
}
if (curr_allocs > self.info.peak_allocated) {
self.info.peak_allocated = curr_allocs;
}
return ptr;
}
fn resize(allocator: *std.mem.Allocator, old_mem: []u8, new_size: usize, len_align: u29) error{OutOfMemory}!usize {
const self = @fieldParentPtr(DebugAllocator, "allocator", allocator);
if (old_mem.len == 0) {
std.log.debug(.debug_alloc, "Trying to resize empty slice\n", .{});
std.process.exit(1);
}
if (self.allocation_strack_addresses.get(@ptrToInt(old_mem.ptr)) == null) {
@panic("error - resize call on block not allocated by debug allocator");
}
if (new_size == 0) {
if (self.info.allocation_stats.count == self.info.deallocation_count) {
@panic("error - too many calls to free, most likely double free");
}
self.info.deallocation_total += old_mem.len;
self.info.deallocation_count += 1;
self.allocation_strack_addresses.removeAssertDiscard(@ptrToInt(old_mem.ptr));
} else if (new_size > old_mem.len) {
self.info.reallocation_stats.addSample(new_size - old_mem.len);
} else if (new_size < old_mem.len) {
self.info.shrink_stats.addSample(old_mem.len - new_size);
}
const curr_allocs = self.info.currentlyAllocated();
if (self.max_bytes != 0 and curr_allocs >= self.max_bytes) {
std.log.debug(.debug_alloc, "Exceeded maximum bytes {}, exiting.\n", .{self.max_bytes});
std.process.exit(1);
}
if (curr_allocs > self.info.peak_allocated) {
self.info.peak_allocated = curr_allocs;
}
return self.base_allocator.callResizeFn(old_mem, new_size, len_align) catch |e| {
return e;
};
}
pub fn printRemainingStackTraces(self: DebugAllocator) void {
std.debug.print(
\\{} allocations - stack traces follow
\\------------------------------------
, .{self.allocation_strack_addresses.count()});
var it = self.allocation_strack_addresses.iterator();
var idx: usize = 1;
while (it.next()) |entry| : (idx += 1) {
std.debug.print("\nAllocation {}\n-------------\n", .{idx});
var len: usize = 0;
while (len < stack_addresses_size and entry.value[len] != 0) : (len += 1) {}
const stack_trace = std.builtin.StackTrace{
.instruction_addresses = &entry.value,
.index = len,
};
std.debug.dumpStackTrace(stack_trace);
}
}

48
src/formatting/colour.zig Normal file
View file

@ -0,0 +1,48 @@
const std = @import("std");
const eql = std.mem.eql;
const TextColour = "#D8DEE9";
const DarkerTextColour = "#E5E9F0";
const DarkestTextColour = "#ECEFF4";
const AccentLightColour = "#88C0D0";
const AccentMediumColour = "#81A1C1";
const AccentDarkColour = "#5E81AC";
const RedColour = "#BF616A";
const OrangeColour = "#D08770";
const YellowColour = "#EBCB8B";
const GreenColour = "#A3BE8C";
const PurpleColour = "#B48EAD";
pub fn colour(alloc: *std.mem.Allocator, clr: []const u8, str: []const u8) ![]const u8 {
if (clr[0] == '#') {
return try std.fmt.allocPrint(alloc, "<span color=\"{}\">{}</span>", .{ clr, str });
} else {
if (eql(u8, clr, "text")) {
return colour(alloc, TextColour, str);
} else if (eql(u8, clr, "dark")) {
return colour(alloc, DarkerTextColour, str);
} else if (eql(u8, clr, "darkest")) {
return colour(alloc, DarkestTextColour, str);
} else if (eql(u8, clr, "accentlight")) {
return colour(alloc, AccentLightColour, str);
} else if (eql(u8, clr, "accentmedium")) {
return colour(alloc, AccentMediumColour, str);
} else if (eql(u8, clr, "accentdark")) {
return colour(alloc, AccentDarkColour, str);
} else if (eql(u8, clr, "red")) {
return colour(alloc, RedColour, str);
} else if (eql(u8, clr, "orange")) {
return colour(alloc, OrangeColour, str);
} else if (eql(u8, clr, "yellow")) {
return colour(alloc, YellowColour, str);
} else if (eql(u8, clr, "green")) {
return colour(alloc, GreenColour, str);
} else if (eql(u8, clr, "purple")) {
return colour(alloc, PurpleColour, str);
} else {
return "what";
}
}
}

27
src/main.zig Normal file
View file

@ -0,0 +1,27 @@
const std = @import("std");
const Bar = @import("types/bar.zig").Bar;
const Widget = @import("types/widget.zig").Widget;
const barImpl = @import("bar/bar.zig");
const textWidget = @import("widgets/text/text.zig");
const weatherWidget = @import("widgets/weather/weather.zig");
const DebugAllocator = @import("debug_allocator.zig");
const colour = @import("formatting/colour.zig").colour;
pub fn main() !void {
const dbgAlloc = &DebugAllocator.init(std.heap.page_allocator, 8192 * 512);
defer {
std.debug.print("Finished cleanup, last allocation info.\n", .{});
std.debug.print("\n{}\n", .{dbgAlloc.info});
dbgAlloc.printRemainingStackTraces();
dbgAlloc.deinit();
}
var allocator = &dbgAlloc.allocator;
var bar = barImpl.InitBar(allocator);
var br = Bar.init(&bar);
const widgets = [_]*Widget{
&Widget.init(&textWidget.New("owo", "potato")),
&Widget.init(&weatherWidget.New(&br, "London")),
};
bar.widgets = widgets[0..];
try br.start();
}

21
src/types/bar.zig Normal file
View file

@ -0,0 +1,21 @@
const interface = @import("interfaces");
const Interface = interface.Interface;
const SelfType = interface.SelfType;
const Info = @import("info.zig").Info;
pub const Bar = struct {
const IFace = Interface(struct {
add: fn (*SelfType, *Info) anyerror!void,
start: fn (*SelfType) anyerror!void,
}, interface.Storage.NonOwning);
iface: IFace,
pub fn init(impl_ptr: var) Bar {
return .{ .iface = try IFace.init(.{impl_ptr}) };
}
pub fn add(self: *Bar, i: *Info) anyerror!void {
return try self.iface.call("add", .{i});
}
pub fn start(self: *Bar) anyerror!void {
return try self.iface.call("start", .{});
}
};

6
src/types/info.zig Normal file
View file

@ -0,0 +1,6 @@
pub const Info = struct {
name: []const u8,
markup: []const u8,
full_text: []const u8,
color: []const u8,
};

29
src/types/widget.zig Normal file
View file

@ -0,0 +1,29 @@
const interface = @import("interfaces");
const Interface = interface.Interface;
const SelfType = interface.SelfType;
const Info = @import("info.zig").Info;
pub const Widget = struct {
const IFace = Interface(struct {
name: fn (*SelfType) []const u8,
initial_info: fn (*SelfType) Info,
info: fn (*SelfType) Info,
start: fn (*SelfType) anyerror!void,
}, interface.Storage.NonOwning);
iface: IFace,
pub fn init(impl_ptr: var) Widget {
return .{ .iface = try IFace.init(.{impl_ptr}) };
}
pub fn name(self: *Widget) []const u8 {
return self.iface.call("name", .{});
}
pub fn info(self: *Widget) Info {
return self.iface.call("info", .{});
}
pub fn initial_info(self: *Widget) Info {
return self.iface.call("initial_info", .{});
}
pub fn start(self: *Widget) anyerror!void {
return self.iface.call("start", .{});
}
};

31
src/widgets/text/text.zig Normal file
View file

@ -0,0 +1,31 @@
const std = @import("std");
const Info = @import("../../types/info.zig").Info;
pub const TextWidget = struct {
name: []const u8,
text: []const u8,
pub fn name(self: *TextWidget) []const u8 {
return self.name;
}
pub fn initial_info(self: *TextWidget) Info {
return Info{
.name = self.name,
.full_text = self.text,
.markup = "pango",
.color = "#ffaaff",
};
}
pub fn info(self: *TextWidget) Info {
return self.initial_info();
}
pub fn start(self: *TextWidget) anyerror!void {}
};
pub inline fn New(name: []const u8, text: []const u8) TextWidget {
return TextWidget{
.name = name,
.text = text,
};
}

View file

@ -0,0 +1,137 @@
const std = @import("std");
const net = std.net;
const io = std.io;
const hzzp = @import("hzzp");
const Info = @import("../../types/info.zig").Info;
const Bar = @import("../../types/bar.zig").Bar;
const colour = @import("../../formatting/colour.zig").colour;
const DebugAllocator = @import("../../debug_allocator.zig");
pub const WeatherWidget = struct {
bar: *Bar,
weather_api_url: []const u8,
info: ?Info,
pub fn name(self: *WeatherWidget) []const u8 {
return "weather";
}
pub fn initial_info(self: *WeatherWidget) Info {
return Info{
.name = "weather",
.full_text = "weather",
.markup = "pango",
.color = "#ffffff",
};
}
pub fn info(self: *WeatherWidget) Info {
if (self.info == null) {
return self.initial_info();
} else {
return self.info.?;
}
}
pub fn start(self: *WeatherWidget) anyerror!void {
var buffer: [@sizeOf(u8) * 1024 * 8]u8 = undefined;
var fba = std.heap.FixedBufferAllocator.init(&buffer);
var allocator = &fba.allocator;
while (true) {
std.time.sleep(2000 * std.time.ns_per_ms);
std.debug.print("Starting Weather Widget.\n", .{});
var file = try net.tcpConnectToHost(allocator, "api.openweathermap.org", 80);
std.debug.print("Connected to OpenWeatherMap.\n", .{});
var read_buffer: [512]u8 = undefined;
var client = hzzp.BaseClient.create(&read_buffer, &file.reader(), &file.writer());
try client.writeHead("GET", self.weather_api_url);
try client.writeHeader("Host", "api.openweathermap.org");
try client.writeHeader("User-Agent", "uwu/1.2");
try client.writeHeader("Connection", "close");
try client.writeHeader("Accept", "*/*");
try client.writeHeadComplete();
std.debug.print("Wrote Data, reading response.\n", .{});
var isNextTemp: bool = false;
var isNextMain: bool = false;
var foundMain: bool = false;
var temp: u16 = undefined;
var main: []const u8 = undefined;
while (try client.readEvent()) |event| {
switch (event) {
.chunk => |chunk| {
var tokens = std.json.TokenStream.init(chunk.data);
while (try tokens.next()) |token| {
switch (token) {
.String => |string| {
var str = string.slice(tokens.slice, tokens.i - 1);
if (std.mem.eql(u8, str, "temp")) {
isNextTemp = true;
continue;
}
if (!foundMain and std.mem.eql(u8, str, "main")) {
isNextMain = true;
continue;
}
if (isNextMain) {
main = str;
isNextMain = false;
foundMain = true;
}
},
.Number => |num| {
if (isNextTemp) {
isNextTemp = false;
temp = @floatToInt(u16, std.math.round(try std.fmt.parseFloat(f32, num.slice(tokens.slice, tokens.i - 1))));
}
},
else => {},
}
}
},
.status, .header, .head_complete, .closed, .end, .invalid => continue,
}
}
var tempColour: []const u8 = "green";
if (temp >= 20) {
tempColour = "red";
} else if (temp == 19) {
tempColour = "orange";
} else if (temp == 18) {
tempColour = "yellow";
}
var arena = std.heap.ArenaAllocator.init(allocator);
var arenacator = &arena.allocator;
if (self.info != null) {
allocator.free(self.info.?.full_text);
}
self.info = Info{
.name = "weather",
.full_text = try std.fmt.allocPrint(allocator, "{} {}{}{} {}", .{
colour(arenacator, "accentlight", "weather"),
colour(arenacator, tempColour, try std.fmt.allocPrint(arenacator, "{}", .{temp})),
colour(arenacator, "accentlight", "°"),
colour(arenacator, "accentdark", "C"),
colour(arenacator, "green", main),
}),
.markup = "pango",
.color = "#ffffff",
};
arena.deinit();
}
}
};
pub inline fn New(bar: *Bar, comptime location: []const u8) WeatherWidget {
return WeatherWidget{
.bar = bar,
.weather_api_url = "/data/2.5/weather?q=" ++ location ++ "&appid=dcea3595afe693d1c17846141f58ea10&units=metric",
.info = null,
};
}

24
untitled.sublime-project Normal file
View file

@ -0,0 +1,24 @@
{
"folders":
[
{
"path": "."
},
{
"path": "/home/kitteh/bar"
},
{
"path": "/home/kitteh/zig-linux-x86_64-0.6.0+485231dea/lib/zig/std"
}
],
"settings":
{
"LSP":
{
"zig":
{
"enabled": true
}
}
}
}

282
untitled.sublime-workspace Normal file
View file

@ -0,0 +1,282 @@
{
"auto_complete":
{
"selected_items":
[
[
"allo",
"allocPrint"
],
[
"Accent",
"AccentMediumColour"
],
[
"we",
"weather_api_url"
],
[
"is",
"isNextMain"
],
[
"wea",
"weatherWidget"
],
[
"all",
"allocation"
],
[
"text",
"TextWidget"
],
[
"deb",
"debug_allocator"
],
[
"tex",
"textWidget"
]
]
},
"build_system": "",
"build_system_choices":
[
],
"build_varint": "",
"command_palette":
{
"height": 0.0,
"last_filter": "",
"selected_items":
[
[
"lsp",
"LSP: Toggle Log Panel"
],
[
"enable",
"LSP: Enable Language Server in Project"
],
[
"disable",
"LSP: Disable Language Server Globally"
],
[
"language",
"LSP: Enable Language Server in Project"
],
[
"resource",
"PackageResourceViewer: Open Resource"
],
[
"install",
"Package Control: Install Package"
],
[
"toggle panel",
"LSP: Toggle Log Panel"
],
[
"Package Control: remove",
"Package Control: Remove Package"
],
[
"",
"Build With: Zig - Run File"
],
[
"Package Control: inst",
"Package Control: Install Package"
],
[
"restart",
"LSP: Restart Servers"
],
[
"package resource",
"PackageResourceViewer: Open Resource"
],
[
"Package Control: insta",
"Package Control: Install Package"
],
[
"Package Control: ins",
"Package Control: Install Package"
]
],
"width": 0.0
},
"console":
{
"height": 207.0,
"history":
[
"view.settings().get(\"syntax\")",
"ls"
]
},
"distraction_free":
{
"menu_visible": true,
"show_minimap": false,
"show_open_files": false,
"show_tabs": false,
"side_bar_visible": false,
"status_bar_visible": false
},
"file_history":
[
"/home/kitteh/zar/src/types/widget.zig",
"/home/kitteh/zar/src/widgets/text/text.zig",
"/home/kitteh/zar/src/bar/bar.zig",
"/home/kitteh/zar/src/types/bar.zig",
"/home/kitteh/zar/src/formatting/colour.zig",
"/home/kitteh/zar/src/types/info.zig",
"/home/kitteh/zar/src/types/types.zig",
"/home/kitteh/zar/build.zig",
"/home/kitteh/.config/sublime-text-3/Packages/ayu/widgets/Widget - ayu-dark.sublime-settings",
"/home/kitteh/.config/sublime-text-3/Packages/Monokai++/themes/Monokai++.tmTheme",
"/home/kitteh/zar/.gitignore",
"/home/kitteh/.config/sublime-text-3/Packages/ayu/ayu-dark.sublime-theme"
],
"find":
{
"height": 41.0
},
"find_in_files":
{
"height": 0.0,
"where_history":
[
]
},
"find_state":
{
"case_sensitive": false,
"find_history":
[
"return colour(",
".Fn",
"spawn",
"stringhashmap",
".put",
"HashMap",
"[]u8",
"sidebar",
"font",
"sidebar",
"sidebar_label"
],
"highlight": true,
"in_selection": false,
"preserve_case": false,
"regex": false,
"replace_history":
[
"return colour(allocator, ",
"[]const u8"
],
"reverse": false,
"show_context": true,
"use_buffer2": true,
"whole_word": false,
"wrap": true
},
"incremental_find":
{
"height": 39.0
},
"input":
{
"height": 58.0
},
"layout":
{
"cells":
[
[
0,
0,
1,
1
]
],
"cols":
[
0.0,
1.0
],
"rows":
[
0.0,
1.0
]
},
"menu_visible": true,
"output.diagnostics":
{
"height": 286.0
},
"output.exec":
{
"height": 169.0
},
"output.find_results":
{
"height": 0.0
},
"output.language servers":
{
"height": 276.0
},
"pinned_build_system": "",
"replace":
{
"height": 69.0
},
"save_all_on_build": false,
"select_file":
{
"height": 0.0,
"last_filter": "",
"selected_items":
[
],
"width": 0.0
},
"select_project":
{
"height": 0.0,
"last_filter": "",
"selected_items":
[
],
"width": 0.0
},
"select_symbol":
{
"height": 345.0,
"last_filter": "",
"selected_items":
[
],
"width": 352.0
},
"settings":
{
},
"show_minimap": true,
"show_open_files": false,
"show_tabs": true,
"side_bar_visible": true,
"side_bar_width": 261.0,
"status_bar_visible": true,
"template_settings":
{
}
}