Unnamed: 0
int64
0
0
repo_id
stringlengths
5
186
file_path
stringlengths
15
223
content
stringlengths
1
32.8M
0
repos/georgios/libs
repos/georgios/libs/tinyishlisp/build.zig
const std = @import("std"); pub fn build(b: *std.build.Builder) void { const exe = b.addExecutable("tinyish-lisp", "main.zig"); exe.setBuildMode(b.standardReleaseOptions()); exe.addPackage(.{ .name = "utils", .path = .{.path = "../utils/utils.zig"}, }); b.default_step.dependOn(&exe.step); b.installArtifact(exe); }
0
repos/georgios/libs
repos/georgios/libs/utils/Bdf.zig
// Parser for Bitmap Distribution Format (BDF) monospace fonts. It skips a good // chunk of BDF spec details because they don't matter for monospace fonts or // at least didn't seem to matter for the monospace fonts I've seen so far. // // For reference: // https://en.wikipedia.org/wiki/Glyph_Bitmap_Distribution_Format // https://adobe-type-tools.github.io/font-tech-notes/pdfs/5005.BDF_Spec.pdf const Self = @This(); const std = @import("std"); const utils = @import("utils.zig"); const WordIterator = utils.WordIterator; const Rect = utils.Rect; const streq = utils.memory_compare; const Bounds = Rect(i16, u16); pub const Error = error { BdfBadKeyword, BdfMissingValue, BdfBadValue, BdfBadPropCount, BdfBadGlyphCount, BdfBadBitmap, BdfMissingBuffer, BdfBufferTooSmall, BdfMissingDefaultCodepoint, } || utils.Error; fn get_row_size(width: usize) usize { return utils.align_up(width, 8) / 8; } pub fn get_glyph_size(size: Bounds.Size) usize { return get_row_size(size.x) * size.y; } fn get_byte_shift(from_left: usize) u3 { return @truncate(u3, 7 - from_left % 8); } fn set_bit(byte: *u8, from_left: usize, value: bool) void { const bit = @as(u8, 1) << get_byte_shift(from_left); if (value) { byte.* = byte.* | bit; } else { byte.* = byte.* & ~bit; } } fn get_bit(byte: u8, from_left: usize) bool { return (byte >> get_byte_shift(from_left)) & 1 == 1; } name_buffer: [128]u8 = undefined, name: ?[]const u8 = null, // FONT_NAME Property bounds: Bounds = .{}, // FONTBOUNDINGBOX glyph_count: u32 = 0, // CHARS default_codepoint: u32 = '?', // DEFAULT_CHAR found_default_codepoint: bool = false, pub fn glyph_size(self: *const Self) usize { return get_glyph_size(self.bounds.size); } pub fn required_buffer_size(self: *const Self) usize { return @as(usize, self.glyph_count) * self.glyph_size(); } pub fn glyph_pixel_count(self: *const Self) usize { return self.bounds.size.x * self.bounds.size.y; } pub fn total_pixel_count(self: *const Self) usize { return self.glyph_pixel_count() * self.glyph_count; } pub const Glyph = struct { index: usize, size: Bounds.Size, bitmap_offset: usize, bitmap_size: usize, name: ?[]const u8 = null, // STARTFONT codepoint: ?u32 = null, // ENCODING bounds: Bounds = .{}, // BBX pub fn new(font: *const Self, index: usize) Glyph { return .{ .index = index, .bitmap_offset = index * font.glyph_size(), .bitmap_size = font.glyph_size(), .size = font.bounds.size, }; } fn get_bitmap(self: *const Glyph, buffer: []u8) []u8 { return buffer[self.bitmap_offset..self.bitmap_offset + self.bitmap_size]; } fn get_const_bitmap(self: *const Glyph, buffer: []const u8) []const u8 { return buffer[self.bitmap_offset..self.bitmap_offset + self.bitmap_size]; } pub fn get_byte_offset(self: *const Glyph, row: usize, col: usize) usize { const row_size = get_row_size(self.size.x); return row * row_size + col / 8; } pub const Iterator = struct { glyph: *const Glyph, buffer: []const u8, row: usize = 0, col: usize = 0, new_row: bool = false, pub fn next_pixel(self: *Iterator) ?bool { if (self.row >= self.glyph.size.y) { return null; } const bitmap = self.glyph.get_const_bitmap(self.buffer); const byte = bitmap[self.glyph.get_byte_offset(self.row, self.col)]; const filled = get_bit(byte, self.col); self.col += 1; self.new_row = self.col >= self.glyph.size.x; if (self.new_row) { self.col = 0; self.row += 1; } return filled; } }; pub fn iter_pixels(self: *const Glyph, buffer: []const u8) Iterator { return .{.glyph = self, .buffer = buffer}; } pub fn preview(self: *const Glyph, bitmap_buffer: []const u8, output_buffer: []u8) Error![]u8 { const size = (@as(usize, self.size.x) + 1) * @as(usize, self.size.y); if (output_buffer.len < size) { return Error.NotEnoughDestination; } const output = output_buffer[0..size]; var pixit = self.iter_pixels(bitmap_buffer); var i: usize = 0; while (pixit.next_pixel()) |filled| { output[i] = if (filled) '#' else '.'; i += 1; if (pixit.new_row) { output[i] = '\n'; i += 1; } } return output; } }; // Thinking out how to convert the compact bitmap to a full bitmap. // // An example font is 6x9 pixels and can extend two pixels below the baseline. // In BDF, this means the "FONTBOUNDINGBOX" is "6 9 0 -2". All glyphs should be // able to fit in this box: // // |..... "+" is origin and the horizontal line is the baseline. // |..... // |..... All the glyphs should be able to be converted into a bitmap image // |..... from the font bitmap data and then copied into video memory. // |..... // |..... BDF bitmap data doesn't have to fill out the entire font bounding // +----- box. We will have to position the glyph correctly. // |..... // |..... // // Glyph bounding box (BBw(3), BBh(7), BBx(2), BBy(-1)) relative to font // bounding box (FBBw(6), FBBh(9), FBBx(0), FBBy(-2)) (shown widened x2): // // |FBBx // ←BBx→←BBw→ // ←---FBBw---→ // | . . . . . ↑FBBh Empty rows before bitmap = FBBh + FBBy - BBh - BBy // | . # # # . |↑ Empty rows after bitmap = BBy - FBBy // | . # # # . || Empty columns before bitmap = BBx - FBBx // | . # # # . || Empty columns after bitmap = FBBw + FBBx - BBw - BBx // | . # # # . ||BBh // | . # # # . || // + - # # # - || // | . # # # .FBBy↑|↓⬍BBy // | . . . . . ↓↓ // // Example Glyph: Empty rows before bitmap = 9 + -2 - 6 - 0 = 1 // BBX 5 6 0 0 Empty rows after bitmap = 0 - -2 = 2 // Empty columns before bitmap = 0 - 0 = 0 // BITMAP Empty columns after bitmap = 6 + 0 - 5 - 0 = 1 // |..... <= [ 0,0,0,0,0, 0] <= 00 (These are the complete rows as hex) // |.#... 20 <= [],0,0,1,0,0,[0] <= 08 (Any padding bits at the end must be ignored) // |#.#.. 50 <= [],0,1,0,1,0,[0] <= 14 // #...#. 88 <= [],1,0,0,0,1,[0] <= 22 // #####. f8 <= [],1,1,1,1,1,[0] <= 3e // #...#. 88 <= [],1,0,0,0,1,[0] <= 22 // #---#- 88 <= [],1,0,0,0,1,[0] <= 22 // |..... <= [ 0,0,0,0,0, 0] <= 00 // |..... <= [ 0,0,0,0,0, 0] <= 00 const Compiler = struct { font: *Self, glyph: *Glyph, bitmap: []u8, row: usize = 0, // From the top of the bitmap col: usize = 0, rows_before: u32 = undefined, row_after_glyph: u32 = undefined, cols_before: u32 = undefined, cols_after: u32 = undefined, pub fn new(font: *Self, glyph: *Glyph, buffer: []u8) Compiler { const row_after_glyph = @intCast(u32, @as(i32, font.bounds.size.y) + font.bounds.pos.y - glyph.bounds.pos.y); const r = .{ .font = font, .glyph = glyph, .bitmap = glyph.get_bitmap(buffer), .rows_before = row_after_glyph - glyph.bounds.size.y, .row_after_glyph = row_after_glyph, .cols_before = @intCast(u32, @as(i32, glyph.bounds.pos.x) - font.bounds.pos.x), .cols_after = @intCast(u32, @as(i32, font.bounds.size.x) + font.bounds.pos.x - glyph.bounds.size.x - glyph.bounds.pos.x), }; return r; } fn compile_pixel(self: *Compiler, value: bool) void { const offset = self.glyph.get_byte_offset(self.row, self.col); const byte = &self.bitmap[offset]; set_bit(byte, self.col, value); // std.debug.print("{}: {b:0>8}\n", .{offset, byte.*}); self.col += 1; } pub fn compile_row(self: *Compiler, line: []const u8) Error!void { var col: u16 = undefined; // std.debug.print("Above\n", .{}); // Empty rows above glyph while (self.row < self.rows_before) { col = 0; self.col = 0; while (col < self.font.bounds.size.x) { self.compile_pixel(false); col += 1; } self.row += 1; } // std.debug.print("Before\n", .{}); // Empty columns before glyph row col = 0; self.col = 0; while (col < self.cols_before) { self.compile_pixel(false); col += 1; } // std.debug.print("Glyph\n", .{}); col = 0; var left = line; while (col < self.glyph.bounds.size.x) { const byte = std.fmt.parseUnsigned(u8, left[0..2], 16) catch return Error.BdfBadBitmap; // std.debug.print("{x}", .{byte}); var n: u4 = 0; while (n < 8 and col < self.glyph.bounds.size.x) { self.compile_pixel(get_bit(byte, n)); n += 1; col += 1; } left = left[2..]; } // std.debug.print("After\n", .{}); // Empty columns after glyph row col = 0; while (col < self.cols_after) { self.compile_pixel(false); col += 1; } self.row += 1; // std.debug.print("Below\n", .{}); // Empty rows under glyph if (self.row >= self.row_after_glyph) { while (self.row < self.font.bounds.size.y) { col = 0; self.col = 0; while (col < self.font.bounds.size.x) { self.compile_pixel(false); col += 1; } self.row += 1; } } } }; pub const Result = struct { need_more_input: bool = false, need_buffer: ?usize = null, glyph: ?Glyph = null, done: bool = false, pub fn verify(self: *const Result) void { if (!(self.need_more_input or self.need_buffer != null or self.glyph != null or self.done)) { @panic("Bdf.Result is invalid"); } } }; pub const Parser = struct { const StateKind = enum { BeforeStartFont, AfterStartFont, Properties, Glyphs, Glyph, GlyphBitmap, EndFont, }; const State = union(StateKind) { BeforeStartFont: void, AfterStartFont: void, Properties: struct { expected: u16, got: u16 = 0, }, Glyphs: void, Glyph: void, GlyphBitmap: struct { expected_lines: u16, expected_line_len: u16, compiler: Compiler, }, EndFont: void, }; const Keyword = enum { StartFont, Comment, FontBoundingBox, StartProperties, Chars, StartChar, Encoding, Bbx, Bitmap, EndFont, Unknown, pub fn from_string(string: []const u8) Keyword { if (streq(string, "STARTFONT")) { return .StartFont; } else if (streq(string, "COMMENT")) { return .Comment; } else if (streq(string, "FONTBOUNDINGBOX")) { return .FontBoundingBox; } else if (streq(string, "STARTPROPERTIES")) { return .StartProperties; } else if (streq(string, "CHARS")) { return .Chars; } else if (streq(string, "STARTCHAR")) { return .StartChar; } else if (streq(string, "ENCODING")) { return .Encoding; } else if (streq(string, "BBX")) { return .Bbx; } else if (streq(string, "BITMAP")) { return .Bitmap; } else if (streq(string, "ENDFONT")) { return .EndFont; } else { return .Unknown; } } }; const max_line_len: usize = 256; const more_input = Result{.need_more_input = true}; line_buffer: [max_line_len]u8 = [_]u8{0} ** max_line_len, word_it_buffer: [max_line_len]u8 = undefined, line_pos: usize = 0, line_no: usize = 0, last_result: Result = more_input, glyphs_got: u32 = 0, font: Self = .{}, current_glyph: Glyph = undefined, state: State = State{.BeforeStartFont = void{}}, buffer: ?[]u8 = null, fn parse_int_value(it: *WordIterator, comptime Int: type, base: comptime_int) Error!Int { const str = (try it.next()) orelse return Error.BdfMissingValue; return std.fmt.parseInt(Int, str, base) catch return Error.BdfBadValue; } fn parse_point_i(it: *WordIterator, comptime PointType: type, point: *PointType) Error!void { for ([_]*PointType.Num{&point.x, &point.y}) |num| { num.* = try parse_int_value(it, PointType.Num, 10); } } fn parse_box_i(it: *WordIterator, comptime BoxType: type, box: *BoxType) Error!void { try parse_point_i(it, BoxType.Size, &box.size); try parse_point_i(it, BoxType.Pos, &box.pos); } fn parse_bounds(it: *WordIterator, bounds: *Bounds) Error!void { try parse_box_i(it, Bounds, bounds); } fn process_line(self: *Parser, line: []const u8) Error!Result { self.line_no += 1; // std.debug.print("LINE: {} {s}\n", .{self.line_no, line}); var it = WordIterator{ .quote = '\"', .input = line, .buffer = self.word_it_buffer[0..], }; switch (self.state) { StateKind.BeforeStartFont => { const kw = (try it.next()) orelse return more_input; // std.debug.print("BeforeStartFont keyword: {s}\n", .{kw}); if (Keyword.from_string(kw) == .StartFont) { self.state = State{.AfterStartFont = void{}}; } else { return Error.BdfBadKeyword; } }, StateKind.AfterStartFont => { const kw = (try it.next()) orelse return more_input; // std.debug.print("AfterStartFont keyword: {s}\n", .{kw}); switch (Keyword.from_string(kw)) { .Comment, .Unknown => {}, .FontBoundingBox => { try parse_bounds(&it, &self.font.bounds); }, .StartProperties => self.state = State{.Properties = .{.expected = try parse_int_value(&it, u16, 10)}}, .Chars => { self.font.glyph_count = try parse_int_value(&it, u32, 10); self.state = State{.Glyphs = .{}}; return Result{.need_buffer = self.font.required_buffer_size()}; }, else => return Error.BdfBadKeyword, } }, StateKind.Properties => |*state_info| { const kw = (try it.next()) orelse return more_input; if (streq(kw, "COMMENT")) { return more_input; } else if (streq(kw, "ENDPROPERTIES")) { if (state_info.expected < state_info.got) { return Error.BdfBadPropCount; } self.state = State{.AfterStartFont = void{}}; } else { if (state_info.expected == state_info.got) { return Error.BdfBadPropCount; } state_info.got += 1; if (streq(kw, "FONT_NAME")) { const name = (try it.next()) orelse return Error.BdfMissingValue; const l = utils.memory_copy_truncate(&self.font.name_buffer, name); self.font.name = self.font.name_buffer[0..l]; } else if (streq(kw, "DEFAULT_CHAR")) { self.font.default_codepoint = try parse_int_value(&it, u32, 10); } } }, StateKind.Glyphs => { const kw = (try it.next()) orelse return more_input; // std.debug.print("Glyphs keyword: {s}\n", .{kw}); switch (Keyword.from_string(kw)) { .Comment => {}, .StartChar => { if (self.glyphs_got == self.font.glyph_count) { return Error.BdfBadGlyphCount; } self.current_glyph = Glyph.new(&self.font, self.glyphs_got); self.glyphs_got += 1; // TODO Glyph name from arg self.state = State{.Glyph = void{}}; }, .EndFont => { if (self.glyphs_got < self.font.glyph_count) { return Error.BdfBadGlyphCount; } if (!self.font.found_default_codepoint) { return Error.BdfMissingDefaultCodepoint; } self.state = State{.EndFont = void{}}; return Result{.done = true}; }, else => return Error.BdfBadKeyword, } }, StateKind.Glyph => { const kw = (try it.next()) orelse return more_input; // std.debug.print("Glyph keyword: {s}\n", .{kw}); switch (Keyword.from_string(kw)) { .Comment, .Unknown => {}, // TODO Test the following are being set .Encoding => { self.current_glyph.codepoint = try parse_int_value(&it, u32, 10); if (self.current_glyph.codepoint == self.font.default_codepoint) { self.font.found_default_codepoint = true; } }, .Bbx => { try parse_bounds(&it, &self.current_glyph.bounds); }, .Bitmap => { // TODO: Check for BBX and put results into state self.state = State{.GlyphBitmap = .{ .expected_lines = 0, .expected_line_len = 0, .compiler = Compiler.new(&self.font, &self.current_glyph, self.buffer.?), }}; }, else => return Error.BdfBadKeyword, } }, StateKind.GlyphBitmap => |*state_info| { _ = state_info; // TODO if (line.len == 0) return more_input; // std.debug.print("Bitmap Line: \"{s}\"\n", .{line}); if (streq(line, "ENDCHAR")) { // TODO: Check we got enough lines self.state = State{.Glyphs = .{}}; return Result{.glyph = self.current_glyph, .need_more_input = true}; } else { // TODO: Check this isn't too many lines // TODO: Check line length matches expected try state_info.compiler.compile_row(line); } }, StateKind.EndFont => return Result{.done = true}, } return more_input; } pub fn feed_input(self: *Parser, chunk: []const u8, chunk_pos: *usize) Error!Result { if (self.last_result.glyph != null) { self.last_result.glyph = null; } if (self.last_result.need_buffer != null) { if (self.buffer == null) { return Error.BdfMissingBuffer; } if (self.buffer.?.len < self.font.required_buffer_size()) { return Error.BdfBufferTooSmall; } for (self.buffer.?) |*byte| { byte.* = 0; } self.last_result.need_buffer = null; } if (chunk_pos.* >= chunk.len) { return more_input; } var process_chunk = true; var chunk_done: bool = undefined; while (process_chunk) { const c = chunk[chunk_pos.*]; var newline = c == '\n'; if (newline) { self.last_result = try self.process_line(self.line_buffer[0..self.line_pos]); self.line_pos = 0; } else { self.line_buffer[self.line_pos] = c; self.line_pos += 1; } chunk_pos.* += 1; chunk_done = chunk_pos.* >= chunk.len; // Done, nothing else to do, exit loop if (self.last_result.done) { process_chunk = false; // Not done, chunk done, need more from user, may also ask for // buffer or have glyph, so exit loop. } else if (chunk_done) { self.last_result.need_more_input = true; process_chunk = false; // Not done, chunk not empty, but needs buffer or has glyph, so // exit loop } else if (self.last_result.need_buffer != null or self.last_result.glyph != null) { // Still working on current chunk self.last_result.need_more_input = false; process_chunk = false; } // Else it isn't done with the chunk yet or doesn't need the buffer } self.last_result.verify(); return self.last_result; } }; fn test_parse_font(allocator: *const std.mem.Allocator, parser: *Parser, bdf_text: []const u8, max_chunk_len: usize) !void { const start_chunk_size = @minimum(bdf_text.len, max_chunk_len); var chunk: []const u8 = bdf_text[0..start_chunk_size]; var chunk_pos: usize = 0; var left: []const u8 = bdf_text[start_chunk_size..]; while (true) { const result = try parser.feed_input(chunk, &chunk_pos); if (result.done) break; if (result.glyph) |glyph| { _ = glyph; } if (result.need_more_input) { const chunk_size = @minimum(left.len, max_chunk_len); chunk = left[0..chunk_size]; left = left[chunk_size..]; chunk_pos = 0; } if (result.need_buffer) |buffer_size| { parser.buffer = try allocator.alloc(u8, buffer_size); } } } test "Bdf" { const bdf_text = \\STARTFONT \\COMMENT This is a test comment \\FONTBOUNDINGBOX 6 9 0 -2 \\STARTPROPERTIES 2 \\FONT_NAME "The Font Name" \\DEFAULT_CHAR 65 \\ENDPROPERTIES \\CHARS 1 \\STARTCHAR A \\ENCODING 65 \\BBX 5 6 0 0 \\BITMAP \\20 \\50 \\88 \\f8 \\88 \\88 \\ENDCHAR \\ENDFONT \\ ; var parser = Parser{}; var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator); defer arena.deinit(); const allocator = arena.allocator(); // 3 because we need to be sure the input can be constrained try test_parse_font(&allocator, &parser, bdf_text, 3); try std.testing.expectEqualStrings(parser.font.name.?, "The Font Name"); try std.testing.expectEqual(parser.font.glyph_count, 1); try std.testing.expectEqual(parser.font.bounds.size.x, 6); try std.testing.expectEqual(parser.font.bounds.size.y, 9); try std.testing.expectEqual(parser.font.bounds.pos.x, 0); try std.testing.expectEqual(parser.font.bounds.pos.y, -2); try std.testing.expectEqual(parser.font.default_codepoint, 'A'); try std.testing.expectEqual(parser.current_glyph.codepoint, 65); try std.testing.expectEqual(parser.current_glyph.bounds.size.x, 5); try std.testing.expectEqual(parser.current_glyph.bounds.size.y, 6); try std.testing.expectEqual(parser.current_glyph.bounds.pos.x, 0); try std.testing.expectEqual(parser.current_glyph.bounds.pos.y, 0); // Test that the Compiler is putting the right bytes in the buffer { const expected = [_]u8{ 0x00, 0x20, 0x50, 0x88, 0xf8, 0x88, 0x88, 0x00, 0x00, }; try std.testing.expectEqualSlices(u8, expected[0..], parser.buffer.?); } // Test Glyph.Iterator indirectly { const expected = "......\n" ++ "..#...\n" ++ ".#.#..\n" ++ "#...#.\n" ++ "#####.\n" ++ "#...#.\n" ++ "#...#.\n" ++ "......\n" ++ "......\n"; var buffer = [_]u8{0} ** 63; try std.testing.expectEqualStrings(expected[0..], try parser.current_glyph.preview(parser.buffer.?, buffer[0..])); } } test "Bdf parse builtin_font.bdf" { const bdf_text = @embedFile("../../kernel/builtin_font.bdf"); var parser = Parser{}; var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator); defer arena.deinit(); const allocator = arena.allocator(); try test_parse_font(&allocator, &parser, bdf_text, 128); }
0
repos/georgios/libs
repos/georgios/libs/utils/str.zig
const std = @import("std"); const utils = @import("utils.zig"); pub fn isspace(c: u8) bool { return c == ' ' or c == '\n' or c == '\t' or c == '\r'; } pub fn stripped_string_size(str: []const u8) callconv(.Inline) usize { var stripped_size: usize = 0; for (str) |c, i| { if (!isspace(c)) stripped_size = i + 1; } return stripped_size; } pub fn string_length(bytes: []const u8) callconv(.Inline) usize { for (bytes[0..]) |*ptr, i| { if (ptr.* == 0) { return i; } } return bytes.len; } pub fn cstring_length(cstr: [*:0]const u8) usize { var i: usize = 0; while (cstr[i] != 0) { i += 1; } return i; } pub fn cstring_to_slice(cstr: [*:0]const u8) []const u8 { return @ptrCast([*]const u8, cstr)[0..cstring_length(cstr) + 1]; } pub fn cstring_to_string(cstr: [*:0]const u8) []const u8 { return @ptrCast([*]const u8, cstr)[0..cstring_length(cstr)]; } pub fn hex_char_len(comptime Type: type, value: Type) Type { if (value == 0) { return 1; } return utils.int_log2(Type, value) / 4 + 1; } fn test_hex_char_len(value: usize, expected: usize) !void { try std.testing.expectEqual(expected, hex_char_len(usize, value)); } test "hex_char_len" { try test_hex_char_len(0x0, 1); try test_hex_char_len(0x1, 1); try test_hex_char_len(0xf, 1); try test_hex_char_len(0x10, 2); try test_hex_char_len(0x11, 2); try test_hex_char_len(0xff, 2); try test_hex_char_len(0x100, 3); try test_hex_char_len(0x101, 3); } pub fn nibble_char(value: u4) u8 { return if (value < 10) '0' + @intCast(u8, value) else 'a' + @intCast(u8, value - 10); } /// Insert a hex byte to into a buffer. pub fn byte_buffer(buffer: []u8, value: u8) void { buffer[0] = nibble_char(@intCast(u4, value >> 4)); buffer[1] = nibble_char(@intCast(u4, value % 0x10)); } pub fn starts_with(what: []const u8, prefix: []const u8) bool { if (what.len < prefix.len) return false; for (what[0..prefix.len]) |value, i| { if (value != prefix[i]) return false; } return true; } pub fn ends_with(what: []const u8, postfix: []const u8) bool { if (what.len < postfix.len) return false; for (what[what.len - postfix.len..]) |value, i| { if (value != postfix[i]) return false; } return true; } pub const StringWriter = struct { const Writer = std.io.Writer(*StringWriter, std.mem.Allocator.Error, write); const String = std.ArrayList(u8); string: String, pub fn init(alloc: std.mem.Allocator) StringWriter { return .{.string = String.init(alloc)}; } pub fn deinit(self: *StringWriter) void { self.string.deinit(); } fn write(self: *StringWriter, bytes: []const u8) std.mem.Allocator.Error!usize { try self.string.appendSlice(bytes); return bytes.len; } pub fn writer(self: *StringWriter) Writer { return .{.context = self}; } pub fn get(self: *StringWriter) []const u8 { return self.string.toOwnedSlice(); } }; test "StringWriter" { var ta = utils.TestAlloc{}; defer ta.deinit(.Panic); errdefer ta.deinit(.NoPanic); const alloc = ta.alloc(); var sw = StringWriter.init(alloc); const s1 = sw.get(); try std.testing.expectEqualStrings("", s1); alloc.free(s1); try sw.writer().print("{} Hello {s}\n", .{1, "World"}); try sw.writer().print("{} Hello {s}\n", .{2, "again"}); const s2 = sw.get(); try std.testing.expectEqualStrings( \\1 Hello World \\2 Hello again \\ , s2); alloc.free(s2); const s3 = sw.get(); try std.testing.expectEqualStrings("", s3); alloc.free(s3); ta.deinit(.Panic); } pub const StringReader = struct { const Error = error{}; const Reader = std.io.Reader(*StringReader, Error, read); string: []const u8, pos: usize = 0, fn read(self: *StringReader, bytes: []u8) Error!usize { const len = utils.memory_copy_truncate(bytes, self.string[self.pos..]); self.pos += len; return len; } pub fn reader(self: *StringReader) Reader { return .{.context = self}; } }; test "StringReader" { var sr = StringReader{.string = "Hello World!"}; var reader = sr.reader(); var buffer: [6]u8 = undefined; try std.testing.expectEqualStrings("Hello ", buffer[0..try reader.read(buffer[0..])]); try std.testing.expectEqualStrings("World!", buffer[0..try reader.read(buffer[0..])]); try std.testing.expectEqualStrings("", buffer[0..try reader.read(buffer[0..])]); } pub const DumpHexOptions = struct { // Print hex data like this: // VV group_sep // 00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f // ^^ Byte ^ byte_sep Group^^^^^^^^^^^^^^^^^^^^^^^ byte_sep: []const u8 = " ", group_byte_count: usize = 8, group_count: usize = 2, group_sep: []const u8 = " ", line_end: []const u8 = "\n", compare_to: ?[]const u8 = null, }; fn dump_hex_byte_count(bytes: []const u8, opts: DumpHexOptions, n: usize) usize { return @minimum(opts.group_byte_count * n, bytes.len); } fn dump_hex_group(bytes: []const u8, writer: anytype, opts: DumpHexOptions) !usize { var wrote: usize = 0; if (bytes.len > 0) { const last = bytes.len - 1; for (bytes) |byte, i| { var buffer: [2]u8 = undefined; byte_buffer(buffer[0..], byte); wrote += try writer.write(buffer[0..]); if (i != last) { wrote += try writer.write(opts.byte_sep); } } } return wrote; } fn dump_hex_line(bytes: []const u8, writer: anytype, opts: DumpHexOptions) !usize { var left = bytes; var wrote: usize = 0; if (bytes.len > 0) { const last = opts.group_count - 1; var i: usize = 0; while (i < opts.group_count and left.len > 0) { const byte_count = dump_hex_byte_count(left, opts, 1); const group = left[0..byte_count]; wrote += try dump_hex_group(group, writer, opts); left = left[byte_count..]; if (i < last and left.len > 0) { wrote += try writer.write(opts.group_sep); } i += 1; } } return wrote; } pub fn dump_hex(bytes: []const u8, writer: anytype, opts: DumpHexOptions) !void { var left = bytes; var compare_to_left = opts.compare_to orelse utils.empty_slice(u8, bytes.ptr); // Should be same length const same_sep = " == "; const not_same_sep = " != "; const group_size = opts.group_byte_count * 2 + // Bytes ((opts.group_byte_count * opts.byte_sep.len) - 1); // byte_sep Between Bytes const line_size = group_size * opts.group_count + // Groups (opts.group_count - 1) * opts.group_sep.len; // group_sep Between Groups while (left.len > 0 or compare_to_left.len > 0) { const byte_count = dump_hex_byte_count(left, opts, opts.group_count); const line = left[0..byte_count]; left = left[byte_count..]; const wrote = try dump_hex_line(line, writer, opts); if (opts.compare_to != null) { try writer.writeByteNTimes(' ', line_size - wrote); const ct_byte_count = dump_hex_byte_count(compare_to_left, opts, opts.group_count); const ct_line = compare_to_left[0..ct_byte_count]; _ = try writer.write( if (utils.memory_compare(line, ct_line)) same_sep else not_same_sep); if (compare_to_left.len > 0) { _ = try dump_hex_line(ct_line, writer, opts); compare_to_left = compare_to_left[ct_byte_count..]; } } _ = try writer.write(opts.line_end); } } test "dump_hex" { var ta = utils.TestAlloc{}; defer ta.deinit(.Panic); errdefer ta.deinit(.NoPanic); const alloc = ta.alloc(); var sw = StringWriter.init(alloc); var w = sw.writer(); const bytes = [_]u8 { 0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf, 0xff, }; { try dump_hex(bytes[0..0], w, .{}); const s = sw.get(); defer alloc.free(s); try std.testing.expectEqualStrings("", s); } { try dump_hex(bytes[0..1], w, .{}); const s = sw.get(); defer alloc.free(s); try std.testing.expectEqualStrings("00\n", s); } { try dump_hex(bytes[0..8], w, .{}); const s = sw.get(); defer alloc.free(s); try std.testing.expectEqualStrings("00 01 02 03 04 05 06 07\n", s); } { try dump_hex(bytes[0..9], w, .{}); const s = sw.get(); defer alloc.free(s); try std.testing.expectEqualStrings("00 01 02 03 04 05 06 07 08\n", s); } { try dump_hex(bytes[0..0x10], w, .{}); const s = sw.get(); defer alloc.free(s); try std.testing.expectEqualStrings( "00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f\n", s); } { try dump_hex(bytes[0..0x11], w, .{}); const s = sw.get(); defer alloc.free(s); try std.testing.expectEqualStrings( "00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f\nff\n", s); } { try dump_hex(bytes[0..0x10], w, .{.compare_to = bytes[0..8]}); const s = sw.get(); defer alloc.free(s); try std.testing.expectEqualStrings( "00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f != 00 01 02 03 04 05 06 07\n", s); } { try dump_hex(bytes[0..0x10], w, .{.compare_to = bytes[0..0x10]}); const s = sw.get(); defer alloc.free(s); try std.testing.expectEqualStrings( "00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f == " ++ "00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f\n", s); } { try dump_hex(bytes[0..0x11], w, .{.compare_to = bytes[0..0x10]}); const s = sw.get(); defer alloc.free(s); try std.testing.expectEqualStrings( "00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f == " ++ "00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f\n" ++ "ff != \n", s); } { try dump_hex(bytes[0..0x10], w, .{.compare_to = bytes[0..0x11]}); const s = sw.get(); defer alloc.free(s); try std.testing.expectEqualStrings( "00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f == " ++ "00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f\n" ++ " != ff\n", s); } { try dump_hex(bytes[0..0x11], w, .{.compare_to = bytes[0..0x11]}); const s = sw.get(); defer alloc.free(s); try std.testing.expectEqualStrings( "00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f == " ++ "00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f\n" ++ "ff == ff\n", s); } { const a = [_]u8 { 0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf, 0xf0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, // <-- 0x8, 0x9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf, 0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf, }; const b = [_]u8 { 0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf, 0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf, 0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf, }; try dump_hex(a[0..], w, .{.compare_to = b[0..]}); const s = sw.get(); defer alloc.free(s); try std.testing.expectEqualStrings( "00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f == " ++ "00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f\n" ++ "f0 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f != " ++ "00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f\n" ++ "00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f == " ++ "00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f\n", s); } ta.deinit(.Panic); } fn fmt_dump_hex_impl(bytes: []const u8, comptime fmt: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void { _ = fmt; _ = options; try dump_hex(bytes, writer, .{}); } pub fn fmt_dump_hex(bytes: []const u8) std.fmt.Formatter(fmt_dump_hex_impl) { return .{.data = bytes}; } const FmtCompareBytesData = struct { expected: []const u8, actual: []const u8, }; fn fmt_compare_bytes_impl(data: FmtCompareBytesData, comptime fmt: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void { _ = fmt; _ = options; try dump_hex(data.expected, writer, .{.compare_to = data.actual, .group_byte_count = 4}); } pub fn fmt_compare_bytes( expected: []const u8, actual: []const u8) std.fmt.Formatter(fmt_compare_bytes_impl) { return .{.data = .{.expected = expected, .actual = actual}}; } pub fn expect_equal_bytes(expected: []const u8, actual: []const u8) !void { if (!utils.memory_compare(expected, actual)) { std.debug.print("Expected the left side, but got the right:\n{}", .{fmt_compare_bytes(expected, actual)}); return error.TestExpectedEqual; } }
0
repos/georgios/libs
repos/georgios/libs/utils/ToString.zig
// TODO: Remove this? Replace with std writers/formatters? const std = @import("std"); const utils = @import("utils.zig"); pub const Error = utils.Error; const ToString = @This(); buffer: ?[]u8 = null, got: usize = 0, ext_func: ?fn(self: *ToString, str: []const u8) void = null, truncate: ?[]const u8 = null, fn left(self: *const ToString) usize { if (self.buffer) |buf| { return buf.len - self.got; } return std.math.maxInt(usize); } pub fn need(self: *const ToString, space: usize) Error!void { if (self.truncate == null and space > self.left()) { return Error.NotEnoughDestination; } } pub fn get(self: *ToString) []u8 { if (self.buffer) |buf| { return buf[0..@minimum(buf.len, self.got)]; } @panic("This ToString doesn't have a buffer set"); } pub fn upgrade_buffer(self: *ToString, new: []u8) Error!?[]u8 { if (self.buffer) |buf| { _ = try utils.memory_copy_error(new, buf); self.buffer = new; return buf; } self.buffer = new; self.got = 0; return null; } fn _str(self: *ToString, s: []const u8) void { if (self.buffer) |buf| { if (self.left() == 0) { return; } var from = s; var i: usize = 0; var in_truncated = false; while (i < from.len) { if (self.got >= buf.len) { break; } else if (self.truncate) |truncate_end| { if (!in_truncated and self.left() == truncate_end.len) { in_truncated = true; from = truncate_end[0..]; continue; } } buf[self.got] = from[i]; self.got += 1; i += 1; } } if (self.ext_func) |ef| { ef(self, s); } } fn _char(self: *ToString, c: u8) void { self._str(([_]u8{c})[0..]); } pub fn char(self: *ToString, c: u8) Error!void { try self.need(1); self._char(c); } pub fn string(self: *ToString, src: []const u8) Error!void { try self.need(src.len); self._str(src); } pub fn string_truncate(self: *ToString, src: []const u8) void { self._str(src[0..@minimum(src.len, self.left())]); } pub fn cstring(self: *ToString, cstr: [*:0]const u8) Error!void { try self.string(utils.cstring_to_string(cstr)); } fn hex_recurse(self: *ToString, value: usize) void { const next = value / 0x10; if (next > 0) { self.hex_recurse(next); } self._char(utils.nibble_char(@intCast(u4, value % 0x10))); } pub fn hex(self: *ToString, value: usize) Error!void { try self.need(2 + utils.hex_char_len(usize, value)); self._char('0'); self._char('x'); if (value == 0) { self._char('0'); return; } self.hex_recurse(value); } fn _int_recurse(self: *ToString, sign: ?u8, needs: usize, value: anytype) Error!void { const next = value / 10; if (next > 0) { try self._int_recurse(sign, needs + 1, next); } else { const add: usize = if (sign != null) 1 else 0; try self.need(needs + add); if (sign) |s| self._char(s); } self._char('0' + @intCast(u8, value % 10)); } pub fn _int(self: *ToString, value: anytype) Error!void { if (value == 0) { try self.char('0'); return; } const signed = std.meta.trait.isSignedInt(@TypeOf(value)); try self._int_recurse(if (signed and value < 0) '-' else null, 1, std.math.absCast(value)); } pub fn int(self: *ToString, value: anytype) Error!void { const Type = @TypeOf(value); comptime std.debug.assert(@typeInfo(Type) == .Int); try self._int(value); } pub fn uint(self: *ToString, value: usize) Error!void { try self._int(value); } pub fn std_write(self: *ToString, bytes: []const u8) Error!usize { try self.string(bytes); return bytes.len; } pub const StdWriter = std.io.Writer(*ToString, Error, std_write); pub fn std_writer(self: *ToString) StdWriter { return StdWriter{.context = self}; } test "ToString" { { var buffer: [128]u8 = undefined; var ts = ToString{.buffer = buffer[0..]}; try ts.string("Hello "); try ts.string(""); try ts.cstring("goodbye!"); try ts.cstring(""); try std.testing.expectEqualStrings("Hello goodbye!", ts.get()); } { var buffer: [1]u8 = undefined; var ts = ToString{.buffer = buffer[0..]}; try std.testing.expectError(Error.NotEnoughDestination, ts.string("Hello")); try std.testing.expectEqualStrings("", ts.get()); } { var buffer: [128]u8 = undefined; var ts = ToString{.buffer = buffer[0..]}; try ts.hex(0x0); try ts.hex(0x1); try ts.hex(0xf); try ts.hex(0x10); try ts.hex(0xff); try ts.hex(0x100); try std.testing.expectEqualStrings("0x00x10xf0x100xff0x100", ts.get()); } { var buffer: [1]u8 = undefined; var ts = ToString{.buffer = buffer[0..]}; try std.testing.expectError(Error.NotEnoughDestination, ts.hex(0xff)); try std.testing.expectEqualStrings("", ts.get()); } { var buffer: [10]u8 = undefined; var ts = ToString{.buffer = buffer[0..]}; try ts.uint(10); // 2 try ts.uint(1234); // 6 try std.testing.expectError(Error.NotEnoughDestination, ts.uint(56789)); // 11 try ts.uint(5678); // 10 try std.testing.expectEqualStrings("1012345678", ts.get()); } { var buffer: [10]u8 = undefined; var ts = ToString{.buffer = buffer[0..], .truncate = ".."}; try ts.uint(10); try ts.uint(1234); try std.testing.expectEqualStrings("101234", ts.get()); try ts.uint(56789); try std.testing.expectEqualStrings("10123456..", ts.get()); try ts.string("another thing"); try std.testing.expectEqualStrings("10123456..", ts.get()); } { var buffer: [10]u8 = undefined; var ts = ToString{.buffer = buffer[0..]}; try ts.std_writer().print("0o{o}", .{0o123}); try std.testing.expectEqualStrings("0o123", ts.get()); } }
0
repos/georgios/libs
repos/georgios/libs/utils/utils.zig
const std = @import("std"); const builtin = @import("builtin"); pub const AnsiEscProcessor = @import("AnsiEscProcessor.zig"); pub const Guid = @import("Guid.zig"); pub const ToString = @import("ToString.zig"); pub const Cksum = @import("Cksum.zig"); pub const WordIterator = @import("WordIterator.zig"); pub const Bdf = @import("Bdf.zig"); pub const List = @import("list.zig").List; pub const CircularBuffer = @import("circular_buffer.zig").CircularBuffer; pub const PackedArray = @import("packed_array.zig").PackedArray; pub const Bmp = @import("bmp.zig").Bmp; const unicode = @import("unicode.zig"); pub const Utf8ToUtf32 = unicode.Utf8ToUtf32; pub const UnicodeError = unicode.Error; const mem = @import("mem.zig"); pub const memory_compare = mem.memory_compare; pub const memory_copy_truncate = mem.memory_copy_truncate; pub const memory_copy_error = mem.memory_copy_error; pub const memory_copy_anyptr = mem.memory_copy_anyptr; pub const memory_set = mem.memory_set; pub const to_bytes = mem.to_bytes; pub const to_const_bytes = mem.to_const_bytes; pub const empty_slice = mem.empty_slice; pub const TestAlloc = mem.TestAlloc; const str = @import("str.zig"); pub const isspace = str.isspace; pub const stripped_string_size = str.stripped_string_size; pub const string_length = str.string_length; pub const cstring_length = str.cstring_length; pub const cstring_to_slice = str.cstring_to_slice; pub const cstring_to_string = str.cstring_to_string; pub const hex_char_len = str.hex_char_len; pub const nibble_char = str.nibble_char; pub const byte_buffer = str.byte_buffer; pub const starts_with = str.starts_with; pub const ends_with = str.ends_with; pub const StringWriter = str.StringWriter; pub const StringReader = str.StringReader; pub const fmt_dump_hex = str.fmt_dump_hex; pub const expect_equal_bytes = str.expect_equal_bytes; pub const Error = error { Unknown, OutOfBounds, NotEnoughSource, NotEnoughDestination, }; // NOTE: DO NOT TRY TO REMOVE INLINE ON THESE, WILL BREAK LOW KERNEL pub fn Ki(x: usize) callconv(.Inline) usize { return x << 10; } pub fn Mi(x: usize) callconv(.Inline) usize { return x << 20; } pub fn Gi(x: usize) callconv(.Inline) usize { return x << 30; } pub fn Ti(x: usize) callconv(.Inline) usize { return x << 40; } pub fn align_down(value: usize, align_by: usize) usize { return value & (~align_by +% 1); } pub fn align_up(value: usize, align_by: usize) usize { return align_down(value +% align_by -% 1, align_by); } pub fn padding(value: usize, align_by: usize) callconv(.Inline) usize { return -%value & (align_by - 1); } pub fn div_round_up(comptime Type: type, n: Type, d: Type) callconv(.Inline) Type { return n / d + (if (n % d != 0) @as(Type, 1) else @as(Type, 0)); } test "div_round_up" { try std.testing.expectEqual(@as(u8, 0), div_round_up(u8, 0, 2)); try std.testing.expectEqual(@as(u8, 1), div_round_up(u8, 1, 2)); try std.testing.expectEqual(@as(u8, 1), div_round_up(u8, 2, 2)); try std.testing.expectEqual(@as(u8, 2), div_round_up(u8, 3, 2)); try std.testing.expectEqual(@as(u8, 2), div_round_up(u8, 4, 2)); } pub fn packed_bit_size(comptime Type: type) comptime_int { const Traits = @typeInfo(Type); switch (Traits) { std.builtin.TypeId.Int => |int_type| { return int_type.bits; }, std.builtin.TypeId.Bool => { return 1; }, std.builtin.TypeId.Array => |array_type| { return packed_bit_size(array_type.child) * array_type.len; }, std.builtin.TypeId.Struct => |struct_type| { if (struct_type.layout != std.builtin.TypeInfo.ContainerLayout.Packed) { @compileError("Struct must be packed!"); } comptime var total_size: comptime_int = 0; inline for (struct_type.fields) |field| { total_size += packed_bit_size(field.field_type); } return total_size; }, else => { @compileLog("Unsupported Type is ", @typeName(Type)); @compileError("Unsupported Type"); } } } /// @intToEnum can't be used to test if a value is a valid Enum, so this wraps /// it and gives that functionality. pub fn int_to_enum(comptime EnumType: type, value: std.meta.Tag(EnumType)) ?EnumType { const type_info = @typeInfo(EnumType).Enum; inline for (type_info.fields) |*field| { if (@intCast(type_info.tag_type, field.value) == value) { return @intToEnum(EnumType, value); } } return null; } pub fn valid_enum(comptime EnumType: type, value: EnumType) bool { return int_to_enum(EnumType, @bitCast(std.meta.Tag(EnumType), value)) != null; } test "int_to_enum" { const assert = std.debug.assert; const Abc = enum(u8) { A = 0, B = 1, C = 12, }; // Check with Literals assert(int_to_enum(Abc, @intCast(std.meta.Tag(Abc), 0)).? == Abc.A); assert(int_to_enum(Abc, @intCast(std.meta.Tag(Abc), 1)).? == Abc.B); assert(int_to_enum(Abc, @intCast(std.meta.Tag(Abc), 2)) == null); assert(int_to_enum(Abc, @intCast(std.meta.Tag(Abc), 11)) == null); assert(int_to_enum(Abc, @intCast(std.meta.Tag(Abc), 12)).? == Abc.C); assert(int_to_enum(Abc, @intCast(std.meta.Tag(Abc), 13)) == null); assert(int_to_enum(Abc, @intCast(std.meta.Tag(Abc), 0xFF)) == null); // Check with Variable var x: std.meta.Tag(Abc) = 0; assert(int_to_enum(Abc, x).? == Abc.A); x = 0xFF; assert(int_to_enum(Abc, x) == null); // valid_enum assert(valid_enum(Abc, @intToEnum(Abc, @as(u8, 0)))); // TODO: This is a workaround bitcast of a const Enum causing a compiler assert // Looks like it's related to https://github.com/ziglang/zig/issues/1036 var invalid_enum_value: u8 = 4; assert(!valid_enum(Abc, @ptrCast(*const Abc, &invalid_enum_value).*)); // assert(valid_enum(Abc, @bitCast(Abc, @as(u8, 4)))); } pub fn enum_name(comptime EnumType: type, value: EnumType) ?[]const u8 { const type_info = @typeInfo(EnumType).Enum; inline for (type_info.fields) |*field| { var enum_value = @ptrCast(*const type_info.tag_type, &value).*; if (@intCast(type_info.tag_type, field.value) == enum_value) { return field.name; } } return null; } pub fn add_signed_to_unsigned( comptime uT: type, a: uT, comptime iT: type, b: iT) ?uT { var result: uT = undefined; if (@addWithOverflow(uT, a, @bitCast(uT, b), &result)) { if (b > 0 and result < a) { return null; } } else if (b < 0 and result > a) { return null; } return result; } pub fn add_isize_to_usize(a: usize, b: isize) callconv(.Inline) ?usize { return add_signed_to_unsigned(usize, a, isize, b); } test "add_signed_to_unsigned" { try std.testing.expect(add_isize_to_usize(0, 0).? == 0); try std.testing.expect(add_isize_to_usize(0, 10).? == 10); try std.testing.expect(add_isize_to_usize(0, -10) == null); const max_usize = std.math.maxInt(usize); try std.testing.expect(add_isize_to_usize(max_usize, 0).? == max_usize); try std.testing.expect(add_isize_to_usize(max_usize, -10).? == max_usize - 10); try std.testing.expect(add_isize_to_usize(max_usize, 10) == null); } pub fn int_log2(comptime Type: type, value: Type) Type { return @sizeOf(Type) * 8 - 1 - @clz(Type, value); } fn test_int_log2(value: usize, expected: usize) !void { try std.testing.expectEqual(expected, int_log2(usize, value)); } test "int_log2" { try test_int_log2(1, 0); try test_int_log2(2, 1); try test_int_log2(4, 2); try test_int_log2(8, 3); try test_int_log2(16, 4); try test_int_log2(32, 5); try test_int_log2(64, 6); try test_int_log2(128, 7); } pub fn int_bit_size(comptime Type: type) usize { return @typeInfo(Type).Int.bits; } pub fn IntLog2Type(comptime Type: type) type { return @Type(std.builtin.TypeInfo{.Int = std.builtin.TypeInfo.Int{ .signedness = .unsigned, .bits = int_log2(usize, int_bit_size(Type)), }}); } fn test_IntLog2Type(comptime Type: type, expected: usize) !void { try std.testing.expectEqual(expected, int_bit_size(IntLog2Type(Type))); } test "Log2Int" { try test_IntLog2Type(u2, 1); try test_IntLog2Type(u32, 5); try test_IntLog2Type(u64, 6); } pub const UsizeLog2Type = IntLog2Type(usize); pub fn select_nibble(comptime Type: type, value: Type, which: usize) u4 { return @intCast(u4, (value >> (@intCast(IntLog2Type(Type), which) * 4)) & 0xf); } fn test_select_nibble(comptime Type: type, value: Type, which: usize, expected: u4) !void { try std.testing.expectEqual(expected, select_nibble(Type, value, which)); } test "select_nibble" { try test_select_nibble(u8, 0xaf, 0, 0xf); try test_select_nibble(u8, 0xaf, 1, 0xa); try test_select_nibble(u16, 0x1234, 0, 0x4); try test_select_nibble(u16, 0x1234, 1, 0x3); try test_select_nibble(u16, 0x1234, 2, 0x2); try test_select_nibble(u16, 0x1234, 3, 0x1); } pub fn pow2_round_up(comptime Type: type, value: Type) Type { if (value < 3) { return value; } else { return @intCast(Type, 1) << @intCast(IntLog2Type(Type), int_log2(Type, value - 1) + 1); } } test "pow2_round_up" { try std.testing.expectEqual(@as(u8, 0), pow2_round_up(u8, 0)); try std.testing.expectEqual(@as(u8, 1), pow2_round_up(u8, 1)); try std.testing.expectEqual(@as(u8, 2), pow2_round_up(u8, 2)); try std.testing.expectEqual(@as(u8, 4), pow2_round_up(u8, 3)); try std.testing.expectEqual(@as(u8, 4), pow2_round_up(u8, 4)); try std.testing.expectEqual(@as(u8, 8), pow2_round_up(u8, 5)); try std.testing.expectEqual(@as(u8, 8), pow2_round_up(u8, 6)); try std.testing.expectEqual(@as(u8, 8), pow2_round_up(u8, 7)); try std.testing.expectEqual(@as(u8, 8), pow2_round_up(u8, 8)); try std.testing.expectEqual(@as(u8, 16), pow2_round_up(u8, 9)); try std.testing.expectEqual(@as(u8, 16), pow2_round_up(u8, 16)); try std.testing.expectEqual(@as(u8, 32), pow2_round_up(u8, 17)); } pub fn pow2_round_down(comptime Type: type, value: Type) Type { if (value < 3 or (value & (value - 1)) == 0) return value; return @intCast(Type, 1) << @intCast(IntLog2Type(Type), int_log2(Type, value - 1)); } test "pow2_round_down" { try std.testing.expectEqual(@as(u8, 0), pow2_round_down(u8, 0)); try std.testing.expectEqual(@as(u8, 1), pow2_round_down(u8, 1)); try std.testing.expectEqual(@as(u8, 2), pow2_round_down(u8, 2)); try std.testing.expectEqual(@as(u8, 2), pow2_round_down(u8, 3)); try std.testing.expectEqual(@as(u8, 4), pow2_round_down(u8, 4)); try std.testing.expectEqual(@as(u8, 4), pow2_round_down(u8, 5)); try std.testing.expectEqual(@as(u8, 4), pow2_round_down(u8, 6)); try std.testing.expectEqual(@as(u8, 4), pow2_round_down(u8, 7)); try std.testing.expectEqual(@as(u8, 8), pow2_round_down(u8, 8)); try std.testing.expectEqual(@as(u8, 8), pow2_round_down(u8, 9)); try std.testing.expectEqual(@as(u8, 16), pow2_round_down(u8, 16)); try std.testing.expectEqual(@as(u8, 16), pow2_round_down(u8, 17)); } /// Simple Pseudo-random number generator /// See https://en.wikipedia.org/wiki/Linear_congruential_generator pub fn Rand(comptime Type: type) type { return struct { const Self = @This(); const a: u64 = 6364136223846793005; const c: u64 = 1442695040888963407; seed: u64, pub fn get(self: *Self) Type { self.seed = a *% self.seed +% c; return @truncate(Type, self.seed); } }; } test "Rand" { var r = Rand(u64){.seed = 0}; try std.testing.expectEqual(@as(u64, 1442695040888963407), r.get()); try std.testing.expectEqual(@as(u64, 1876011003808476466), r.get()); try std.testing.expectEqual(@as(u64, 11166244414315200793), r.get()); } pub fn absolute(x: anytype) @TypeOf(x) { return if (x >= 0) x else -x; } pub fn Point(comptime TheNum: type) type { return struct { const Self = @This(); pub const Num = TheNum; x: Num = 0, y: Num = 0, pub fn as(self: *const Self, comptime NumType: type) Point(NumType) { return .{.x = @as(NumType, self.x), .y = @as(NumType, self.y)}; } pub fn intCast(self: *const Self, comptime NumType: type) Point(NumType) { return .{.x = @intCast(NumType, self.x), .y = @intCast(NumType, self.y)}; } pub fn plus_int(self: *const Self, comptime value: anytype) Self { return .{.x = self.x + value, .y = self.y + value}; } pub fn minus_int(self: *const Self, comptime value: anytype) Self { return .{.x = self.x - value, .y = self.y - value}; } pub fn multiply(self: *const Self, value: anytype) Self { return .{.x = self.x * value, .y = self.y * value}; } pub fn divide(self: *const Self, value: anytype) Self { return .{.x = self.x / value, .y = self.y / value}; } pub fn plus_point(self: *const Self, other: Self) Self { return .{.x = self.x + other.x, .y = self.y + other.y}; } pub fn minus_point(self: *const Self, other: Self) Self { return .{.x = self.x - other.x, .y = self.y - other.y}; } pub fn abs(self: *const Self) Self { return .{.x = absolute(self.x), .y = absolute(self.y)}; } pub fn eq(self: *const Self, other: Self) bool { return self.x == other.x and self.y == other.y; } }; } pub const U32Point = Point(u32); pub const I32Point = Point(i32); pub fn Point3d(comptime TheNum: type) type { return struct { const Self = @This(); pub const Num = TheNum; x: Num = 0, y: Num = 0, z: Num = 0, pub fn as(self: *const Self, comptime NumType: type) Point3d(NumType) { return .{ .x = @as(NumType, self.x), .y = @as(NumType, self.y), .z = @as(NumType, self.z) }; } pub fn plus_int(self: *const Self, value: anytype) Self { return .{.x = self.x + value, .y = self.y + value, .z = self.z + value}; } pub fn minus_int(self: *const Self, comptime value: anytype) Self { return .{.x = self.x - value, .y = self.y - value, .z = self.z - value}; } pub fn plus_point(self: *const Self, other: Self) Self { return .{.x = self.x + other.x, .y = self.y + other.y, .z = self.z + other.z}; } pub fn minus_point(self: *const Self, other: Self) Self { return .{.x = self.x - other.x, .y = self.y - other.y, .z = self.z - other.z}; } }; } pub const I32Point3d = Point3d(i32); pub fn Rect(comptime PosNum: type, comptime SizeNum: type) type { return struct { pub const Pos = Point(PosNum); pub const Size = Point(SizeNum); pos: Pos = .{}, size: Size = .{}, }; } pub const U32Rect = Rect(u32, u32); pub fn any_equal(a: anytype, b: @TypeOf(a)) bool { const Type = @TypeOf(a); const ti = @typeInfo(Type); return switch (ti) { .Union => |union_ti| { const Tag = union_ti.tag_type.?; const tag_ti = @typeInfo(Tag).Enum; const kind = @as(Tag, a); if (kind != @as(Tag, b)) { return false; } inline for (tag_ti.fields) |field| { if (kind == @intToEnum(Tag, field.value)) { return any_equal(@field(a, field.name), @field(b, field.name)); } } return false; }, .Pointer => |ptr_ti| { if (ptr_ti.size == .Slice and ptr_ti.child == u8) { return memory_compare(a, b); } else { return a == b; } }, .Struct => |struct_ti| { inline for (struct_ti.fields) |field| { if (!any_equal(@field(a, field.name), @field(b, field.name))) { return false; } } return true; }, else => { return a == b; }, }; } const AnyEqualTestValue = union(enum) { Int: u32, String1: []const u8, String2: []const u8, Pair: struct { x: u16, y: u16, }, Nil: void, fn eq(self: AnyEqualTestValue, other: AnyEqualTestValue) bool { return any_equal(self, other); } }; test "any_equal" { const Value = AnyEqualTestValue; const int1 = Value{.Int = 1}; const int2 = Value{.Int = 2}; const str1 = Value{.String1 = "hello"}; const str2 = Value{.String2 = "hello"}; const pair = Value{.Pair = .{.x = 4, .y = 8}}; const nil = Value{.Nil = .{}}; try std.testing.expect(int1.eq(int1)); try std.testing.expect(!int1.eq(int2)); try std.testing.expect(!int1.eq(str1)); try std.testing.expect(str1.eq(str1)); try std.testing.expect(!str1.eq(str2)); try std.testing.expect(nil.eq(nil)); try std.testing.expect(!nil.eq(int1)); try std.testing.expect(!pair.eq(int1)); try std.testing.expect(!pair.eq(.{.Pair = .{.x = 4, .y = 9}})); try std.testing.expect(pair.eq(.{.Pair = .{.x = 4, .y = 8}})); } pub const GenericWriter = struct { pub const GenericWriterError = error { GenericWriterError, }; impl: *anyopaque, write_fn_impl: fn(impl: *anyopaque, bytes: []const u8) GenericWriterError!usize, fn write_fn(self: *GenericWriter, bytes: []const u8) GenericWriterError!usize { return self.write_fn_impl(self.impl, bytes); } pub const Writer = std.io.Writer(*GenericWriter, GenericWriterError, write_fn); pub fn writer(self: *GenericWriter) Writer { return .{.context = self}; } }; pub fn GenericWriterImpl(comptime RealWriter: type) type { return struct { const Self = @This(); const GenericWriterError = GenericWriter.GenericWriterError; real_writer: *RealWriter = undefined, generic_writer: GenericWriter = undefined, fn write_fn_impl(impl: *anyopaque, bytes: []const u8) GenericWriterError!usize { const self = @ptrCast(*Self, @alignCast(@alignOf(Self), impl)); return self.real_writer.write(bytes) catch GenericWriterError.GenericWriterError; } pub fn init(self: *Self, real_writer: *RealWriter) void { self.* = .{ .real_writer = real_writer, .generic_writer = .{ .impl = @ptrCast(*anyopaque, self), .write_fn_impl = write_fn_impl, }, }; } pub fn writer(self: *Self) GenericWriter.Writer { return self.generic_writer.writer(); } }; } test "GenericWriter" { var ta = TestAlloc{}; defer ta.deinit(.Panic); errdefer ta.deinit(.NoPanic); const alloc = ta.alloc(); var sw = StringWriter.init(alloc); var real_writer = sw.writer(); var generic_writer_impl = GenericWriterImpl(@TypeOf(real_writer)){}; generic_writer_impl.init(&real_writer); const generic_writer = generic_writer_impl.writer(); try generic_writer.print("{} Hello {s}\n", .{1, "World"}); try generic_writer.print("{} Hello {s}\n", .{2, "again"}); const string = sw.get(); try std.testing.expectEqualStrings( \\1 Hello World \\2 Hello again \\ , string); alloc.free(string); } pub const GenericReader = struct { pub const GenericReaderError = error { GenericReaderError, }; impl: *anyopaque, read_fn_impl: fn(impl: *anyopaque, bytes: []u8) GenericReaderError!usize, fn read_fn(self: *GenericReader, bytes: []u8) GenericReaderError!usize { return self.read_fn_impl(self.impl, bytes); } pub const Reader = std.io.Reader(*GenericReader, GenericReaderError, read_fn); pub fn reader(self: *GenericReader) Reader { return .{.context = self}; } }; pub fn GenericReaderImpl(comptime RealReader: type) type { return struct { const Self = @This(); const GenericReaderError = GenericReader.GenericReaderError; real_reader: *RealReader = undefined, generic_reader: GenericReader = undefined, fn read_fn_impl(impl: *anyopaque, bytes: []u8) GenericReaderError!usize { const self = @ptrCast(*Self, @alignCast(@alignOf(RealReader), impl)); return self.real_reader.read(bytes) catch GenericReaderError.GenericReaderError; } pub fn init(self: *Self, real_reader: *RealReader) void { self.* = .{ .real_reader = real_reader, .generic_reader = .{ .impl = @ptrCast(*anyopaque, self), .read_fn_impl = read_fn_impl, }, }; } pub fn reader(self: *Self) GenericReader.Reader { return self.generic_reader.reader(); } }; } test "GenericReader" { var ta = TestAlloc{}; defer ta.deinit(.Panic); errdefer ta.deinit(.NoPanic); const alloc = ta.alloc(); var sr = StringReader{.string = "Hello world!"}; var real_reader = sr.reader(); var generic_reader_impl = GenericReaderImpl(@TypeOf(real_reader)){}; generic_reader_impl.init(&real_reader); const generic_reader = generic_reader_impl.reader(); var store = std.ArrayList(u8).init(alloc); defer store.deinit(); try generic_reader.readAllArrayList(&store, 16); const result = store.toOwnedSlice(); defer alloc.free(result); try std.testing.expectEqualStrings("Hello world!", result); }
0
repos/georgios/libs
repos/georgios/libs/utils/packed_array.zig
const std = @import("std"); const utils = @import("utils.zig"); const Error = utils.Error; pub fn PackedArray(comptime T: type, count: usize) type { const Traits = @typeInfo(T); const T2 = switch (Traits) { std.builtin.TypeId.Int => T, std.builtin.TypeId.Bool => u1, std.builtin.TypeId.Enum => |enum_type| enum_type.tag_type, else => @compileError("Invalid Type"), }; const is_enum = switch (Traits) { std.builtin.TypeId.Enum => true, else => false, }; return struct { const Self = @This(); const len = count; const Type = T; const InnerType = T2; const type_bit_size = utils.int_bit_size(InnerType); const Word = usize; const word_bit_size = utils.int_bit_size(Word); const WordShiftType = utils.IntLog2Type(Word); const values_per_word = word_bit_size / type_bit_size; const word_count = utils.align_up(count * type_bit_size, word_bit_size) / word_bit_size; const mask: Word = (1 << type_bit_size) - 1; contents: [word_count]Word = undefined, pub fn get(self: *const Self, index: usize) Error!Type { if (index >= len) { return Error.OutOfBounds; } const array_index = index / values_per_word; const shift = @intCast(WordShiftType, (index % values_per_word) * type_bit_size); const inner_value = @intCast(InnerType, (self.contents[array_index] >> shift) & mask); if (is_enum) { return @intToEnum(Type, inner_value); } else { return @bitCast(Type, inner_value); } } pub fn set(self: *Self, index: usize, value: Type) Error!void { if (index >= len) { return Error.OutOfBounds; } const array_index = index / values_per_word; const shift = @intCast(WordShiftType, (index % values_per_word) * type_bit_size); self.contents[array_index] = (self.contents[array_index] & ~(mask << shift)) | (@intCast(Word, @bitCast(InnerType, value)) << shift); } pub fn reset(self: *Self) void { for (self.contents[0..]) |*ptr| { ptr.* = 0; } } }; } fn test_PackedBoolArray(comptime size: usize) !void { var pa: PackedArray(bool, size) = undefined; pa.reset(); // Make sure get works try std.testing.expectEqual(false, try pa.get(0)); try std.testing.expectEqual(false, try pa.get(1)); try std.testing.expectEqual(false, try pa.get(size - 3)); try std.testing.expectEqual(false, try pa.get(size - 2)); try std.testing.expectEqual(false, try pa.get(size - 1)); // Set and unset the first bit and check it try pa.set(0, true); try std.testing.expectEqual(true, try pa.get(0)); try pa.set(0, false); try std.testing.expectEqual(false, try pa.get(0)); // Set a spot near the end try pa.set(size - 2, true); try std.testing.expectEqual(false, try pa.get(0)); try std.testing.expectEqual(false, try pa.get(1)); try std.testing.expectEqual(false, try pa.get(size - 3)); try std.testing.expectEqual(true, try pa.get(size - 2)); try std.testing.expectEqual(false, try pa.get(size - 1)); // Invalid Operations try std.testing.expectError(Error.OutOfBounds, pa.get(size)); try std.testing.expectError(Error.OutOfBounds, pa.get(size + 100)); try std.testing.expectError(Error.OutOfBounds, pa.set(size, true)); try std.testing.expectError(Error.OutOfBounds, pa.set(size + 100, true)); } test "PackedArray" { try test_PackedBoolArray(5); try test_PackedBoolArray(8); try test_PackedBoolArray(13); try test_PackedBoolArray(400); // Int Type { var pa: PackedArray(u7, 9) = undefined; pa.reset(); try pa.set(0, 13); try std.testing.expectEqual(@as(u7, 13), try pa.get(0)); try pa.set(1, 12); try std.testing.expectEqual(@as(u7, 12), try pa.get(1)); try std.testing.expectEqual(@as(u7, 13), try pa.get(0)); try pa.set(8, 47); try std.testing.expectEqual(@as(u7, 47), try pa.get(8)); } // Enum Type { const Type = enum (u2) { a, b, c, d, }; var pa: PackedArray(Type, 9) = undefined; pa.reset(); try pa.set(0, .a); try std.testing.expectEqual(Type.a, try pa.get(0)); try pa.set(1, .b); try std.testing.expectEqual(Type.b, try pa.get(1)); try std.testing.expectEqual(Type.a, try pa.get(0)); try pa.set(8, .d); try std.testing.expectEqual(Type.d, try pa.get(8)); } }
0
repos/georgios/libs
repos/georgios/libs/utils/mem.zig
const std = @import("std"); const utils = @import("utils.zig"); const Error = utils.Error; /// Returns true if the contents of the slices `a` and `b` are the same. pub fn memory_compare(a: []const u8, b: []const u8) callconv(.Inline) bool { if (a.len != b.len) return false; for (a[0..]) |value, i| { if (value != b[i]) return false; } return true; } /// Copy contents from `source` to `destination`. /// /// If `source.len > destination.len` then the copy is truncated. pub fn memory_copy_truncate(destination: []u8, source: []const u8) callconv(.Inline) usize { const size = @minimum(destination.len, source.len); for (destination[0..size]) |*ptr, i| { ptr.* = source[i]; } return size; } pub fn memory_copy_error(destination: []u8, source: []const u8) callconv(.Inline) Error!usize { if (destination.len < source.len) { return Error.NotEnoughDestination; } const size = source.len; for (destination[0..size]) |*ptr, i| { ptr.* = source[i]; } return size; } pub fn memory_copy_anyptr(destination: []u8, source: anytype) callconv(.Inline) void { const s = @ptrCast([*]const u8, source); for (destination[0..]) |*ptr, i| { ptr.* = s[i]; } } /// Set all the elements of `destination` to `value`. pub fn memory_set(destination: []u8, value: u8) callconv(.Inline) void { for (destination[0..]) |*ptr| { ptr.* = value; } } pub fn to_bytes(value: anytype) callconv(.Inline) []u8 { const Type = @TypeOf(value); const Traits = @typeInfo(Type); switch (Traits) { std.builtin.TypeId.Pointer => |pointer_type| { const count = switch (pointer_type.size) { .One => 1, .Slice => value.len, else => { @compileLog("Unsupported Type is ", @typeName(Type)); @compileError("Unsupported Type"); } }; return @ptrCast([*]u8, value)[0.. @sizeOf(pointer_type.child) * count]; }, else => { @compileLog("Unsupported Type is ", @typeName(Type)); @compileError("Unsupported Type"); } } } pub fn to_const_bytes(value: anytype) callconv(.Inline) []const u8 { const Type = @TypeOf(value); const Traits = @typeInfo(Type); switch (Traits) { std.builtin.TypeId.Pointer => |pointer_type| { const count = switch (pointer_type.size) { .One => 1, .Slice => value.len, else => { @compileLog("Unsupported Type is ", @typeName(Type)); @compileError("Unsupported Type"); } }; return @ptrCast([*]const u8, value)[0.. @sizeOf(pointer_type.child) * count]; }, else => { @compileLog("Unsupported Type is ", @typeName(Type)); @compileError("Unsupported Type"); } } } pub fn empty_slice(comptime Type: type, ptr: anytype) callconv(.Inline) []const Type { const PtrType = @TypeOf(ptr); var rv: []const Type = undefined; rv.len = 0; rv.ptr = switch (@typeInfo(PtrType)) { std.builtin.TypeId.Pointer => @ptrCast([*]const Type, ptr), std.builtin.TypeId.Int => @intToPtr([*]const Type, ptr), std.builtin.TypeId.ComptimeInt => @intToPtr([*]const Type, @as(usize, ptr)), else => { @compileLog("Unsupported Type is ", @typeName(PtrType)); @compileError("Unsupported Type"); } }; return rv; } pub const TestAlloc = struct { impl: std.heap.GeneralPurposeAllocator(.{}) = .{}, has_deinit: bool = false, pub fn alloc(self: *TestAlloc) std.mem.Allocator { return self.impl.allocator(); } pub const ShouldPanic = enum { Panic, NoPanic, }; pub fn deinit(self: *TestAlloc, should_panic: ShouldPanic) void { if (!self.has_deinit) { const leaks = self.impl.deinit(); if (should_panic == .Panic) { std.testing.expect(!leaks) catch @panic("leak(s) detected"); } self.has_deinit = true; } } }; test "TestAlloc example usage" { var ta = utils.TestAlloc{}; defer ta.deinit(.Panic); errdefer ta.deinit(.NoPanic); const alloc = ta.alloc(); const int = try alloc.create(u32); int.* = 13; alloc.destroy(int); }
0
repos/georgios/libs
repos/georgios/libs/utils/AnsiEscProcessor.zig
// ANSI Escape Code Sequence Processor ======================================== // // Takes a stream of bytes and interprets it if it responds to recognized ANSI // escape codes by invoking callbacks. // // More Info: // https://en.wikipedia.org/wiki/ANSI_escape_code // https://vt100.net/docs/vt100-ug/chapter3.html // https://gist.github.com/fnky/458719343aabd01cfb17a3a4f7296797 // ============================================================================ const std = @import("std"); const Self = @This(); pub const HexColor = enum(u4) { White = 15, // Spec name is bright white LightGray = 7, // Spec name is white DarkGray = 8, // Spec name is bright black Black = 0, Red = 1, Green = 2, Yellow = 3, Blue = 4, Magenta = 5, Cyan = 6, LightRed = 9, LightGreen = 10, LightYellow = 11, LightBlue = 12, LightMagenta = 13, LightCyan = 14, }; pub const Layer = enum { Foreground, Background, }; const State = enum { Unescaped, Escaped, Csi, }; print_char: ?fn(self: *Self, char: u8) void = null, hex_color: ?fn(self: *Self, color: HexColor, layer: Layer) void = null, invert_colors: ?fn(self: *Self) void = null, backspace: ?fn(self: *Self) void = null, newline: ?fn(self: *Self) void = null, use_default_color: ?fn(self: *Self, layer: Layer) void = null, reset_attributes: ?fn(self: *Self) void = null, reset_terminal: ?fn(self: *Self) void = null, move_cursor: ?fn(self: *Self, r: usize, c: usize) void = null, show_cursor: ?fn(self: *Self, show: bool) void = null, state: State = .Unescaped, saved: [64]u8 = undefined, parameter_start: ?usize = null, parameters: [16]usize = undefined, parameter_count: usize = 0, saved_so_far: usize = 0, malformed_sequences: usize = 0, fn process_parameter(self: *Self) bool { var parameter: ?u16 = null; if (self.parameter_start) |start| { const parameter_str = self.saved[start..self.saved_so_far]; parameter = std.fmt.parseUnsigned(u16, parameter_str, 10) catch null; self.parameter_start = null; } else { // empty parameter parameter = 0; } if (parameter) |p| { // std.debug.print("Parameter: {}\n", .{p}); if (self.parameter_count < self.parameters.len) { self.parameters[self.parameter_count] = p; self.parameter_count += 1; } else { return true; } } return parameter == null; } fn select_graphic_rendition(self: *Self) void { var i: usize = 0; while (i < self.parameter_count) { const p = self.parameters[i]; // std.debug.print("SGR: {}\n", .{p}); switch (p) { 0 => if (self.reset_attributes) |reset_attributes| reset_attributes(self), 7 => if (self.invert_colors) |invert_colors| invert_colors(self), 30...37 => if (self.hex_color) |hex_color| hex_color(self, @intToEnum(HexColor, p - 30), .Foreground), 39 => if (self.use_default_color) |use_default_color| use_default_color(self, .Foreground), 40...47 => if (self.hex_color) |hex_color| hex_color(self, @intToEnum(HexColor, p - 40), .Background), 49 => if (self.use_default_color) |use_default_color| use_default_color(self, .Background), 90...97 => if (self.hex_color) |hex_color| hex_color(self, @intToEnum(HexColor, p - 82), .Foreground), 100...107 => if (self.hex_color) |hex_color| hex_color(self, @intToEnum(HexColor, p - 92), .Background), else => {}, } i += 1; } } fn process_move_cursor(self: *Self) void { var column: usize = 0; if (self.parameter_count > 1) { column = self.parameters[1]; } var row: usize = 0; if (self.parameter_count > 0) { row = self.parameters[0]; } if (self.move_cursor) |move_cursor| { move_cursor(self, row, column); } } pub fn feed_char(self: *Self, char: u8) void { self.saved[self.saved_so_far] = char; // std.debug.print("feed_char {c}\n", .{char}); var abort = false; var reset = false; switch (self.state) { .Unescaped => { reset = true; switch (char) { 0x08 => if (self.backspace) |backspace| backspace(self), '\n' => { if (self.newline) |newline| { newline(self); } else if (self.print_char) |print_char| { print_char(self, char); } }, 0x1b => { self.state = .Escaped; reset = false; }, else => { if (self.print_char) |print_char| { print_char(self, char); } }, } }, .Escaped => { switch (char) { '[' => self.state = .Csi, 'c' => { if (self.reset_terminal) |reset_terminal| { reset_terminal(self); } reset = true; }, else => abort = true, } }, .Csi => { switch (char) { '0'...'9' => { if (self.parameter_start == null) { self.parameter_start = self.saved_so_far; } }, '?' => { // TODO // private = true; }, ';' => { abort = self.process_parameter(); }, 'm' => { abort = self.process_parameter(); if (!abort) { self.select_graphic_rendition(); reset = true; } }, 'H' => { abort = self.process_parameter(); if (!abort) { self.process_move_cursor(); reset = true; } }, 'l' => { // if (self.parameter_count == 1 and self.parameters[0] == 25) { if (self.show_cursor) |show_cursor| { show_cursor(self, false); } reset = true; // } else { // abort = true; // } }, else => abort = true, } }, } self.saved_so_far += 1; reset = reset or abort; // If we're not gonna reset, abort if we're gonna be outa room on the // next character. if (!reset and self.saved_so_far == self.saved.len) { abort = true; } if (abort) { // std.debug.print("Abort\n", .{}); if (self.print_char) |print_char| { // Dump the malformed sequence. Seems to be what Gnome's terminal does. for (self.saved[0..self.saved_so_far]) |c| { print_char(self, c); } } self.malformed_sequences += 1; } if (reset) { self.parameter_count = 0; self.state = .Unescaped; self.saved_so_far = 0; self.parameter_start = null; } // std.debug.print("state {s}\n", .{@tagName(self.state)}); } pub fn feed_str(self: *Self, str: []const u8) void { for (str) |char| { self.feed_char(char); } } // Testing ==================================================================== var test_print_char_buffer: [128]u8 = undefined; var test_print_char_got: usize = 0; fn test_print_char(self: *Self, char: u8) void { _ = self; test_print_char_buffer[test_print_char_got] = char; test_print_char_got += 1; } fn test_print_str(self: *Self, str: []const u8) void { for (str) |c| test_print_char(self, c); } fn test_reset(self: *Self) void { test_print_str(self, "[RESET]"); } fn test_invert_colors(self: *Self) void { test_print_str(self, "[INVERT]"); } fn test_hex_color(self: *Self, color: HexColor, layer: Layer) void { test_print_char(self, '['); test_print_char(self, if (layer == .Background) 'B' else 'F'); test_print_str(self, "G_COLOR("); test_print_char(self, switch (color) { .LightRed => 'R', .Green => 'g', else => '?', }); test_print_str(self, ")]"); } fn test_use_default_color(self: *Self, layer: Layer) void { test_print_str(self, "[DEFAULT_"); test_print_char(self, if (layer == .Background) 'B' else 'F'); test_print_str(self, "G]"); } test "AnsiEscProcessor" { var esc = Self{ .print_char = test_print_char, .use_default_color = test_use_default_color, .reset_attributes = test_reset, .invert_colors = test_invert_colors, .hex_color = test_hex_color, }; esc.feed_str("Hello \x1b[7mBob\x1b[0m \x1b[91;42mGoodbye"); try std.testing.expectEqualStrings( "Hello [INVERT]Bob[RESET] [FG_COLOR(R)][BG_COLOR(g)]Goodbye", test_print_char_buffer[0..test_print_char_got]); try std.testing.expectEqual(@as(usize, 0), esc.malformed_sequences); test_print_char_got = 0; esc.feed_str("\x1b[91m<<<\x1b[39;49m\x1b[101;32m1\x1b[39;49m"); try std.testing.expectEqualStrings( "[FG_COLOR(R)]<<<[DEFAULT_FG][DEFAULT_BG]" ++ "[BG_COLOR(R)][FG_COLOR(g)]1[DEFAULT_FG][DEFAULT_BG]", test_print_char_buffer[0..test_print_char_got]); try std.testing.expectEqual(@as(usize, 0), esc.malformed_sequences); // TODO: More Tests }
0
repos/georgios/libs
repos/georgios/libs/utils/test.zig
test "utils test root" { _ = @import("utils.zig"); _ = @import("unicode.zig"); _ = @import("Guid.zig"); _ = @import("AnsiEscProcessor.zig"); _ = @import("ToString.zig"); _ = @import("Cksum.zig"); _ = @import("WordIterator.zig"); _ = @import("Bdf.zig"); _ = @import("list.zig"); _ = @import("circular_buffer.zig"); _ = @import("str.zig"); _ = @import("packed_array.zig"); _ = @import("mem.zig"); _ = @import("bmp.zig"); _ = @import("Regex.zig"); }
0
repos/georgios/libs
repos/georgios/libs/utils/Guid.zig
const std = @import("std"); const utils = @import("utils.zig"); pub const Error = utils.Error; const Guid = @This(); pub const size = 16; pub const string_size = 36; data: [size]u8 = undefined, pub fn is_null(self: *const Guid) bool { for (self.data) |b| { if (b != 0) { return false; } } return true; } pub fn equals(a: *const Guid, b: *const Guid) bool { return utils.memory_compare(&a.data, &b.data); } pub fn from_be(self: *Guid, source: []const u8) Error!void { if (source.len < size) { return Error.NotEnoughSource; } // 00112233-4455-6677-8899-AABBCCDDEEFF for (source[0..size]) |*ptr, i| { self.data[i] = ptr.*; } } pub fn new_from_be(source: []const u8) Error!Guid { var guid = Guid{}; try guid.from_be(source); return guid; } pub fn to_be(self: *const Guid, destination: []u8) Error!void { if (destination.len < size) { return Error.NotEnoughDestination; } for (destination[0..size]) |*ptr, i| { ptr.* = self.data[i]; } } pub fn from_ms(self: *Guid, source: []const u8) Error!void { if (source.len < size) { return Error.NotEnoughSource; } // 33221100-5544-7766-8899-AABBCCDDEEFF self.data[0x0] = source[0x3]; self.data[0x1] = source[0x2]; self.data[0x2] = source[0x1]; self.data[0x3] = source[0x0]; self.data[0x4] = source[0x5]; self.data[0x5] = source[0x4]; self.data[0x6] = source[0x7]; self.data[0x7] = source[0x6]; self.data[0x8] = source[0x8]; self.data[0x9] = source[0x9]; self.data[0xa] = source[0xa]; self.data[0xb] = source[0xb]; self.data[0xc] = source[0xc]; self.data[0xd] = source[0xd]; self.data[0xe] = source[0xe]; self.data[0xf] = source[0xf]; } pub fn new_from_ms(source: []const u8) Error!Guid { var guid = Guid{}; try guid.from_ms(source); return guid; } pub fn to_ms(self: *const Guid, destination: []u8) Error!void { if (destination.len < size) { return Error.NotEnoughDestination; } destination[0x3] = self.data[0x0]; destination[0x2] = self.data[0x1]; destination[0x1] = self.data[0x2]; destination[0x0] = self.data[0x3]; destination[0x5] = self.data[0x4]; destination[0x4] = self.data[0x5]; destination[0x7] = self.data[0x6]; destination[0x6] = self.data[0x7]; destination[0x8] = self.data[0x8]; destination[0x9] = self.data[0x9]; destination[0xa] = self.data[0xa]; destination[0xb] = self.data[0xb]; destination[0xc] = self.data[0xc]; destination[0xd] = self.data[0xd]; destination[0xe] = self.data[0xe]; destination[0xf] = self.data[0xf]; } pub fn to_string(self: *const Guid, buffer: []u8) Error!void { if (buffer.len < string_size) { return Error.NotEnoughDestination; } utils.byte_buffer(buffer[0..], self.data[0x0]); utils.byte_buffer(buffer[2..], self.data[0x1]); utils.byte_buffer(buffer[4..], self.data[0x2]); utils.byte_buffer(buffer[6..], self.data[0x3]); buffer[8] = '-'; utils.byte_buffer(buffer[9..], self.data[0x4]); utils.byte_buffer(buffer[11..], self.data[0x5]); buffer[13] = '-'; utils.byte_buffer(buffer[14..], self.data[0x6]); utils.byte_buffer(buffer[16..], self.data[0x7]); buffer[18] = '-'; utils.byte_buffer(buffer[19..], self.data[0x8]); utils.byte_buffer(buffer[21..], self.data[0x9]); buffer[23] = '-'; utils.byte_buffer(buffer[24..], self.data[0xa]); utils.byte_buffer(buffer[26..], self.data[0xb]); utils.byte_buffer(buffer[28..], self.data[0xc]); utils.byte_buffer(buffer[30..], self.data[0xd]); utils.byte_buffer(buffer[32..], self.data[0xe]); utils.byte_buffer(buffer[34..], self.data[0xf]); } const test_guid_source = "\x28\x73\x2a\xc1\x1f\xf8\xd2\x11\xba\x4b\x00\xa0\xc9\x3e\xc9\x3b"; test "MS Guid" { const guid = try new_from_ms(test_guid_source); var guid_string: [string_size]u8 = undefined; try guid.to_string(guid_string[0..]); try std.testing.expectEqualStrings("c12a7328-f81f-11d2-ba4b-00a0c93ec93b", &guid_string); var guid_dst: [size]u8 = undefined; try guid.to_ms(&guid_dst); try std.testing.expectEqualSlices(u8, test_guid_source, &guid_dst); } test "BE Guid" { const guid = try new_from_be(test_guid_source); var guid_string: [string_size]u8 = undefined; try guid.to_string(guid_string[0..]); try std.testing.expectEqualStrings("28732ac1-1ff8-d211-ba4b-00a0c93ec93b", &guid_string); var guid_dst: [size]u8 = undefined; try guid.to_be(&guid_dst); try std.testing.expectEqualSlices(u8, test_guid_source, &guid_dst); }
0
repos/georgios/libs
repos/georgios/libs/utils/circular_buffer.zig
const std = @import("std"); /// What to discard if there is no more room. const CircularBufferDiscard = enum { DiscardNewest, DiscardOldest, }; pub fn CircularBuffer( comptime Type: type, len_arg: usize, discard: CircularBufferDiscard) type { return struct { const Self = @This(); const max_len = len_arg; contents: [max_len]Type = undefined, start: usize = 0, len: usize = 0, pub fn reset(self: *Self) void { self.start = 0; self.len = 0; } fn wrapped_offset(pos: usize, offset: usize) callconv(.Inline) usize { return (pos + offset) % max_len; } fn increment(pos: *usize) callconv(.Inline) void { pos.* = wrapped_offset(pos.*, 1); } pub fn push(self: *Self, value: Type) void { if (self.len == max_len) { if (discard == .DiscardNewest) { return; } else { // DiscardOldest increment(&self.start); } } else { self.len += 1; } self.contents[wrapped_offset(self.start, self.len - 1)] = value; } pub fn pop(self: *Self) ?Type { if (self.len == 0) return null; self.len -= 1; defer increment(&self.start); return self.contents[self.start]; } pub fn get(self: *const Self, offset: usize) ?Type { if (offset >= self.len) return null; return self.contents[wrapped_offset(self.start, offset)]; } pub fn peek_start(self: *const Self) ?Type { return self.get(0); } pub fn peek_end(self: *const Self) ?Type { if (self.len == 0) return null; return self.get(self.len - 1); } }; } fn test_circular_buffer(comptime discard: CircularBufferDiscard) !void { var buffer = CircularBuffer(usize, 4, discard){}; const nil: ?usize = null; // Empty try std.testing.expectEqual(@as(usize, 0), buffer.len); try std.testing.expectEqual(nil, buffer.pop()); try std.testing.expectEqual(nil, buffer.peek_start()); try std.testing.expectEqual(nil, buffer.get(0)); try std.testing.expectEqual(nil, buffer.peek_end()); // Push Some Values buffer.push(1); try std.testing.expectEqual(@as(usize, 1), buffer.len); try std.testing.expectEqual(@as(usize, 1), buffer.peek_start().?); try std.testing.expectEqual(@as(usize, 1), buffer.peek_end().?); buffer.push(2); try std.testing.expectEqual(@as(usize, 2), buffer.peek_end().?); buffer.push(3); try std.testing.expectEqual(@as(usize, 3), buffer.peek_end().?); try std.testing.expectEqual(@as(usize, 3), buffer.len); // Test get try std.testing.expectEqual(@as(usize, 1), buffer.get(0).?); try std.testing.expectEqual(@as(usize, 2), buffer.get(1).?); try std.testing.expectEqual(@as(usize, 3), buffer.get(2).?); try std.testing.expectEqual(nil, buffer.get(3)); // Pop The Values try std.testing.expectEqual(@as(usize, 1), buffer.peek_start().?); try std.testing.expectEqual(@as(usize, 1), buffer.pop().?); try std.testing.expectEqual(@as(usize, 2), buffer.peek_start().?); try std.testing.expectEqual(@as(usize, 2), buffer.pop().?); try std.testing.expectEqual(@as(usize, 3), buffer.peek_start().?); try std.testing.expectEqual(@as(usize, 3), buffer.pop().?); // It's empty again try std.testing.expectEqual(@as(usize, 0), buffer.len); try std.testing.expectEqual(nil, buffer.pop()); try std.testing.expectEqual(nil, buffer.peek_start()); try std.testing.expectEqual(nil, buffer.get(0)); try std.testing.expectEqual(nil, buffer.peek_end()); // Fill it past capacity buffer.push(5); try std.testing.expectEqual(@as(usize, 5), buffer.peek_end().?); buffer.push(4); try std.testing.expectEqual(@as(usize, 4), buffer.peek_end().?); buffer.push(3); try std.testing.expectEqual(@as(usize, 3), buffer.peek_end().?); buffer.push(2); try std.testing.expectEqual(@as(usize, 2), buffer.peek_end().?); buffer.push(1); if (discard == .DiscardOldest) { try std.testing.expectEqual(@as(usize, 1), buffer.peek_end().?); } try std.testing.expectEqual(@as(usize, 4), buffer.len); // Test get var index: usize = 0; if (discard == .DiscardNewest) { try std.testing.expectEqual(@as(usize, 5), buffer.get(index).?); index += 1; } try std.testing.expectEqual(@as(usize, 4), buffer.get(index).?); index += 1; try std.testing.expectEqual(@as(usize, 3), buffer.get(index).?); index += 1; try std.testing.expectEqual(@as(usize, 2), buffer.get(index).?); index += 1; if (discard == .DiscardOldest) { try std.testing.expectEqual(@as(usize, 1), buffer.get(index).?); index += 1; } try std.testing.expectEqual(nil, buffer.get(index)); // Pop The Values if (discard == .DiscardNewest) { try std.testing.expectEqual(@as(usize, 5), buffer.peek_start().?); try std.testing.expectEqual(@as(usize, 5), buffer.pop().?); } try std.testing.expectEqual(@as(usize, 4), buffer.peek_start().?); try std.testing.expectEqual(@as(usize, 4), buffer.pop().?); try std.testing.expectEqual(@as(usize, 3), buffer.pop().?); try std.testing.expectEqual(@as(usize, 2), buffer.peek_start().?); try std.testing.expectEqual(@as(usize, 2), buffer.pop().?); if (discard == .DiscardOldest) { try std.testing.expectEqual(@as(usize, 1), buffer.peek_start().?); try std.testing.expectEqual(@as(usize, 1), buffer.pop().?); } // It's empty yet again try std.testing.expectEqual(@as(usize, 0), buffer.len); try std.testing.expectEqual(nil, buffer.pop()); try std.testing.expectEqual(nil, buffer.peek_start()); try std.testing.expectEqual(nil, buffer.get(0)); try std.testing.expectEqual(nil, buffer.peek_end()); } test "CircularBuffer(.DiscardNewest)" { try test_circular_buffer(.DiscardNewest); } test "CircularBuffer(.DiscardOldest)" { try test_circular_buffer(.DiscardOldest); }
0
repos/georgios/libs
repos/georgios/libs/utils/WordIterator.zig
const Self = @This(); const std = @import("std"); const utils = @import("utils.zig"); const Error = utils.Error; input: []const u8, buffer: ?[]u8 = null, start: usize = 0, sep: u8 = ' ', quote: ?u8 = null, escape: u8 = '\\', escaped: bool = undefined, quoted: bool = undefined, fn process_char(self: *Self, c: u8) bool { if (self.quote) |quote| { if (self.escaped) { self.escaped = false; } else if (c == self.escape) { self.escaped = true; return false; // Don't keep escape char } else if (c == quote) { self.quoted = !self.quoted; return false; // Don't keep quote char } } return true; } /// Allow caller to process words as they want, then they can call /// postprocess to clean up remaining quotes and escapes. pub fn next_unprocessed(self: *Self) ?[]const u8 { var this_start = self.start; var end = self.input.len; if (this_start >= end) return null; // Skip past any leading seperators for (self.input[this_start..]) |c| { if (c != self.sep) { break; } this_start += 1; } self.escaped = false; self.quoted = false; for (self.input[this_start..]) |c, i| { _ = self.process_char(c); if (!(self.escaped or self.quoted) and c == self.sep) { end = this_start + i; break; } } self.start = end; if (this_start >= end) return null; return self.input[this_start..end]; } pub fn postprocess(self: *Self, word: []const u8) Error![]const u8 { if (self.quote == null or self.buffer == null) return word; const buffer = self.buffer.?; self.escaped = false; self.quoted = false; var i: usize = 0; for (word) |c| { if (self.process_char(c)) { if (i >= buffer.len) return Error.NotEnoughDestination; buffer[i] = c; i += 1; } } return buffer[0..i]; } pub fn next(self: *Self) Error!?[]const u8 { if (self.next_unprocessed()) |word| { return try self.postprocess(word); } return null; } fn test_word_iterator(word_it: *Self, expected: ?[]const u8) !void { const word_maybe = try word_it.next(); try std.testing.expect((expected == null) == (word_maybe == null)); if (word_maybe) |word| { try std.testing.expectEqualStrings(expected.?, word); } } var test_buffer: [32]u8 = undefined; test "WordIterator simple" { { var word_it = Self{.input = "", .buffer = test_buffer[0..]}; try test_word_iterator(&word_it, null); } { var word_it = Self{.input = "ABC"}; try test_word_iterator(&word_it, "ABC"); try test_word_iterator(&word_it, null); } { var word_it = Self{.input = "A BCD E", .buffer = test_buffer[0..]}; try test_word_iterator(&word_it, "A"); try test_word_iterator(&word_it, "BCD"); try test_word_iterator(&word_it, "E"); try test_word_iterator(&word_it, null); } { var word_it = Self{.input = " A BCD E "}; try test_word_iterator(&word_it, "A"); try test_word_iterator(&word_it, "BCD"); try test_word_iterator(&word_it, "E"); try test_word_iterator(&word_it, null); } } test "WordIterator complex with no postprocess" { var word_it = Self{ .quote = '\'', .input = " '' 'A' ' ' 'B'' ' \\C \\' ' EF\\' ' " }; try test_word_iterator(&word_it, "''"); try test_word_iterator(&word_it, "'A'"); try test_word_iterator(&word_it, "' '"); try test_word_iterator(&word_it, "'B'' '"); try test_word_iterator(&word_it, "\\C"); try test_word_iterator(&word_it, "\\'"); try test_word_iterator(&word_it, "' EF\\' '"); try test_word_iterator(&word_it, null); } test "WordIterator complex with postprocess" { var word_it = Self{ .quote = '\'', .input = " '' 'A' ' ' 'B'' ' \\C \\' ' EF\\' ' ", .buffer = test_buffer[0..], }; try test_word_iterator(&word_it, ""); try test_word_iterator(&word_it, "A"); try test_word_iterator(&word_it, " "); try test_word_iterator(&word_it, "B "); try test_word_iterator(&word_it, "C"); try test_word_iterator(&word_it, "'"); try test_word_iterator(&word_it, " EF' "); try test_word_iterator(&word_it, null); }
0
repos/georgios/libs
repos/georgios/libs/utils/Regex.zig
// =========================================================================== // A VM-based Regular Expression Engine // =========================================================================== // // VM concept based on https://swtch.com/~rsc/regexp/regexp2.html // // Supports: // . ^ $ ? // Groups // // Reference: // https://en.wikipedia.org/wiki/Regular_expression const std = @import("std"); const utils = @import("utils"); pub const Error = error { RegexIsInvalid, RegexRecursionLimit, OutOfMemory, }; fn View(comptime Type: type) type { return struct { const Self = @This(); items: []const Type = undefined, pos: usize = 0, fn init(self: *Self, items: []const Type) void { self.* = .{.items = items}; } fn limit(self: *Self, by: ?usize) void { if (by) |limit_by| { self.items = self.items[0..self.pos + limit_by + 1]; } } fn done(self: *const Self) bool { return self.pos >= self.items.len; } fn first(self: *const Self) bool { return self.items.len > 0 and self.pos == 0; } fn last(self: *const Self) bool { return self.items.len > 0 and self.pos == self.items.len - 1; } fn get(self: *const Self) ?Type { return if (self.done()) null else self.items[self.pos]; } fn inc(self: *Self) void { if (!self.done()) self.pos += 1; } fn move(self: *Self, by: isize) void { const new_pos = @intCast(usize, @intCast(isize, self.pos) + by); if (new_pos > self.items.len) { @panic("Moved to invalid position"); } self.pos = new_pos; } fn seen(self: *Self, from: usize) []const Type { return self.items[from..self.pos]; } fn consume_exact(self: *Self, expected: []const u8) bool { const len = @minimum(self.items.len - self.pos, expected.len); if (std.mem.eql(u8, self.items[self.pos..self.pos + len], expected)) { self.pos += len; return true; } return false; } }; } const StrView = View(u8); const Inst = union (enum) { matched: enum { InputCanContinue, InputMustBeDone, }, literal: u8, any, jump: isize, split: [2]isize, repeat: struct { len: u16, min: u32 = 0, max: u32 = std.math.maxInt(u32), }, }; pub const CompiledRegex = []const Inst; const Vm = struct { insts: CompiledRegex, level_limit: ?u16 = null, const Context = struct { layer: u16 = 0, insts: View(Inst) = .{}, match_at_end: bool = false, input: StrView = .{}, fn init(self: *Context, insts: CompiledRegex, input: []const u8) void { self.insts.init(insts); self.input.init(input); } fn new_nested(self: *const Context, inst_offset: isize, match_after: ?usize) Context { var copy = self.*; copy.layer += 1; copy.insts.limit(match_after); copy.match_at_end = match_after != null; copy.jump(inst_offset); return copy; } fn inst(self: *const Context) ?Inst { return self.insts.get(); } fn jump(self: *Context, by: isize) void { if (by == 0) { @panic("Trying to jump by 0"); } self.insts.move(by); } fn char(self: *const Context) ?u8 { return self.input.get(); } fn matched(self: *const Context, yes: bool) ?usize { return if (yes) self.input.pos else null; } fn status(self: *const Context) void { std.debug.print("L{} inst: {s}@{} char ", .{self.layer, @tagName(self.inst().?), self.insts.pos}); if (self.char()) |c| { std.debug.print("'{c}' @ {}\n", .{c, self.input.pos}); } else { std.debug.print("end @ {}\n", .{self.input.pos}); } } }; fn matches_i(self: *const Vm, ctx: *Context) Error!?usize { if (self.level_limit) |level_limit| { if (ctx.layer > level_limit) { return Error.RegexRecursionLimit; } } while (true) { const inst = ctx.inst() orelse if (ctx.match_at_end) return ctx.matched(true) else @panic("Missing matched instruction?"); // ctx.status(); switch (inst) { .matched => |cond| return ctx.matched((cond == .InputCanContinue) or ctx.input.done()), .literal => |expected_char| { if (ctx.char()) |char| { if (char != expected_char) return ctx.matched(false); ctx.insts.inc(); ctx.input.inc(); } else return ctx.matched(false); }, .any => { if (ctx.input.done()) return ctx.matched(false); ctx.insts.inc(); ctx.input.inc(); }, .jump => |by| ctx.jump(by), .split => |branches| { var nested = ctx.new_nested(branches[1], null); if (try self.matches_i(&nested)) |input_pos| { return input_pos; } ctx.jump(branches[0]); }, .repeat => |loop| { // std.debug.print(" L{} repeat: {} len {} -> {}\n", .{ctx.layer, loop.len, loop.min, loop.max}); var count: u32 = 0; while (count < loop.max) { // std.debug.print(" L{} repeat: #{} @ {}\n", .{ctx.layer, count, ctx.input.pos}); var nested = ctx.new_nested(1, loop.len); if (try self.matches_i(&nested)) |input_pos| { ctx.input.pos = input_pos; count += 1; } else break; } // std.debug.print(" L{} repeat done after {}\n", .{ctx.layer, count}); if (count < loop.min) return ctx.matched(false); ctx.jump(loop.len + 1); }, } } } fn matches(self: *const Vm, input: []const u8) Error!bool { var ctx = Context{}; ctx.init(self.insts, input); return (try self.matches_i(&ctx)) != null; } fn expect_matches(self: *const Vm, expect_match: bool, input: []const u8) !void { try std.testing.expect(expect_match == (try self.matches(input))); } }; test "Vm" { // s// { const insts = [_]Inst{ .{.matched = .InputCanContinue}, }; const vm = Vm{.insts = insts[0..]}; try vm.expect_matches(true, ""); try vm.expect_matches(true, "a"); try vm.expect_matches(true, "anything"); } // s/^a/ { const insts = [_]Inst{ .{.literal = 'a'}, .{.matched = .InputCanContinue}, }; const vm = Vm{.insts = insts[0..]}; try vm.expect_matches(false, ""); try vm.expect_matches(false, "b"); try vm.expect_matches(true, "a"); try vm.expect_matches(true, "abc"); } // s/^abc/ { const insts = [_]Inst{ .{.literal = 'a'}, .{.literal = 'b'}, .{.literal = 'c'}, .{.matched = .InputCanContinue}, }; const vm = Vm{.insts = insts[0..]}; try vm.expect_matches(false, ""); try vm.expect_matches(false, "a"); try vm.expect_matches(false, "ab"); try vm.expect_matches(true, "abc"); try vm.expect_matches(true, "abcdef"); try vm.expect_matches(false, "aabc"); } // s/^abc$/ { const insts = [_]Inst{ .{.literal = 'a'}, .{.literal = 'b'}, .{.literal = 'c'}, .{.matched = .InputMustBeDone}, }; const vm = Vm{.insts = insts[0..]}; try vm.expect_matches(false, ""); try vm.expect_matches(false, "a"); try vm.expect_matches(false, "ab"); try vm.expect_matches(true, "abc"); try vm.expect_matches(false, "abcdef"); try vm.expect_matches(false, "aabc"); } // s/^a.c/ { const insts = [_]Inst{ .{.literal = 'a'}, .any, .{.literal = 'c'}, .{.matched = .InputCanContinue}, }; const vm = Vm{.insts = insts[0..]}; try vm.expect_matches(false, ""); try vm.expect_matches(false, "a"); try vm.expect_matches(false, "ab"); try vm.expect_matches(true, "abc"); try vm.expect_matches(true, "a c"); try vm.expect_matches(false, "aabc"); } // s/^ab?c/ { const insts = [_]Inst{ .{.literal = 'a'}, .{.split = .{1, 2}}, .{.literal = 'b'}, .{.literal = 'c'}, .{.matched = .InputCanContinue}, }; const vm = Vm{.insts = insts[0..]}; try vm.expect_matches(false, "ab"); try vm.expect_matches(true, "ac"); try vm.expect_matches(true, "abc"); } // s/^a(bbb)?c/ { const insts = [_]Inst{ .{.literal = 'a'}, .{.split = .{1, 4}}, .{.literal = 'b'}, .{.literal = 'b'}, .{.literal = 'b'}, .{.literal = 'c'}, .{.matched = .InputCanContinue}, }; const vm = Vm{.insts = insts[0..]}; try vm.expect_matches(false, "ab"); try vm.expect_matches(true, "ac"); try vm.expect_matches(false, "abc"); try vm.expect_matches(false, "abbc"); try vm.expect_matches(true, "abbbc"); } // s/^ab|cd/ { const insts = [_]Inst{ .{.literal = 'a'}, .{.split = .{1, 3}}, .{.literal = 'b'}, .{.jump = 2}, .{.literal = 'c'}, .{.literal = 'd'}, .{.matched = .InputCanContinue}, }; const vm = Vm{.insts = insts[0..]}; try vm.expect_matches(false, "ad"); try vm.expect_matches(false, "ab"); try vm.expect_matches(false, "ac"); try vm.expect_matches(true, "abd"); try vm.expect_matches(true, "acd"); try vm.expect_matches(false, "abcd"); try vm.expect_matches(true, "abdc"); try vm.expect_matches(true, "acdb"); } // s/^ab*c/ { const insts = [_]Inst{ .{.literal = 'a'}, .{.repeat = .{.len = 1}}, .{.literal = 'b'}, .{.literal = 'c'}, .{.matched = .InputCanContinue}, }; const vm = Vm{.insts = insts[0..]}; try vm.expect_matches(false, "a"); try vm.expect_matches(false, "ab"); try vm.expect_matches(false, "abb"); try vm.expect_matches(false, "abbb"); try vm.expect_matches(false, "abbbb"); try vm.expect_matches(true, "ac"); try vm.expect_matches(true, "abc"); try vm.expect_matches(true, "abbc"); try vm.expect_matches(true, "abbbc"); try vm.expect_matches(true, "abbbbc"); } // s/^ab+c/ { const insts = [_]Inst{ .{.literal = 'a'}, .{.repeat = .{.len = 1, .min = 1}}, .{.literal = 'b'}, .{.literal = 'c'}, .{.matched = .InputCanContinue}, }; const vm = Vm{.insts = insts[0..]}; try vm.expect_matches(false, "a"); try vm.expect_matches(false, "ab"); try vm.expect_matches(false, "abb"); try vm.expect_matches(false, "abbbbbb"); try vm.expect_matches(false, "ac"); try vm.expect_matches(true, "abc"); try vm.expect_matches(true, "abbc"); try vm.expect_matches(true, "abbbbbbc"); } // s/^ab{2,3}c/ { const insts = [_]Inst{ .{.literal = 'a'}, .{.repeat = .{.len = 1, .min = 2, .max = 3}}, .{.literal = 'b'}, .{.literal = 'c'}, .{.matched = .InputCanContinue}, }; const vm = Vm{.insts = insts[0..]}; try vm.expect_matches(false, "a"); try vm.expect_matches(false, "ab"); try vm.expect_matches(false, "abb"); try vm.expect_matches(false, "abbb"); try vm.expect_matches(false, "abbbb"); try vm.expect_matches(false, "ac"); try vm.expect_matches(false, "abc"); try vm.expect_matches(true, "abbc"); try vm.expect_matches(true, "abbbc"); try vm.expect_matches(false, "abbbbc"); } } const Compiler = struct { const Insts = utils.List(Inst); input: StrView = .{}, insts: Insts = undefined, group_level: usize = 0, fn init(self: *Compiler, alloc: std.mem.Allocator, input: []const u8) void { self.input.init(input); self.insts = Insts{.alloc = alloc}; } fn m_int(self: *Compiler) Error!usize { const start = self.input.pos; while (self.input.get()) |char| { if (char >= '0' and char <= '9') { self.input.inc(); } else break; } if (std.fmt.parseUnsigned(usize, self.input.seen(start), 10) catch null) |int| { return int; } else { self.input.pos = start; return Error.RegexIsInvalid; } } fn m_int_maybe(self: *Compiler) ?usize { return self.m_int() catch null; } fn m_exact_maybe(self: *Compiler, expected: []const u8) bool { return self.input.consume_exact(expected); } fn m_exact(self: *Compiler, expected: []const u8) Error!void { if (!self.m_exact_maybe(expected)) return Error.RegexIsInvalid; } fn m_group(self: *Compiler) Error!void { try self.m_exact("("); self.group_level += 1; // std.debug.print("entering m_group {}\n", .{self.group_level}); defer self.group_level -= 1; try self.m_expr(); try self.m_exact(")"); // std.debug.print("leaving m_group {}\n", .{self.group_level}); } fn m_expr(self: *Compiler) Error!void { // std.debug.print("m_expr\n", .{}); var last = self.insts.tail; var last_index = self.insts.len; while (self.input.get()) |char| { // std.debug.print("{} {c}: ", .{self.insts.len, char}); switch (char) { '.' => { // std.debug.print("any\n", .{}); last = self.insts.tail; last_index = self.insts.len; try self.insts.push_back(.any); self.input.inc(); }, '?' => { const jump = @intCast(isize, self.insts.len - last_index + 1); // std.debug.print("? {}\n", .{jump}); try self.insts.insert_after(last, .{.split = .{1, jump}}); self.input.inc(); }, '(' => { last = self.insts.tail; last_index = self.insts.len; try self.m_group(); }, ')' => { if (self.group_level == 0) return Error.RegexIsInvalid; return; }, '^' => { return Error.RegexIsInvalid; }, '$' => { if (!self.input.last()) { return Error.RegexIsInvalid; } return; }, else => { // std.debug.print("literal\n", .{}); last = self.insts.tail; last_index = self.insts.len; try self.insts.push_back(.{.literal = char}); self.input.inc(); }, } } } fn compile_regex(self: *Compiler) Error!CompiledRegex { defer self.insts.clear(); // If no ^, then insert the equivalent of a non-greedy .* at the start if (!self.m_exact_maybe("^")) { try self.insts.push_back(.{.split = .{3, 1}}); try self.insts.push_back(.any); try self.insts.push_back(.{.jump = -2}); } try self.m_expr(); if (self.group_level != 0) return Error.RegexIsInvalid; try self.insts.push_back(.{.matched = if (self.m_exact_maybe("$")) .InputMustBeDone else .InputCanContinue}); return self.insts.to_slice(); } }; pub fn compile(alloc: std.mem.Allocator, regex: []const u8) Error!CompiledRegex { var c = Compiler{}; c.init(alloc, regex); return c.compile_regex(); } test "Compiler" { const Invalid = Error.RegexIsInvalid; const eq = std.testing.expectEqual; const eq_slices = std.testing.expectEqualSlices; const er = std.testing.expectError; var ta = utils.TestAlloc{}; defer ta.deinit(.Panic); errdefer ta.deinit(.NoPanic); const alloc = ta.alloc(); // Terminal Elements { var c = Compiler{}; c.input.items = "abc xyz 123 456"; // Try Incorrectly at start try eq(@as(usize, 0), c.input.pos); try er(Invalid, c.m_exact("xyz")); try eq(@as(usize, 0), c.input.pos); try eq(false, c.m_exact_maybe("1")); try eq(@as(usize, 0), c.input.pos); try er(Invalid, c.m_int()); try eq(@as(usize, 0), c.input.pos); try eq(@as(?usize, null), c.m_int_maybe()); // abc try eq(@as(usize, 0), c.input.pos); try c.m_exact("abc"); try eq(@as(usize, 3), c.input.pos); try c.m_exact(" "); // xyz try eq(@as(usize, 4), c.input.pos); try eq(true, c.m_exact_maybe("xyz")); try eq(@as(usize, 7), c.input.pos); try c.m_exact(" "); // 123 try eq(@as(usize, 8), c.input.pos); try eq(@as(usize, 123), try c.m_int()); try eq(@as(usize, 11), c.input.pos); try c.m_exact(" "); try eq(@as(usize, 12), c.input.pos); try eq(@as(?usize, 456), c.m_int_maybe()); try eq(@as(usize, c.input.items.len), c.input.pos); } // Expr with '.', '?', and simple groups { const compiled = try compile(alloc, "ab.d?(ef)?g"); defer alloc.free(compiled); try eq_slices(Inst, &[_]Inst{ .{.split = .{3, 1}}, .any, .{.jump = -2}, .{.literal = 'a'}, .{.literal = 'b'}, .any, .{.split = .{1, 2}}, .{.literal = 'd'}, .{.split = .{1, 3}}, .{.literal = 'e'}, .{.literal = 'f'}, .{.literal = 'g'}, .{.matched = .InputCanContinue}, }, compiled); const vm = Vm{.insts = compiled}; try eq(false, try vm.matches("")); try eq(false, try vm.matches("a")); try eq(false, try vm.matches("ab")); try eq(true, try vm.matches("abcg")); try eq(true, try vm.matches("abxg")); try eq(true, try vm.matches("abcdg")); try eq(true, try vm.matches("abxdg")); try eq(false, try vm.matches("abcdeg")); try eq(true, try vm.matches("abcdefg")); try eq(false, try vm.matches("abcdeffg")); // Test that not having ^ means the match doesn't need to be at the // start. try eq(true, try vm.matches("xabcdefg")); // Test taht not having $ means there can be anything after the match. try eq(true, try vm.matches("abcdefghij")); try eq(true, try vm.matches("abcdefghijklmnop")); } // Expr with '^', '$', and nested groups { const compiled = try compile(alloc, "^a((b)?((c(de)?)?f(g)?)?)?hi$"); defer alloc.free(compiled); try eq_slices(Inst, &[_]Inst{ .{.literal = 'a'}, .{.split = .{1, 12}}, .{.split = .{1, 2}}, .{.literal = 'b'}, .{.split = .{1, 9}}, .{.split = .{1, 5}}, .{.literal = 'c'}, .{.split = .{1, 3}}, .{.literal = 'd'}, .{.literal = 'e'}, .{.literal = 'f'}, .{.split = .{1, 2}}, .{.literal = 'g'}, .{.literal = 'h'}, .{.literal = 'i'}, .{.matched = .InputMustBeDone}, }, compiled); const vm = Vm{.insts = compiled}; try eq(false, try vm.matches("")); try eq(true, try vm.matches("ahi")); try eq(false, try vm.matches("achi")); try eq(true, try vm.matches("afhi")); try eq(true, try vm.matches("abfhi")); try eq(true, try vm.matches("abcfhi")); try eq(false, try vm.matches("abcdfhi")); try eq(true, try vm.matches("abcdefhi")); try eq(false, try vm.matches("abcdefxhi")); try eq(true, try vm.matches("abcdefghi")); // Test that having ^ means the match need to be at the start. try eq(false, try vm.matches("xabcdefghi")); // Test that having $ means there can't be anything after the match. try eq(false, try vm.matches("abcdefghij")); try eq(false, try vm.matches("abcdefghijklmnop")); } } pub fn match(alloc: std.mem.Allocator, regex: []const u8, str: []const u8) Error!bool { const compiled = try compile(alloc, regex); defer alloc.free(compiled); const vm = Vm{.insts = compiled}; return vm.matches(str); }
0
repos/georgios/libs
repos/georgios/libs/utils/Cksum.zig
// Implementation of the POSIX cksum program, which uses a CRC-32 derived // algorithm. // // For more info see: // https://en.wikipedia.org/wiki/Cyclic_redundancy_check // https://rosettacode.org/wiki/CRC-32#C // https://pubs.opengroup.org/onlinepubs/9699919799/utilities/cksum.html // The source of lookup_table and the basis of the cksum-specific parts of // the algorithm. const Self = @This(); const std = @import("std"); // TODO: Generate this at compile-time or maybe runtime? const lookup_table = [256]u32{ 0x00000000, 0x04c11db7, 0x09823b6e, 0x0d4326d9, 0x130476dc, 0x17c56b6b, 0x1a864db2, 0x1e475005, 0x2608edb8, 0x22c9f00f, 0x2f8ad6d6, 0x2b4bcb61, 0x350c9b64, 0x31cd86d3, 0x3c8ea00a, 0x384fbdbd, 0x4c11db70, 0x48d0c6c7, 0x4593e01e, 0x4152fda9, 0x5f15adac, 0x5bd4b01b, 0x569796c2, 0x52568b75, 0x6a1936c8, 0x6ed82b7f, 0x639b0da6, 0x675a1011, 0x791d4014, 0x7ddc5da3, 0x709f7b7a, 0x745e66cd, 0x9823b6e0, 0x9ce2ab57, 0x91a18d8e, 0x95609039, 0x8b27c03c, 0x8fe6dd8b, 0x82a5fb52, 0x8664e6e5, 0xbe2b5b58, 0xbaea46ef, 0xb7a96036, 0xb3687d81, 0xad2f2d84, 0xa9ee3033, 0xa4ad16ea, 0xa06c0b5d, 0xd4326d90, 0xd0f37027, 0xddb056fe, 0xd9714b49, 0xc7361b4c, 0xc3f706fb, 0xceb42022, 0xca753d95, 0xf23a8028, 0xf6fb9d9f, 0xfbb8bb46, 0xff79a6f1, 0xe13ef6f4, 0xe5ffeb43, 0xe8bccd9a, 0xec7dd02d, 0x34867077, 0x30476dc0, 0x3d044b19, 0x39c556ae, 0x278206ab, 0x23431b1c, 0x2e003dc5, 0x2ac12072, 0x128e9dcf, 0x164f8078, 0x1b0ca6a1, 0x1fcdbb16, 0x018aeb13, 0x054bf6a4, 0x0808d07d, 0x0cc9cdca, 0x7897ab07, 0x7c56b6b0, 0x71159069, 0x75d48dde, 0x6b93dddb, 0x6f52c06c, 0x6211e6b5, 0x66d0fb02, 0x5e9f46bf, 0x5a5e5b08, 0x571d7dd1, 0x53dc6066, 0x4d9b3063, 0x495a2dd4, 0x44190b0d, 0x40d816ba, 0xaca5c697, 0xa864db20, 0xa527fdf9, 0xa1e6e04e, 0xbfa1b04b, 0xbb60adfc, 0xb6238b25, 0xb2e29692, 0x8aad2b2f, 0x8e6c3698, 0x832f1041, 0x87ee0df6, 0x99a95df3, 0x9d684044, 0x902b669d, 0x94ea7b2a, 0xe0b41de7, 0xe4750050, 0xe9362689, 0xedf73b3e, 0xf3b06b3b, 0xf771768c, 0xfa325055, 0xfef34de2, 0xc6bcf05f, 0xc27dede8, 0xcf3ecb31, 0xcbffd686, 0xd5b88683, 0xd1799b34, 0xdc3abded, 0xd8fba05a, 0x690ce0ee, 0x6dcdfd59, 0x608edb80, 0x644fc637, 0x7a089632, 0x7ec98b85, 0x738aad5c, 0x774bb0eb, 0x4f040d56, 0x4bc510e1, 0x46863638, 0x42472b8f, 0x5c007b8a, 0x58c1663d, 0x558240e4, 0x51435d53, 0x251d3b9e, 0x21dc2629, 0x2c9f00f0, 0x285e1d47, 0x36194d42, 0x32d850f5, 0x3f9b762c, 0x3b5a6b9b, 0x0315d626, 0x07d4cb91, 0x0a97ed48, 0x0e56f0ff, 0x1011a0fa, 0x14d0bd4d, 0x19939b94, 0x1d528623, 0xf12f560e, 0xf5ee4bb9, 0xf8ad6d60, 0xfc6c70d7, 0xe22b20d2, 0xe6ea3d65, 0xeba91bbc, 0xef68060b, 0xd727bbb6, 0xd3e6a601, 0xdea580d8, 0xda649d6f, 0xc423cd6a, 0xc0e2d0dd, 0xcda1f604, 0xc960ebb3, 0xbd3e8d7e, 0xb9ff90c9, 0xb4bcb610, 0xb07daba7, 0xae3afba2, 0xaafbe615, 0xa7b8c0cc, 0xa379dd7b, 0x9b3660c6, 0x9ff77d71, 0x92b45ba8, 0x9675461f, 0x8832161a, 0x8cf30bad, 0x81b02d74, 0x857130c3, 0x5d8a9099, 0x594b8d2e, 0x5408abf7, 0x50c9b640, 0x4e8ee645, 0x4a4ffbf2, 0x470cdd2b, 0x43cdc09c, 0x7b827d21, 0x7f436096, 0x7200464f, 0x76c15bf8, 0x68860bfd, 0x6c47164a, 0x61043093, 0x65c52d24, 0x119b4be9, 0x155a565e, 0x18197087, 0x1cd86d30, 0x029f3d35, 0x065e2082, 0x0b1d065b, 0x0fdc1bec, 0x3793a651, 0x3352bbe6, 0x3e119d3f, 0x3ad08088, 0x2497d08d, 0x2056cd3a, 0x2d15ebe3, 0x29d4f654, 0xc5a92679, 0xc1683bce, 0xcc2b1d17, 0xc8ea00a0, 0xd6ad50a5, 0xd26c4d12, 0xdf2f6bcb, 0xdbee767c, 0xe3a1cbc1, 0xe760d676, 0xea23f0af, 0xeee2ed18, 0xf0a5bd1d, 0xf464a0aa, 0xf9278673, 0xfde69bc4, 0x89b8fd09, 0x8d79e0be, 0x803ac667, 0x84fbdbd0, 0x9abc8bd5, 0x9e7d9662, 0x933eb0bb, 0x97ffad0c, 0xafb010b1, 0xab710d06, 0xa6322bdf, 0xa2f33668, 0xbcb4666d, 0xb8757bda, 0xb5365d03, 0xb1f740b4 }; crc: u32 = 0, total_size: usize = 0, fn sum_byte(self: *Self, byte: u8) void { self.crc = (self.crc << 8) ^ lookup_table[(self.crc >> 24) ^ byte]; } pub fn sum_bytes(self: *Self, bytes: []const u8) void { for (bytes) |byte| { self.sum_byte(byte); } self.total_size += bytes.len; } pub fn get_result(self: *Self) u32 { var n = self.total_size; while (n != 0) { self.sum_byte(@truncate(u8, n)); n >>= 8; } return ~self.crc; } pub fn check(data: []const u8) u32 { var cksum = Self{}; cksum.sum_bytes(data); return cksum.get_result(); } const test_string = "The quick brown fox jumps over the lazy dog\n"; const test_sum: u32 = 2382472371; // From running GNU cksum on Linux test "Cksum.sum_data" { var cksum = Self{}; cksum.sum_bytes(test_string[0..14]); cksum.sum_bytes(test_string[14..]); try std.testing.expectEqual(cksum.get_result(), test_sum); } test "Cksum.check" { try std.testing.expectEqual(check(test_string), test_sum); }
0
repos/georgios/libs
repos/georgios/libs/utils/unicode.zig
// Unicode Utilities, Like Converting Character Encodings // // More Information: // https://en.wikipedia.org/wiki/UTF-8 // The Unicode Standard. // For version 13.0, see volume 1 section 3.9 "Unicode Encoding Forms". // See Page 123 for the part on UTF-8 const std = @import("std"); const utils = @import("utils.zig"); pub const Error = error { InvalidUtf8, IncompleteUtf8, } || utils.Error; /// Possibly incomplete decode state to save if we don't have the entire /// sequence at the moment. pub const State = struct { byte_pos: u8 = 0, seqlen: u8 = 0, code_point: u32 = 0, }; /// Low-level UTF-8 to UTF-32 converter. Returns individual code points. pub const Utf8Iterator = struct { input: []const u8, pos: usize = 0, state: State = .{}, fn next_byte(self: *Utf8Iterator, first_byte: bool) callconv(.Inline) Error!u8 { if (self.pos >= self.input.len) { return if (first_byte) Error.OutOfBounds else Error.IncompleteUtf8; } const byte = self.input[self.pos]; self.pos += 1; return byte; } pub fn next(self: *Utf8Iterator) Error!u32 { // Valid UTF-8 code point sequences take these binary forms: // // 00000000 00000000 0aaaaaaa = 0aaaaaaa // 00000000 00000aaa aabbbbbb = 110aaaaa 10bbbbbb // 00000000 aaaabbbb bbcccccc = 1110aaaa 10bbbbbb 10cccccc // 000aaabb bbbbcccc ccdddddd = 11110aaa 10bbbbbb 10cccccc 10dddddd if (self.state.byte_pos == 0) { const first_byte = try self.next_byte(true); if (first_byte & 0b10000000 == 0) { return first_byte; } self.state.seqlen = @clz(u8, ~first_byte); if (self.state.seqlen < 2 or self.state.seqlen > 4) { return Error.InvalidUtf8; } self.state.code_point = first_byte & ((@as(u8, 1) << @intCast(u3, 7 - self.state.seqlen)) - 1); self.state.byte_pos = 1; } while (self.state.byte_pos < self.state.seqlen) { const byte = try self.next_byte(false); if (byte >> 6 != 0b10) { self.state.byte_pos = 0; return Error.InvalidUtf8; } self.state.code_point <<= 6; self.state.code_point |= (byte & 0b00111111); self.state.byte_pos += 1; } self.state.byte_pos = 0; return self.state.code_point; } }; /// High-level UTF-8 to UTF-32 converter. Returns strings as large as the /// buffer allows and there is input for. pub const Utf8ToUtf32 = struct { input: []const u8, buffer: []u32, // Character to insert if there are errors. If null, then errors aren't // allowed. allow_errors: ?u32 = '?', state: State = .{}, pub fn reset(self: *Utf8ToUtf32) void { self.state = .{}; } pub fn next(self: *Utf8ToUtf32) Error![]u32 { var it = Utf8Iterator{.input = self.input, .state = self.state}; var i: usize = 0; var leftovers: ?usize = null; var save_state = false; var replace_char_leftover = false; while (true) { const last_pos = it.pos; if (it.next()) |c| { if (i >= self.buffer.len) { leftovers = last_pos; break; } self.buffer[i] = c; i += 1; } else |e| switch (e) { Error.OutOfBounds => { break; }, Error.IncompleteUtf8 => { // Can't complete sequence. Save state so we can try to // resume when we get more input. save_state = true; break; }, Error.InvalidUtf8 => { if (self.allow_errors) |replace_char| { if (i >= self.buffer.len) { replace_char_leftover = true; break; } self.buffer[i] = replace_char; i += 1; } else { return e; } }, else => { return e; }, } } if (replace_char_leftover) { self.input = @ptrCast([*]const u8, &self.allow_errors.?)[0..1]; } else { self.input = self.input[(if (leftovers == null) it.pos else leftovers.?)..]; } self.state = if (save_state) it.state else .{}; return self.buffer[0..i]; } }; test "utf8_to_utf32" { var buffer: [128]u32 = undefined; // Use this Python function to generate expected u32 arrays: // def utf32_array(s): // indent = ' ' // l = ['0x{:08x},'.format(ord(i)) for i in s] // print('\n'.join([indent + ' '.join(l[i:i+4]) for i in range(0, len(l), 4)])) // One Byte UTF-8 Code Units { const input: []const u8 = "Hello"; const expected = [_]u32 { 0x00000048, 0x00000065, 0x0000006c, 0x0000006c, 0x0000006f, }; var utf8_to_utf32 = Utf8ToUtf32{.input = input, .buffer = buffer[0..]}; try std.testing.expectEqualSlices(u32, expected[0..], try utf8_to_utf32.next()); try std.testing.expectEqualSlices(u8, "", utf8_to_utf32.input); } // One-Two Byte UTF-8 Code Units { const input: []const u8 = "Æðelstan"; const expected = [_]u32 { 0x000000c6, 0x000000f0, 0x00000065, 0x0000006c, 0x00000073, 0x00000074, 0x00000061, 0x0000006e, }; var utf8_to_utf32 = Utf8ToUtf32{.input = input, .buffer = buffer[0..]}; try std.testing.expectEqualSlices(u32, expected[0..], try utf8_to_utf32.next()); try std.testing.expectEqualSlices(u8, "", utf8_to_utf32.input); } // One-Four Byte UTF-8 Code Units { const input: []const u8 = "🍱 頂きます"; const expected = [_]u32 { 0x0001f371, 0x00000020, 0x00009802, 0x0000304d, 0x0000307e, 0x00003059, }; var utf8_to_utf32 = Utf8ToUtf32{.input = input, .buffer = buffer[0..]}; try std.testing.expectEqualSlices(u32, expected[0..], try utf8_to_utf32.next()); try std.testing.expectEqualSlices(u8, "", utf8_to_utf32.input); } // Output is Too Small, so There Are Leftovers { var too_small_buffer: [3]u32 = undefined; const input: []const u8 = "Hello"; var utf8_to_utf32 = Utf8ToUtf32{.input = input, .buffer = too_small_buffer[0..]}; const expected_output1 = [_]u32 {0x00000048, 0x00000065, 0x0000006c}; try std.testing.expectEqualSlices(u32, expected_output1[0..], try utf8_to_utf32.next()); try std.testing.expectEqualSlices(u8, input[too_small_buffer.len..], utf8_to_utf32.input); const expected_output2 = [_]u32 {0x0000006c, 0x0000006f}; try std.testing.expectEqualSlices(u32, expected_output2[0..], try utf8_to_utf32.next()); try std.testing.expectEqualSlices(u8, "", utf8_to_utf32.input); } // Code point is broken up over multiple inputs. { const expected = [_]u32 { 0x0001f371, }; var utf8_to_utf32 = Utf8ToUtf32{.input = "\xf0\x9f", .buffer = buffer[0..]}; try std.testing.expectEqualSlices(u32, expected[0..0], try utf8_to_utf32.next()); try std.testing.expectEqualSlices(u8, "", utf8_to_utf32.input); utf8_to_utf32.input = "\x8d\xb1"; try std.testing.expectEqualSlices(u32, expected[0..], try utf8_to_utf32.next()); try std.testing.expectEqualSlices(u8, "", utf8_to_utf32.input); } // Code point is incomplete AND Buffer is too small. { var too_small_buffer: [2]u32 = undefined; const expected1 = [_]u32 {0x00000031, 0x00000032}; var utf8_to_utf32 = Utf8ToUtf32{ .input = "12\xf0\x9f\x8d", .buffer = too_small_buffer[0..]}; try std.testing.expectEqualSlices(u32, expected1[0..], try utf8_to_utf32.next()); try std.testing.expectEqualSlices(u8, "", utf8_to_utf32.input); const expected2 = [_]u32 {0x0001f371, 0x00000033}; utf8_to_utf32.input = "\xb13"; try std.testing.expectEqualSlices(u32, expected2[0..], try utf8_to_utf32.next()); try std.testing.expectEqualSlices(u8, "", utf8_to_utf32.input); } // Errors can be overcome by default. { const expected = [_]u32 { 0x00000048, 0x00000069, 0x0000003f, 0x0000003f, 0x00000042, 0x00000079, 0x00000065, }; // 0xf8 has an large number of leading ones, implies there are more // bytes in the sequence than are possible. It should be replaced by // '?'. // 0xc0 is a leading byte of a sequence like 0xf8, but the next byte // doesn't begin with 0b10 like it should. Both bytes should be // replaced by a single '?'. // NOTE: If the '!' byte began with 0b10, then we would accept it as // '!', though this would technically be invalid UTF-8 and is called an // overlong encoding. const input: []const u8 = "Hi\xf8\xc0!Bye"; var utf8_to_utf32 = Utf8ToUtf32{.input = input, .buffer = buffer[0..]}; try std.testing.expectEqualSlices(u32, expected[0..], try utf8_to_utf32.next()); try std.testing.expectEqualSlices(u8, "", utf8_to_utf32.input); } // Errors can be overcome if there is no more room in the buffer { var too_small_buffer: [2]u32 = undefined; const input: []const u8 = "Hi\xf8"; const expected1 = [_]u32 {0x00000048, 0x00000069}; var utf8_to_utf32 = Utf8ToUtf32{.input = input, .buffer = too_small_buffer[0..]}; try std.testing.expectEqualSlices(u32, expected1[0..], try utf8_to_utf32.next()); try std.testing.expectEqualSlices(u8, "?", utf8_to_utf32.input); const expected2 = [_]u32 {0x0000003f}; try std.testing.expectEqualSlices(u32, expected2[0..], try utf8_to_utf32.next()); try std.testing.expectEqualSlices(u8, "", utf8_to_utf32.input); } // Decode can be strict { var utf8_to_utf32 = Utf8ToUtf32{ .input = "Hi\xf8Bye", .buffer = buffer[0..], .allow_errors = null}; try std.testing.expectError(Error.InvalidUtf8, utf8_to_utf32.next()); utf8_to_utf32 = Utf8ToUtf32{ .input = "Hi\xc0!Bye", .buffer = buffer[0..], .allow_errors = null}; try std.testing.expectError(Error.InvalidUtf8, utf8_to_utf32.next()); } }
0
repos/georgios/libs
repos/georgios/libs/utils/bmp.zig
// Read the BMP/Windows bitmap file format and convert the bitmap data to a // form that can be put in a display buffer. // // For reference: // https://en.wikipedia.org/wiki/BMP_file_format const std = @import("std"); const utils = @import("utils.zig"); const BaseError = error { BmpInvalidFile, BmpUnsupportedEncoding, BmpUnreadHeader, BmpAlreadyRead, } || utils.Error; const magic = "BM"; const BmpHeader = packed struct { magic0: u8 = magic[0], magic1: u8 = magic[1], file_size: u32, reserved: u32 = 0, data_offset: u32, fn check(self: *const BmpHeader) BaseError!void { const sz = @sizeOf(BmpHeader); if (self.magic0 != magic[0] or self.magic1 != magic[1] or self.file_size <= sz or self.data_offset <= sz or self.data_offset >= self.file_size) { return BaseError.BmpInvalidFile; } } }; const Encoding = enum(u32) { Rgb = 0, RunLenEnc8 = 1, RunLenEnc16 = 2, Rgba = 3, _, }; // The "Device-independent bitmap" header apparently is a separate struct in // the Windows C API. const DibHeader = packed struct { dib_header_size: u32 = @sizeOf(DibHeader), width: u32, height: u32, planes: u16, bits_per_pixel: u16, encoding: Encoding, // "compression" image_size: u32, x_pixels_per_meter: u32, y_pixels_per_meter: u32, color_table_count: u32, important_color_count: u32, fn check(self: *const DibHeader) BaseError!void { if (self.dib_header_size < @sizeOf(DibHeader) or self.width == 0 or self.height == 0 or self.planes == 0 or self.bits_per_pixel == 0) { return BaseError.BmpInvalidFile; } if (self.encoding != .Rgba) { // TODO: That's what Gimp and ImageMagick seem to produce return BaseError.BmpUnsupportedEncoding; } if (@intCast(usize, self.width) * self.height * self.bits_per_pixel / 8 != self.image_size) { // TODO: image_size might be 0? return BaseError.BmpInvalidFile; } } }; pub fn Bmp(comptime File: type) type { return struct { const Self = @This(); const Reader = File.Reader; const SeekableStream = File.SeekableStream; pub const Error = BaseError || Reader.Error || SeekableStream.SeekError; reader: Reader, seekable_stream: SeekableStream, headers_read: bool = false, bmp_header: BmpHeader = undefined, dib_header: DibHeader = undefined, pub fn init(file: *File) Self { return .{ .reader = file.reader(), .seekable_stream = file.seekableStream(), }; } fn read_header_i(self: *Self, comptime Type: type, value: *Type) Error!*Type { const count = try self.reader.read(std.mem.asBytes(value)); if (count < @sizeOf(Type)) { return Error.BmpInvalidFile; } return value; } pub fn read_header(self: *Self) Error!void { try self.seekable_stream.seekTo(0); try (try self.read_header_i(BmpHeader, &self.bmp_header)).check(); try (try self.read_header_i(DibHeader, &self.dib_header)).check(); self.headers_read = true; } pub fn image_size_pixels(self: *Self) Error!utils.U32Point { if (!self.headers_read) try self.read_header(); return utils.U32Point{.x = self.dib_header.width, .y = self.dib_header.height}; } pub fn image_size_bytes(self: *Self) Error!utils.U32Point { return (try self.image_size_pixels()).multiply(self.dib_header.bits_per_pixel).divide(8); } pub fn image_size_bytes_total(self: *Self) Error!usize { if (!self.headers_read) try self.read_header(); return self.dib_header.image_size; } pub fn read_bitmap(self: *Self, pos: *usize, buffer: []u8) Error!?usize { if (buffer.len == 0) { return Error.NotEnoughDestination; } // NOTE: BMP data is "bottom-up": // https://devblogs.microsoft.com/oldnewthing/20210525-00/?p=105250 // Each row is in the expected order from left lsb to right msb, // but the most bottom row is first in the bitmap data of the file. // We need to supply the expected order to the buffer. // TODO: Each row is aligned to 4 bytes. The padding for this would // need to be omitted from the output. Right now though we only // support 32 bpp RGBA, which won't have the padding. const total_expected = try self.image_size_bytes_total(); const width: usize = self.dib_header.width * self.dib_header.bits_per_pixel / 8; const data_end: usize = self.bmp_header.data_offset + total_expected; var got: usize = 0; while (got < buffer.len and pos.* < total_expected) { const row = pos.* / width; const col = @mod(pos.*, width); const seek_to = data_end - width * (row + 1) + col; const count = @minimum(width - col, buffer.len - got); try self.seekable_stream.seekTo(seek_to); if ((try self.reader.read(buffer[got..got + count])) != count) { return Error.BmpInvalidFile; } got += count; pos.* += count; } return if (got == 0) null else got; } }; } test "read test.bmp" { var file = try std.fs.cwd().openFile("misc/test.bmp", .{.read = true}); defer file.close(); var bmp = Bmp(@TypeOf(file)).init(&file); try bmp.read_header(); // std.debug.print("BMP STRUCT: {}\n", .{bmp}); var bitmap = [_]u8{0} ** 1024; var buffer: [129]u8 = undefined; var pos: usize = 0; while (try bmp.read_bitmap(&pos, buffer[0..])) |got| { for (buffer[0..got]) |byte, i| { bitmap[pos - got + i] = byte; } } // std.debug.print("BITMAP: {}\n", .{utils.fmt_dump_hex(bitmap[0..])}); var expected_ascii = " " ++ " @@@@@@ " ++ " @@@@@@@@ " ++ " @@@@@@@@@@ " ++ " @@@@@@@@@@@@ " ++ " @@@@-@@@@-@@@@ " ++ " @@@---@@---@@@ " ++ " @@@-@@@@-@@@@@ " ++ " @@@@@@@@@@@@@@ " ++ " @@@@@@@@@@@@@@ " ++ " @@@@@@@@@@@@@@ " ++ " @@@------@@@ " ++ " @@@@@@@@@@ " ++ " @@@@@@@@ " ++ " @@@@@@ " ++ " "; var expected: [1024]u8 = undefined; for (expected_ascii) |ascii_pixel, i| { var e = expected[i * 4..(i + 1) * 4]; switch (ascii_pixel) { ' ' => { // Transparent e[0] = 0; e[1] = 0; e[2] = 0; e[3] = 0; }, '@' => { // Yellow e[0] = 0; e[1] = 0xff; e[2] = 0xff; e[3] = 0xff; }, '-' => { // Black e[0] = 0; e[1] = 0; e[2] = 0; e[3] = 0xff; }, else => @panic("Unexpected \"expected\" ASCII"), } } if (false) { for (std.mem.bytesAsSlice(u32, bitmap[0..])) |p, i| { const c: u8 = switch (p) { 0 => ' ', 0xff000000 => '-', 0xffffff00 => '@', else => { std.debug.print("GOT pixel value {x}\n", .{p}); @panic("??"); }, }; std.debug.print("{c}", .{c}); if (@mod(i, 16) == 0) { std.debug.print("\n", .{}); } } } try utils.expect_equal_bytes(expected[0..], bitmap[0..]); }
0
repos/georgios/libs
repos/georgios/libs/utils/list.zig
const std = @import("std"); const Allocator = std.mem.Allocator; pub const Error = Allocator.Error; const utils = @import("utils.zig"); pub fn List(comptime Type: type) type { return struct { const Self = @This(); pub const Node = struct { next: ?*Node = null, prev: ?*Node = null, value: Type, }; alloc: Allocator, head: ?*Node = null, tail: ?*Node = null, len: usize = 0, pub fn front(self: *Self) ?Type { if (self.head) |node| { return node.value; } return null; } pub fn back(self: *Self) ?Type { if (self.tail) |node| { return node.value; } return null; } pub fn unlink_node(self: *Self, node_maybe: ?*Node) void { if (node_maybe) |node| { if (node.next) |next| { next.prev = node.prev; } if (node.prev) |prev| { prev.next = node.next; } if (node == self.head) { self.head = node.next; } if (node == self.tail) { self.tail = node.prev; } self.len -= 1; } } pub fn destroy_node(self: *Self, node: *Node) void { self.alloc.destroy(node); } pub fn remove_node(self: *Self, node: *Node) void { self.unlink_node(node); self.destroy_node(node); } pub fn push_front_node(self: *Self, node: *Node) void { node.next = self.head; node.prev = null; if (self.head) |head| { head.prev = node; } self.head = node; if (self.len == 0) { self.tail = node; } self.len += 1; } pub fn create_node(self: *Self, value: Type) Error!*Node{ const node = try self.alloc.create(Node); node.* = .{.value = value}; return node; } pub fn push_front(self: *Self, value: Type) Error!void { self.push_front_node(try self.create_node(value)); } pub fn pop_front_node(self: *Self) ?*Node { const node = self.head; self.unlink_node(node); return node; } pub fn pop_front(self: *Self) ?Type { if (self.pop_front_node()) |node| { const value = node.value; self.destroy_node(node); return value; } return null; } pub fn bump_node_to_front(self: *Self, node: *Node) void { if (self.head == node) { return; } self.unlink_node(node); self.push_front_node(node); } pub fn push_back_node(self: *Self, node: *Node) void { node.next = null; node.prev = self.tail; if (self.tail) |tail| { tail.next = node; } self.tail = node; if (self.len == 0) { self.head = node; } self.len += 1; } pub fn push_back(self: *Self, value: Type) Error!void { self.push_back_node(try self.create_node(value)); } pub fn pop_back_node(self: *Self) ?*Node { const node = self.tail; self.unlink_node(node); return node; } pub fn pop_back(self: *Self) ?Type { if (self.pop_back_node()) |node| { const value = node.value; self.destroy_node(node); return value; } return null; } pub fn bump_node_to_back(self: *Self, node: *Node) void { if (self.tail == node) { return; } self.unlink_node(node); self.push_back_node(node); } pub fn insert_node_before(self: *Self, before_maybe: ?*Node, insert: *Node) void { if (before_maybe == null) { self.push_back_node(insert); return; } const before = before_maybe.?; if (before.prev == insert) { return; } insert.prev = before.prev; insert.next = before; before.prev = insert; if (insert.prev) |after| { after.next = insert; } else { self.head = insert; } self.len += 1; } pub fn insert_before(self: *Self, before: ?*Node, value: Type) Error!void { self.insert_node_before(before, try self.create_node(value)); } pub fn insert_node_after(self: *Self, after_maybe: ?*Node, insert: *Node) void { if (after_maybe == null) { self.push_front_node(insert); return; } const after = after_maybe.?; if (after.next == insert) { return; } insert.prev = after; insert.next = after.next; after.next = insert; if (insert.next) |before| { before.prev = insert; } else { self.tail = insert; } self.len += 1; } pub fn insert_after(self: *Self, after: ?*Node, value: Type) Error!void { self.insert_node_after(after, try self.create_node(value)); } pub fn push_back_list(self: *Self, other: *Self) void { if (other.head) |other_head| { other_head.prev = self.tail; if (self.tail) |tail| { tail.next = other_head; } self.tail = other.tail; if (self.len == 0) { self.head = other_head; } self.len += other.len; other.head = null; other.tail = null; other.len = 0; } } pub fn to_slice(self: *Self) Error![]Type { const slice = try self.alloc.alloc(Type, self.len); var it = self.iterator(); var i: usize = 0; while (it.next()) |value| { slice[i] = value; i += 1; } return slice; } pub fn clear(self: *Self) void { while (self.pop_back_node()) |node| { self.destroy_node(node); } } pub const Iterator = struct { node: ?*Node, pub fn next(self: *Iterator) ?Type { if (self.node) |n| { self.node = n.next; return n.value; } return null; } pub fn done(self: *const Iterator) bool { return self.node == null; } }; pub fn iterator(self: *Self) Iterator { return Iterator{.node = self.head}; } // TODO: Make generic with Iterator? pub const ConstIterator = struct { node: ?*const Node, pub fn next(self: *ConstIterator) ?Type { if (self.node) |n| { self.node = n.next; return n.value; } return null; } pub fn done(self: *const ConstIterator) bool { return self.node == null; } }; pub fn const_iterator(self: *const Self) ConstIterator { return ConstIterator{.node = self.head}; } }; } test "List" { const equal = std.testing.expectEqual; var ta = utils.TestAlloc{}; defer ta.deinit(.Panic); errdefer ta.deinit(.NoPanic); const alloc = ta.alloc(); const UsizeList = List(usize); var list = UsizeList{.alloc = alloc}; const nilv: ?usize = null; const niln: ?*UsizeList.Node = null; // Empty try equal(@as(usize, 0), list.len); try equal(nilv, list.pop_back()); try equal(nilv, list.pop_front()); try equal(niln, list.head); try equal(niln, list.tail); // Push Some Values try list.push_back(1); try equal(@as(usize, 1), list.len); try list.push_back(2); try equal(@as(usize, 2), list.len); try list.push_back(3); try equal(@as(usize, 3), list.len); // Test Iterator var i: usize = 0; const expected = [_]usize{1, 2, 3}; var it = list.iterator(); while (it.next()) |actual| { try equal(expected[i], actual); i += 1; } // pop_back The Values try equal(@as(usize, 3), list.pop_back().?); try equal(@as(usize, 2), list.len); try equal(@as(usize, 2), list.pop_back().?); try equal(@as(usize, 1), list.len); try equal(@as(usize, 1), list.pop_back().?); // It's empty again try equal(@as(usize, 0), list.len); try equal(nilv, list.pop_back()); try equal(nilv, list.pop_front()); try equal(niln, list.head); try equal(niln, list.tail); // Push and insert values try list.push_front(20); // Now: >20< const n20 = list.head; try equal(@as(usize, 1), list.len); try list.push_back(3); // Now: 20 >3< const n3 = list.tail; try list.push_front(10); // Now: >10< 20 3 try list.insert_after(null, 1); // Now: >1< 10 20 3 try list.insert_after(n3, 4); // Now: 1 10 20 3 >4< try list.insert_before(list.head, 0); // Now: >0< 1 10 20 3 4 try list.insert_after(n3, 30); // Now: 0 1 10 20 3 >30< 4 try list.insert_before(null, 999); // Now: 0 1 10 20 3 30 4 >999< try list.insert_before(n20, 11); // Now: 0 1 10 >11< 20 3 30 4 999 try list.insert_before(list.tail, 40); // Now: 0 1 10 20 3 30 4 >40< 999 // Test to_slice and using pop_front const expected2 = [_]usize{0, 1, 10, 11, 20, 3, 30, 4, 40, 999}; try equal(@as(usize, expected2.len), list.len); const slice = try list.to_slice(); defer alloc.free(slice); for (expected2) |val, n| { try equal(expected2[n], val); try equal(expected2[n], list.pop_front().?); } // It's empty yet again try equal(@as(usize, 0), list.len); try equal(nilv, list.pop_back()); try equal(nilv, list.pop_front()); try equal(niln, list.head); try equal(niln, list.tail); // Clear try list.push_back(12); try list.push_front(6); list.clear(); // It's empty ... again try equal(@as(usize, 0), list.len); try equal(nilv, list.pop_back()); try equal(nilv, list.pop_front()); try equal(niln, list.head); try equal(niln, list.tail); // Test push_back_list by adding empty list to empty list var other_list = UsizeList{.alloc = alloc}; list.push_back_list(&other_list); try equal(@as(usize, 0), list.len); try equal(nilv, list.pop_back()); try equal(nilv, list.pop_front()); try equal(niln, list.head); try equal(niln, list.tail); // Test push_back_list by adding non empty list to empty list try other_list.push_back(1); try other_list.push_back(3); list.push_back_list(&other_list); try equal(@as(usize, 0), other_list.len); try equal(nilv, other_list.pop_back()); try equal(nilv, other_list.pop_front()); try equal(niln, other_list.head); try equal(niln, other_list.tail); try equal(@as(usize, 2), list.len); // Test push_back_list by adding non empty list to non empty list try other_list.push_back(5); try other_list.push_back(7); list.push_back_list(&other_list); try equal(@as(usize, 0), other_list.len); try equal(nilv, other_list.pop_back()); try equal(nilv, other_list.pop_front()); try equal(niln, other_list.head); try equal(niln, other_list.tail); try equal(@as(usize, 4), list.len); try equal(@as(usize, 1), list.pop_front().?); try equal(@as(usize, 3), list.pop_front().?); try equal(@as(usize, 5), list.pop_front().?); try equal(@as(usize, 7), list.pop_front().?); try equal(@as(usize, 0), list.len); try equal(nilv, list.pop_back()); try equal(nilv, list.pop_front()); try equal(niln, list.head); try equal(niln, list.tail); }
0
repos/georgios/libs
repos/georgios/libs/georgios/Console.zig
const Self = @This(); const utils = @import("utils"); const Ansi = utils.AnsiEscProcessor; pub const HexColor = Ansi.HexColor; pub const Layer = Ansi.Layer; row: u32 = undefined, column: u32 = undefined, width: u32 = undefined, height: u32 = undefined, ansi: Ansi = undefined, utf32_buffer: [128]u32 = undefined, utf8_to_utf32: utils.Utf8ToUtf32 = undefined, place_impl: fn(console: *Self, utf32_value: u32, row: u32, col: u32) void, scroll_impl: fn(console: *Self) void, set_hex_color_impl: fn(console: *Self, color: HexColor, layer: Layer) void, get_hex_color_impl: fn(console: *Self, layer: Layer) HexColor, use_default_color_impl: fn(console: *Self, layer: Layer) void, reset_attributes_impl: fn(console: *Self) void, move_cursor_impl: fn(console: *Self, row: u32, col: u32) void, show_cursor_impl: fn(console: *Self, show: bool) void, clear_screen_impl: fn(console: *Self) void, pub fn init(self: *Self, width: u32, height: u32) void { self.width = width; self.height = height; self.ansi = .{ .print_char = ansi_print_char, .newline = ansi_newline, .backspace = ansi_backspace, .hex_color = ansi_hex_color, .invert_colors = ansi_invert_colors, .use_default_color = ansi_use_default_color, .reset_attributes = ansi_reset_attributes, .reset_terminal = ansi_reset_terminal, .move_cursor = ansi_move_cursor, .show_cursor = ansi_show_cursor, }; self.utf8_to_utf32 = .{.input = undefined, .buffer = self.utf32_buffer[0..]}; self.reset_terminal(); } /// Takes a UTF8/ANSI escape code byte pub fn print(self: *Self, byte: u8) void { self.ansi.feed_char(byte); } pub fn print_utf8(self: *Self, utf8_value: u8) void { self.utf8_to_utf32.input = @ptrCast([*]const u8, &utf8_value)[0..1]; // TODO: Shouldn't crash the kernel just because we got an invalid UTF8 byte. for (self.utf8_to_utf32.next() catch @panic("Console UTF-8 Failure")) |utf32_value| { self.print_utf32(utf32_value); } } pub fn print_utf32(self: *Self, utf32_value: u32) void { if ((self.column + 1) > self.width) { self.newline(); } self.place(utf32_value, self.row, self.column); self.move_cursor(self.row, self.column + 1); } pub fn place(self: *Self, utf32_value: u32, row: u32, col: u32) void { self.place_impl(self, utf32_value, row, col); } pub fn ansi_print_char(ansi: *Ansi, char: u8) void { const self = @fieldParentPtr(Self, "ansi", ansi); self.print_utf8(char); } pub fn newline(self: *Self) void { if (self.row == (self.height - 1)) { self.scroll_impl(self); } else { self.row += 1; } self.move_cursor(self.row, 0); } pub fn ansi_newline(ansi: *Ansi) void { const self = @fieldParentPtr(Self, "ansi", ansi); self.newline(); } pub fn backspace(self: *Self) void { var row = self.row; var col = self.column; if (col == 0 and row > 0) { col = self.width - 1; row -= 1; } else { col -= 1; } self.move_cursor(row, col); self.place(' ', self.row, self.column); } pub fn ansi_backspace(ansi: *Ansi) void { const self = @fieldParentPtr(Self, "ansi", ansi); self.backspace(); } pub fn set_hex_color(self: *Self, color: HexColor, layer: Layer) void { self.set_hex_color_impl(self, color, layer); } pub fn ansi_hex_color(ansi: *Ansi, color: HexColor, layer: Layer) void { const self = @fieldParentPtr(Self, "ansi", ansi); self.set_hex_color(color, layer); } pub fn set_hex_colors(self: *Self, fg: HexColor, bg: HexColor) void { self.set_hex_color(fg, .Foreground); self.set_hex_color(bg, .Background); } pub fn get_hex_color(self: *Self, layer: Layer) HexColor { return self.get_hex_color_impl(self, layer); } pub fn invert_colors(self: *Self) void { const fg = self.get_hex_color(.Foreground); const bg = self.get_hex_color(.Background); self.set_hex_colors(bg, fg); } pub fn ansi_invert_colors(ansi: *Ansi) void { const self = @fieldParentPtr(Self, "ansi", ansi); self.invert_colors(); } pub fn use_default_color(self: *Self, layer: Layer) void { self.use_default_color_impl(self, layer); } pub fn ansi_use_default_color(ansi: *Ansi, layer: Layer) void { const self = @fieldParentPtr(Self, "ansi", ansi); self.use_default_color(layer); } pub fn use_default_colors(self: *Self) void { self.use_default_color(.Foreground); self.use_default_color(.Background); } pub fn reset_attributes(self: *Self) void { self.use_default_colors(); self.reset_attributes_impl(self); } pub fn ansi_reset_attributes(ansi: *Ansi) void { const self = @fieldParentPtr(Self, "ansi", ansi); self.reset_attributes(); } pub fn move_cursor(self: *Self, row: u32, col: u32) void { self.row = row; self.column = col; self.move_cursor_impl(self, row, col); } pub fn ansi_move_cursor(ansi: *Ansi, row: u32, col: u32) void { const self = @fieldParentPtr(Self, "ansi", ansi); self.move_cursor(row, col); } pub fn show_cursor(self: *Self, show: bool) void { self.show_cursor_impl(self, show); } pub fn ansi_show_cursor(ansi: *Ansi, show: bool) void { const self = @fieldParentPtr(Self, "ansi", ansi); self.show_cursor(show); } pub fn reset_cursor(self: *Self) void { self.move_cursor(0, 0); self.show_cursor(true); } pub fn clear_screen(self: *Self) void { self.clear_screen_impl(self); } pub fn reset_terminal(self: *Self) void { self.reset_attributes(); self.clear_screen(); self.reset_cursor(); } pub fn ansi_reset_terminal(ansi: *Ansi) void { const self = @fieldParentPtr(Self, "ansi", ansi); self.reset_terminal(); }
0
repos/georgios/libs
repos/georgios/libs/georgios/keyboard.zig
pub const Key = @import("keys.zig").Key; pub const Kind = enum { Pressed, // Key is depressed. Should be followed by a release. Released, // Key was released. Hit, // Key doesn't support seperate pressed and released states. }; pub const Modifiers = struct { right_shift_is_pressed: bool = false, left_shift_is_pressed: bool = false, right_alt_is_pressed: bool = false, left_alt_is_pressed: bool = false, right_control_is_pressed: bool = false, left_control_is_pressed: bool = false, pub fn shift_is_pressed(self: *const Modifiers) bool { return self.right_shift_is_pressed or self.left_shift_is_pressed; } pub fn alt_is_pressed(self: *const Modifiers) bool { return self.right_alt_is_pressed or self.left_alt_is_pressed; } pub fn control_is_pressed(self: *const Modifiers) bool { return self.right_control_is_pressed or self.left_control_is_pressed; } pub fn update(self: *Modifiers, event: *const Event) void { switch (event.unshifted_key) { .Key_LeftShift => self.left_shift_is_pressed = event.kind == .Pressed, .Key_RightShift => self.right_shift_is_pressed = event.kind == .Pressed, .Key_LeftAlt => self.left_alt_is_pressed = event.kind == .Pressed, .Key_RightAlt => self.right_alt_is_pressed = event.kind == .Pressed, .Key_LeftControl => self.left_control_is_pressed = event.kind == .Pressed, .Key_RightControl => self.right_control_is_pressed = event.kind == .Pressed, else => {}, } } }; pub const Event = struct { unshifted_key: Key, kind: Kind, modifiers: Modifiers, key: Key, char: ?u8, pub fn new( unshifted_key: Key, shifted_key: ?Key, kind: Kind, modifiers: *const Modifiers) Event { return Event { .unshifted_key = unshifted_key, .kind = kind, .modifiers = modifiers.*, .key = if (shifted_key != null and modifiers.shift_is_pressed()) shifted_key.? else unshifted_key, .char = null, }; } };
0
repos/georgios/libs
repos/georgios/libs/georgios/fs.idl
@virtual_dispatch interface Directory { void create(in string path, in georgios::fs::NodeKind kind) raises (georgios::fs::Error); void unlink(in string path) raises (georgios::fs::Error); };
0
repos/georgios/libs
repos/georgios/libs/georgios/io.zig
const std = @import("std"); const utils = @import("utils"); const georgios = @import("georgios.zig"); const memory = @import("memory.zig"); pub const FileError = error { /// The operation is not supported. Unsupported, /// An Implementation-Related Error Occured. Internal, InvalidFileId, OutOfSpace, StreamTooLong, EndOfStream, } || georgios.BasicError; /// File IO Interface pub const File = struct { pub const Id = u32; /// Used for seek() pub const SeekType = enum { FromStart, FromHere, FromEnd, }; pub const nop = struct { pub fn read_impl(file: *File, to: []u8) FileError!usize { _ = file; _ = to; return 0; } pub fn write_impl(file: *File, from: []const u8) FileError!usize { _ = file; return from.len; } pub fn seek_impl(file: *File, offset: isize, seek_type: SeekType) FileError!usize { _ = file; _ = offset; _ = seek_type; return 0; } pub fn close_impl(file: *File) georgios.fs.Error!void { _ = file; } }; pub const unsupported = struct { pub fn read_impl(file: *File, to: []u8) FileError!usize { _ = file; _ = to; return FileError.Unsupported; } pub fn write_impl(file: *File, from: []const u8) FileError!usize { _ = file; _ = from; return FileError.Unsupported; } pub fn seek_impl(file: *File, offset: isize, seek_type: SeekType) FileError!usize { _ = file; _ = offset; _ = seek_type; return FileError.Unsupported; } pub fn close_impl(file: *File) georgios.fs.Error!void { _ = file; return FileError.Unsupported; } }; pub const system_call = struct { pub fn read_impl(file: *File, to: []u8) FileError!usize { return georgios.system_calls.file_read(file.id.?, to); } pub fn write_impl(file: *File, from: []const u8) FileError!usize { return georgios.system_calls.file_write(file.id.?, from); } pub fn seek_impl(file: *File, offset: isize, seek_type: SeekType) FileError!usize { return georgios.system_calls.file_seek(file.id.?, offset, seek_type); } pub fn close_impl(file: *File) georgios.fs.Error!void { return georgios.system_calls.file_close(file.id.?); } }; const default_impl = if (georgios.is_program) system_call else unsupported; id: ?Id = null, read_impl: fn(*File, []u8) FileError!usize = default_impl.read_impl, write_impl: fn(*File, []const u8) FileError!usize = default_impl.write_impl, seek_impl: fn(*File, isize, SeekType) FileError!usize = default_impl.seek_impl, close_impl: fn(*File) georgios.fs.Error!void = default_impl.close_impl, /// Set the file to do nothing when used. pub fn set_nop_impl(self: *File) void { self.read_impl = nop.read_impl; self.write_impl = nop.write_impl; self.seek_impl = nop.seek_impl; self.close_impl = nop.close_impl; } /// Set the file to return FileError.Unsupported when used. pub fn set_unsupported_impl(self: *File) void { self.read_impl = unsupported.read_impl; self.write_impl = unsupported.write_impl; self.seek_impl = unsupported.seek_impl; self.close_impl = unsupported.close_impl; } /// Tries to read as much as possible into the `to` slice and will return /// the amount read, which may be less than `to.len`. Can return 0 if the /// `to` slice is zero or the end of the file has been reached already. It /// should never return `FileError.EndOfStream` or /// `FileError.NotEnoughDestination`, but `read_or_error` will. The exact /// return values are defined by the file implementation. pub fn read(file: *File, to: []u8) FileError!usize { return file.read_impl(file, to); } /// Same as `read`, but returns `FileError.NotEnoughDestination` if an /// empty `to` was passed or `FileError.EndOfStream` if trying to read from /// a file that's already reached the end. pub fn read_or_error(file: *File, to: []u8) FileError!usize { if (to.len == 0) { return FileError.NotEnoughDestination; } const result = try file.read_impl(file, to); if (result == 0) { return FileError.EndOfStream; } return result; } /// Tries the write the entire `from` slice and will return the amount /// written, which may be less than `from.len`. As with `read` this can be /// 0 if the file has a limit of what can be written and that limit was /// already reached. Also like `read` this should never return /// `FileError.EndOfStream`, but `write_or_error` can. The exact return /// values are defined by the file implementation. pub fn write(file: *File, from: []const u8) FileError!usize { return file.write_impl(file, from); } /// Same as `write`, but return `FileError.EndOfStream` if an empty `from` /// was passed or `FileError.EndOfStream` if trying to write to a file /// that's already reached the end. pub fn write_or_error(file: *File, from: []const u8) FileError!usize { const result = try file.write_impl(file, from); if (result == 0 and from.len > 0) { return FileError.EndOfStream; } return result; } /// Shift where the file is operating from. Returns the new location if /// that's applicable, but if it's not it always returns 0. pub fn seek(file: *File, offset: isize, seek_type: File.SeekType) FileError!usize { return file.seek_impl(file, offset, seek_type); } /// Free resources used by the file. pub fn close(file: *File) georgios.fs.Error!void { defer file.id = null; file.close_impl(file) catch |e| return e; } /// A generic seek calculation for File Implementations to call. /// This assumes the following: /// - The start of the stream is always 0 and this is something that can /// be seeked. /// - The `position` can never over or under flow, or otherwise go past /// start by being negative. /// - If `limit` is non-null, then the stream position can't go past it. /// The result is returned unless it's invalid, then /// `FileError.EndOfStream` is returned. pub fn generic_seek(position: usize, end: usize, limit: ?usize, offset: isize, seek_type: SeekType) FileError!usize { const from: usize = switch (seek_type) { .FromStart => 0, .FromHere => position, .FromEnd => end, }; if (utils.add_isize_to_usize(from, offset)) |result| { if (result != position and limit != null and result >= limit.?) { return FileError.EndOfStream; } return result; } return FileError.EndOfStream; } // Std I/O Interfaces // TODO: Convert seek to match std i64/u64? fn std_seek_to_impl(file: *File, pos: u64) FileError!void { _ = try file.seek(@intCast(isize, pos), .FromStart); } fn std_seek_by_impl(file: *File, pos: i64) FileError!void { _ = try file.seek(@intCast(isize, pos), .FromHere); } fn std_get_pos_impl(file: *File) FileError!u64 { return @intCast(u64, try file.seek(0, .FromHere)); } fn std_get_end_pos_impl(file: *File) FileError!u64 { _ = file; @panic("std_get_end_pos_impl not implemented"); } pub const SeekableStream = std.io.SeekableStream( *File, FileError, FileError, std_seek_to_impl, std_seek_by_impl, std_get_pos_impl, std_get_end_pos_impl, ); pub fn seekableStream(file: *File) SeekableStream { return .{.context = file}; } pub const Writer = std.io.Writer(*File, FileError, write); pub fn writer(file: *File) Writer { return .{.context = file}; } pub const Reader = std.io.Reader(*File, FileError, read); pub fn reader(file: *File) Reader { return .{.context = file}; } }; /// Test for normal situation. fn generic_seek_subtest(seek_type: File.SeekType, expected_from: usize) !void { try std.testing.expectEqual(expected_from, try File.generic_seek(1, 4, null, 0, seek_type)); try std.testing.expectEqual(expected_from + 5, try File.generic_seek(1, 4, null, 5, seek_type)); try std.testing.expectError(FileError.EndOfStream, File.generic_seek(1, 4, 4, 5, seek_type)); try std.testing.expectError(FileError.EndOfStream, File.generic_seek(1, 4, 4, -5, seek_type)); try std.testing.expectError(FileError.EndOfStream, File.generic_seek(1, 4, 4, -5, seek_type)); } test "File.generic_seek" { // Some normal situations try generic_seek_subtest(.FromStart, 0); try generic_seek_subtest(.FromHere, 1); try generic_seek_subtest(.FromEnd, 4); // We should be able to go to max_usize. const max_usize: usize = std.math.maxInt(usize); const max_isize: isize = std.math.maxInt(isize); const max_isize_as_usize = @bitCast(usize, max_isize); try std.testing.expectEqual(max_usize, max_isize_as_usize + max_isize_as_usize + 1); // Just a sanity check try std.testing.expectEqual(max_usize, try File.generic_seek(max_isize_as_usize + 1, 4, null, max_isize, .FromHere)); // However we shouldn't be able to go to past max_usize. try std.testing.expectError(FileError.EndOfStream, File.generic_seek(max_usize, 4, null, 5, .FromHere)); try std.testing.expectError(FileError.EndOfStream, File.generic_seek(max_usize, 4, 4, 5, .FromHere)); } /// File that reads from and writes to a provided fixed buffer. pub const BufferFile = struct { const Self = @This(); file: File = undefined, buffer: []u8 = undefined, position: usize = 0, written_up_until: usize = 0, pub fn init(self: *Self, buffer: []u8) void { self.file.read_impl = Self.read; self.file.write_impl = Self.write; self.file.seek_impl = Self.seek; self.file.close_impl = File.nop.close_impl; self.buffer = buffer; self.reset(); } pub fn reset(self: *Self) void { self.position = 0; self.written_up_until = 0; } pub fn read(file: *File, to: []u8) FileError!usize { const self = @fieldParentPtr(Self, "file", file); if (self.written_up_until > self.position) { const read_size = self.written_up_until - self.position; _ = utils.memory_copy_truncate(to[0..read_size], self.buffer[self.position..self.written_up_until]); self.position = self.written_up_until; return read_size; } return 0; } fn fill_unwritten(self: *Self, pos: usize) void { if (pos > self.written_up_until) { utils.memory_set(self.buffer[self.written_up_until..pos], 0); } } pub fn write(file: *File, from: []const u8) FileError!usize { const self = @fieldParentPtr(Self, "file", file); const write_size = @minimum(from.len, self.buffer.len - self.position); if (write_size > 0) { self.fill_unwritten(self.position); const new_position = self.position + write_size; _ = utils.memory_copy_truncate(self.buffer[self.position..new_position], from[0..write_size]); self.position = new_position; self.written_up_until = new_position; } return write_size; } pub fn seek(file: *File, offset: isize, seek_type: File.SeekType) FileError!usize { const self = @fieldParentPtr(Self, "file", file); const new_postion = try File.generic_seek( self.position, self.written_up_until, self.buffer.len, offset, seek_type); self.position = new_postion; return new_postion; } pub fn set_contents( self: *Self, offset: usize, new_contents: []const u8) utils.Error!void { self.fill_unwritten(offset); self.written_up_until = offset + try utils.memory_copy_error(self.buffer[offset..], new_contents); } pub fn get_contents(self: *Self) []u8 { return self.buffer[0..self.written_up_until]; } pub fn expect(self: *Self, expected_contents: []const u8) !void { try std.testing.expectEqualSlices(u8, expected_contents, self.get_contents()); } }; test "BufferFile" { var file_buffer: [128]u8 = undefined; var buffer_file = BufferFile{}; buffer_file.init(file_buffer[0..]); const file = &buffer_file.file; // Put "adc123" into `file_buffer`, read it into `result_buffer`, then // compare them. const string = "abc123"; const len = string.len; var result_buffer: [128]u8 = undefined; try buffer_file.set_contents(0, string); try std.testing.expectEqual(len, try file.read(result_buffer[0..])); // TODO: Show strings if fail? try std.testing.expectEqualSlices(u8, string[0..], result_buffer[0..len]); try buffer_file.expect(string[0..]); // Seek position 3 and then read three 3 to start of result buffer try std.testing.expectEqual(@as(usize, 3), try file.seek(3, .FromStart)); try std.testing.expectEqual(@as(usize, 3), try file.read(result_buffer[0..])); try std.testing.expectEqualSlices(u8, "123123", result_buffer[0..len]); // Try to read again at the end of the file try std.testing.expectEqual(@as(usize, 0), try file.read(result_buffer[0..])); try std.testing.expectEqual(len, buffer_file.position); // Try Writing Another String Over It const string2 = "cdef"; try std.testing.expectEqual(@as(usize, 2), try file.seek(2, .FromStart)); try std.testing.expectEqual(@as(usize, string2.len), try file.write(string2)); try std.testing.expectEqual(@as(usize, 0), try file.seek(0, .FromStart)); try std.testing.expectEqual(len, try file.read(result_buffer[0..])); try std.testing.expectEqualSlices(u8, "abcdef", result_buffer[0..len]); // Unwritten With Set Contents { buffer_file.reset(); const blank = "\x00\x00\x00\x00\x00\x00\x00\x00"; const str = "Georgios"; try buffer_file.set_contents(blank.len, str); try buffer_file.expect(blank ++ str); } // Unwritten With Seek { buffer_file.reset(); const str1 = "123"; try std.testing.expectEqual(str1.len, try file.write(str1)); try std.testing.expectEqual(str1.len, buffer_file.written_up_until); const blank = "\x00\x00\x00\x00\x00\x00\x00\x00"; const expected1 = str1 ++ blank; try std.testing.expectEqual(expected1.len, try file.seek(expected1.len, .FromStart)); try std.testing.expectEqual(str1.len, buffer_file.written_up_until); try buffer_file.expect(str1); const str2 = "4567"; try std.testing.expectEqual(str2.len, try file.write(str2)); const expected2 = expected1 ++ str2; try buffer_file.expect(expected2); } // Try to Write and Read End Of Buffer { buffer_file.reset(); const str = "xyz"; const pos = file_buffer.len - str.len; try buffer_file.set_contents(pos, str); try std.testing.expectEqual(pos, try file.seek(-@as(isize, str.len), .FromEnd)); try std.testing.expectEqual(str.len, try file.read(result_buffer[0..])); try std.testing.expectEqualSlices(u8, str[0..], result_buffer[0..str.len]); try std.testing.expectEqual(@as(usize, 0), try file.write("ijk")); try std.testing.expectEqual(@as(usize, 0), try file.read(result_buffer[0..])); } }
0
repos/georgios/libs
repos/georgios/libs/georgios/system_calls.zig
// Generated by scripts/codegen/generate_system_calls.py from // kernel/platform/system_calls.zig. See system_calls.zig for more info. const utils = @import("utils"); const georgios = @import("georgios.zig"); const ErrorCode = enum(u32) { Unknown = 1, OutOfBounds = 2, NotEnoughSource = 3, NotEnoughDestination = 4, OutOfMemory = 5, ZeroSizedAlloc = 6, InvalidFree = 7, FileNotFound = 8, NotADirectory = 9, NotAFile = 10, InvalidFilesystem = 11, Unsupported = 12, Internal = 13, InvalidFileId = 14, InvalidElfFile = 15, InvalidElfObjectType = 16, InvalidElfPlatform = 17, NoCurrentProcess = 18, NoSuchProcess = 19, DirectoryNotEmpty = 20, OutOfSpace = 21, FilesystemAlreadyMountedHere = 22, InvalidOpenOpts = 23, StreamTooLong = 24, EndOfStream = 25, DispatchInvalidPort = 26, DispatchInvalidMessage = 27, DispatchBrokenCall = 28, DispatchOpUnsupported = 29, AlreadyExists = 30, _, }; pub fn ValueOrError(comptime ValueType: type, comptime ErrorType: type) type { return union (enum) { const Self = @This(); value: ValueType, error_code: ErrorCode, pub fn set_value(self: *Self, value: ValueType) void { self.* = Self{.value = value}; } pub fn set_error(self: *Self, err: ErrorType) void { self.* = Self{.error_code = switch (ErrorType) { georgios.DispatchError => switch (err) { georgios.DispatchError.DispatchInvalidPort => ErrorCode.DispatchInvalidPort, georgios.DispatchError.DispatchInvalidMessage => ErrorCode.DispatchInvalidMessage, georgios.DispatchError.DispatchBrokenCall => ErrorCode.DispatchBrokenCall, georgios.DispatchError.DispatchOpUnsupported => ErrorCode.DispatchOpUnsupported, georgios.DispatchError.Unknown => ErrorCode.Unknown, georgios.DispatchError.OutOfBounds => ErrorCode.OutOfBounds, georgios.DispatchError.NotEnoughSource => ErrorCode.NotEnoughSource, georgios.DispatchError.NotEnoughDestination => ErrorCode.NotEnoughDestination, georgios.DispatchError.OutOfMemory => ErrorCode.OutOfMemory, georgios.DispatchError.ZeroSizedAlloc => ErrorCode.ZeroSizedAlloc, georgios.DispatchError.InvalidFree => ErrorCode.InvalidFree, }, georgios.BasicError => switch (err) { georgios.BasicError.Unknown => ErrorCode.Unknown, georgios.BasicError.OutOfBounds => ErrorCode.OutOfBounds, georgios.BasicError.NotEnoughSource => ErrorCode.NotEnoughSource, georgios.BasicError.NotEnoughDestination => ErrorCode.NotEnoughDestination, georgios.BasicError.OutOfMemory => ErrorCode.OutOfMemory, georgios.BasicError.ZeroSizedAlloc => ErrorCode.ZeroSizedAlloc, georgios.BasicError.InvalidFree => ErrorCode.InvalidFree, }, georgios.ExecError => switch (err) { georgios.ExecError.FileNotFound => ErrorCode.FileNotFound, georgios.ExecError.NotADirectory => ErrorCode.NotADirectory, georgios.ExecError.NotAFile => ErrorCode.NotAFile, georgios.ExecError.DirectoryNotEmpty => ErrorCode.DirectoryNotEmpty, georgios.ExecError.InvalidFilesystem => ErrorCode.InvalidFilesystem, georgios.ExecError.FilesystemAlreadyMountedHere => ErrorCode.FilesystemAlreadyMountedHere, georgios.ExecError.InvalidOpenOpts => ErrorCode.InvalidOpenOpts, georgios.ExecError.AlreadyExists => ErrorCode.AlreadyExists, georgios.ExecError.Unsupported => ErrorCode.Unsupported, georgios.ExecError.Internal => ErrorCode.Internal, georgios.ExecError.InvalidFileId => ErrorCode.InvalidFileId, georgios.ExecError.OutOfSpace => ErrorCode.OutOfSpace, georgios.ExecError.StreamTooLong => ErrorCode.StreamTooLong, georgios.ExecError.EndOfStream => ErrorCode.EndOfStream, georgios.ExecError.Unknown => ErrorCode.Unknown, georgios.ExecError.OutOfBounds => ErrorCode.OutOfBounds, georgios.ExecError.NotEnoughSource => ErrorCode.NotEnoughSource, georgios.ExecError.NotEnoughDestination => ErrorCode.NotEnoughDestination, georgios.ExecError.OutOfMemory => ErrorCode.OutOfMemory, georgios.ExecError.ZeroSizedAlloc => ErrorCode.ZeroSizedAlloc, georgios.ExecError.InvalidFree => ErrorCode.InvalidFree, georgios.ExecError.NoCurrentProcess => ErrorCode.NoCurrentProcess, georgios.ExecError.NoSuchProcess => ErrorCode.NoSuchProcess, georgios.ExecError.InvalidElfFile => ErrorCode.InvalidElfFile, georgios.ExecError.InvalidElfObjectType => ErrorCode.InvalidElfObjectType, georgios.ExecError.InvalidElfPlatform => ErrorCode.InvalidElfPlatform, }, georgios.fs.Error => switch (err) { georgios.fs.Error.FileNotFound => ErrorCode.FileNotFound, georgios.fs.Error.NotADirectory => ErrorCode.NotADirectory, georgios.fs.Error.NotAFile => ErrorCode.NotAFile, georgios.fs.Error.DirectoryNotEmpty => ErrorCode.DirectoryNotEmpty, georgios.fs.Error.InvalidFilesystem => ErrorCode.InvalidFilesystem, georgios.fs.Error.FilesystemAlreadyMountedHere => ErrorCode.FilesystemAlreadyMountedHere, georgios.fs.Error.InvalidOpenOpts => ErrorCode.InvalidOpenOpts, georgios.fs.Error.AlreadyExists => ErrorCode.AlreadyExists, georgios.fs.Error.Unsupported => ErrorCode.Unsupported, georgios.fs.Error.Internal => ErrorCode.Internal, georgios.fs.Error.InvalidFileId => ErrorCode.InvalidFileId, georgios.fs.Error.OutOfSpace => ErrorCode.OutOfSpace, georgios.fs.Error.StreamTooLong => ErrorCode.StreamTooLong, georgios.fs.Error.EndOfStream => ErrorCode.EndOfStream, georgios.fs.Error.Unknown => ErrorCode.Unknown, georgios.fs.Error.OutOfBounds => ErrorCode.OutOfBounds, georgios.fs.Error.NotEnoughSource => ErrorCode.NotEnoughSource, georgios.fs.Error.NotEnoughDestination => ErrorCode.NotEnoughDestination, georgios.fs.Error.OutOfMemory => ErrorCode.OutOfMemory, georgios.fs.Error.ZeroSizedAlloc => ErrorCode.ZeroSizedAlloc, georgios.fs.Error.InvalidFree => ErrorCode.InvalidFree, }, georgios.io.FileError => switch (err) { georgios.io.FileError.Unsupported => ErrorCode.Unsupported, georgios.io.FileError.Internal => ErrorCode.Internal, georgios.io.FileError.InvalidFileId => ErrorCode.InvalidFileId, georgios.io.FileError.OutOfSpace => ErrorCode.OutOfSpace, georgios.io.FileError.StreamTooLong => ErrorCode.StreamTooLong, georgios.io.FileError.EndOfStream => ErrorCode.EndOfStream, georgios.io.FileError.Unknown => ErrorCode.Unknown, georgios.io.FileError.OutOfBounds => ErrorCode.OutOfBounds, georgios.io.FileError.NotEnoughSource => ErrorCode.NotEnoughSource, georgios.io.FileError.NotEnoughDestination => ErrorCode.NotEnoughDestination, georgios.io.FileError.OutOfMemory => ErrorCode.OutOfMemory, georgios.io.FileError.ZeroSizedAlloc => ErrorCode.ZeroSizedAlloc, georgios.io.FileError.InvalidFree => ErrorCode.InvalidFree, }, georgios.threading.Error => switch (err) { georgios.threading.Error.NoCurrentProcess => ErrorCode.NoCurrentProcess, georgios.threading.Error.NoSuchProcess => ErrorCode.NoSuchProcess, georgios.threading.Error.Unknown => ErrorCode.Unknown, georgios.threading.Error.OutOfBounds => ErrorCode.OutOfBounds, georgios.threading.Error.NotEnoughSource => ErrorCode.NotEnoughSource, georgios.threading.Error.NotEnoughDestination => ErrorCode.NotEnoughDestination, georgios.threading.Error.OutOfMemory => ErrorCode.OutOfMemory, georgios.threading.Error.ZeroSizedAlloc => ErrorCode.ZeroSizedAlloc, georgios.threading.Error.InvalidFree => ErrorCode.InvalidFree, }, georgios.ThreadingOrFsError => switch (err) { georgios.ThreadingOrFsError.FileNotFound => ErrorCode.FileNotFound, georgios.ThreadingOrFsError.NotADirectory => ErrorCode.NotADirectory, georgios.ThreadingOrFsError.NotAFile => ErrorCode.NotAFile, georgios.ThreadingOrFsError.DirectoryNotEmpty => ErrorCode.DirectoryNotEmpty, georgios.ThreadingOrFsError.InvalidFilesystem => ErrorCode.InvalidFilesystem, georgios.ThreadingOrFsError.FilesystemAlreadyMountedHere => ErrorCode.FilesystemAlreadyMountedHere, georgios.ThreadingOrFsError.InvalidOpenOpts => ErrorCode.InvalidOpenOpts, georgios.ThreadingOrFsError.AlreadyExists => ErrorCode.AlreadyExists, georgios.ThreadingOrFsError.Unsupported => ErrorCode.Unsupported, georgios.ThreadingOrFsError.Internal => ErrorCode.Internal, georgios.ThreadingOrFsError.InvalidFileId => ErrorCode.InvalidFileId, georgios.ThreadingOrFsError.OutOfSpace => ErrorCode.OutOfSpace, georgios.ThreadingOrFsError.StreamTooLong => ErrorCode.StreamTooLong, georgios.ThreadingOrFsError.EndOfStream => ErrorCode.EndOfStream, georgios.ThreadingOrFsError.Unknown => ErrorCode.Unknown, georgios.ThreadingOrFsError.OutOfBounds => ErrorCode.OutOfBounds, georgios.ThreadingOrFsError.NotEnoughSource => ErrorCode.NotEnoughSource, georgios.ThreadingOrFsError.NotEnoughDestination => ErrorCode.NotEnoughDestination, georgios.ThreadingOrFsError.OutOfMemory => ErrorCode.OutOfMemory, georgios.ThreadingOrFsError.ZeroSizedAlloc => ErrorCode.ZeroSizedAlloc, georgios.ThreadingOrFsError.InvalidFree => ErrorCode.InvalidFree, georgios.ThreadingOrFsError.NoCurrentProcess => ErrorCode.NoCurrentProcess, georgios.ThreadingOrFsError.NoSuchProcess => ErrorCode.NoSuchProcess, }, else => @compileError( "Invalid ErrorType for " ++ @typeName(Self) ++ ".set_error: " ++ @typeName(ErrorType)), }}; } pub fn get(self: *const Self) ErrorType!ValueType { return switch (self.*) { Self.value => |value| return value, Self.error_code => |error_code| switch (ErrorType) { georgios.DispatchError => switch (error_code) { .DispatchInvalidPort => georgios.DispatchError.DispatchInvalidPort, .DispatchInvalidMessage => georgios.DispatchError.DispatchInvalidMessage, .DispatchBrokenCall => georgios.DispatchError.DispatchBrokenCall, .DispatchOpUnsupported => georgios.DispatchError.DispatchOpUnsupported, .Unknown => georgios.DispatchError.Unknown, .OutOfBounds => georgios.DispatchError.OutOfBounds, .NotEnoughSource => georgios.DispatchError.NotEnoughSource, .NotEnoughDestination => georgios.DispatchError.NotEnoughDestination, .OutOfMemory => georgios.DispatchError.OutOfMemory, .ZeroSizedAlloc => georgios.DispatchError.ZeroSizedAlloc, .InvalidFree => georgios.DispatchError.InvalidFree, else => georgios.BasicError.Unknown, }, georgios.BasicError => switch (error_code) { .Unknown => georgios.BasicError.Unknown, .OutOfBounds => georgios.BasicError.OutOfBounds, .NotEnoughSource => georgios.BasicError.NotEnoughSource, .NotEnoughDestination => georgios.BasicError.NotEnoughDestination, .OutOfMemory => georgios.BasicError.OutOfMemory, .ZeroSizedAlloc => georgios.BasicError.ZeroSizedAlloc, .InvalidFree => georgios.BasicError.InvalidFree, else => georgios.BasicError.Unknown, }, georgios.ExecError => switch (error_code) { .FileNotFound => georgios.ExecError.FileNotFound, .NotADirectory => georgios.ExecError.NotADirectory, .NotAFile => georgios.ExecError.NotAFile, .DirectoryNotEmpty => georgios.ExecError.DirectoryNotEmpty, .InvalidFilesystem => georgios.ExecError.InvalidFilesystem, .FilesystemAlreadyMountedHere => georgios.ExecError.FilesystemAlreadyMountedHere, .InvalidOpenOpts => georgios.ExecError.InvalidOpenOpts, .AlreadyExists => georgios.ExecError.AlreadyExists, .Unsupported => georgios.ExecError.Unsupported, .Internal => georgios.ExecError.Internal, .InvalidFileId => georgios.ExecError.InvalidFileId, .OutOfSpace => georgios.ExecError.OutOfSpace, .StreamTooLong => georgios.ExecError.StreamTooLong, .EndOfStream => georgios.ExecError.EndOfStream, .Unknown => georgios.ExecError.Unknown, .OutOfBounds => georgios.ExecError.OutOfBounds, .NotEnoughSource => georgios.ExecError.NotEnoughSource, .NotEnoughDestination => georgios.ExecError.NotEnoughDestination, .OutOfMemory => georgios.ExecError.OutOfMemory, .ZeroSizedAlloc => georgios.ExecError.ZeroSizedAlloc, .InvalidFree => georgios.ExecError.InvalidFree, .NoCurrentProcess => georgios.ExecError.NoCurrentProcess, .NoSuchProcess => georgios.ExecError.NoSuchProcess, .InvalidElfFile => georgios.ExecError.InvalidElfFile, .InvalidElfObjectType => georgios.ExecError.InvalidElfObjectType, .InvalidElfPlatform => georgios.ExecError.InvalidElfPlatform, else => georgios.BasicError.Unknown, }, georgios.fs.Error => switch (error_code) { .FileNotFound => georgios.fs.Error.FileNotFound, .NotADirectory => georgios.fs.Error.NotADirectory, .NotAFile => georgios.fs.Error.NotAFile, .DirectoryNotEmpty => georgios.fs.Error.DirectoryNotEmpty, .InvalidFilesystem => georgios.fs.Error.InvalidFilesystem, .FilesystemAlreadyMountedHere => georgios.fs.Error.FilesystemAlreadyMountedHere, .InvalidOpenOpts => georgios.fs.Error.InvalidOpenOpts, .AlreadyExists => georgios.fs.Error.AlreadyExists, .Unsupported => georgios.fs.Error.Unsupported, .Internal => georgios.fs.Error.Internal, .InvalidFileId => georgios.fs.Error.InvalidFileId, .OutOfSpace => georgios.fs.Error.OutOfSpace, .StreamTooLong => georgios.fs.Error.StreamTooLong, .EndOfStream => georgios.fs.Error.EndOfStream, .Unknown => georgios.fs.Error.Unknown, .OutOfBounds => georgios.fs.Error.OutOfBounds, .NotEnoughSource => georgios.fs.Error.NotEnoughSource, .NotEnoughDestination => georgios.fs.Error.NotEnoughDestination, .OutOfMemory => georgios.fs.Error.OutOfMemory, .ZeroSizedAlloc => georgios.fs.Error.ZeroSizedAlloc, .InvalidFree => georgios.fs.Error.InvalidFree, else => georgios.BasicError.Unknown, }, georgios.io.FileError => switch (error_code) { .Unsupported => georgios.io.FileError.Unsupported, .Internal => georgios.io.FileError.Internal, .InvalidFileId => georgios.io.FileError.InvalidFileId, .OutOfSpace => georgios.io.FileError.OutOfSpace, .StreamTooLong => georgios.io.FileError.StreamTooLong, .EndOfStream => georgios.io.FileError.EndOfStream, .Unknown => georgios.io.FileError.Unknown, .OutOfBounds => georgios.io.FileError.OutOfBounds, .NotEnoughSource => georgios.io.FileError.NotEnoughSource, .NotEnoughDestination => georgios.io.FileError.NotEnoughDestination, .OutOfMemory => georgios.io.FileError.OutOfMemory, .ZeroSizedAlloc => georgios.io.FileError.ZeroSizedAlloc, .InvalidFree => georgios.io.FileError.InvalidFree, else => georgios.BasicError.Unknown, }, georgios.threading.Error => switch (error_code) { .NoCurrentProcess => georgios.threading.Error.NoCurrentProcess, .NoSuchProcess => georgios.threading.Error.NoSuchProcess, .Unknown => georgios.threading.Error.Unknown, .OutOfBounds => georgios.threading.Error.OutOfBounds, .NotEnoughSource => georgios.threading.Error.NotEnoughSource, .NotEnoughDestination => georgios.threading.Error.NotEnoughDestination, .OutOfMemory => georgios.threading.Error.OutOfMemory, .ZeroSizedAlloc => georgios.threading.Error.ZeroSizedAlloc, .InvalidFree => georgios.threading.Error.InvalidFree, else => georgios.BasicError.Unknown, }, georgios.ThreadingOrFsError => switch (error_code) { .FileNotFound => georgios.ThreadingOrFsError.FileNotFound, .NotADirectory => georgios.ThreadingOrFsError.NotADirectory, .NotAFile => georgios.ThreadingOrFsError.NotAFile, .DirectoryNotEmpty => georgios.ThreadingOrFsError.DirectoryNotEmpty, .InvalidFilesystem => georgios.ThreadingOrFsError.InvalidFilesystem, .FilesystemAlreadyMountedHere => georgios.ThreadingOrFsError.FilesystemAlreadyMountedHere, .InvalidOpenOpts => georgios.ThreadingOrFsError.InvalidOpenOpts, .AlreadyExists => georgios.ThreadingOrFsError.AlreadyExists, .Unsupported => georgios.ThreadingOrFsError.Unsupported, .Internal => georgios.ThreadingOrFsError.Internal, .InvalidFileId => georgios.ThreadingOrFsError.InvalidFileId, .OutOfSpace => georgios.ThreadingOrFsError.OutOfSpace, .StreamTooLong => georgios.ThreadingOrFsError.StreamTooLong, .EndOfStream => georgios.ThreadingOrFsError.EndOfStream, .Unknown => georgios.ThreadingOrFsError.Unknown, .OutOfBounds => georgios.ThreadingOrFsError.OutOfBounds, .NotEnoughSource => georgios.ThreadingOrFsError.NotEnoughSource, .NotEnoughDestination => georgios.ThreadingOrFsError.NotEnoughDestination, .OutOfMemory => georgios.ThreadingOrFsError.OutOfMemory, .ZeroSizedAlloc => georgios.ThreadingOrFsError.ZeroSizedAlloc, .InvalidFree => georgios.ThreadingOrFsError.InvalidFree, .NoCurrentProcess => georgios.ThreadingOrFsError.NoCurrentProcess, .NoSuchProcess => georgios.ThreadingOrFsError.NoSuchProcess, else => georgios.BasicError.Unknown, }, else => @compileError( "Invalid ErrorType for " ++ @typeName(Self) ++ ".get: " ++ @typeName(ErrorType)), }, }; } }; } pub fn send(dispatch: georgios.Dispatch, opts: georgios.SendOpts) callconv(.Inline) georgios.DispatchError!void { var rv: ValueOrError(void, georgios.DispatchError) = undefined; asm volatile ("int $100" :: [syscall_number] "{eax}" (@as(u32, 100)), [arg1] "{ebx}" (@ptrToInt(&dispatch)), [arg2] "{ecx}" (@ptrToInt(&opts)), [arg3] "{edx}" (@ptrToInt(&rv)), ); return rv.get(); } pub fn recv(dst: georgios.PortId, opts: georgios.RecvOpts) callconv(.Inline) georgios.DispatchError!?georgios.Dispatch { var rv: ValueOrError(?georgios.Dispatch, georgios.DispatchError) = undefined; asm volatile ("int $100" :: [syscall_number] "{eax}" (@as(u32, 101)), [arg1] "{ebx}" (dst), [arg2] "{ecx}" (@ptrToInt(&opts)), [arg3] "{edx}" (@ptrToInt(&rv)), ); return rv.get(); } pub fn call(dispatch: georgios.Dispatch, opts: georgios.CallOpts) callconv(.Inline) georgios.DispatchError!georgios.Dispatch { var rv: ValueOrError(georgios.Dispatch, georgios.DispatchError) = undefined; asm volatile ("int $100" :: [syscall_number] "{eax}" (@as(u32, 102)), [arg1] "{ebx}" (@ptrToInt(&dispatch)), [arg2] "{ecx}" (@ptrToInt(&opts)), [arg3] "{edx}" (@ptrToInt(&rv)), ); return rv.get(); } pub fn print_string(s: []const u8) callconv(.Inline) void { asm volatile ("int $100" :: [syscall_number] "{eax}" (@as(u32, 0)), [arg1] "{ebx}" (@ptrToInt(&s)), ); } pub fn add_dynamic_memory(inc: usize) callconv(.Inline) georgios.BasicError![]u8 { var rv: ValueOrError([]u8, georgios.BasicError) = undefined; asm volatile ("int $100" :: [syscall_number] "{eax}" (@as(u32, 1)), [arg1] "{ebx}" (inc), [arg2] "{ecx}" (@ptrToInt(&rv)), ); return rv.get(); } pub fn yield() callconv(.Inline) void { asm volatile ("int $100" :: [syscall_number] "{eax}" (@as(u32, 2)), ); } pub fn exit(info: georgios.ExitInfo) callconv(.Inline) noreturn { asm volatile ("int $100" :: [syscall_number] "{eax}" (@as(u32, 3)), [arg1] "{ebx}" (@ptrToInt(&info)), ); unreachable; } pub fn exec(info: *const georgios.ProcessInfo) callconv(.Inline) georgios.ExecError!georgios.ExitInfo { var rv: ValueOrError(georgios.ExitInfo, georgios.ExecError) = undefined; asm volatile ("int $100" :: [syscall_number] "{eax}" (@as(u32, 4)), [arg1] "{ebx}" (info), [arg2] "{ecx}" (@ptrToInt(&rv)), ); return rv.get(); } pub fn get_key(blocking: georgios.Blocking) callconv(.Inline) ?georgios.keyboard.Event { var key: ?georgios.keyboard.Event = undefined; asm volatile ("int $100" :: [syscall_number] "{eax}" (@as(u32, 5)), [arg1] "{ebx}" (@ptrToInt(&blocking)), [arg2] "{ecx}" (@ptrToInt(&key)), ); return key; } pub fn get_mouse_event(blocking: georgios.Blocking) callconv(.Inline) ?georgios.MouseEvent { var key: ?georgios.MouseEvent = undefined; asm volatile ("int $100" :: [syscall_number] "{eax}" (@as(u32, 28)), [arg1] "{ebx}" (@ptrToInt(&blocking)), [arg2] "{ecx}" (@ptrToInt(&key)), ); return key; } pub fn print_uint(value: u32, base: u8) callconv(.Inline) void { asm volatile ("int $100" :: [syscall_number] "{eax}" (@as(u32, 7)), [arg1] "{ebx}" (value), [arg2] "{ecx}" (base), ); } pub fn file_open(path: []const u8, opts: georgios.fs.OpenOpts) callconv(.Inline) georgios.fs.Error!georgios.io.File.Id { var rv: ValueOrError(georgios.io.File.Id, georgios.fs.Error) = undefined; asm volatile ("int $100" :: [syscall_number] "{eax}" (@as(u32, 8)), [arg1] "{ebx}" (@ptrToInt(&path)), [arg2] "{ecx}" (@ptrToInt(&opts)), [arg3] "{edx}" (@ptrToInt(&rv)), ); return rv.get(); } pub fn file_read(id: georgios.io.File.Id, to: []u8) callconv(.Inline) georgios.io.FileError!usize { var rv: ValueOrError(usize, georgios.io.FileError) = undefined; asm volatile ("int $100" :: [syscall_number] "{eax}" (@as(u32, 9)), [arg1] "{ebx}" (id), [arg2] "{ecx}" (@ptrToInt(&to)), [arg3] "{edx}" (@ptrToInt(&rv)), ); return rv.get(); } pub fn file_write(id: georgios.io.File.Id, from: []const u8) callconv(.Inline) georgios.io.FileError!usize { var rv: ValueOrError(usize, georgios.io.FileError) = undefined; asm volatile ("int $100" :: [syscall_number] "{eax}" (@as(u32, 10)), [arg1] "{ebx}" (id), [arg2] "{ecx}" (@ptrToInt(&from)), [arg3] "{edx}" (@ptrToInt(&rv)), ); return rv.get(); } pub fn file_seek(id: georgios.io.File.Id, offset: isize, seek_type: georgios.io.File.SeekType) callconv(.Inline) georgios.io.FileError!usize { var rv: ValueOrError(usize, georgios.io.FileError) = undefined; asm volatile ("int $100" :: [syscall_number] "{eax}" (@as(u32, 11)), [arg1] "{ebx}" (id), [arg2] "{ecx}" (offset), [arg3] "{edx}" (seek_type), [arg4] "{edi}" (@ptrToInt(&rv)), ); return rv.get(); } pub fn file_close(id: georgios.io.File.Id) callconv(.Inline) georgios.fs.Error!void { var rv: ValueOrError(void, georgios.fs.Error) = undefined; asm volatile ("int $100" :: [syscall_number] "{eax}" (@as(u32, 12)), [arg1] "{ebx}" (id), [arg2] "{ecx}" (@ptrToInt(&rv)), ); return rv.get(); } pub fn get_cwd(buffer: []u8) callconv(.Inline) georgios.threading.Error![]const u8 { var rv: ValueOrError([]const u8, georgios.threading.Error) = undefined; asm volatile ("int $100" :: [syscall_number] "{eax}" (@as(u32, 13)), [arg1] "{ebx}" (@ptrToInt(&buffer)), [arg2] "{ecx}" (@ptrToInt(&rv)), ); return rv.get(); } pub fn set_cwd(dir: []const u8) callconv(.Inline) georgios.ThreadingOrFsError!void { var rv: ValueOrError(void, georgios.ThreadingOrFsError) = undefined; asm volatile ("int $100" :: [syscall_number] "{eax}" (@as(u32, 14)), [arg1] "{ebx}" (@ptrToInt(&dir)), [arg2] "{ecx}" (@ptrToInt(&rv)), ); return rv.get(); } pub fn sleep_milliseconds(ms: u64) callconv(.Inline) void { asm volatile ("int $100" :: [syscall_number] "{eax}" (@as(u32, 15)), [arg1] "{ebx}" (@ptrToInt(&ms)), ); } pub fn sleep_seconds(s: u64) callconv(.Inline) void { asm volatile ("int $100" :: [syscall_number] "{eax}" (@as(u32, 16)), [arg1] "{ebx}" (@ptrToInt(&s)), ); } pub fn time() callconv(.Inline) u64 { var rv: u64 = undefined; asm volatile ("int $100" :: [syscall_number] "{eax}" (@as(u32, 17)), [arg1] "{ebx}" (@ptrToInt(&rv)), ); return rv; } pub fn get_process_id() callconv(.Inline) u32 { var rv: u32 = undefined; asm volatile ("int $100" :: [syscall_number] "{eax}" (@as(u32, 18)), [arg1] "{ebx}" (@ptrToInt(&rv)), ); return rv; } pub fn get_thread_id() callconv(.Inline) u32 { var rv: u32 = undefined; asm volatile ("int $100" :: [syscall_number] "{eax}" (@as(u32, 19)), [arg1] "{ebx}" (@ptrToInt(&rv)), ); return rv; } pub fn overflow_kernel_stack() callconv(.Inline) void { asm volatile ("int $100" :: [syscall_number] "{eax}" (@as(u32, 20)), ); } pub fn console_width() callconv(.Inline) u32 { var rv: u32 = undefined; asm volatile ("int $100" :: [syscall_number] "{eax}" (@as(u32, 21)), [arg1] "{ebx}" (@ptrToInt(&rv)), ); return rv; } pub fn console_height() callconv(.Inline) u32 { var rv: u32 = undefined; asm volatile ("int $100" :: [syscall_number] "{eax}" (@as(u32, 22)), [arg1] "{ebx}" (@ptrToInt(&rv)), ); return rv; } pub fn vbe_res() callconv(.Inline) ?utils.U32Point { var rv: ?utils.U32Point = undefined; asm volatile ("int $100" :: [syscall_number] "{eax}" (@as(u32, 23)), [arg1] "{ebx}" (@ptrToInt(&rv)), ); return rv; } pub fn vbe_draw_raw_image_chunk(data: []const u8, w: u32, pos: utils.U32Point, last: *utils.U32Point) callconv(.Inline) void { asm volatile ("int $100" :: [syscall_number] "{eax}" (@as(u32, 24)), [arg1] "{ebx}" (@ptrToInt(&data)), [arg2] "{ecx}" (w), [arg3] "{edx}" (@ptrToInt(&pos)), [arg4] "{edi}" (last), ); } pub fn vbe_flush_buffer() callconv(.Inline) void { asm volatile ("int $100" :: [syscall_number] "{eax}" (@as(u32, 25)), ); } pub fn get_vbe_console_info(last_scroll_count: *u32, size: *utils.U32Point, pos: *utils.U32Point, glyph_size: *utils.U32Point) callconv(.Inline) void { asm volatile ("int $100" :: [syscall_number] "{eax}" (@as(u32, 26)), [arg1] "{ebx}" (last_scroll_count), [arg2] "{ecx}" (size), [arg3] "{edx}" (pos), [arg4] "{edi}" (glyph_size), ); } pub fn vbe_fill_rect(rect: *const utils.U32Rect, pixel: u32) callconv(.Inline) void { asm volatile ("int $100" :: [syscall_number] "{eax}" (@as(u32, 27)), [arg1] "{ebx}" (rect), [arg2] "{ecx}" (pixel), ); }
0
repos/georgios/libs
repos/georgios/libs/georgios/test.zig
test "georgios test root" { _ = @import("io.zig"); }
0
repos/georgios/libs
repos/georgios/libs/georgios/georgios.zig
const std = @import("std"); const builtin = @import("builtin"); pub const utils = @import("utils"); pub const system_calls = @import("system_calls.zig"); pub const send = system_calls.send; pub const recv = system_calls.recv; pub const call = system_calls.call; pub const start = @import("start.zig"); pub const keyboard = @import("keyboard.zig"); pub const io = @import("io.zig"); pub const memory = @import("memory.zig"); pub const Directory = @import("fs.zig").Directory; pub const Console = @import("Console.zig"); pub var page_allocator: std.mem.Allocator = undefined; pub const is_cross_compiled = builtin.os.tag == .freestanding; const root = @import("root"); pub const is_kernel = is_cross_compiled and @hasDecl(root, "kernel_main"); pub const is_program = is_cross_compiled and @hasDecl(root, "main"); comptime { if (is_program) { // Force include program stubs. Based on std.zig. _ = start; } } pub const ExitInfo = struct { status: u8 = 0, crashed: bool = false, pub fn failed(self: *const ExitInfo) bool { return self.status != 0 or self.crashed; } }; pub var proc_info: if (is_program) *const ProcessInfo else void = undefined; pub fn panic(msg: []const u8, trace: ?*std.builtin.StackTrace) noreturn { _ = trace; var buffer: [128]u8 = undefined; var ts = utils.ToString{.buffer = buffer[0..]}; ts.string(proc_info.name) catch unreachable; ts.string(" panicked: ") catch unreachable; ts.string(msg) catch unreachable; ts.string("\n") catch unreachable; system_calls.print_string(ts.get()); system_calls.exit(.{.status = 1}); } pub const ProcessInfo = struct { path: []const u8, name: []const u8 = utils.empty_slice(u8, 1024), args: []const []const u8 = utils.empty_slice([]const u8, 1024), kernel_mode: bool = false, }; pub const fs = struct { pub const Error = error { FileNotFound, NotADirectory, NotAFile, DirectoryNotEmpty, InvalidFilesystem, FilesystemAlreadyMountedHere, InvalidOpenOpts, AlreadyExists, } || io.FileError; pub const OpenOptsKind = enum { ReadOnly, Write, }; pub const Exist = enum { CreateIfNeeded, MustExist, MustCreate, }; pub const OpenOpts = union (OpenOptsKind) { ReadOnly: struct { dir: bool = false, }, Write: struct { read: bool = false, exist: Exist = .CreateIfNeeded, append: bool = false, // Conflicts with MustCreate and truncate truncate: bool = false, // Conflicts with append }, pub fn check(self: *const OpenOpts) Error!void { switch (self.*) { .ReadOnly => {}, .Write => |w| { if (w.append and w.truncate or w.exist == .MustCreate and w.append) { return Error.InvalidOpenOpts; } }, } } pub fn must_exist(self: *const OpenOpts) bool { return switch (self.*) { .ReadOnly => true, .Write => |w| w.exist == .MustExist, }; } pub fn dir(self: *const OpenOpts) bool { return switch (self.*) { .ReadOnly => |*ropts| ropts.dir, else => false, }; } pub fn from_fopen_mode(mode: []const u8) Error!OpenOpts { if (mode.len == 0) return Error.InvalidOpenOpts; if (utils.starts_with(mode, "r+")) { return OpenOpts{.Write = .{.read = true}}; } if (utils.starts_with(mode, "r")) { return OpenOpts.ReadOnly; } if (utils.starts_with(mode, "w+x")) { return OpenOpts{.Write = .{.truncate = true, .read = true, .exist = .MustCreate}}; } if (utils.starts_with(mode, "wx")) { return OpenOpts{.Write = .{.truncate = true, .exist = .MustCreate}}; } if (utils.starts_with(mode, "w+")) { return OpenOpts{.Write = .{.truncate = true, .read = true}}; } if (utils.starts_with(mode, "w")) { return OpenOpts{.Write = .{.truncate = true}}; } if (utils.starts_with(mode, "a+")) { return OpenOpts{.Write = .{.append = true, .read = true}}; } if (utils.starts_with(mode, "a")) { return OpenOpts{.Write = .{.append = true}}; } return Error.InvalidOpenOpts; } }; pub fn open(path: []const u8, opts: OpenOpts) Error!io.File { try opts.check(); return io.File{.id = try system_calls.file_open(path, opts)}; } pub fn fopen(path: []const u8, mode: []const u8) Error!io.File { return open(path, try OpenOpts.from_fopen_mode(mode)); } pub const NodeKind = struct { file: bool = false, directory: bool = false, }; }; pub const BasicError = utils.Error || memory.MemoryError; pub const threading = struct { pub const Error = error { NoCurrentProcess, NoSuchProcess, } || BasicError; }; pub const ThreadingOrFsError = fs.Error || threading.Error; pub const elf = struct { pub const Error = error { InvalidElfFile, InvalidElfObjectType, InvalidElfPlatform, }; }; pub const ExecError = ThreadingOrFsError || elf.Error; pub const Blocking = enum { Blocking, NonBlocking, }; pub const ConsoleWriter = struct { pub const Error = error{}; pub const Writer = std.io.Writer(ConsoleWriter, Error, write); pub fn writer(self: ConsoleWriter) Writer { return Writer{.context = self}; } pub fn write(self: ConsoleWriter, bytes: []const u8) Error!usize { _ = self; if (is_program) { system_calls.print_string(bytes); } else if (is_kernel) { root.kernel.print.string(bytes); } else { @compileError("ConsoleWriter doesn't know what to do here. " ++ "is_program and is_kernel are both false"); } return bytes.len; } }; pub fn get_console_writer() ConsoleWriter.Writer { const cw = ConsoleWriter{}; return cw.writer(); } pub const MouseEvent = struct { rmb_pressed: bool, mmb_pressed: bool, lmb_pressed: bool, delta: utils.Point(i32), }; pub const DispatchError = error { DispatchInvalidPort, DispatchInvalidMessage, DispatchBrokenCall, DispatchOpUnsupported, } || BasicError; pub const PortId = u32; pub const MetaPort: PortId = 1; pub const FirstDynamicPort = MetaPort; pub const Dispatch = struct { msg: []const u8, dst: PortId = 0, src: PortId = 0, }; pub const Blocks = union (enum) { NonBlocking, Blocking: ?u32, }; pub const SendOpts = struct { blocks: Blocks = .{.Blocking = null}, }; pub const RecvOpts = struct { blocks: Blocks = .{.Blocking = null}, }; pub const CallOpts = struct { blocks: Blocks = .{.Blocking = null}, }; pub fn msg_cast(comptime T: type, dispatch: Dispatch) DispatchError!*const T { if (dispatch.msg.len == @sizeOf(T)) { return @alignCast(@alignOf(T), &std.mem.bytesAsSlice(T, dispatch.msg)[0]); } return DispatchError.DispatchInvalidMessage; } pub fn send_value(value: anytype, dst: PortId, opts: SendOpts) DispatchError!void { try send(.{.msg = std.mem.asBytes(value), .dst = dst}, opts); }
0
repos/georgios/libs
repos/georgios/libs/georgios/memory.zig
const std = @import("std"); const utils = @import("utils"); const syscalls = @import("system_calls.zig"); pub const AllocError = error { OutOfMemory, ZeroSizedAlloc, }; pub const FreeError = error { InvalidFree, }; pub const MemoryError = AllocError || FreeError; pub const PageAllocator = struct { last_alloc_end: ?usize = null, fn alloc(self: *PageAllocator, n: usize, ptr_align: u29, len_align: u29, ra: usize) ![]u8 { _ = len_align; _ = ra; const page_size = 4096; const min_size = utils.align_up(n, page_size); const align_diff = ptr_align - @minimum(page_size, ptr_align); const area = syscalls.add_dynamic_memory( if (align_diff <= min_size - n) min_size else utils.align_up(min_size + align_diff, page_size)) catch return std.mem.Allocator.Error.OutOfMemory; defer self.last_alloc_end = area.len; if (self.last_alloc_end) |last| { return area[last..]; } return area[0..]; } fn resize(self: *PageAllocator, buf: []u8, buf_align: u29, new_len: usize, len_align: u29, ret_addr: usize) ?usize { _ = self; _ = buf_align; _ = len_align; _ = ret_addr; // TODO if (new_len <= buf.len) { return new_len; } else { return null; } } fn free(self: *PageAllocator, buf: []u8, buf_align: u29, ret_addr: usize) void { _ = self; _ = buf; _ = buf_align; _ = ret_addr; // TODO } pub fn allocator(self: *PageAllocator) std.mem.Allocator { return std.mem.Allocator.init(self, alloc, resize, free); } };
0
repos/georgios/libs
repos/georgios/libs/georgios/start.zig
const root = @import("root"); const system_calls = @import("system_calls.zig"); const georgios = @import("georgios.zig"); export fn _start() callconv(.Naked) noreturn { georgios.proc_info = asm volatile ("xor %%ebp, %%ebp" : [info] "={esp}" (-> *const georgios.ProcessInfo)); @call(.{ .modifier = .never_inline }, start, .{}); } fn start() noreturn { system_calls.exit(main_wrapper()); } fn main_wrapper() georgios.ExitInfo { var pa = georgios.memory.PageAllocator{}; georgios.page_allocator = pa.allocator(); var exit_info: georgios.ExitInfo = .{}; const ret = @typeInfo(@typeInfo(@TypeOf(root.main)).Fn.return_type.?); switch (ret) { .Void => root.main(), .Int => exit_info.status = root.main(), .ErrorUnion => |eu| switch (@typeInfo(eu.payload)) { .Void => { if (root.main()) |_| {} else |e| { @panic(@errorName(e)); } }, .Int => { if (root.main()) |status| { exit_info.status = status; } else |e| { @panic(@errorName(e)); } }, else => @compileError("main return type not supported"), }, else => @compileError("main return type not supported"), } return exit_info; }
0
repos/georgios/libs
repos/georgios/libs/georgios/keys.zig
// Generated by scripts/codegen/keys.py pub const Key = enum { Key_a, Key_b, Key_c, Key_d, Key_e, Key_f, Key_g, Key_h, Key_i, Key_j, Key_k, Key_l, Key_m, Key_n, Key_o, Key_p, Key_q, Key_r, Key_s, Key_t, Key_u, Key_v, Key_w, Key_x, Key_y, Key_z, Key_A, Key_B, Key_C, Key_D, Key_E, Key_F, Key_G, Key_H, Key_I, Key_J, Key_K, Key_L, Key_M, Key_N, Key_O, Key_P, Key_Q, Key_R, Key_S, Key_T, Key_U, Key_V, Key_W, Key_X, Key_Y, Key_Z, Key_0, Key_1, Key_2, Key_3, Key_4, Key_5, Key_6, Key_7, Key_8, Key_9, Key_Keypad0, Key_Keypad1, Key_Keypad2, Key_Keypad3, Key_Keypad4, Key_Keypad5, Key_Keypad6, Key_Keypad7, Key_Keypad8, Key_Keypad9, Key_Enter, Key_KeypadEnter, Key_Tab, Key_Backspace, Key_Space, Key_Slash, Key_KeypadSlash, Key_Backslash, Key_Period, Key_KeypadPeriod, Key_Question, Key_Exclamation, Key_Comma, Key_Colon, Key_SemiColon, Key_BackTick, Key_SingleQuote, Key_DoubleQuote, Key_Asterisk, Key_KeypadAsterisk, Key_At, Key_Ampersand, Key_Percent, Key_Caret, Key_Pipe, Key_Tilde, Key_Underscore, Key_Pound, Key_Dollar, Key_Plus, Key_KeypadPlus, Key_Minus, Key_KeypadMinus, Key_Equals, Key_GreaterThan, Key_LessThan, Key_LeftBrace, Key_RightBrace, Key_LeftSquareBracket, Key_RightSquareBracket, Key_LeftParentheses, Key_RightParentheses, Key_Escape, Key_LeftShift, Key_RightShift, Key_LeftAlt, Key_RightAlt, Key_LeftControl, Key_RightControl, Key_CapsLock, Key_NumberLock, Key_ScrollLock, Key_F1, Key_F2, Key_F3, Key_F4, Key_F5, Key_F6, Key_F7, Key_F8, Key_F9, Key_F10, Key_F11, Key_F12, Key_CursorLeft, Key_CursorRight, Key_CursorUp, Key_CursorDown, Key_PageUp, Key_PageDown, Key_AcpiPower, Key_AcpiSleep, Key_AcpiWake, Key_Home, Key_End, Key_Insert, Key_Delete, Key_PrintScreen, Key_Pause, };
0
repos/georgios/libs
repos/georgios/libs/georgios/fs.zig
// Generated from libs/georgios/fs.idl by ifgen.py const georgios = @import("georgios.zig"); pub const Directory = struct { pub fn create(self: *Directory, path: []const u8, kind: georgios.fs.NodeKind) georgios.DispatchError!void { return self._create_impl(self, path, kind); } pub fn unlink(self: *Directory, path: []const u8) georgios.DispatchError!void { return self._unlink_impl(self, path); } pub const _ArgVals = union (enum) { _create: struct { path: []const u8, kind: georgios.fs.NodeKind, }, _unlink: []const u8, }; pub const _RetVals = union (enum) { _create: void, _unlink: void, }; pub const _dispatch_impls = struct { pub fn _create_impl(self: *Directory, path: []const u8, kind: georgios.fs.NodeKind) georgios.DispatchError!void { return georgios.send_value(&_ArgVals{._create = .{.path = path, .kind = kind}}, self._port_id, .{}); } pub fn _unlink_impl(self: *Directory, path: []const u8) georgios.DispatchError!void { return georgios.send_value(&_ArgVals{._unlink = path}, self._port_id, .{}); } }; pub fn _recv_value(self: *Directory, dispatch: georgios.Dispatch) georgios.DispatchError!void { return switch ((try georgios.msg_cast(_ArgVals, dispatch)).*) { ._create => |val| self._create_impl(self, val.path, val.kind), ._unlink => |val| self._unlink_impl(self, val), }; } _port_id: georgios.PortId, _create_impl: fn(self: *Directory, path: []const u8, kind: georgios.fs.NodeKind) georgios.DispatchError!void = _dispatch_impls._create_impl, _unlink_impl: fn(self: *Directory, path: []const u8) georgios.DispatchError!void = _dispatch_impls._unlink_impl, };
0
repos/georgios
repos/georgios/docs/x86_32_memory.md
# x86\_32 Memory ## Map of Real Memory | Contents | Start | End | | ------------------- | ------------------------ | -------------------- | | Available | 0 | ? | | BIOS Area | ? | `_KERNEL_REAL_START` | | Kernel | `_KERNEL_REAL_START` | `_KERNEL_REAL_END` | | Multiboot Info | `multiboot_info_pointer` | `kernel_page_tables` | | Initial Page Tables | `kernel_page_tables` | ? | | Available | ? | ? | ## Notes - Kernel might have internal spaces that will be recycled as available frames after they are done being used. - Multiboot Info will be copied from its original location in `kernel_main_wrapper` so we don't accidentally overwrite it. Its memory space will be page aligned so it can be recycled into frames when we are done with it. - The initial page tables set up in `kernel_main_wrapper` will map the real memory from 0 to the end of the page table of the end of Multiboot Info. Until full memory management kicks in we shouldn't need to interact with any other range of memory. ## Resources - https://ethv.net/workshops/osdev/notes/notes-2.html - Used linked list described there for the physical memory allocator.
0
repos/georgios
repos/georgios/docs/context_swtiching.md
# Context Switching ## `iret` On a x86\_32 CPU, the `iret` instruction uses the stack to transition the CPU from one state to another. It does this by popping new values for the `eip`, `cs`, and `eflags` registers from the stack in that order. If the `cs` selector is for a different ring, then `iret` will also pop new values for the `esp` and `ss` registers from the stack in that order. ## Specific Resources Used - [Alex Dzoba OS Interrupts](https://alex.dzyoba.com/blog/os-interrupts/) - [Stackoverflow Question: Switching to User-mode using iret](https://stackoverflow.com/questions/6892421/switching-to-user-mode-using-iret) - [JamesM's kernel development tutorials part 10](https://web.archive.org/web/20160326062442/http://jamesmolloy.co.uk/tutorial\_html/10.-User%20Mode.html) - [Skelix OS tutorial part 4](https://web.archive.org/web/20170103060529/http://skelix.net/skelixos/tutorial04_en.html) - http://ethv.net/workshops/osdev/notes/notes-4.html - https://forum.osdev.org/viewtopic.php?f=1&t=36922
0
repos/georgios
repos/georgios/docs/boot.md
# Boot ## Boot Loader Georgios relies on GRUB for setting up the Multiboot structure, loading the kernel, and getting the computer to go to `_start`. *NOTE: `_start` is defined in boot.s and is declared as the entry point in linking.ld.* ## `_start` `_start` is the first phase of kernel initialization. It is completely written in assembly and does the following: - Move the pointer to the multiboot structure from `%eax` where GRUB left it to the place in memory where `platform_init()` will take it as its argument. - In one continuous motion: - Set the Page Table so that kernel data and code (currently starting at 0x00100000) can also be read and written starting at 0xC0100000, called a higher kernel. - Enabled Paging - Jump to the higher kernel - Disable the lower kernel memory range. - Set up the Stack - Call `platform_init()` and `kernel_main()` ## `platform_init()` `platform_init()` is the second phase of kernel initialization. It is mostly written in C and does the following: - Set up the IBM PC basic text graphics mode. - Set up low level structures like the GDT and IDT. - Initialize various hardware related tasks specific to PCs. - Converts Multiboot information so that the kernel can use when governing the computer. ## `kernel_main()` `kernel_main()` is the third phase of kernel initialization. It is meant to be platform independent. Currently it just sets up memory to be allocated as needed by programs. In the future it will initialize higher level kernel services, like the scheduler. It will then jump into the schedule loop and start processes.
0
repos/georgios
repos/georgios/docs/piasafs.md
# PiasaFS Georgios ultimate purpose is to be a host for a experimental file system based on tags called [Piasa](https://en.wikipedia.org/wiki/Piasa)FS (pronounced *PIE-A-SAW*). There are what could be thought of "wrappers" for traditional file systems to use tag based systems like TMSU and Tagsistant. The goal of this project will not be to be preferable over them for everyday use (certainly not possible to do at the current rate and with my abilities), but to see what a native file system build around tags could look like and how the rest of the OS could or couldn't leverage that. ## Design Notes - Might need some sort of nested namespaces for the system and applications to use privately. - Will need a compatibility layer with software expecting a traditional file system. For `fopen` at least, Clib could default to the traditional mode, unless the `fopen` was given a special token in the mode parameter. ### Notes on Existing Semantic Systems #### Unnamed Website The basis of PiasaFS usage, at least at first, will be a unnamed website, with an advanced tagging system for content. This is a summery of the system along with my comments. - `foo`: Content tagged with `foo`. - `a b`: Content tagged with both `a` and `b`. - `~a ~b`: Content tagged with at least `a` or `b`. - A infix operator is tempting here - `not_interesting`: Tags can't have spaces. - Simplifies syntax a lot, but it would be nice to have arbitrary tags. - `a -b`: Content tagged with `a`, but not `b` - Simple, Google-like, hard to say no to it. - `taxes_*`: Wildcard, would match `taxes_2021` or `taxes_1998`. - The Website restricts usage of this and for good reason. I would have to see. - `METATAG:EXPR` is where things get interesting. Content can be filtered by date ranges, user, and other metadata. - Expression examples: - `>100`, `<=100`: Integer Ranges - `2019-08-04`, `today`, `5_days_ago`, `today..5_years_ago`: dates and date ranges - Hashes - `order:ATTRIBUTE`: Order by an attribute - Will be implemented in some way into PiasaFS #### Other Things to Research - [RDF](https://en.wikipedia.org/wiki/Resource_Description_Framework) ## Also See - [Wikipedia: "Semantic file system"](https://en.wikipedia.org/wiki/Semantic_file_system)
0
repos
repos/koino/README.md
# [koino](https://github.com/kivikakk/koino) ![Build status](https://github.com/kivikakk/koino/workflows/Zig/badge.svg) ![Spec Status: 671/671](https://img.shields.io/badge/specs-671%2F671-brightgreen.svg) Zig port of [Comrak](https://github.com/kivikakk/comrak). Maintains 100% spec-compatibility with [GitHub Flavored Markdown](https://github.github.com/gfm/). ## Getting started ## Using koino as a library * Get Zig 0.12 https://ziglang.org/ * Using Zig 0.13? See [`zig-0.13.0`](https://github.com/kivikakk/koino/tree/zig-0.13.0) branch. * Start a new project with `zig init-exe` / `zig init-lib` * Add koino via the zig package manager: ```console $ zig fetch --save https://github.com/kivikakk/koino/archive/<commit hash>.tar.gz ``` * Add the following to your `build.zig`'s `build` function: ```zig const koino_pkg = b.dependency("koino", .{ .optimize = optimize, .target = target }); exe.root_module.addImport("koino", koino_pkg.module("koino")); ``` * Have a look at the bottom of [`parser.zig`](https://github.com/kivikakk/koino/blob/main/src/parser.zig) to see some test usage. ### Using it as a CLI executable * Clone this repository: ```console $ git clone https://github.com/kivikakk/koino ``` * Build ```console $ zig build ``` * Use `./zig-out/bin/koino` ### For development purposes * Clone this repository with submodules for the `cmark-gfm` dependency: ```console $ git clone --recurse-submodules https://github.com/kivikakk/koino $ cd koino ``` * Build and run the spec suite. ```console $ zig build test $ make spec ``` ## Usage Command line: ```console $ koino --help Usage: koino [-hu] [-e <EXTENSION>...] [--smart] Options: -h, --help Display this help and exit -u, --unsafe Render raw HTML and dangerous URLs -e, --extension <EXTENSION>... Enable an extension. (table,strikethrough,autolink,tagfilter) --smart Use smart punctuation. ``` Library: Documentation is TODO — see [LoLa](https://github.com/MasterQ32/LoLa/blob/d02b0e6774fedbe07276d8af51e1a305cc58fb34/src/tools/render-md-page.zig#L157) for an example of use. Note also the [`build.zig`](https://github.com/MasterQ32/LoLa/blob/d02b0e6774fedbe07276d8af51e1a305cc58fb34/build.zig#L41-L50) declaration.
0
repos
repos/koino/build.zig.zon
.{ .name = "koino", .version = "0.1.0", .minimum_zig_version = "0.12.0", .dependencies = .{ .@"libpcre.zig" = .{ .url = "https://github.com/kivikakk/libpcre.zig/archive/1dc32407368651bc0118b752288c3b87af12ff50.tar.gz", .hash = "1220039ac1b212be156069f9ed7e066571a8da5acc8f1adc33eb1a5c3bcbb4a8b47b", }, .@"htmlentities.zig" = .{ .url = "https://github.com/kivikakk/htmlentities.zig/archive/6c6ab63c9fce8317049d332377db760a2c8932e7.tar.gz", .hash = "1220740c4161d6e2c7601a86504dcb3dbd7814cac5083d0263ad0fd35b473a416325", }, .zunicode = .{ .url = "https://github.com/kivikakk/zunicode/archive/711afa05416d1d1512bccf798a332be494d08f5f.tar.gz", .hash = "1220d530a8c14f65f6184e695bf04d14c97b8e341cd391c579f9e85990c6262d2fec", }, .clap = .{ .url = "https://github.com/Hejsil/zig-clap/archive/1d413d9ffcbd394904fa683ca975b5adbc19e615.tar.gz", .hash = "1220949d4e88864579067b6d4cdad6476c6176f27e782782c2c39b7f2c4817a10efb", }, }, .paths = .{ "build.zig", "Makefile", "build.zig.zon", "src", "LICENSE", "README.md", }, }
0
repos
repos/koino/build.zig
const std = @import("std"); pub fn build(b: *std.Build) !void { const target = b.standardTargetOptions(.{}); const optimize = b.standardOptimizeOption(.{}); var deps = std.StringHashMap(*std.Build.Module).init(b.allocator); const pcre_pkg = b.dependency("libpcre.zig", .{ .optimize = optimize, .target = target }); const htmlentities_pkg = b.dependency("htmlentities.zig", .{ .optimize = optimize, .target = target }); const zunicode_pkg = b.dependency("zunicode", .{ .optimize = optimize, .target = target }); const clap_pkg = b.dependency("clap", .{ .optimize = optimize, .target = target }); try deps.put("clap", clap_pkg.module("clap")); try deps.put("libpcre", pcre_pkg.module("libpcre")); try deps.put("zunicode", zunicode_pkg.module("zunicode")); try deps.put("htmlentities", htmlentities_pkg.module("htmlentities")); const mod = b.addModule("koino", .{ .root_source_file = b.path("src/koino.zig"), .target = target, .optimize = optimize, }); try addCommonRequirements(mod, &deps); const exe = b.addExecutable(.{ .name = "koino", .root_source_file = b.path("src/main.zig"), .target = target, .optimize = optimize, }); try addCommonRequirements(&exe.root_module, &deps); b.installArtifact(exe); const run_cmd = b.addRunArtifact(exe); run_cmd.step.dependOn(b.getInstallStep()); const run_step = b.step("run", "Run the app"); run_step.dependOn(&run_cmd.step); if (b.args) |args| { run_cmd.addArgs(args); } const test_exe = b.addTest(.{ .name = "test", .root_source_file = b.path("src/main.zig"), .target = target, .optimize = optimize, }); try addCommonRequirements(&test_exe.root_module, &deps); const test_step = b.step("test", "Run all the tests"); test_step.dependOn(&test_exe.step); } fn addCommonRequirements(mod: *std.Build.Module, deps: *const std.StringHashMap(*std.Build.Module)) !void { var it = deps.iterator(); while (it.next()) |entry| { mod.addImport(entry.key_ptr.*, entry.value_ptr.*); } mod.linkSystemLibrary("c", .{}); }
0
repos/koino
repos/koino/src/inlines.zig
const std = @import("std"); const mem = std.mem; const ascii = std.ascii; const assert = std.debug.assert; const zunicode = @import("zunicode"); const nodes = @import("nodes.zig"); const strings = @import("strings.zig"); const Options = @import("options.zig").Options; const scanners = @import("scanners.zig"); const Reference = @import("parser.zig").Reference; const MAX_BACKTICKS = 80; const MAX_LINK_LABEL_LENGTH = 1000; pub const ParseError = error{ OutOfMemory, InvalidUtf8 }; pub const Subject = struct { allocator: mem.Allocator, refmap: *std.StringHashMap(Reference), options: *const Options, input: []const u8, pos: usize = 0, last_delimiter: ?*Delimiter = null, brackets: std.ArrayList(Bracket), backticks: [MAX_BACKTICKS + 1]usize = [_]usize{0} ** (MAX_BACKTICKS + 1), scanned_for_backticks: bool = false, special_chars: *const [256]bool, skip_chars: *const [256]bool, pub fn init(allocator: mem.Allocator, refmap: *std.StringHashMap(Reference), options: *const Options, special_chars: *const [256]bool, skip_chars: *const [256]bool, input: []const u8) Subject { const s = Subject{ .allocator = allocator, .refmap = refmap, .options = options, .input = input, .brackets = std.ArrayList(Bracket).init(allocator), .special_chars = special_chars, .skip_chars = skip_chars, }; return s; } pub fn setCharsForOptions(options: *const Options, special_chars: *[256]bool, skip_chars: *[256]bool) void { for ([_]u8{ '\n', '\r', '_', '*', '"', '`', '\'', '\\', '&', '<', '[', ']', '!' }) |c| { special_chars.*[c] = true; } if (options.extensions.strikethrough) { special_chars.*['~'] = true; skip_chars.*['~'] = true; } if (options.parse.smart) { for ([_]u8{ '"', '\'', '.', '-' }) |c| { special_chars.*[c] = true; } } } pub fn deinit(self: *Subject) void { self.brackets.deinit(); } pub fn parseInline(self: *Subject, node: *nodes.AstNode) ParseError!bool { const c = self.peekChar() orelse return false; var new_inl: ?*nodes.AstNode = null; switch (c) { 0 => return false, '\n', '\r' => new_inl = try self.handleNewLine(), '`' => new_inl = try self.handleBackticks(), '\\' => new_inl = try self.handleBackslash(), '&' => new_inl = try self.handleEntity(), '<' => new_inl = try self.handlePointyBrace(), '*', '_', '\'', '"' => new_inl = try self.handleDelim(c), '-' => new_inl = try self.handleHyphen(), '.' => new_inl = try self.handlePeriod(), '[' => { self.pos += 1; const inl = try self.makeInline(.{ .Text = try self.allocator.dupe(u8, "[") }); try self.pushBracket(.Link, inl); new_inl = inl; }, ']' => new_inl = try self.handleCloseBracket(), '!' => { self.pos += 1; if (self.peekChar() orelse 0 == '[') { self.pos += 1; const inl = try self.makeInline(.{ .Text = try self.allocator.dupe(u8, "![") }); try self.pushBracket(.Image, inl); new_inl = inl; } else { new_inl = try self.makeInline(.{ .Text = try self.allocator.dupe(u8, "!") }); } }, else => { if (self.options.extensions.strikethrough and c == '~') { new_inl = try self.handleDelim(c); } else { const endpos = self.findSpecialChar(); var contents = self.input[self.pos..endpos]; self.pos = endpos; if (self.peekChar()) |n| { if (strings.isLineEndChar(n)) { contents = strings.rtrim(contents); } } new_inl = try self.makeInline(.{ .Text = try self.allocator.dupe(u8, contents) }); } }, } if (new_inl) |inl| { node.append(inl); } return true; } fn makeInline(self: *Subject, value: nodes.NodeValue) !*nodes.AstNode { return nodes.AstNode.create(self.allocator, .{ .value = value, .content = std.ArrayList(u8).init(self.allocator), }); } fn makeAutolink(self: *Subject, url: []const u8, kind: nodes.AutolinkType) !*nodes.AstNode { var inl = try self.makeInline(.{ .Link = .{ .url = try strings.cleanAutolink(self.allocator, url, kind), .title = "", }, }); inl.append(try self.makeInline(.{ .Text = try strings.unescapeHtml(self.allocator, url) })); return inl; } pub fn processEmphasis(self: *Subject, stack_bottom: ?*Delimiter) !void { var closer = self.last_delimiter; var openers_bottom: [3][128]?*Delimiter = [_][128]?*Delimiter{[_]?*Delimiter{null} ** 128} ** 3; for (&openers_bottom) |*i| { i['*'] = stack_bottom; i['_'] = stack_bottom; i['\''] = stack_bottom; i['"'] = stack_bottom; if (self.options.extensions.strikethrough) i['~'] = stack_bottom; } while (closer != null and closer.?.prev != stack_bottom) { closer = closer.?.prev; } while (closer != null) { if (closer.?.can_close) { var opener = closer.?.prev; var opener_found = false; while (opener != null and opener != openers_bottom[closer.?.length % 3][closer.?.delim_char]) { if (opener.?.can_open and opener.?.delim_char == closer.?.delim_char) { const odd_match = (closer.?.can_open or opener.?.can_close) and ((opener.?.length + closer.?.length) % 3 == 0) and !(opener.?.length % 3 == 0 and closer.?.length % 3 == 0); if (!odd_match) { opener_found = true; break; } } opener = opener.?.prev; } const old_closer = closer; if (closer.?.delim_char == '*' or closer.?.delim_char == '_' or (self.options.extensions.strikethrough and closer.?.delim_char == '~')) { if (opener_found) { closer = try self.insertEmph(opener.?, closer.?); } else { closer = closer.?.next; } } else if (closer.?.delim_char == '\'') { var al = closer.?.inl.data.value.text_mut().?; self.allocator.free(al.*); al.* = try self.allocator.dupe(u8, "’"); if (opener_found) { al = opener.?.inl.data.value.text_mut().?; self.allocator.free(al.*); al.* = try self.allocator.dupe(u8, "‘"); } closer = closer.?.next; } else if (closer.?.delim_char == '"') { var al = closer.?.inl.data.value.text_mut().?; self.allocator.free(al.*); al.* = try self.allocator.dupe(u8, "”"); if (opener_found) { al = opener.?.inl.data.value.text_mut().?; self.allocator.free(al.*); al.* = try self.allocator.dupe(u8, "“"); } closer = closer.?.next; } if (!opener_found) { const ix = old_closer.?.length % 3; openers_bottom[ix][old_closer.?.delim_char] = old_closer.?.prev; if (!old_closer.?.can_open) { self.removeDelimiter(old_closer.?); } } } else { closer = closer.?.next; } } while (self.last_delimiter != null and self.last_delimiter != stack_bottom) { self.removeDelimiter(self.last_delimiter.?); } } fn removeDelimiter(self: *Subject, delimiter: *Delimiter) void { if (delimiter.next == null) { assert(delimiter == self.last_delimiter.?); self.last_delimiter = delimiter.prev; } else { delimiter.next.?.prev = delimiter.prev; } if (delimiter.prev != null) { delimiter.prev.?.next = delimiter.next; } self.allocator.destroy(delimiter); } pub fn popBracket(self: *Subject) bool { return self.brackets.popOrNull() != null; } fn eof(self: *Subject) bool { return self.pos >= self.input.len; } pub fn peekChar(self: *Subject) ?u8 { return self.peekCharN(0); } fn peekCharN(self: *Subject, n: usize) ?u8 { if (self.pos + n >= self.input.len) { return null; } const c = self.input[self.pos + n]; assert(c > 0); return c; } pub fn spnl(self: *Subject) void { self.skipSpaces(); if (self.skipLineEnd()) self.skipSpaces(); } fn findSpecialChar(self: *Subject) usize { var n = self.pos; while (n < self.input.len) : (n += 1) { if (self.special_chars[self.input[n]]) return n; } return n; } fn handleNewLine(self: *Subject) !*nodes.AstNode { const nlpos = self.pos; if (self.input[self.pos] == '\r') self.pos += 1; if (self.input[self.pos] == '\n') self.pos += 1; self.skipSpaces(); const line_break = nlpos > 1 and self.input[nlpos - 1] == ' ' and self.input[nlpos - 2] == ' '; return self.makeInline(if (line_break) .LineBreak else .SoftBreak); } fn takeWhile(self: *Subject, c: u8) usize { const start_pos = self.pos; while (self.peekChar() == c) { self.pos += 1; } return self.pos - start_pos; } fn scanToClosingBacktick(self: *Subject, openticklength: usize) ?usize { if (openticklength > MAX_BACKTICKS) { return null; } if (self.scanned_for_backticks and self.backticks[openticklength] <= self.pos) { return null; } while (true) { var peeked = self.peekChar(); while (peeked != null and peeked.? != '`') { self.pos += 1; peeked = self.peekChar(); } if (self.pos >= self.input.len) { self.scanned_for_backticks = true; return null; } const numticks = self.takeWhile('`'); if (numticks <= MAX_BACKTICKS) { self.backticks[numticks] = self.pos - numticks; } if (numticks == openticklength) { return self.pos; } } } fn handleBackticks(self: *Subject) !*nodes.AstNode { const openticks = self.takeWhile('`'); const startpos = self.pos; const endpos = self.scanToClosingBacktick(openticks); if (endpos) |end| { const buf = self.input[startpos .. end - openticks]; const code = try strings.normalizeCode(self.allocator, buf); return try self.makeInline(.{ .Code = code }); } else { self.pos = startpos; const contents = try self.allocator.alloc(u8, openticks); @memset(contents, '`'); return try self.makeInline(.{ .Text = contents }); } } pub fn skipSpaces(self: *Subject) void { while (self.peekChar()) |c| { if (!(c == ' ' or c == '\t')) break; self.pos += 1; } } fn handleBackslash(self: *Subject) !*nodes.AstNode { self.pos += 1; if (strings.isPunct(self.peekChar() orelse 0)) { self.pos += 1; const contents = try self.allocator.dupe(u8, self.input[self.pos - 1 .. self.pos]); return try self.makeInline(.{ .Text = contents }); } else if (!self.eof() and self.skipLineEnd()) { return try self.makeInline(.LineBreak); } else { return try self.makeInline(.{ .Text = try self.allocator.dupe(u8, "\\") }); } } pub fn skipLineEnd(self: *Subject) bool { const old_pos = self.pos; if (self.peekChar() orelse 0 == '\r') self.pos += 1; if (self.peekChar() orelse 0 == '\n') self.pos += 1; return self.pos > old_pos or self.eof(); } fn handleEntity(self: *Subject) !*nodes.AstNode { self.pos += 1; var out = std.ArrayList(u8).init(self.allocator); if (try strings.unescapeInto(self.input[self.pos..], &out)) |len| { self.pos += len; return try self.makeInline(.{ .Text = try out.toOwnedSlice() }); } try out.append('&'); return try self.makeInline(.{ .Text = try out.toOwnedSlice() }); } fn handlePointyBrace(self: *Subject) !*nodes.AstNode { self.pos += 1; if (try scanners.autolinkUri(self.input[self.pos..])) |match_len| { const inl = try self.makeAutolink(self.input[self.pos .. self.pos + match_len - 1], .URI); self.pos += match_len; return inl; } if (try scanners.autolinkEmail(self.input[self.pos..])) |match_len| { const inl = try self.makeAutolink(self.input[self.pos .. self.pos + match_len - 1], .Email); self.pos += match_len; return inl; } if (try scanners.htmlTag(self.input[self.pos..])) |match_len| { const contents = self.input[self.pos - 1 .. self.pos + match_len]; const inl = try self.makeInline(.{ .HtmlInline = try self.allocator.dupe(u8, contents) }); self.pos += match_len; return inl; } return try self.makeInline(.{ .Text = try self.allocator.dupe(u8, "<") }); } fn handleDelim(self: *Subject, c: u8) !*nodes.AstNode { const scan = try self.scanDelims(c); const contents = if (c == '\'' and self.options.parse.smart) "’" else if (c == '"' and self.options.parse.smart and scan.can_close) "”" else if (c == '"' and self.options.parse.smart and !scan.can_close) "“" else self.input[self.pos - scan.num_delims .. self.pos]; const inl = try self.makeInline(.{ .Text = try self.allocator.dupe(u8, contents) }); if ((scan.can_open or scan.can_close) and (!(c == '\'' or c == '"') or self.options.parse.smart)) { try self.pushDelimiter(c, scan.can_open, scan.can_close, inl); } return inl; } fn handleHyphen(self: *Subject) !*nodes.AstNode { self.pos += 1; var num_hyphens: usize = 1; if (!self.options.parse.smart or (self.peekChar() orelse 0) != '-') { return try self.makeInline(.{ .Text = try self.allocator.dupe(u8, "-") }); } while (self.options.parse.smart and (self.peekChar() orelse 0) == '-') { self.pos += 1; num_hyphens += 1; } var ens_ems = if (num_hyphens % 3 == 0) [2]usize{ 0, num_hyphens / 3 } else if (num_hyphens % 2 == 0) [2]usize{ num_hyphens / 2, 0 } else if (num_hyphens % 3 == 2) [2]usize{ 1, (num_hyphens - 2) / 3 } else [2]usize{ 2, (num_hyphens - 4) / 3 }; var text = std.ArrayList(u8).init(self.allocator); while (ens_ems[1] > 0) : (ens_ems[1] -= 1) try text.appendSlice("—"); while (ens_ems[0] > 0) : (ens_ems[0] -= 1) try text.appendSlice("–"); return try self.makeInline(.{ .Text = try text.toOwnedSlice() }); } fn handlePeriod(self: *Subject) !*nodes.AstNode { self.pos += 1; if (self.options.parse.smart and (self.peekChar() orelse 0) == @as(u8, '.')) { self.pos += 1; if (self.peekChar() == @as(u8, '.')) { self.pos += 1; return try self.makeInline(.{ .Text = try self.allocator.dupe(u8, "…") }); } return try self.makeInline(.{ .Text = try self.allocator.dupe(u8, "..") }); } return try self.makeInline(.{ .Text = try self.allocator.dupe(u8, ".") }); } const ScanResult = struct { num_delims: usize, can_open: bool, can_close: bool, }; fn scanDelims(self: *Subject, c: u8) !ScanResult { var before_char: u21 = '\n'; if (self.pos > 0) { var before_char_pos = self.pos - 1; while (before_char_pos > 0 and (self.input[before_char_pos] >> 6 == 2 or self.skip_chars[self.input[before_char_pos]])) { before_char_pos -= 1; } var utf8 = std.unicode.Utf8View.initUnchecked(self.input[before_char_pos..self.pos]).iterator(); if (utf8.nextCodepoint()) |codepoint| { if (codepoint >= 256 or !self.skip_chars[codepoint]) { before_char = codepoint; } } } var num_delims: usize = 0; if (c == '\'' or c == '"') { num_delims += 1; self.pos += 1; } else while (self.peekChar() == c) { num_delims += 1; self.pos += 1; } var after_char: u21 = '\n'; if (!self.eof()) { var after_char_pos = self.pos; while (after_char_pos < self.input.len - 1 and self.skip_chars[self.input[after_char_pos]]) { after_char_pos += 1; } var utf8 = std.unicode.Utf8View.initUnchecked(self.input[after_char_pos..]).iterator(); if (utf8.nextCodepoint()) |codepoint| { if (codepoint >= 256 or !self.skip_chars[codepoint]) { after_char = codepoint; } } } const left_flanking = num_delims > 0 and !zunicode.isSpace(after_char) and !(zunicode.isPunct(after_char) and !zunicode.isSpace(before_char) and !zunicode.isPunct(before_char)); const right_flanking = num_delims > 0 and !zunicode.isSpace(before_char) and !(zunicode.isPunct(before_char) and !zunicode.isSpace(after_char) and !zunicode.isPunct(after_char)); if (c == '_') { return ScanResult{ .num_delims = num_delims, .can_open = left_flanking and (!right_flanking or zunicode.isPunct(before_char)), .can_close = right_flanking and (!left_flanking or zunicode.isPunct(after_char)), }; } else if (c == '\'' or c == '"') { return ScanResult{ .num_delims = num_delims, .can_open = left_flanking and !right_flanking and before_char != ']' and before_char != ')', .can_close = right_flanking, }; } else { return ScanResult{ .num_delims = num_delims, .can_open = left_flanking, .can_close = right_flanking, }; } } fn pushDelimiter(self: *Subject, c: u8, can_open: bool, can_close: bool, inl: *nodes.AstNode) !void { const delimiter = try self.allocator.create(Delimiter); delimiter.* = .{ .inl = inl, .length = inl.data.value.text().?.len, .delim_char = c, .can_open = can_open, .can_close = can_close, .prev = self.last_delimiter, .next = null, }; if (delimiter.prev) |prev| { prev.next = delimiter; } self.last_delimiter = delimiter; } fn insertEmph(self: *Subject, opener: *Delimiter, closer: *Delimiter) !?*Delimiter { const opener_char = opener.inl.data.value.text().?[0]; var opener_num_chars = opener.inl.data.value.text().?.len; var closer_num_chars = closer.inl.data.value.text().?.len; const use_delims: u8 = if (closer_num_chars >= 2 and opener_num_chars >= 2) 2 else 1; opener_num_chars -= use_delims; closer_num_chars -= use_delims; if (self.options.extensions.strikethrough and opener_char == '~' and (opener_num_chars != closer_num_chars or opener_num_chars > 0)) return null; const opener_text = opener.inl.data.value.text_mut().?; opener_text.* = try self.allocator.realloc(opener_text.*, opener_num_chars); const closer_text = closer.inl.data.value.text_mut().?; closer_text.* = try self.allocator.realloc(closer_text.*, closer_num_chars); var delim = closer.prev; while (delim != null and delim != opener) { const prev = delim.?.prev; self.removeDelimiter(delim.?); delim = prev; } var value: nodes.NodeValue = undefined; if (self.options.extensions.strikethrough and opener_char == '~') { value = .Strikethrough; } else if (use_delims == 1) { value = .Emph; } else { value = .Strong; } var emph = try self.makeInline(value); var tmp = opener.inl.next.?; while (tmp != closer.inl) { const next = tmp.next; emph.append(tmp); if (next) |n| { tmp = n; } else { break; } } opener.inl.insertAfter(emph); if (opener_num_chars == 0) { opener.inl.detachDeinit(); self.removeDelimiter(opener); } if (closer_num_chars == 0) { closer.inl.detachDeinit(); const next = closer.next; self.removeDelimiter(closer); return next; } else { return closer; } } fn pushBracket(self: *Subject, kind: BracketKind, inl_text: *nodes.AstNode) !void { const len = self.brackets.items.len; if (len > 0) self.brackets.items[len - 1].bracket_after = true; try self.brackets.append(.{ .previous_delimiter = self.last_delimiter, .inl_text = inl_text, .position = self.pos, .kind = kind, .active = true, .bracket_after = false, }); } fn handleCloseBracket(self: *Subject) !?*nodes.AstNode { self.pos += 1; const initial_pos = self.pos; const brackets_len = self.brackets.items.len; if (brackets_len == 0) { return try self.makeInline(.{ .Text = try self.allocator.dupe(u8, "]") }); } if (!self.brackets.items[brackets_len - 1].active) { _ = self.brackets.pop(); return try self.makeInline(.{ .Text = try self.allocator.dupe(u8, "]") }); } const kind = self.brackets.items[brackets_len - 1].kind; const after_link_text_pos = self.pos; var sps: usize = 0; var url: []const u8 = ""; var n: usize = 0; if (self.peekChar() orelse 0 == '(' and blk: { sps = (try scanners.spacechars(self.input[self.pos + 1 ..])) orelse 0; break :blk manualScanLinkUrl(self.input[self.pos + 1 + sps ..], &url, &n); }) { const starturl = self.pos + 1 + sps; const endurl = starturl + n; const starttitle = endurl + ((try scanners.spacechars(self.input[endurl..])) orelse 0); const endtitle = if (starttitle == endurl) starttitle else starttitle + ((try scanners.linkTitle(self.input[starttitle..])) orelse 0); const endall = endtitle + ((try scanners.spacechars(self.input[endtitle..])) orelse 0); if (endall < self.input.len and self.input[endall] == ')') { self.pos = endall + 1; const cleanUrl = try strings.cleanUrl(self.allocator, url); const cleanTitle = try strings.cleanTitle(self.allocator, self.input[starttitle..endtitle]); try self.closeBracketMatch(kind, cleanUrl, cleanTitle); return null; } else { self.pos = after_link_text_pos; } } var label: ?[]const u8 = null; if (self.linkLabel()) |lab| { label = lab; } if (label == null) { self.pos = initial_pos; } if ((label == null or label.?.len == 0) and !self.brackets.items[brackets_len - 1].bracket_after) { label = self.input[self.brackets.items[brackets_len - 1].position .. initial_pos - 1]; } const normalized = try strings.normalizeLabel(self.allocator, label orelse ""); defer self.allocator.free(normalized); const maybe_ref = if (label != null) self.refmap.get(normalized) else null; if (maybe_ref) |ref| { try self.closeBracketMatch(kind, try self.allocator.dupe(u8, ref.url), try self.allocator.dupe(u8, ref.title)); return null; } _ = self.brackets.pop(); self.pos = initial_pos; return try self.makeInline(.{ .Text = try self.allocator.dupe(u8, "]") }); } pub fn linkLabel(self: *Subject) ?[]const u8 { const startpos = self.pos; if (self.peekChar() orelse 0 != '[') { return null; } self.pos += 1; var length: usize = 0; var c: u8 = 0; while (true) { c = self.peekChar() orelse 0; if (c == '[' or c == ']') { break; } if (c == '\\') { self.pos += 1; length += 1; if (strings.isPunct(self.peekChar() orelse 0)) { self.pos += 1; length += 1; } } else { self.pos += 1; length += 1; } if (length > MAX_LINK_LABEL_LENGTH) { self.pos = startpos; return null; } } if (c == ']') { const raw_label = strings.trim(self.input[startpos + 1 .. self.pos]); self.pos += 1; return raw_label; } else { self.pos = startpos; return null; } } /// Takes ownership of `url' and `title'. fn closeBracketMatch(self: *Subject, kind: BracketKind, url: []u8, title: []u8) !void { const nl = nodes.NodeLink{ .url = url, .title = title }; var inl = try self.makeInline(switch (kind) { .Link => .{ .Link = nl }, .Image => .{ .Image = nl }, }); var brackets_len = self.brackets.items.len; self.brackets.items[brackets_len - 1].inl_text.insertBefore(inl); var tmpch = self.brackets.items[brackets_len - 1].inl_text.next; while (tmpch) |tmp| { tmpch = tmp.next; inl.append(tmp); } self.brackets.items[brackets_len - 1].inl_text.detachDeinit(); const previous_delimiter = self.brackets.items[brackets_len - 1].previous_delimiter; try self.processEmphasis(previous_delimiter); _ = self.brackets.pop(); brackets_len -= 1; if (kind == .Link) { var i: i32 = @intCast(brackets_len); i -= 1; while (i >= 0) : (i -= 1) { if (self.brackets.items[@intCast(i)].kind == .Link) { if (!self.brackets.items[@intCast(i)].active) { break; } else { self.brackets.items[@intCast(i)].active = false; } } } } } pub fn manualScanLinkUrl(input: []const u8, url: *[]const u8, n: *usize) bool { const len = input.len; var i: usize = 0; if (i < len and input[i] == '<') { i += 1; while (i < len) { switch (input[i]) { '>' => { i += 1; break; }, '\\' => { i += 2; }, '\n', '<' => { return false; }, else => { i += 1; }, } } } else { return manualScanLinkUrl2(input, url, n); } if (i >= len) { return false; } else { url.* = input[1 .. i - 1]; n.* = i; return true; } } fn manualScanLinkUrl2(input: []const u8, url: *[]const u8, n: *usize) bool { const len = input.len; var i: usize = 0; var nb_p: usize = 0; while (i < len) { if (input[i] == '\\' and i + 1 < len and strings.isPunct(input[i + 1])) { i += 2; } else if (input[i] == '(') { nb_p += 1; i += 1; if (nb_p > 32) return false; } else if (input[i] == ')') { if (nb_p == 0) break; nb_p -= 1; i += 1; } else if (ascii.isWhitespace(input[i])) { if (i == 0) return false; break; } else { i += 1; } } if (i >= len) { return false; } else { url.* = input[0..i]; n.* = i; return true; } } }; const Delimiter = struct { inl: *nodes.AstNode, length: usize, delim_char: u8, can_open: bool, can_close: bool, prev: ?*Delimiter, next: ?*Delimiter, }; const Bracket = struct { previous_delimiter: ?*Delimiter, inl_text: *nodes.AstNode, position: usize, kind: BracketKind, active: bool, bracket_after: bool, }; const BracketKind = enum { Link, Image };
0
repos/koino
repos/koino/src/main.zig
const std = @import("std"); const builtin = @import("builtin"); const assert = std.debug.assert; const clap = @import("clap"); const koino = @import("./koino.zig"); const Parser = koino.parser.Parser; const Options = koino.Options; const nodes = koino.nodes; const html = koino.html; pub fn main() !void { // In debug, use the GeneralPurposeAllocator as the Parser internal allocator // to shake out memory issues. There should be no leaks in normal operation. // In release, use an arena and reset it at the end. var gpa: std.heap.GeneralPurposeAllocator(.{}) = undefined; var arena: std.heap.ArenaAllocator = undefined; var allocator: std.mem.Allocator = undefined; if (builtin.mode == .Debug) { gpa = std.heap.GeneralPurposeAllocator(.{}){}; allocator = gpa.allocator(); } else { arena = std.heap.ArenaAllocator.init(std.heap.page_allocator); allocator = arena.allocator(); } defer { if (builtin.mode == .Debug) { _ = gpa.deinit(); } else { arena.deinit(); } } var options: Options = undefined; var args = try parseArgs(&options, allocator); var parser = try Parser.init(allocator, options); if (args.positionals.len > 0) { for (args.positionals) |pos| { const markdown = try std.fs.cwd().readFileAlloc(allocator, pos, 1024 * 1024 * 1024); defer allocator.free(markdown); try parser.feed(markdown); } } else { const markdown = try std.io.getStdIn().reader().readAllAlloc(allocator, 1024 * 1024 * 1024); defer allocator.free(markdown); try parser.feed(markdown); } var doc = try parser.finish(); const output = blk: { var arr = std.ArrayList(u8).init(allocator); errdefer arr.deinit(); try html.print(arr.writer(), allocator, options, doc); break :blk try arr.toOwnedSlice(); }; defer allocator.free(output); if (builtin.mode == .Debug) { args.deinit(); parser.deinit(); doc.deinit(); } try std.io.getStdOut().writer().writeAll(output); } const params = clap.parseParamsComptime("-h, --help Display this help and exit\n" ++ "-u, --unsafe Render raw HTML and dangerous URLs\n" ++ "-e, --extension <str>... Enable an extension (" ++ extensionsFriendly ++ ")\n" ++ " --header-anchors Generate anchors for headers\n" ++ " --smart Use smart punctuation\n" ++ "<str>"); const ClapResult = clap.Result(clap.Help, &params, clap.parsers.default); fn parseArgs(options: *Options, allocator: std.mem.Allocator) !ClapResult { var stderr = std.io.getStdErr().writer(); const res = try clap.parse(clap.Help, &params, clap.parsers.default, .{ .allocator = allocator }); if (res.args.help != 0) { try stderr.writeAll("Usage: koino "); try clap.usage(stderr, clap.Help, &params); try stderr.writeAll("\n\nOptions:\n"); try clap.help(stderr, clap.Help, &params, .{}); std.process.exit(0); } options.* = .{}; if (res.args.unsafe != 0) options.render.unsafe = true; if (res.args.smart != 0) options.parse.smart = true; if (res.args.@"header-anchors" != 0) options.render.header_anchors = true; for (res.args.extension) |extension| try enableExtension(extension, options); return res; } const extensions = blk: { var exts: []const []const u8 = &[_][]const u8{}; for (@typeInfo(Options.Extensions).Struct.fields) |field| { exts = exts ++ [_][]const u8{field.name}; } break :blk exts; }; const extensionsFriendly = blk: { var extsFriendly: []const u8 = &[_]u8{}; var first = true; for (extensions) |extension| { if (first) { first = false; } else { extsFriendly = extsFriendly ++ ","; } extsFriendly = extsFriendly ++ extension; } break :blk extsFriendly; }; fn enableExtension(extension: []const u8, options: *Options) !void { inline for (extensions) |valid_extension| { if (std.mem.eql(u8, valid_extension, extension)) { @field(options.extensions, valid_extension) = true; return; } } try std.fmt.format(std.io.getStdErr().writer(), "unknown extension: {s}\n", .{extension}); std.process.exit(1); } /// Performs work using internalAllocator, and writes the result to a Writer. fn markdownToHtmlInternal(writer: anytype, internalAllocator: std.mem.Allocator, options: Options, markdown: []const u8) !void { var doc = try parse(internalAllocator, options, markdown); defer doc.deinit(); try html.print(writer, internalAllocator, options, doc); } /// Parses Markdown into an AST. Use `deinit()' on the returned document to free memory. pub fn parse(internalAllocator: std.mem.Allocator, options: Options, markdown: []const u8) !*nodes.AstNode { var p = try Parser.init(internalAllocator, options); defer p.deinit(); try p.feed(markdown); return try p.finish(); } /// Performs work with an ArenaAllocator backed by the page allocator, and allocates the result HTML with resultAllocator. pub fn markdownToHtml(resultAllocator: std.mem.Allocator, options: Options, markdown: []const u8) ![]u8 { var result = std.ArrayList(u8).init(resultAllocator); errdefer result.deinit(); try markdownToHtmlWriter(result.writer(), options, markdown); return result.toOwnedSlice(); } /// Performs work with an ArenaAllocator backed by the page allocator, and writes the result to a Writer. pub fn markdownToHtmlWriter(writer: anytype, options: Options, markdown: []const u8) !void { var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator); defer arena.deinit(); try markdownToHtmlInternal(writer, arena.allocator(), options, markdown); } /// Uses a GeneralPurposeAllocator for scratch work instead of an ArenaAllocator to aid in locating memory leaks. /// Result HTML is allocated by std.testing.allocator. pub fn testMarkdownToHtml(options: Options, markdown: []const u8) ![]u8 { var gpa = std.heap.GeneralPurposeAllocator(.{}){}; defer _ = gpa.deinit(); var doc = try parse(gpa.allocator(), options, markdown); defer doc.deinit(); var result = std.ArrayList(u8).init(std.testing.allocator); errdefer result.deinit(); try html.print(result.writer(), gpa.allocator(), options, doc); return result.toOwnedSlice(); } test { std.testing.refAllDecls(@This()); }
0
repos/koino
repos/koino/src/parser.zig
const std = @import("std"); const assert = std.debug.assert; const ascii = std.ascii; const main = @import("main.zig"); const strings = @import("strings.zig"); const nodes = @import("nodes.zig"); const scanners = @import("scanners.zig"); const inlines = @import("inlines.zig"); const Options = @import("options.zig").Options; const table = @import("table.zig"); const AutolinkProcessor = @import("autolink.zig").AutolinkProcessor; const TAB_STOP = 4; const CODE_INDENT = 4; pub const Reference = struct { url: []u8, title: []u8, }; pub const Parser = struct { allocator: std.mem.Allocator, refmap: std.StringHashMap(Reference), hack_refmapKeys: std.ArrayList([]u8), root: *nodes.AstNode, current: *nodes.AstNode, options: Options, line_number: u32 = 0, offset: usize = 0, column: usize = 0, first_nonspace: usize = 0, first_nonspace_column: usize = 0, indent: usize = 0, blank: bool = false, partially_consumed_tab: bool = false, last_line_length: usize = 0, special_chars: [256]bool = [_]bool{false} ** 256, skip_chars: [256]bool = [_]bool{false} ** 256, pub fn init(allocator: std.mem.Allocator, options: Options) !Parser { const root = try nodes.AstNode.create(allocator, .{ .value = .Document, .content = std.ArrayList(u8).init(allocator), }); var parser = Parser{ .allocator = allocator, .refmap = std.StringHashMap(Reference).init(allocator), .hack_refmapKeys = std.ArrayList([]u8).init(allocator), .root = root, .current = root, .options = options, }; inlines.Subject.setCharsForOptions(&options, &parser.special_chars, &parser.skip_chars); return parser; } pub fn deinit(self: *Parser) void { var it = self.refmap.iterator(); while (it.next()) |entry| { self.allocator.free(entry.key_ptr.*); self.allocator.free(entry.value_ptr.url); self.allocator.free(entry.value_ptr.title); } self.refmap.deinit(); } pub fn feed(self: *Parser, s: []const u8) !void { var i: usize = 0; const sz = s.len; var linebuf = std.ArrayList(u8).init(self.allocator); defer linebuf.deinit(); while (i < sz) { var process = true; var eol = i; while (eol < sz) { if (strings.isLineEndChar(s[eol])) break; if (s[eol] == 0) { process = false; break; } eol += 1; } if (process) { if (linebuf.items.len != 0) { try linebuf.appendSlice(s[i..eol]); try self.processLine(linebuf.items); linebuf.items.len = 0; } else if (sz > eol and s[eol] == '\n') { try self.processLine(s[i .. eol + 1]); } else { try self.processLine(s[i..eol]); } i = eol; if (i < sz and s[i] == '\r') i += 1; if (i < sz and s[i] == '\n') i += 1; } else { assert(eol < sz and s[eol] == 0); try linebuf.appendSlice(s[i..eol]); try linebuf.appendSlice("\u{fffd}"); i = eol + 1; } } } pub fn finish(self: *Parser) !*nodes.AstNode { try self.finalizeDocument(); try self.postprocessTextNodes(); return self.root; } fn findFirstNonspace(self: *Parser, line: []const u8) void { self.first_nonspace = self.offset; self.first_nonspace_column = self.column; var chars_to_tab = TAB_STOP - (self.column % TAB_STOP); while (true) { if (self.first_nonspace >= line.len) { break; } switch (line[self.first_nonspace]) { ' ' => { self.first_nonspace += 1; self.first_nonspace_column += 1; chars_to_tab -= 1; if (chars_to_tab == 0) { chars_to_tab = TAB_STOP; } }, 9 => { self.first_nonspace += 1; self.first_nonspace_column += chars_to_tab; chars_to_tab = TAB_STOP; }, else => break, } } self.indent = self.first_nonspace_column - self.column; self.blank = self.first_nonspace < line.len and strings.isLineEndChar(line[self.first_nonspace]); } fn processLine(self: *Parser, input: []const u8) !void { var line: []const u8 = undefined; var new_line: ?[]u8 = null; if (input.len == 0 or !strings.isLineEndChar(input[input.len - 1])) { new_line = try self.allocator.alloc(u8, input.len + 1); std.mem.copyForwards(u8, new_line.?, input); new_line.?[input.len] = '\n'; line = new_line.?; } else { line = input; } self.offset = 0; self.column = 0; self.blank = false; self.partially_consumed_tab = false; if (self.line_number == 0 and line.len >= 3 and std.mem.eql(u8, line[0..3], "\u{feff}")) { self.offset += 3; } self.line_number += 1; const result = try self.checkOpenBlocks(line); if (result.container) |last_matched_container| { const current = self.current; const container = try self.openNewBlocks(last_matched_container, line, result.all_matched); if (current == self.current) { try self.addTextToContainer(container, last_matched_container, line); } } self.last_line_length = line.len; if (self.last_line_length > 0 and line[self.last_line_length - 1] == '\n') { self.last_line_length -= 1; } if (self.last_line_length > 0 and line[self.last_line_length - 1] == '\r') { self.last_line_length -= 1; } if (new_line) |nl| self.allocator.free(nl); } const CheckOpenBlocksResult = struct { all_matched: bool = false, container: ?*nodes.AstNode, }; fn checkOpenBlocks(self: *Parser, line: []const u8) !CheckOpenBlocksResult { const result = try self.checkOpenBlocksInner(self.root, line); if (result.container) |container| { return CheckOpenBlocksResult{ .all_matched = result.all_matched, .container = if (result.all_matched) container else container.parent.?, }; } return result; } fn checkOpenBlocksInner(self: *Parser, start_container: *nodes.AstNode, line: []const u8) !CheckOpenBlocksResult { var container = start_container; while (container.lastChildIsOpen()) { container = container.last_child.?; self.findFirstNonspace(line); switch (container.data.value) { .BlockQuote => { if (!self.parseBlockQuotePrefix(line)) { return CheckOpenBlocksResult{ .container = container }; } }, .Item => |*nl| { if (!self.parseNodeItemPrefix(line, container, nl)) { return CheckOpenBlocksResult{ .container = container }; } }, .CodeBlock => { switch (try self.parseCodeBlockPrefix(line, container)) { .DoNotContinue => { return CheckOpenBlocksResult{ .container = null }; }, .NoMatch => { return CheckOpenBlocksResult{ .container = container }; }, .Match => {}, } }, .HtmlBlock => |nhb| { if (!self.parseHtmlBlockPrefix(nhb.block_type)) { return CheckOpenBlocksResult{ .container = container }; } }, .Paragraph => { if (self.blank) { return CheckOpenBlocksResult{ .container = container }; } }, .Table => { if (!(try table.matches(self.allocator, line[self.first_nonspace..]))) { return CheckOpenBlocksResult{ .container = container }; } }, .Heading, .TableRow, .TableCell => { return CheckOpenBlocksResult{ .container = container }; }, .Document, .List, .ThematicBreak, .Text, .SoftBreak, .LineBreak, .Code, .HtmlInline, .Emph, .Strong, .Strikethrough, .Link, .Image => {}, } } return CheckOpenBlocksResult{ .all_matched = true, .container = container, }; } fn openNewBlocks(self: *Parser, input_container: *nodes.AstNode, line: []const u8, all_matched: bool) !*nodes.AstNode { var container = input_container; var maybe_lazy = switch (self.current.data.value) { .Paragraph => true, else => false, }; var matched: usize = undefined; var nl: nodes.NodeList = undefined; var sc: scanners.SetextChar = undefined; while (switch (container.data.value) { .CodeBlock, .HtmlBlock => false, else => true, }) { self.findFirstNonspace(line); const indented = self.indent >= CODE_INDENT; if (!indented and line[self.first_nonspace] == '>') { const offset = self.first_nonspace + 1 - self.offset; self.advanceOffset(line, offset, false); if (strings.isSpaceOrTab(line[self.offset])) { self.advanceOffset(line, 1, true); } container = try self.addChild(container, .BlockQuote); } else if (!indented and try scanners.unwrap(scanners.atxHeadingStart(line[self.first_nonspace..]), &matched)) { const heading_startpos = self.first_nonspace; const offset = self.offset; self.advanceOffset(line, heading_startpos + matched - offset, false); container = try self.addChild(container, .{ .Heading = .{} }); var hashpos = std.mem.indexOfScalar(u8, line[self.first_nonspace..], '#').? + self.first_nonspace; var level: u8 = 0; while (line[hashpos] == '#') { if (level < 6) level += 1; hashpos += 1; } container.data.value = .{ .Heading = .{ .level = level, .setext = false } }; } else if (!indented and try scanners.unwrap(scanners.openCodeFence(line[self.first_nonspace..]), &matched)) { const first_nonspace = self.first_nonspace; const offset = self.offset; const ncb = nodes.NodeCodeBlock{ .fenced = true, .fence_char = line[first_nonspace], .fence_length = matched, .fence_offset = first_nonspace - offset, .info = null, .literal = std.ArrayList(u8).init(self.allocator), }; container = try self.addChild(container, .{ .CodeBlock = ncb }); self.advanceOffset(line, first_nonspace + matched - offset, false); } else if (!indented and ((try scanners.htmlBlockStart(line[self.first_nonspace..], &matched)) or switch (container.data.value) { .Paragraph => false, else => try scanners.htmlBlockStart7(line[self.first_nonspace..], &matched), })) { const nhb = nodes.NodeHtmlBlock{ .block_type = @truncate(matched), .literal = std.ArrayList(u8).init(self.allocator), }; container = try self.addChild(container, .{ .HtmlBlock = nhb }); } else if (!indented and switch (container.data.value) { .Paragraph => try scanners.setextHeadingLine(line[self.first_nonspace..], &sc), else => false, }) { const has_content = try self.resolveReferenceLinkDefinitions(&container.data.content); if (has_content) { container.data.value = .{ .Heading = .{ .level = switch (sc) { .Equals => 1, .Hyphen => 2, }, .setext = true, }, }; const adv = line.len - 1 - self.offset; self.advanceOffset(line, adv, false); } } else if (!indented and !(switch (container.data.value) { .Paragraph => !all_matched, else => false, }) and try scanners.unwrap(scanners.thematicBreak(line[self.first_nonspace..]), &matched)) { container = try self.addChild(container, .ThematicBreak); const adv = line.len - 1 - self.offset; self.advanceOffset(line, adv, false); } else if ((!indented or switch (container.data.value) { .List => true, else => false, }) and self.indent < 4 and parseListMarker(line, self.first_nonspace, switch (container.data.value) { .Paragraph => true, else => false, }, &matched, &nl)) { const offset = self.first_nonspace + matched - self.offset; self.advanceOffset(line, offset, false); const save_partially_consumed_tab = self.partially_consumed_tab; const save_offset = self.offset; const save_column = self.column; while (self.column - save_column <= 5 and strings.isSpaceOrTab(line[self.offset])) { self.advanceOffset(line, 1, true); } const i = self.column - save_column; if (i >= 5 or i < 1 or strings.isLineEndChar(line[self.offset])) { nl.padding = matched + 1; self.partially_consumed_tab = save_partially_consumed_tab; self.offset = save_offset; self.column = save_column; if (i > 0) self.advanceOffset(line, 1, true); } else { nl.padding = matched + i; } nl.marker_offset = self.indent; if (switch (container.data.value) { .List => |*mnl| !listsMatch(&nl, mnl), else => true, }) { container = try self.addChild(container, .{ .List = nl }); } container = try self.addChild(container, .{ .Item = nl }); } else if (indented and !maybe_lazy and !self.blank) { self.advanceOffset(line, CODE_INDENT, true); container = try self.addChild(container, .{ .CodeBlock = .{ .fenced = false, .fence_char = 0, .fence_length = 0, .fence_offset = 0, .info = null, .literal = std.ArrayList(u8).init(self.allocator), }, }); } else { var replace: bool = undefined; const new_container = if (!indented and self.options.extensions.table) try table.tryOpeningBlock(self, container, line, &replace) else null; if (new_container) |new| { if (replace) { container.insertAfter(new); container.detachDeinit(); container = new; } else { container = new; } } else { break; } } if (container.data.value.acceptsLines()) { break; } maybe_lazy = false; } return container; } pub fn addChild(self: *Parser, input_parent: *nodes.AstNode, value: nodes.NodeValue) !*nodes.AstNode { var parent = input_parent; while (!parent.data.value.canContainType(value)) { parent = (try self.finalize(parent)).?; } const node = try nodes.AstNode.create(self.allocator, .{ .value = value, .start_line = self.line_number, .content = std.ArrayList(u8).init(self.allocator), }); parent.append(node); return node; } fn addTextToContainer(self: *Parser, input_container: *nodes.AstNode, last_matched_container: *nodes.AstNode, line: []const u8) !void { var container = input_container; self.findFirstNonspace(line); if (self.blank) { if (container.last_child) |last_child| { last_child.data.last_line_blank = true; } } container.data.last_line_blank = self.blank and switch (container.data.value) { .BlockQuote, .Heading, .ThematicBreak => false, .CodeBlock => |ncb| !ncb.fenced, .Item => container.first_child != null or container.data.start_line != self.line_number, else => true, }; var tmp = container; while (tmp.parent) |parent| { parent.data.last_line_blank = false; tmp = parent; } if (self.current != last_matched_container and container == last_matched_container and !self.blank and self.current.data.value == .Paragraph) { try self.addLine(self.current, line); return; } while (self.current != last_matched_container) { self.current = (try self.finalize(self.current)).?; } switch (container.data.value) { .CodeBlock => { try self.addLine(container, line); }, .HtmlBlock => |nhb| { try self.addLine(container, line); const matches_end_condition = switch (nhb.block_type) { 1 => scanners.htmlBlockEnd1(line[self.first_nonspace..]), 2 => scanners.htmlBlockEnd2(line[self.first_nonspace..]), 3 => scanners.htmlBlockEnd3(line[self.first_nonspace..]), 4 => scanners.htmlBlockEnd4(line[self.first_nonspace..]), 5 => scanners.htmlBlockEnd5(line[self.first_nonspace..]), else => false, }; if (matches_end_condition) { container = (try self.finalize(container)).?; } }, else => { if (self.blank) { // do nothing } else if (container.data.value.acceptsLines()) { var consider_line: []const u8 = line; switch (container.data.value) { .Heading => |nh| if (!nh.setext) { consider_line = strings.chopTrailingHashtags(line); }, else => {}, } const count = self.first_nonspace - self.offset; if (self.first_nonspace <= consider_line.len) { self.advanceOffset(consider_line, count, false); try self.addLine(container, consider_line); } } else { container = try self.addChild(container, .Paragraph); const count = self.first_nonspace - self.offset; self.advanceOffset(line, count, false); try self.addLine(container, line); } }, } self.current = container; } fn addLine(self: *Parser, node: *nodes.AstNode, line: []const u8) !void { assert(node.data.open); if (self.partially_consumed_tab) { self.offset += 1; var chars_to_tab = TAB_STOP - (self.column % TAB_STOP); while (chars_to_tab > 0) : (chars_to_tab -= 1) { try node.data.content.append(' '); } } if (self.offset < line.len) { try node.data.content.appendSlice(line[self.offset..]); } } fn finalizeDocument(self: *Parser) !void { while (self.current != self.root) { self.current = (try self.finalize(self.current)).?; } _ = try self.finalize(self.root); try self.processInlines(); } fn finalize(self: *Parser, node: *nodes.AstNode) !?*nodes.AstNode { assert(node.data.open); node.data.open = false; const parent = node.parent; switch (node.data.value) { .Paragraph => { const has_content = try self.resolveReferenceLinkDefinitions(&node.data.content); if (!has_content) { node.detachDeinit(); } }, .CodeBlock => |*ncb| { if (!ncb.fenced) { strings.removeTrailingBlankLines(&node.data.content); try node.data.content.append('\n'); } else { var pos: usize = 0; while (pos < node.data.content.items.len) : (pos += 1) { if (strings.isLineEndChar(node.data.content.items[pos])) break; } assert(pos < node.data.content.items.len); const info = try strings.cleanUrl(self.allocator, node.data.content.items[0..pos]); if (info.len != 0) { ncb.info = info; } if (node.data.content.items[pos] == '\r') pos += 1; if (node.data.content.items[pos] == '\n') pos += 1; try node.data.content.replaceRange(0, pos, ""); } std.mem.swap(std.ArrayList(u8), &ncb.literal, &node.data.content); }, .HtmlBlock => |*nhb| { std.mem.swap(std.ArrayList(u8), &nhb.literal, &node.data.content); }, .List => |*nl| { nl.tight = true; var it = node.first_child; while (it) |item| { if (item.data.last_line_blank and item.next != null) { nl.tight = false; break; } var subit = item.first_child; while (subit) |subitem| { if (subitem.endsWithBlankLine() and (item.next != null or subitem.next != null)) { nl.tight = false; break; } subit = subitem.next; } if (!nl.tight) { break; } it = item.next; } }, else => {}, } return parent; } fn postprocessTextNodes(self: *Parser) !void { var stack = try std.ArrayList(*nodes.AstNode).initCapacity(self.allocator, 1); defer stack.deinit(); var children = std.ArrayList(*nodes.AstNode).init(self.allocator); defer children.deinit(); try stack.append(self.root); while (stack.popOrNull()) |node| { var nch = node.first_child; while (nch) |n| { var this_bracket = false; while (true) { switch (n.data.value) { .Text => |*root| { var ns = n.next orelse { try self.postprocessTextNode(n, root); break; }; switch (ns.data.value) { .Text => |adj| { const old_len = root.len; root.* = try self.allocator.realloc(root.*, old_len + adj.len); @memcpy(root.*[old_len..], adj); ns.detachDeinit(); }, else => { try self.postprocessTextNode(n, root); break; }, } }, .Link, .Image => { this_bracket = true; break; }, else => break, } } if (!this_bracket) { try children.append(n); } nch = n.next; } while (children.popOrNull()) |child| try stack.append(child); } } fn postprocessTextNode(self: *Parser, node: *nodes.AstNode, text: *[]u8) !void { if (self.options.extensions.autolink) { try AutolinkProcessor.init(self.allocator, text).process(node); } } fn resolveReferenceLinkDefinitions(self: *Parser, content: *std.ArrayList(u8)) !bool { var seeked: usize = 0; var pos: usize = undefined; var seek = content.items; while (seek.len > 0 and seek[0] == '[' and try self.parseReferenceInline(seek, &pos)) { seek = seek[pos..]; seeked += pos; } try content.replaceRange(0, seeked, ""); return !strings.isBlank(content.items); } fn parseReferenceInline(self: *Parser, content: []const u8, pos: *usize) !bool { var subj = inlines.Subject.init(self.allocator, &self.refmap, &self.options, &self.special_chars, &self.skip_chars, content); defer subj.deinit(); const lab = if (subj.linkLabel()) |l| lab: { if (l.len == 0) return false; break :lab l; } else return false; if (subj.peekChar() orelse 0 != ':') return false; subj.pos += 1; subj.spnl(); var url: []const u8 = undefined; var match_len: usize = undefined; if (!inlines.Subject.manualScanLinkUrl(subj.input[subj.pos..], &url, &match_len)) return false; subj.pos += match_len; const beforetitle = subj.pos; subj.spnl(); const title_search: ?usize = if (subj.pos == beforetitle) null else try scanners.linkTitle(subj.input[subj.pos..]); const title = if (title_search) |title_match| title: { const t = subj.input[subj.pos .. subj.pos + title_match]; subj.pos += title_match; break :title try self.allocator.dupe(u8, t); } else title: { subj.pos = beforetitle; break :title &[_]u8{}; }; defer self.allocator.free(title); subj.skipSpaces(); if (!subj.skipLineEnd()) { if (title.len > 0) { subj.pos = beforetitle; subj.skipSpaces(); if (!subj.skipLineEnd()) { return false; } } else { return false; } } const normalized = try strings.normalizeLabel(self.allocator, lab); if (normalized.len > 0) { // refmap takes ownership of `normalized'. const result = try subj.refmap.getOrPut(normalized); if (!result.found_existing) { result.value_ptr.* = Reference{ .url = try strings.cleanUrl(self.allocator, url), .title = try strings.cleanTitle(self.allocator, title), }; } else { self.allocator.free(normalized); } } pos.* = subj.pos; return true; } fn processInlines(self: *Parser) !void { try self.processInlinesNode(self.root); } fn processInlinesNode(self: *Parser, node: *nodes.AstNode) inlines.ParseError!void { var it = node.descendantsIterator(); while (it.next()) |descendant| { if (descendant.data.value.containsInlines()) { try self.parseInlines(descendant); } } } fn parseInlines(self: *Parser, node: *nodes.AstNode) inlines.ParseError!void { const content = strings.rtrim(node.data.content.items); var subj = inlines.Subject.init(self.allocator, &self.refmap, &self.options, &self.special_chars, &self.skip_chars, content); defer subj.deinit(); while (try subj.parseInline(node)) {} try subj.processEmphasis(null); while (subj.popBracket()) {} } pub fn advanceOffset(self: *Parser, line: []const u8, in_count: usize, columns: bool) void { var count = in_count; while (count > 0) { switch (line[self.offset]) { '\t' => { const chars_to_tab = TAB_STOP - (self.column % TAB_STOP); if (columns) { self.partially_consumed_tab = chars_to_tab > count; const chars_to_advance = @min(count, chars_to_tab); self.column += chars_to_advance; self.offset += @as(u8, if (self.partially_consumed_tab) 0 else 1); count -= chars_to_advance; } else { self.partially_consumed_tab = false; self.column += chars_to_tab; self.offset += 1; count -= 1; } }, else => { self.partially_consumed_tab = false; self.offset += 1; self.column += 1; count -= 1; }, } } } fn parseBlockQuotePrefix(self: *Parser, line: []const u8) bool { const indent = self.indent; if (indent <= 3 and line[self.first_nonspace] == '>') { self.advanceOffset(line, indent + 1, true); if (strings.isSpaceOrTab(line[self.offset])) { self.advanceOffset(line, 1, true); } return true; } return false; } fn parseNodeItemPrefix(self: *Parser, line: []const u8, container: *nodes.AstNode, nl: *const nodes.NodeList) bool { if (self.indent >= nl.marker_offset + nl.padding) { self.advanceOffset(line, nl.marker_offset + nl.padding, true); return true; } else if (self.blank and container.first_child != null) { const offset = self.first_nonspace - self.offset; self.advanceOffset(line, offset, false); return true; } return false; } const CodeBlockPrefixParseResult = enum { DoNotContinue, NoMatch, Match, }; fn parseCodeBlockPrefix(self: *Parser, line: []const u8, container: *nodes.AstNode) !CodeBlockPrefixParseResult { const ncb = switch (container.data.value) { .CodeBlock => |i| i, else => unreachable, }; if (!ncb.fenced) { if (self.indent >= CODE_INDENT) { self.advanceOffset(line, CODE_INDENT, true); return .Match; } else if (self.blank) { const offset = self.first_nonspace - self.offset; self.advanceOffset(line, offset, false); return .Match; } return .NoMatch; } const matched = if (self.indent <= 3 and line[self.first_nonspace] == ncb.fence_char) (try scanners.closeCodeFence(line[self.first_nonspace..])) orelse 0 else 0; if (matched >= ncb.fence_length) { self.advanceOffset(line, matched, false); self.current = (try self.finalize(container)).?; return .DoNotContinue; } var i = ncb.fence_offset; while (i > 0 and strings.isSpaceOrTab(line[self.offset])) : (i -= 1) { self.advanceOffset(line, 1, true); } return .Match; } fn parseHtmlBlockPrefix(self: *Parser, t: u8) bool { return switch (t) { 1, 2, 3, 4, 5 => true, 6, 7 => !self.blank, else => unreachable, }; } fn parseListMarker(line: []const u8, input_pos: usize, interrupts_paragraph: bool, matched: *usize, nl: *nodes.NodeList) bool { var pos = input_pos; var c = line[pos]; const startpos = pos; if (c == '*' or c == '-' or c == '+') { pos += 1; if (!ascii.isWhitespace(line[pos])) { return false; } if (interrupts_paragraph) { var i = pos; while (strings.isSpaceOrTab(line[i])) : (i += 1) {} if (line[i] == '\n') { return false; } } matched.* = pos - startpos; nl.* = .{ .list_type = .Bullet, .marker_offset = 0, .padding = 0, .start = 1, .delimiter = .Period, .bullet_char = c, .tight = false, }; return true; } if (ascii.isDigit(c)) { var start: usize = 0; var digits: u8 = 0; while (digits < 9 and ascii.isDigit(line[pos])) { start = (10 * start) + (line[pos] - '0'); pos += 1; digits += 1; } if (interrupts_paragraph and start != 1) { return false; } c = line[pos]; if (c != '.' and c != ')') { return false; } pos += 1; if (!ascii.isWhitespace(line[pos])) { return false; } if (interrupts_paragraph) { var i = pos; while (strings.isSpaceOrTab(line[i])) : (i += 1) {} if (strings.isLineEndChar(line[i])) { return false; } } matched.* = pos - startpos; nl.* = .{ .list_type = .Ordered, .marker_offset = 0, .padding = 0, .start = start, .delimiter = if (c == '.') .Period else .Paren, .bullet_char = 0, .tight = false, }; return true; } return false; } fn listsMatch(list_data: *const nodes.NodeList, item_data: *const nodes.NodeList) bool { return list_data.list_type == item_data.list_type and list_data.delimiter == item_data.delimiter and list_data.bullet_char == item_data.bullet_char; } }; fn expectMarkdownHTML(options: Options, markdown: []const u8, html: []const u8) !void { const output = try main.testMarkdownToHtml(options, markdown); defer std.testing.allocator.free(output); try std.testing.expectEqualStrings(html, output); } test "convert simple emphases" { try expectMarkdownHTML(.{}, \\hello, _world_ __world__ ___world___ *_world_* **_world_** *__world__* \\ \\this is `yummy` \\ , \\<p>hello, <em>world</em> <strong>world</strong> <em><strong>world</strong></em> <em><em>world</em></em> <strong><em>world</em></strong> <em><strong>world</strong></em></p> \\<p>this is <code>yummy</code></p> \\ ); } test "smart quotes" { try expectMarkdownHTML(.{ .parse = .{ .smart = true } }, "\"Hey,\" she said. \"What's 'up'?\"\n", "<p>“Hey,” she said. “What’s ‘up’?”</p>\n"); } test "handles EOF without EOL" { try expectMarkdownHTML(.{}, "hello", "<p>hello</p>\n"); } test "accepts multiple lines" { try expectMarkdownHTML(.{}, "hello\nthere\n", "<p>hello\nthere</p>\n"); try expectMarkdownHTML(.{ .render = .{ .hard_breaks = true } }, "hello\nthere\n", "<p>hello<br />\nthere</p>\n"); } test "smart hyphens" { try expectMarkdownHTML(.{ .parse = .{ .smart = true } }, "hyphen - en -- em --- four ---- five ----- six ------ seven -------\n", "<p>hyphen - en – em — four –– five —– six —— seven —––</p>\n"); } test "handles tabs" { try expectMarkdownHTML(.{}, "\tfoo\tbaz\t\tbim\n", "<pre><code>foo\tbaz\t\tbim\n</code></pre>\n"); try expectMarkdownHTML(.{}, " \tfoo\tbaz\t\tbim\n", "<pre><code>foo\tbaz\t\tbim\n</code></pre>\n"); try expectMarkdownHTML(.{}, " - foo\n\n\tbar\n", "<ul>\n<li>\n<p>foo</p>\n<p>bar</p>\n</li>\n</ul>\n"); try expectMarkdownHTML(.{}, "#\tFoo\n", "<h1>Foo</h1>\n"); try expectMarkdownHTML(.{}, "*\t*\t*\t\n", "<hr />\n"); } test "escapes" { try expectMarkdownHTML(.{}, "\\## foo\n", "<p>## foo</p>\n"); } test "setext heading override pointy" { try expectMarkdownHTML(.{}, "<a title=\"a lot\n---\nof dashes\"/>\n", "<h2>&lt;a title=&quot;a lot</h2>\n<p>of dashes&quot;/&gt;</p>\n"); } test "fenced code blocks" { try expectMarkdownHTML(.{}, "```\n<\n >\n```\n", "<pre><code>&lt;\n &gt;\n</code></pre>\n"); try expectMarkdownHTML(.{}, "````\naaa\n```\n``````\n", "<pre><code>aaa\n```\n</code></pre>\n"); } test "html blocks" { try expectMarkdownHTML(.{ .render = .{ .unsafe = true } }, \\_world_. \\</pre> , \\<p><em>world</em>. \\</pre></p> \\ ); try expectMarkdownHTML(.{ .render = .{ .unsafe = true } }, \\<table><tr><td> \\<pre> \\**Hello**, \\ \\_world_. \\</pre> \\</td></tr></table> , \\<table><tr><td> \\<pre> \\**Hello**, \\<p><em>world</em>. \\</pre></p> \\</td></tr></table> \\ ); try expectMarkdownHTML(.{ .render = .{ .unsafe = true } }, \\<DIV CLASS="foo"> \\ \\*Markdown* \\ \\</DIV> , \\<DIV CLASS="foo"> \\<p><em>Markdown</em></p> \\</DIV> \\ ); try expectMarkdownHTML(.{ .render = .{ .unsafe = true } }, \\<pre language="haskell"><code> \\import Text.HTML.TagSoup \\ \\main :: IO () \\main = print $ parseTags tags \\</code></pre> \\okay \\ , \\<pre language="haskell"><code> \\import Text.HTML.TagSoup \\ \\main :: IO () \\main = print $ parseTags tags \\</code></pre> \\<p>okay</p> \\ ); } test "links" { try expectMarkdownHTML(.{}, "[foo](/url)\n", "<p><a href=\"/url\">foo</a></p>\n"); try expectMarkdownHTML(.{}, "[foo](/url \"title\")\n", "<p><a href=\"/url\" title=\"title\">foo</a></p>\n"); } test "link reference definitions" { try expectMarkdownHTML(.{}, "[foo]: /url \"title\"\n\n[foo]\n", "<p><a href=\"/url\" title=\"title\">foo</a></p>\n"); try expectMarkdownHTML(.{}, "[foo]: /url\\bar\\*baz \"foo\\\"bar\\baz\"\n\n[foo]\n", "<p><a href=\"/url%5Cbar*baz\" title=\"foo&quot;bar\\baz\">foo</a></p>\n"); } test "tables" { try expectMarkdownHTML(.{ .extensions = .{ .table = true } }, \\| foo | bar | \\| --- | --- | \\| baz | bim | \\ , \\<table> \\<thead> \\<tr> \\<th>foo</th> \\<th>bar</th> \\</tr> \\</thead> \\<tbody> \\<tr> \\<td>baz</td> \\<td>bim</td> \\</tr> \\</tbody> \\</table> \\ ); } test "strikethroughs" { try expectMarkdownHTML(.{ .extensions = .{ .strikethrough = true } }, "Hello ~world~ there.\n", "<p>Hello <del>world</del> there.</p>\n"); } test "images" { try expectMarkdownHTML(.{}, "[![moon](moon.jpg)](/uri)\n", "<p><a href=\"/uri\"><img src=\"moon.jpg\" alt=\"moon\" /></a></p>\n"); } test "autolink" { try expectMarkdownHTML(.{ .extensions = .{ .autolink = true } }, "www.commonmark.org\n", "<p><a href=\"http://www.commonmark.org\">www.commonmark.org</a></p>\n"); try expectMarkdownHTML(.{ .extensions = .{ .autolink = true } }, "http://commonmark.org\n", "<p><a href=\"http://commonmark.org\">http://commonmark.org</a></p>\n"); try expectMarkdownHTML(.{ .extensions = .{ .autolink = true } }, "[email protected]\n", "<p><a href=\"mailto:[email protected]\">[email protected]</a></p>\n"); } test "header anchors" { try expectMarkdownHTML(.{ .render = .{ .header_anchors = true } }, \\# Hi. \\## Hi 1. \\### Hi. \\#### Hello. \\##### Hi. \\###### Hello. \\# Isn't it grand? \\ , \\<h1><a href="#hi" id="hi"></a>Hi.</h1> \\<h2><a href="#hi-1" id="hi-1"></a>Hi 1.</h2> \\<h3><a href="#hi-2" id="hi-2"></a>Hi.</h3> \\<h4><a href="#hello" id="hello"></a>Hello.</h4> \\<h5><a href="#hi-3" id="hi-3"></a>Hi.</h5> \\<h6><a href="#hello-1" id="hello-1"></a>Hello.</h6> \\<h1><a href="#isnt-it-grand" id="isnt-it-grand"></a>Isn't it grand?</h1> \\ ); }
0
repos/koino
repos/koino/src/autolink.zig
const std = @import("std"); const ascii = std.ascii; const assert = std.debug.assert; const nodes = @import("nodes.zig"); const strings = @import("strings.zig"); const zunicode = @import("zunicode"); pub const AutolinkProcessor = struct { allocator: std.mem.Allocator, text: *[]u8, pub fn init(allocator: std.mem.Allocator, text: *[]u8) AutolinkProcessor { return .{ .allocator = allocator, .text = text, }; } const Match = struct { post: *nodes.AstNode, reverse: usize, skip: usize, }; pub fn process(self: AutolinkProcessor, node: *nodes.AstNode) !void { const len = self.text.len; var i: usize = 0; while (i < len) { const post_org: ?Match = blk: { while (i < len) : (i += 1) { switch (self.text.*[i]) { 'w' => if (try self.wwwMatch(i)) |match| { break :blk match; }, ':' => if (try self.urlMatch(i)) |match| { break :blk match; }, '@' => if (try self.emailMatch(i)) |match| { break :blk match; }, else => {}, } } break :blk null; }; if (post_org) |org| { i -= org.reverse; node.insertAfter(org.post); if (i + org.skip < len) { const remain = self.text.*[i + org.skip ..]; assert(remain.len > 0); org.post.insertAfter(try self.makeInline(.{ .Text = try self.allocator.dupe(u8, remain) })); } self.text.* = try self.allocator.realloc(self.text.*, i); return; } } } const WWW_DELIMS = strings.createMap("*_~(["); fn wwwMatch(self: AutolinkProcessor, i: usize) !?Match { if (i > 0 and !ascii.isWhitespace(self.text.*[i - 1]) and !WWW_DELIMS[self.text.*[i - 1]]) { return null; } if (!std.mem.startsWith(u8, self.text.*[i..], "www.")) { return null; } var link_end = (try checkDomain(self.text.*[i..], false)) orelse return null; while (i + link_end < self.text.len and !ascii.isWhitespace(self.text.*[i + link_end])) : (link_end += 1) {} link_end = autolinkDelim(self.text.*[i..], link_end); var url = try std.ArrayList(u8).initCapacity(self.allocator, 7 + link_end); try url.appendSlice("http://"); try url.appendSlice(self.text.*[i .. link_end + i]); var inl = try self.makeInline(.{ .Link = .{ .url = try url.toOwnedSlice(), .title = &[_]u8{}, }, }); inl.append(try self.makeInline(.{ .Text = try self.allocator.dupe(u8, self.text.*[i .. link_end + i]), })); return Match{ .post = inl, .reverse = 0, .skip = link_end, }; } const SCHEMES = [_][]const u8{ "http", "https", "ftp" }; fn urlMatch(self: AutolinkProcessor, i: usize) !?Match { const size = self.text.len; if (size - i < 4 or self.text.*[i + 1] != '/' or self.text.*[i + 2] != '/') { return null; } var rewind: usize = 0; while (rewind < i and ascii.isAlphabetic(self.text.*[i - rewind - 1])) : (rewind += 1) {} if (!scheme_matched: { for (SCHEMES) |scheme| { if (size - i + rewind >= scheme.len and std.mem.eql(u8, self.text.*[i - rewind .. i], scheme)) { break :scheme_matched true; } } break :scheme_matched false; }) { return null; } var link_end = (try checkDomain(self.text.*[i + 3 ..], true)) orelse return null; while (link_end < size - i and !ascii.isWhitespace(self.text.*[i + link_end])) : (link_end += 1) {} link_end = autolinkDelim(self.text.*[i..], link_end); const url = self.text.*[i - rewind .. i + link_end]; var inl = try self.makeInline(.{ .Link = .{ .url = try self.allocator.dupe(u8, url), .title = &[_]u8{}, }, }); inl.append(try self.makeInline(.{ .Text = try self.allocator.dupe(u8, url) })); return Match{ .post = inl, .reverse = rewind, .skip = rewind + link_end, }; } const EMAIL_OK_SET = strings.createMap(".+-_"); fn emailMatch(self: AutolinkProcessor, i: usize) !?Match { const size = self.text.len; var rewind: usize = 0; var ns: usize = 0; while (rewind < i) { const c = self.text.*[i - rewind - 1]; if (ascii.isAlphanumeric(c) or EMAIL_OK_SET[c]) { rewind += 1; continue; } if (c == '/') { ns += 1; } break; } if (rewind == 0 or ns > 0) { return null; } var link_end: usize = 0; var nb: usize = 0; var np: usize = 0; while (link_end < size - i) { const c = self.text.*[i + link_end]; if (ascii.isAlphanumeric(c)) { // empty } else if (c == '@') { nb += 1; } else if (c == '.' and link_end < size - i - 1 and ascii.isAlphanumeric(self.text.*[i + link_end + 1])) { np += 1; } else if (c != '-' and c != '_') { break; } link_end += 1; } if (link_end < 2 or nb != 1 or np == 0 or (!ascii.isAlphabetic(self.text.*[i + link_end - 1]) and self.text.*[i + link_end - 1] != '.')) { return null; } link_end = autolinkDelim(self.text.*[i..], link_end); var url = try std.ArrayList(u8).initCapacity(self.allocator, 7 + link_end - rewind); try url.appendSlice("mailto:"); try url.appendSlice(self.text.*[i - rewind .. link_end + i]); var inl = try self.makeInline(.{ .Link = .{ .url = try url.toOwnedSlice(), .title = &[_]u8{}, }, }); inl.append(try self.makeInline(.{ .Text = try self.allocator.dupe(u8, self.text.*[i - rewind .. link_end + i]) })); return Match{ .post = inl, .reverse = rewind, .skip = rewind + link_end, }; } fn checkDomain(data: []const u8, allow_short: bool) !?usize { var np: usize = 0; var uscore1: usize = 0; var uscore2: usize = 0; var view = std.unicode.Utf8View.initUnchecked(data); var it = view.iterator(); var last_i = it.i; while (it.nextCodepoint()) |c| { if (c == '_') { uscore2 += 1; } else if (c == '.') { uscore1 = uscore2; uscore2 = 0; np += 1; } else if (!isValidHostchar(c) and c != '-') { if (uscore1 == 0 and uscore2 == 0 and np > 0) { return last_i; } return null; } last_i = it.i; } if (uscore1 > 0 or uscore2 > 0) { return null; } else if (allow_short or np > 0) { return data.len; } else { return null; } } fn isValidHostchar(c: u21) bool { return !zunicode.isSpace(c) and !zunicode.isPunct(c); } const LINK_END_ASSORTMENT = strings.createMap("?!.,:*_~'\""); fn autolinkDelim(data: []const u8, in_link_end: usize) usize { var link_end = in_link_end; for (data[0..link_end], 0..) |c, i| { if (c == '<') { link_end = i; break; } } while (link_end > 0) { const cclose = data[link_end - 1]; const copen: ?u8 = if (cclose == ')') '(' else null; if (LINK_END_ASSORTMENT[cclose]) { link_end -= 1; } else if (cclose == ';') { var new_end = link_end - 2; while (new_end > 0 and ascii.isAlphanumeric(data[new_end])) : (new_end -= 1) {} if (new_end < link_end - 2 and data[new_end] == '&') { link_end = new_end; } else { link_end -= 1; } } else if (copen) |c| { var opening: usize = 0; var closing: usize = 0; for (data[0..link_end]) |b| { if (b == c) { opening += 1; } else if (b == cclose) { closing += 1; } } if (closing <= opening) break; link_end -= 1; } else { break; } } return link_end; } fn makeInline(self: AutolinkProcessor, value: nodes.NodeValue) !*nodes.AstNode { return nodes.AstNode.create(self.allocator, .{ .value = value, .content = std.ArrayList(u8).init(self.allocator), }); } };
0
repos/koino
repos/koino/src/options.zig
pub const Options = struct { pub const Extensions = struct { table: bool = false, strikethrough: bool = false, autolink: bool = false, tagfilter: bool = false, }; pub const Parse = struct { smart: bool = false, }; pub const Render = struct { hard_breaks: bool = false, unsafe: bool = false, header_anchors: bool = false, /// when anchors are enabled, render this icon in front of each heading so people can click it anchor_icon: []const u8 = "", }; extensions: Extensions = .{}, parse: Parse = .{}, render: Render = .{}, };
0
repos/koino
repos/koino/src/html.zig
const std = @import("std"); const ascii = std.ascii; const assert = std.debug.assert; const Options = @import("options.zig").Options; const nodes = @import("nodes.zig"); const strings = @import("strings.zig"); const scanners = @import("scanners.zig"); pub fn print(writer: anytype, allocator: std.mem.Allocator, options: Options, root: *nodes.AstNode) !void { var formatter = makeHtmlFormatter(writer, allocator, options); defer formatter.deinit(); try formatter.format(root, false); } pub fn makeHtmlFormatter(writer: anytype, allocator: std.mem.Allocator, options: Options) HtmlFormatter(@TypeOf(writer)) { return HtmlFormatter(@TypeOf(writer)).init(writer, allocator, options); } pub fn HtmlFormatter(comptime Writer: type) type { return struct { writer: Writer, allocator: std.mem.Allocator, options: Options, last_was_lf: bool = true, anchor_map: std.StringHashMap(void), anchor_node_map: std.AutoHashMap(*nodes.AstNode, []const u8), const Self = @This(); pub fn init(writer: Writer, allocator: std.mem.Allocator, options: Options) Self { return .{ .writer = writer, .allocator = allocator, .options = options, .anchor_map = std.StringHashMap(void).init(allocator), .anchor_node_map = std.AutoHashMap(*nodes.AstNode, []const u8).init(allocator), }; } pub fn deinit(self: *Self) void { var it = self.anchor_map.iterator(); while (it.next()) |entry| { self.allocator.free(entry.key_ptr.*); } self.anchor_map.deinit(); self.anchor_node_map.deinit(); } const NEEDS_ESCAPED = strings.createMap("\"&<>"); const HREF_SAFE = strings.createMap("-_.+!*'(),%#@?=;:/,+&$~abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"); fn dangerousUrl(input: []const u8) !bool { return (try scanners.dangerousUrl(input)) != null; } fn cr(self: *Self) !void { if (!self.last_was_lf) { try self.writeAll("\n"); } } fn escape(self: *Self, s: []const u8) !void { var offset: usize = 0; for (s, 0..) |c, i| { if (NEEDS_ESCAPED[c]) { try self.writeAll(s[offset..i]); try self.writeAll(switch (c) { '"' => "&quot;", '&' => "&amp;", '<' => "&lt;", '>' => "&gt;", else => unreachable, }); offset = i + 1; } } try self.writeAll(s[offset..]); } fn escapeHref(self: *Self, s: []const u8) !void { var i: usize = 0; const size = s.len; while (i < size) : (i += 1) { const org = i; while (i < size and HREF_SAFE[s[i]]) i += 1; if (i > org) { try self.writeAll(s[org..i]); } if (i >= size) { break; } switch (s[i]) { '&' => try self.writeAll("&amp;"), '\'' => try self.writeAll("&#x27;"), else => try self.writer.print("%{X:0>2}", .{s[i]}), } } } pub fn writeAll(self: *Self, s: []const u8) !void { if (s.len == 0) { return; } try self.writer.writeAll(s); self.last_was_lf = s[s.len - 1] == '\n'; } pub fn format(self: *Self, input_node: *nodes.AstNode, plain: bool) !void { const Phase = enum { Pre, Post }; const StackEntry = struct { node: *nodes.AstNode, plain: bool, phase: Phase, }; var stack = std.ArrayList(StackEntry).init(self.allocator); defer stack.deinit(); try stack.append(.{ .node = input_node, .plain = plain, .phase = .Pre }); while (stack.popOrNull()) |entry| { switch (entry.phase) { .Pre => { var new_plain: bool = undefined; if (entry.plain) { switch (entry.node.data.value) { .Text, .HtmlInline, .Code => |literal| { try self.escape(literal); }, .LineBreak, .SoftBreak => { try self.writeAll(" "); }, else => {}, } new_plain = entry.plain; } else { try stack.append(.{ .node = entry.node, .plain = false, .phase = .Post }); new_plain = try self.fnode(entry.node, true); } var it = entry.node.reverseChildrenIterator(); while (it.next()) |ch| { try stack.append(.{ .node = ch, .plain = new_plain, .phase = .Pre }); } }, .Post => { assert(!entry.plain); _ = try self.fnode(entry.node, false); }, } } } fn fnode(self: *Self, node: *nodes.AstNode, entering: bool) !bool { switch (node.data.value) { .Document => {}, .BlockQuote => { try self.cr(); try self.writeAll(if (entering) "<blockquote>\n" else "</blockquote>"); }, .List => |nl| { if (entering) { try self.cr(); if (nl.list_type == .Bullet) { try self.writeAll("<ul>\n"); } else if (nl.start == 1) { try self.writeAll("<ol>\n"); } else { try self.writer.print("<ol start=\"{}\">", .{nl.start}); } } else if (nl.list_type == .Bullet) { try self.writeAll("</ul>\n"); } else { try self.writeAll("</ol>\n"); } }, .Item => { if (entering) { try self.cr(); try self.writeAll("<li>"); } else { try self.writeAll("</li>\n"); } }, .Heading => |nch| { if (entering) { try self.cr(); try self.writer.print("<h{}>", .{nch.level}); if (self.options.render.header_anchors) { const id = try self.getNodeAnchor(node); try self.writeAll("<a href=\"#"); try self.writeAll(id); try self.writeAll("\" id=\""); try self.writeAll(id); try self.writeAll("\">"); try self.writeAll(self.options.render.anchor_icon); try self.writeAll("</a>"); } } else { try self.writer.print("</h{}>\n", .{nch.level}); self.last_was_lf = true; } }, .CodeBlock => |ncb| { if (entering) { try self.cr(); if (ncb.info == null or ncb.info.?.len == 0) { try self.writeAll("<pre><code>"); } else { var first_tag: usize = 0; while (first_tag < ncb.info.?.len and !ascii.isWhitespace(ncb.info.?[first_tag])) first_tag += 1; try self.writeAll("<pre><code class=\"language-"); try self.escape(ncb.info.?[0..first_tag]); try self.writeAll("\">"); } try self.escape(ncb.literal.items); try self.writeAll("</code></pre>\n"); } }, .HtmlBlock => |nhb| { if (entering) { try self.cr(); if (!self.options.render.unsafe) { try self.writeAll("<!-- raw HTML omitted -->"); } else if (self.options.extensions.tagfilter) { try self.tagfilterBlock(nhb.literal.items); } else { try self.writeAll(nhb.literal.items); } try self.cr(); } }, .ThematicBreak => { if (entering) { try self.cr(); try self.writeAll("<hr />\n"); } }, .Paragraph => { const tight = node.parent != null and node.parent.?.parent != null and switch (node.parent.?.parent.?.data.value) { .List => |nl| nl.tight, else => false, }; if (!tight) { if (entering) { try self.cr(); try self.writeAll("<p>"); } else { try self.writeAll("</p>\n"); } } }, .Text => |literal| { if (entering) { try self.escape(literal); } }, .LineBreak => { if (entering) { try self.writeAll("<br />\n"); } }, .SoftBreak => { if (entering) { try self.writeAll(if (self.options.render.hard_breaks) "<br />\n" else "\n"); } }, .Code => |literal| { if (entering) { try self.writeAll("<code>"); try self.escape(literal); try self.writeAll("</code>"); } }, .HtmlInline => |literal| { if (entering) { if (!self.options.render.unsafe) { try self.writeAll("<!-- raw HTML omitted -->"); } else if (self.options.extensions.tagfilter and tagfilter(literal)) { try self.writeAll("&lt;"); try self.writeAll(literal[1..]); } else { try self.writeAll(literal); } } }, .Strong => { try self.writeAll(if (entering) "<strong>" else "</strong>"); }, .Emph => { try self.writeAll(if (entering) "<em>" else "</em>"); }, .Strikethrough => { if (entering) { try self.writeAll("<del>"); } else { try self.writeAll("</del>"); } }, .Link => |nl| { if (entering) { try self.writeAll("<a href=\""); if (self.options.render.unsafe or !(try dangerousUrl(nl.url))) { try self.escapeHref(nl.url); } if (nl.title.len > 0) { try self.writeAll("\" title=\""); try self.escape(nl.title); } try self.writeAll("\">"); } else { try self.writeAll("</a>"); } }, .Image => |nl| { if (entering) { try self.writeAll("<img src=\""); if (self.options.render.unsafe or !(try dangerousUrl(nl.url))) { try self.escapeHref(nl.url); } try self.writeAll("\" alt=\""); return true; } else { if (nl.title.len > 0) { try self.writeAll("\" title=\""); try self.escape(nl.title); } try self.writeAll("\" />"); } }, .Table => { if (entering) { try self.cr(); try self.writeAll("<table>\n"); } else { if (node.last_child.? != node.first_child.?) { try self.cr(); try self.writeAll("</tbody>\n"); } try self.cr(); try self.writeAll("</table>\n"); } }, .TableRow => |kind| { if (entering) { try self.cr(); if (kind == .Header) { try self.writeAll("<thead>\n"); } else if (node.prev) |prev| { switch (prev.data.value) { .TableRow => |k| { if (k == .Header) try self.writeAll("<tbody>\n"); }, else => {}, } } try self.writeAll("<tr>"); } else { try self.cr(); try self.writeAll("</tr>"); if (kind == .Header) { try self.cr(); try self.writeAll("</thead>"); } } }, .TableCell => { const kind = node.parent.?.data.value.TableRow; const alignments = node.parent.?.parent.?.data.value.Table; if (entering) { try self.cr(); if (kind == .Header) { try self.writeAll("<th"); } else { try self.writeAll("<td"); } var start = node.parent.?.first_child.?; var i: usize = 0; while (start != node) { i += 1; start = start.next.?; } switch (alignments[i]) { .Left => try self.writeAll(" align=\"left\""), .Right => try self.writeAll(" align=\"right\""), .Center => try self.writeAll(" align=\"center\""), .None => {}, } try self.writeAll(">"); } else if (kind == .Header) { try self.writeAll("</th>"); } else { try self.writeAll("</td>"); } }, } return false; } fn collectText(self: *Self, node: *nodes.AstNode) ![]u8 { var out = std.ArrayList(u8).init(self.allocator); try collectTextInto(&out, node); return out.toOwnedSlice(); } fn collectTextInto(out: *std.ArrayList(u8), node: *nodes.AstNode) std.mem.Allocator.Error!void { switch (node.data.value) { .Text, .Code => |literal| { try out.appendSlice(literal); }, .LineBreak, .SoftBreak => try out.append(' '), else => { var it = node.first_child; while (it) |child| { try collectTextInto(out, child); it = child.next; } }, } } /// Return the anchor for a given Heading node. If it does not exist yet, it will be generated. pub fn getNodeAnchor(self: *Self, node: *nodes.AstNode) ![]const u8 { std.debug.assert(node.data.value == .Heading); const gop = try self.anchor_node_map.getOrPut(node); if (!gop.found_existing) { errdefer _ = self.anchor_node_map.remove(node); const text_content = try self.collectText(node); defer self.allocator.free(text_content); gop.value_ptr.* = try self.anchorize(text_content); } return gop.value_ptr.*; } fn anchorize(self: *Self, header: []const u8) ![]const u8 { const lower = try strings.toLower(self.allocator, header); defer self.allocator.free(lower); const removed = try scanners.removeAnchorizeRejectedChars(self.allocator, lower); defer self.allocator.free(removed); for (removed) |*c| { if (c.* == ' ') c.* = '-'; } var uniq: usize = 0; while (true) { const anchor = if (uniq == 0) try self.allocator.dupe(u8, removed) else try std.fmt.allocPrint(self.allocator, "{s}-{}", .{ removed, uniq }); errdefer self.allocator.free(anchor); const getPut = try self.anchor_map.getOrPut(anchor); if (!getPut.found_existing) { // anchor now belongs in anchor_map. return anchor; } self.allocator.free(anchor); uniq += 1; } } const TAGFILTER_BLACKLIST = [_][]const u8{ "title", "textarea", "style", "xmp", "iframe", "noembed", "noframes", "script", "plaintext", }; fn tagfilter(literal: []const u8) bool { if (literal.len < 3 or literal[0] != '<') return false; var i: usize = 1; if (literal[i] == '/') i += 1; for (TAGFILTER_BLACKLIST) |t| { const j = i + t.len; if (literal.len > j and std.ascii.eqlIgnoreCase(t, literal[i..j])) { return ascii.isWhitespace(literal[j]) or literal[j] == '>' or (literal[j] == '/' and literal.len >= j + 2 and literal[j + 1] == '>'); } } return false; } fn tagfilterBlock(self: *Self, input: []const u8) !void { const size = input.len; var i: usize = 0; while (i < size) { const org = i; while (i < size and input[i] != '<') : (i += 1) {} if (i > org) { try self.writeAll(input[org..i]); } if (i >= size) { break; } if (tagfilter(input[i..])) { try self.writeAll("&lt;"); } else { try self.writeAll("<"); } i += 1; } } }; } test "escaping works as expected" { var buffer = std.ArrayList(u8).init(std.testing.allocator); defer buffer.deinit(); var formatter = makeHtmlFormatter(buffer.writer(), std.testing.allocator, .{}); defer formatter.deinit(); try formatter.escape("<hello & goodbye>"); try std.testing.expectEqualStrings("&lt;hello &amp; goodbye&gt;", buffer.items); } test "lowercase anchor generation" { var formatter = makeHtmlFormatter(std.io.null_writer, std.testing.allocator, .{}); defer formatter.deinit(); try std.testing.expectEqualStrings("yés", try formatter.anchorize("YÉS")); }
0
repos/koino
repos/koino/src/scanners.zig
const std = @import("std"); const testing = std.testing; const Regex = @import("libpcre").Regex; const Error = error{OutOfMemory}; const MemoizedRegexes = struct { atxHeadingStart: ?Regex = null, thematicBreak: ?Regex = null, setextHeadingLine: ?Regex = null, autolinkUri: ?Regex = null, autolinkEmail: ?Regex = null, openCodeFence: ?Regex = null, closeCodeFence: ?Regex = null, htmlBlockStart1: ?Regex = null, htmlBlockStart4: ?Regex = null, htmlBlockStart6: ?Regex = null, htmlBlockStart7: ?Regex = null, htmlTag: ?Regex = null, spacechars: ?Regex = null, linkTitle: ?Regex = null, dangerousUrl: ?Regex = null, tableStart: ?Regex = null, tableCell: ?Regex = null, tableCellEnd: ?Regex = null, tableRowEnd: ?Regex = null, removeAnchorizeRejectedChars: ?Regex = null, }; var memoized = MemoizedRegexes{}; // pub fn deinitRegexes() void { // inline for (@typeInfo(MemoizedRegexes).Struct.fields) |field| { // if (@field(memoized, field.name)) |re| { // re.deinit(); // @field(memoized, field.name) = null; // } // } // } fn acquire(comptime name: []const u8, regex: [:0]const u8) Error!Regex { const field_name = comptime if (std.mem.lastIndexOf(u8, name, ".")) |i| name[i + 1 ..] else name; if (@field(memoized, field_name)) |re| { return re; } @field(memoized, field_name) = Regex.compile(regex, .{ .Utf8 = true }) catch |err| switch (err) { error.OutOfMemory => return error.OutOfMemory, else => unreachable, }; return @field(memoized, field_name).?; } fn search(re: Regex, line: []const u8) ?usize { if (re.matches(line, .{ .Anchored = true }) catch null) |cap| { return cap.end; } return null; } pub fn unwrap(value: Error!?usize, out: *usize) Error!bool { if (value) |maybe_val| { if (maybe_val) |val| { out.* = val; return true; } return false; } else |err| { return err; } } var searchFirstCaptureBuffer: [1024]u8 = [_]u8{undefined} ** 1024; var searchFirstCaptureBufferAllocator = std.heap.FixedBufferAllocator.init(&searchFirstCaptureBuffer); fn searchFirstCapture(re: Regex, line: []const u8) Error!?usize { searchFirstCaptureBufferAllocator.reset(); const result = re.captures(searchFirstCaptureBufferAllocator.allocator(), line, .{ .Anchored = true }) catch |err| switch (err) { error.OutOfMemory => return error.OutOfMemory, else => return null, }; if (result) |caps| { var i: usize = 1; while (i < caps.len) : (i += 1) { if (caps[i]) |cap| { return cap.end; } } @panic("no matching capture group"); } return null; } pub fn atxHeadingStart(line: []const u8) Error!?usize { if (line[0] != '#') { return null; } const re = try acquire(@src().fn_name, "#{1,6}[ \t\r\n]"); return search(re, line); } pub fn thematicBreak(line: []const u8) Error!?usize { if (line[0] != '*' and line[0] != '-' and line[0] != '_') { return null; } const re = try acquire(@src().fn_name, "(?:(?:\\*[ \t]*){3,}|(?:_[ \t]*){3,}|(?:-[ \t]*){3,})[ \t]*[\r\n]"); return search(re, line); } test "thematicBreak" { try testing.expectEqual(@as(?usize, null), try thematicBreak("hello")); try testing.expectEqual(@as(?usize, 4), try thematicBreak("***\n")); try testing.expectEqual(@as(?usize, 21), try thematicBreak("- - - \r")); try testing.expectEqual(@as(?usize, 21), try thematicBreak("- - - \r\nxyz")); } pub const SetextChar = enum { Equals, Hyphen, }; pub fn setextHeadingLine(line: []const u8, sc: *SetextChar) Error!bool { const re = try acquire(@src().fn_name, "(?:=+|-+)[ \t]*[\r\n]"); if ((line[0] == '=' or line[0] == '-') and search(re, line) != null) { sc.* = if (line[0] == '=') .Equals else .Hyphen; return true; } return false; } const scheme = "[A-Za-z][A-Za-z0-9.+-]{1,31}"; pub fn autolinkUri(line: []const u8) Error!?usize { const re = try acquire(@src().fn_name, scheme ++ ":[^\\x00-\\x20<>]*>"); return search(re, line); } test "autolinkUri" { try testing.expectEqual(@as(?usize, null), try autolinkUri("www.google.com>")); try testing.expectEqual(@as(?usize, 23), try autolinkUri("https://www.google.com>")); try testing.expectEqual(@as(?usize, 7), try autolinkUri("a+b-c:>")); try testing.expectEqual(@as(?usize, null), try autolinkUri("a+b-c:")); } pub fn autolinkEmail(line: []const u8) Error!?usize { const re = try acquire(@src().fn_name, \\[a-zA-Z0-9.!#$%&'*+/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*> ); return search(re, line); } test "autolinkEmail" { try testing.expectEqual(@as(?usize, null), try autolinkEmail("abc>")); try testing.expectEqual(@as(?usize, null), try autolinkEmail("abc.def>")); try testing.expectEqual(@as(?usize, null), try autolinkEmail("abc@def")); try testing.expectEqual(@as(?usize, 8), try autolinkEmail("abc@def>")); try testing.expectEqual(@as(?usize, 16), try autolinkEmail("abc+123!?@96--1>")); } pub fn openCodeFence(line: []const u8) Error!?usize { if (line[0] != '`' and line[0] != '~') return null; const re = try acquire(@src().fn_name, "(?:(`{3,})[^`\r\n\\x00]*|(~{3,})[^\r\n\\x00]*)[\r\n]"); return searchFirstCapture(re, line); } test "openCodeFence" { try testing.expectEqual(@as(?usize, null), try openCodeFence("```m")); try testing.expectEqual(@as(?usize, 3), try openCodeFence("```m\n")); try testing.expectEqual(@as(?usize, 6), try openCodeFence("~~~~~~m\n")); } pub fn closeCodeFence(line: []const u8) Error!?usize { if (line[0] != '`' and line[0] != '~') return null; const re = try acquire(@src().fn_name, "(`{3,}|~{3,})[\t ]*[\r\n]"); return searchFirstCapture(re, line); } test "closeCodeFence" { try testing.expectEqual(@as(?usize, null), try closeCodeFence("```m")); try testing.expectEqual(@as(?usize, 3), try closeCodeFence("```\n")); try testing.expectEqual(@as(?usize, 6), try closeCodeFence("~~~~~~\r\n")); } pub fn htmlBlockEnd1(line: []const u8) bool { return std.ascii.indexOfIgnoreCase(line, "</script>") != null or std.ascii.indexOfIgnoreCase(line, "</pre>") != null or std.ascii.indexOfIgnoreCase(line, "</style>") != null; } test "htmlBlockEnd1" { try testing.expect(htmlBlockEnd1(" xyz </script> ")); try testing.expect(htmlBlockEnd1(" xyz </SCRIPT> ")); try testing.expect(!htmlBlockEnd1(" xyz </ script> ")); } pub fn htmlBlockEnd2(line: []const u8) bool { return std.mem.indexOf(u8, line, "-->") != null; } pub fn htmlBlockEnd3(line: []const u8) bool { return std.mem.indexOf(u8, line, "?>") != null; } pub fn htmlBlockEnd4(line: []const u8) bool { return std.mem.indexOfScalar(u8, line, '>') != null; } pub fn htmlBlockEnd5(line: []const u8) bool { return std.mem.indexOf(u8, line, "]]>") != null; } pub fn htmlBlockStart(line: []const u8, sc: *usize) Error!bool { if (line[0] != '<') return false; const re1 = try acquire("htmlBlockStart1", "<(?i:script|pre|style)[ \t\\x0b\\x0c\r\n>]"); const re4 = try acquire("htmlBlockStart4", "<![A-Z]"); const re6 = try acquire("htmlBlockStart6", "</?(?i:address|article|aside|base|basefont|blockquote|body|caption|center|col|colgroup|dd|details|dialog|dir|div|dl|dt|fieldset|figcaption|figure|footer|form|frame|frameset|h1|h2|h3|h4|h5|h6|head|header|hr|html|iframe|legend|li|link|main|menu|menuitem|nav|noframes|ol|optgroup|option|p|param|section|source|title|summary|table|tbody|td|tfoot|th|thead|title|tr|track|ul)(?:[ \t\\x0b\\x0c\r\n>]|/>)"); if (search(re1, line) != null) { sc.* = 1; } else if (std.mem.startsWith(u8, line, "<!--")) { sc.* = 2; } else if (std.mem.startsWith(u8, line, "<?")) { sc.* = 3; } else if (search(re4, line) != null) { sc.* = 4; } else if (std.mem.startsWith(u8, line, "<![CDATA[")) { sc.* = 5; } else if (search(re6, line) != null) { sc.* = 6; } else { return false; } return true; } test "htmlBlockStart" { var sc: usize = undefined; try testing.expect(!try htmlBlockStart("<xyz", &sc)); try testing.expect(try htmlBlockStart("<Script\r", &sc)); try testing.expectEqual(@as(usize, 1), sc); try testing.expect(try htmlBlockStart("<pre>", &sc)); try testing.expectEqual(@as(usize, 1), sc); try testing.expect(try htmlBlockStart("<!-- h", &sc)); try testing.expectEqual(@as(usize, 2), sc); try testing.expect(try htmlBlockStart("<?m", &sc)); try testing.expectEqual(@as(usize, 3), sc); try testing.expect(try htmlBlockStart("<!Q", &sc)); try testing.expectEqual(@as(usize, 4), sc); try testing.expect(try htmlBlockStart("<![CDATA[\n", &sc)); try testing.expectEqual(@as(usize, 5), sc); try testing.expect(try htmlBlockStart("</ul>", &sc)); try testing.expectEqual(@as(usize, 6), sc); try testing.expect(try htmlBlockStart("<figcaption/>", &sc)); try testing.expectEqual(@as(usize, 6), sc); try testing.expect(!try htmlBlockStart("<xhtml>", &sc)); } const space_char = "[ \t\\x0b\\x0c\r\n]"; const tag_name = "(?:[A-Za-z][A-Za-z0-9-]*)"; const close_tag = "(?:/" ++ tag_name ++ space_char ++ "*>)"; const attribute_name = "(?:[a-zA_Z_:][a-zA-Z0-9:._-]*)"; const attribute_value = "(?:(?:[^ \t\r\n\\x0b\\x0c\"'=<>`\\x00]+)|(?:'[^\\x00']*')|(?:\"[^\\x00\"]*\"))"; const attribute_value_spec = "(?:" ++ space_char ++ "*=" ++ space_char ++ "*" ++ attribute_value ++ ")"; const attribute = "(?:" ++ space_char ++ "+" ++ attribute_name ++ attribute_value_spec ++ "?)"; const open_tag = "(?:" ++ tag_name ++ attribute ++ "*" ++ space_char ++ "*/?>)"; pub fn htmlBlockStart7(line: []const u8, sc: *usize) Error!bool { const re = try acquire(@src().fn_name, "<(?:" ++ open_tag ++ "|" ++ close_tag ++ ")[\t\\x0c ]*[\r\n]"); if (search(re, line) != null) { sc.* = 7; return true; } return false; } test "htmlBlockStart7" { var sc: usize = 1; try testing.expect(!try htmlBlockStart7("<a", &sc)); try testing.expect(try htmlBlockStart7("<a> \n", &sc)); try testing.expectEqual(@as(usize, 7), sc); try testing.expect(try htmlBlockStart7("<b2/>\r", &sc)); try testing.expect(try htmlBlockStart7("<b2\ndata=\"foo\" >\t\x0c\n", &sc)); try testing.expect(try htmlBlockStart7("<a foo=\"bar\" bam = 'baz <em>\"</em>'\n_boolean zoop:33=zoop:33 />\n", &sc)); try testing.expect(!try htmlBlockStart7("<a h*#ref=\"hi\">\n", &sc)); } const html_comment = "(?:!---->|(?:!---?[^\\x00>-](?:-?[^\\x00-])*-->))"; const processing_instruction = "(?:\\?(?:[^?>\\x00]+|\\?[^>\\x00]|>)*\\?>)"; const declaration = "(?:![A-Z]+" ++ space_char ++ "+[^>\\x00]*>)"; const cdata = "(?:!\\[CDATA\\[(?:[^\\]\\x00]+|\\][^\\]\\x00]|\\]\\][^>\\x00])*]]>)"; pub fn htmlTag(line: []const u8) Error!?usize { const re = try acquire(@src().fn_name, "(?:" ++ open_tag ++ "|" ++ close_tag ++ "|" ++ html_comment ++ "|" ++ processing_instruction ++ "|" ++ declaration ++ "|" ++ cdata ++ ")"); return search(re, line); } test "htmlTag" { try testing.expectEqual(@as(?usize, 6), try htmlTag("!---->")); try testing.expectEqual(@as(?usize, 9), try htmlTag("!--x-y-->")); try testing.expectEqual(@as(?usize, 5), try htmlTag("?zy?>")); try testing.expectEqual(@as(?usize, 6), try htmlTag("?z?y?>")); try testing.expectEqual(@as(?usize, 14), try htmlTag("!ABCD aoea@#&>")); try testing.expectEqual(@as(?usize, 11), try htmlTag("![CDATA[]]>")); try testing.expectEqual(@as(?usize, 20), try htmlTag("![CDATA[a b\n c d ]]>")); try testing.expectEqual(@as(?usize, 23), try htmlTag("![CDATA[\r]abc]].>\n]>]]>")); } pub fn spacechars(line: []const u8) Error!?usize { const re = try acquire(@src().fn_name, space_char ++ "+"); return search(re, line); } const link_title = "(?:\"(?:\\\\.|[^\"\\x00])*\"|'(?:\\\\.|[^'\\x00])*'|\\((?:\\\\.|[^()\\x00])*\\))"; pub fn linkTitle(line: []const u8) Error!?usize { const re = try acquire(@src().fn_name, link_title); return search(re, line); } test "linkTitle" { try testing.expectEqual(@as(?usize, null), try linkTitle("\"xyz")); try testing.expectEqual(@as(?usize, 5), try linkTitle("\"xyz\"")); try testing.expectEqual(@as(?usize, 7), try linkTitle("\"x\\\"yz\"")); try testing.expectEqual(@as(?usize, null), try linkTitle("'xyz")); try testing.expectEqual(@as(?usize, 5), try linkTitle("'xyz'")); try testing.expectEqual(@as(?usize, null), try linkTitle("(xyz")); try testing.expectEqual(@as(?usize, 5), try linkTitle("(xyz)")); } const dangerous_url = "(?:data:(?!png|gif|jpeg|webp)|javascript:|vbscript:|file:)"; pub fn dangerousUrl(line: []const u8) Error!?usize { const re = try acquire(@src().fn_name, dangerous_url); return search(re, line); } test "dangerousUrl" { try testing.expectEqual(@as(?usize, null), try dangerousUrl("http://thing")); try testing.expectEqual(@as(?usize, 5), try dangerousUrl("data:xyz")); try testing.expectEqual(@as(?usize, null), try dangerousUrl("data:png")); try testing.expectEqual(@as(?usize, null), try dangerousUrl("data:webp")); try testing.expectEqual(@as(?usize, 5), try dangerousUrl("data:a")); try testing.expectEqual(@as(?usize, 11), try dangerousUrl("javascript:")); } const table_spacechar = "[ \t\\x0b\\x0c]"; const table_newline = "(?:\r?\n)"; const table_marker = "(?:" ++ table_spacechar ++ "*:?-+:?" ++ table_spacechar ++ "*)"; const table_cell = "(?:(\\\\.|[^|\r\n])*)"; pub fn tableStart(line: []const u8) Error!?usize { const re = try acquire(@src().fn_name, "\\|?" ++ table_marker ++ "(?:\\|" ++ table_marker ++ ")*\\|?" ++ table_spacechar ++ "*" ++ table_newline); return search(re, line); } test "tableStart" { try testing.expectEqual(@as(?usize, null), try tableStart(" \r\n")); try testing.expectEqual(@as(?usize, 7), try tableStart(" -- |\r\n")); try testing.expectEqual(@as(?usize, 14), try tableStart("| :-- | -- |\r\n")); try testing.expectEqual(@as(?usize, null), try tableStart("| -:- | -- |\r\n")); } pub fn tableCell(line: []const u8) Error!?usize { const re = try acquire(@src().fn_name, table_cell); return search(re, line); } test "tableCell" { try testing.expectEqual(@as(?usize, 3), try tableCell("abc|def")); try testing.expectEqual(@as(?usize, 8), try tableCell("abc\\|def")); try testing.expectEqual(@as(?usize, 5), try tableCell("abc\\\\|def")); } pub fn tableCellEnd(line: []const u8) Error!?usize { const re = try acquire(@src().fn_name, "\\|" ++ table_spacechar ++ "*" ++ table_newline ++ "?"); return search(re, line); } test "tableCellEnd" { try testing.expectEqual(@as(?usize, 1), try tableCellEnd("|")); try testing.expectEqual(@as(?usize, null), try tableCellEnd(" |")); try testing.expectEqual(@as(?usize, 1), try tableCellEnd("|a")); try testing.expectEqual(@as(?usize, 3), try tableCellEnd("| \r")); try testing.expectEqual(@as(?usize, 4), try tableCellEnd("| \n")); try testing.expectEqual(@as(?usize, 5), try tableCellEnd("| \r\n")); } pub fn tableRowEnd(line: []const u8) Error!?usize { const re = try acquire(@src().fn_name, table_spacechar ++ "*" ++ table_newline); return search(re, line); } test "tableRowEnd" { try testing.expectEqual(@as(?usize, null), try tableRowEnd("a")); try testing.expectEqual(@as(?usize, 1), try tableRowEnd("\na")); try testing.expectEqual(@as(?usize, null), try tableRowEnd(" a")); try testing.expectEqual(@as(?usize, 4), try tableRowEnd(" \na")); try testing.expectEqual(@as(?usize, 5), try tableRowEnd(" \r\na")); } pub fn removeAnchorizeRejectedChars(allocator: std.mem.Allocator, src: []const u8) Error![]u8 { const re = try acquire(@src().fn_name, "[^\\p{L}\\p{M}\\p{N}\\p{Pc} -]"); var output = std.ArrayList(u8).init(allocator); errdefer output.deinit(); var org: usize = 0; while (re.matches(src[org..], .{}) catch null) |cap| { try output.appendSlice(src[org .. org + cap.start]); org += cap.end; if (org >= src.len) break; } try output.appendSlice(src[org..]); return output.toOwnedSlice(); } test "removeAnchorizeRejectedChars" { for ([_][]const u8{ "abc", "'abc", "''abc", "a'bc", "'a'''b'c'" }) |abc| { const result = try removeAnchorizeRejectedChars(std.testing.allocator, abc); try testing.expectEqualStrings("abc", result); std.testing.allocator.free(result); } }
0
repos/koino
repos/koino/src/koino.zig
pub const parser = @import("parser.zig"); pub const Options = @import("options.zig").Options; pub const nodes = @import("nodes.zig"); pub const html = @import("html.zig");
0
repos/koino
repos/koino/src/table.zig
const std = @import("std"); const Parser = @import("parser.zig").Parser; const nodes = @import("nodes.zig"); const scanners = @import("scanners.zig"); const strings = @import("strings.zig"); pub fn matches(allocator: std.mem.Allocator, line: []const u8) !bool { const r = try row(allocator, line); const result = r != null; if (r) |v| freeNested(allocator, v); return result; } pub fn freeNested(allocator: std.mem.Allocator, v: [][]u8) void { for (v) |e| allocator.free(e); allocator.free(v); } fn row(allocator: std.mem.Allocator, line: []const u8) !?[][]u8 { const len = line.len; var v = std.ArrayList([]u8).init(allocator); errdefer freeNested(allocator, v.toOwnedSlice() catch unreachable); var offset: usize = 0; if (len > 0 and line[0] == '|') offset += 1; while (true) { const cell_matched = (try scanners.tableCell(line[offset..])) orelse 0; var pipe_matched = (try scanners.tableCellEnd(line[offset + cell_matched ..])) orelse 0; if (cell_matched > 0 or pipe_matched > 0) { var cell = try unescapePipes(allocator, line[offset .. offset + cell_matched]); strings.trimIt(&cell); try v.append(try cell.toOwnedSlice()); } offset += cell_matched + pipe_matched; if (pipe_matched == 0) { pipe_matched = (try scanners.tableRowEnd(line[offset..])) orelse 0; offset += pipe_matched; } if (!((cell_matched > 0 or pipe_matched > 0) and offset < len)) { break; } } if (offset != len or v.items.len == 0) { freeNested(allocator, try v.toOwnedSlice()); return null; } else { return try v.toOwnedSlice(); } } pub fn tryOpeningBlock(parser: *Parser, container: *nodes.AstNode, line: []const u8, replace: *bool) !?*nodes.AstNode { return switch (container.data.value) { .Paragraph => try tryOpeningHeader(parser, container, line, replace), .Table => |aligns| tryOpeningRow(parser, container, aligns, line, replace), else => null, }; } fn tryOpeningHeader(parser: *Parser, container: *nodes.AstNode, line: []const u8, replace: *bool) !?*nodes.AstNode { if ((try scanners.tableStart(line[parser.first_nonspace..])) == null) { replace.* = false; return container; } const header_row = (try row(parser.allocator, container.data.content.items)) orelse { replace.* = false; return container; }; defer freeNested(parser.allocator, header_row); const marker_row = (try row(parser.allocator, line[parser.first_nonspace..])).?; defer freeNested(parser.allocator, marker_row); if (header_row.len != marker_row.len) { replace.* = false; return container; } var alignments = try parser.allocator.alloc(nodes.TableAlignment, marker_row.len); errdefer parser.allocator.free(alignments); for (marker_row, 0..) |cell, i| { const left = cell.len > 0 and cell[0] == ':'; const right = cell.len > 0 and cell[cell.len - 1] == ':'; alignments[i] = if (left and right) nodes.TableAlignment.Center else if (left) nodes.TableAlignment.Left else if (right) nodes.TableAlignment.Right else nodes.TableAlignment.None; } const table = try nodes.AstNode.create(parser.allocator, .{ .value = .{ .Table = alignments }, .start_line = parser.line_number, .content = std.ArrayList(u8).init(parser.allocator), }); container.append(table); const header = try parser.addChild(table, .{ .TableRow = .Header }); for (header_row) |header_str| { var header_cell = try parser.addChild(header, .TableCell); try header_cell.data.content.appendSlice(header_str); } const offset = line.len - 1 - parser.offset; parser.advanceOffset(line, offset, false); replace.* = true; return table; } fn tryOpeningRow(parser: *Parser, container: *nodes.AstNode, aligns: []nodes.TableAlignment, line: []const u8, replace: *bool) !?*nodes.AstNode { if (parser.blank) return null; const this_row = (try row(parser.allocator, line[parser.first_nonspace..])).?; defer freeNested(parser.allocator, this_row); const new_row = try parser.addChild(container, .{ .TableRow = .Body }); var i: usize = 0; while (i < @min(aligns.len, this_row.len)) : (i += 1) { var cell = try parser.addChild(new_row, .TableCell); try cell.data.content.appendSlice(this_row[i]); } while (i < aligns.len) : (i += 1) { _ = try parser.addChild(new_row, .TableCell); } const offset = line.len - 1 - parser.offset; parser.advanceOffset(line, offset, false); replace.* = false; return new_row; } fn unescapePipes(allocator: std.mem.Allocator, string: []const u8) !std.ArrayList(u8) { var v = try std.ArrayList(u8).initCapacity(allocator, string.len); for (string, 0..) |c, i| { if (c == '\\' and i + 1 < string.len and string[i + 1] == '|') { continue; } else { try v.append(c); } } return v; }
0
repos/koino
repos/koino/src/ast.zig
const std = @import("std"); const mem = std.mem; const assert = std.debug.assert; pub fn Ast(comptime T: type) type { return struct { const Self = @This(); allocator: mem.Allocator, data: T, parent: ?*Self = null, prev: ?*Self = null, next: ?*Self = null, first_child: ?*Self = null, last_child: ?*Self = null, pub fn create(allocator: mem.Allocator, data: T) !*Self { const obj = try allocator.create(Self); obj.* = .{ .allocator = allocator, .data = data, }; return obj; } pub fn deinit(self: *Self) void { self.data.deinit(self.allocator); var it = self.first_child; while (it) |child| { const next = child.next; child.deinit(); it = next; } self.allocator.destroy(self); } pub fn append(self: *Self, child: *Self) void { child.detach(); child.parent = self; if (self.last_child) |last_child| { child.prev = last_child; assert(last_child.next == null); last_child.next = child; } else { assert(self.first_child == null); self.first_child = child; } self.last_child = child; } pub fn insertAfter(self: *Self, sibling: *Self) void { sibling.detach(); sibling.parent = self.parent; sibling.prev = self; if (self.next) |next| { assert(next.prev.? == self); next.prev = sibling; sibling.next = next; } else if (self.parent) |parent| { assert(parent.last_child.? == self); parent.last_child = sibling; } self.next = sibling; } pub fn insertBefore(self: *Self, sibling: *Self) void { sibling.detach(); sibling.parent = self.parent; sibling.next = self; if (self.prev) |prev| { sibling.prev = prev; assert(prev.next.? == self); prev.next = sibling; } else if (self.parent) |parent| { assert(parent.first_child.? == self); parent.first_child = sibling; } self.prev = sibling; } pub fn detach(self: *Self) void { if (self.next) |next| { next.prev = self.prev; } else if (self.parent) |parent| { parent.last_child = self.prev; } if (self.prev) |prev| { prev.next = self.next; } else if (self.parent) |parent| { parent.first_child = self.next; } self.parent = null; self.prev = null; self.next = null; } pub fn detachDeinit(self: *Self) void { self.detach(); self.deinit(); } pub const ReverseChildrenIterator = struct { next_value: ?*Self, pub fn next(self: *@This()) ?*Self { const to_return = self.next_value; if (to_return) |n| { self.next_value = n.prev; } return to_return; } }; pub fn reverseChildrenIterator(self: *Self) ReverseChildrenIterator { return .{ .next_value = self.last_child }; } pub const TraverseIterator = struct { root: *Self, upcoming: ?NodeEdge, const NodeEdge = union(enum) { Start: *Self, End: *Self, }; pub fn next(self: *@This()) ?NodeEdge { const item = self.upcoming orelse return null; self.upcoming = switch (item) { .Start => |node| if (node.first_child) |child| NodeEdge{ .Start = child } else NodeEdge{ .End = node }, .End => |node| if (node == self.root) null else if (node.next) |sibling| NodeEdge{ .Start = sibling } else if (node.parent) |parent| NodeEdge{ .End = parent } else unreachable, }; return item; } }; pub fn traverseIterator(self: *Self) TraverseIterator { return .{ .root = self, .upcoming = .{ .Start = self } }; } pub const DescendantsIterator = struct { traverse: TraverseIterator, pub fn next(self: *@This()) ?*Self { while (true) { if (self.traverse.next()) |edge| switch (edge) { .Start => |node| return node, .End => {}, } else { return null; } } } }; pub fn descendantsIterator(self: *Self) DescendantsIterator { return .{ .traverse = traverseIterator(self) }; } // These don't quite belong. pub fn lastChildIsOpen(self: *Self) bool { if (self.last_child) |n| { return n.data.open; } return false; } pub fn endsWithBlankLine(self: *Self) bool { var it: ?*Self = self; while (it) |cur| { if (cur.data.last_line_blank) return true; switch (cur.data.value) { .List, .Item => it = cur.last_child, else => it = null, } } return false; } }; }
0
repos/koino
repos/koino/src/strings.zig
const std = @import("std"); const mem = std.mem; const testing = std.testing; const ascii = std.ascii; const nodes = @import("nodes.zig"); const htmlentities = @import("htmlentities"); const zunicode = @import("zunicode"); pub fn isLineEndChar(ch: u8) bool { return switch (ch) { '\n', '\r' => true, else => false, }; } pub fn isSpaceOrTab(ch: u8) bool { return switch (ch) { ' ', '\t' => true, else => false, }; } pub fn isBlank(s: []const u8) bool { for (s) |c| { switch (c) { '\n', '\r' => return true, ' ', '\t' => {}, else => return false, } } return true; } test "isBlank" { try testing.expect(isBlank("")); try testing.expect(isBlank("\nx")); try testing.expect(isBlank(" \t\t \r")); try testing.expect(!isBlank("e")); try testing.expect(!isBlank(" \t e ")); } const SPACES = "\t\n\x0b\x0c\r "; pub fn ltrim(s: []const u8) []const u8 { return mem.trimLeft(u8, s, SPACES); } test "ltrim" { try testing.expectEqualStrings("abc", ltrim("abc")); try testing.expectEqualStrings("abc", ltrim(" abc")); try testing.expectEqualStrings("abc", ltrim(" \n\n \t\r abc")); try testing.expectEqualStrings("abc \n zz \n ", ltrim("\nabc \n zz \n ")); } pub fn rtrim(s: []const u8) []const u8 { return mem.trimRight(u8, s, SPACES); } test "rtrim" { try testing.expectEqualStrings("abc", rtrim("abc")); try testing.expectEqualStrings("abc", rtrim("abc ")); try testing.expectEqualStrings("abc", rtrim("abc \n\n \t\r ")); try testing.expectEqualStrings(" \nabc \n zz", rtrim(" \nabc \n zz \n")); } pub fn trim(s: []const u8) []const u8 { return mem.trim(u8, s, SPACES); } test "trim" { try testing.expectEqualStrings("abc", trim("abc")); try testing.expectEqualStrings("abc", trim(" abc ")); try testing.expectEqualStrings("abc", trim(" abc \n\n \t\r ")); try testing.expectEqualStrings("abc \n zz", trim(" \nabc \n zz \n")); } pub fn trimIt(al: *std.ArrayList(u8)) void { const trimmed = trim(al.items); if (al.items.ptr == trimmed.ptr and al.items.len == trimmed.len) return; if (&al.items != &trimmed) { mem.copyForwards(u8, al.items, trimmed); } al.items.len = trimmed.len; } test "trimIt" { var buf = std.ArrayList(u8).init(std.testing.allocator); defer buf.deinit(); try buf.appendSlice("abc"); trimIt(&buf); try std.testing.expectEqualStrings("abc", buf.items); buf.items.len = 0; try buf.appendSlice(" \tabc"); trimIt(&buf); try std.testing.expectEqualStrings("abc", buf.items); buf.items.len = 0; try buf.appendSlice(" \r abc \n "); trimIt(&buf); try std.testing.expectEqualStrings("abc", buf.items); } pub fn chopTrailingHashtags(s: []const u8) []const u8 { var r = rtrim(s); if (r.len == 0) return r; const orig_n = r.len - 1; var n = orig_n; while (r[n] == '#') : (n -= 1) { if (n == 0) return r; } if (n != orig_n and isSpaceOrTab(r[n])) { return rtrim(r[0..n]); } else { return r; } } test "chopTrailingHashtags" { try testing.expectEqualStrings("xyz", chopTrailingHashtags("xyz")); try testing.expectEqualStrings("xyz#", chopTrailingHashtags("xyz#")); try testing.expectEqualStrings("xyz###", chopTrailingHashtags("xyz###")); try testing.expectEqualStrings("xyz###", chopTrailingHashtags("xyz### ")); try testing.expectEqualStrings("xyz###", chopTrailingHashtags("xyz### #")); try testing.expectEqualStrings("xyz", chopTrailingHashtags("xyz ")); try testing.expectEqualStrings("xyz", chopTrailingHashtags("xyz ##")); try testing.expectEqualStrings("xyz", chopTrailingHashtags("xyz ##")); } pub fn normalizeCode(allocator: mem.Allocator, s: []const u8) mem.Allocator.Error![]u8 { var code = try std.ArrayList(u8).initCapacity(allocator, s.len); errdefer code.deinit(); var i: usize = 0; var contains_nonspace = false; while (i < s.len) { switch (s[i]) { '\r' => { if (i + 1 == s.len or s[i + 1] != '\n') { try code.append(' '); } }, '\n' => { try code.append(' '); }, else => try code.append(s[i]), } if (s[i] != ' ') { contains_nonspace = true; } i += 1; } if (contains_nonspace and code.items.len != 0 and code.items[0] == ' ' and code.items[code.items.len - 1] == ' ') { _ = code.orderedRemove(0); _ = code.pop(); } return code.toOwnedSlice(); } const Case = struct { in: []const u8, out: []const u8, }; fn testCases(comptime function: fn (mem.Allocator, []const u8) anyerror![]u8, cases: []const Case) !void { for (cases) |case| { const result = try function(std.testing.allocator, case.in); defer std.testing.allocator.free(result); try testing.expectEqualStrings(case.out, result); } } test "normalizeCode" { try testCases(normalizeCode, &[_]Case{ .{ .in = "qwe", .out = "qwe" }, .{ .in = " qwe ", .out = "qwe" }, .{ .in = " qwe ", .out = " qwe " }, .{ .in = " abc\rdef'\r\ndef ", .out = "abc def' def" }, }); } pub fn removeTrailingBlankLines(line: *std.ArrayList(u8)) void { var i = line.items.len - 1; while (true) : (i -= 1) { const c = line.items[i]; if (c != ' ' and c != '\t' and !isLineEndChar(c)) { break; } if (i == 0) { line.items.len = 0; return; } } while (i < line.items.len) : (i += 1) { if (!isLineEndChar(line.items[i])) continue; line.items.len = i; break; } } test "removeTrailingBlankLines" { const cases = [_]Case{ .{ .in = "\n\n \r\t\n ", .out = "" }, .{ .in = "yep\nok\n\n ", .out = "yep\nok" }, .{ .in = "yep ", .out = "yep " }, }; var line = std.ArrayList(u8).init(std.testing.allocator); defer line.deinit(); for (cases) |case| { line.items.len = 0; try line.appendSlice(case.in); removeTrailingBlankLines(&line); try testing.expectEqualStrings(case.out, line.items); } } pub fn isPunct(char: u8) bool { return switch (char) { '!', '\"', '#', '$', '%', '&', '\'', '(', ')', '*', '+', ',', '-', '.', '/', ':', ';', '<', '=', '>', '?', '@', '[', '\\', ']', '^', '_', '`', '{', '|', '}', '~' => true, else => false, }; } fn encodeUtf8Into(in_cp: u21, al: *std.ArrayList(u8)) !void { // utf8Encode throws: // - Utf8CannotEncodeSurrogateHalf, which we guard against that by // rewriting 0xd800..0xe0000 to 0xfffd. // - CodepointTooLarge, which we guard against by rewriting 0x110000+ // to 0xfffd. var cp = in_cp; if (cp == 0 or (cp >= 0xd800 and cp <= 0xdfff) or cp >= 0x110000) { cp = 0xFFFD; } var sequence = [4]u8{ 0, 0, 0, 0 }; const len = std.unicode.utf8Encode(cp, &sequence) catch unreachable; try al.appendSlice(sequence[0..len]); } const ENTITY_MIN_LENGTH: u8 = 2; const ENTITY_MAX_LENGTH: u8 = 32; pub fn unescapeInto(text: []const u8, out: *std.ArrayList(u8)) !?usize { if (text.len >= 3 and text[0] == '#') { var codepoint: u32 = 0; var i: usize = 0; const num_digits = block: { if (ascii.isDigit(text[1])) { i = 1; while (i < text.len and ascii.isDigit(text[i])) { codepoint = (codepoint * 10) + (@as(u32, text[i]) - '0'); codepoint = @min(codepoint, 0x11_0000); i += 1; } break :block i - 1; } else if (text[1] == 'x' or text[1] == 'X') { i = 2; while (i < text.len and ascii.isHex(text[i])) { codepoint = (codepoint * 16) + (@as(u32, text[i]) | 32) % 39 - 9; codepoint = @min(codepoint, 0x11_0000); i += 1; } break :block i - 2; } break :block 0; }; if (num_digits >= 1 and num_digits <= 8 and i < text.len and text[i] == ';') { try encodeUtf8Into(@truncate(codepoint), out); return i + 1; } } const size = @min(text.len, ENTITY_MAX_LENGTH); var i = ENTITY_MIN_LENGTH; while (i < size) : (i += 1) { if (text[i] == ' ') return null; if (text[i] == ';') { var key = [_]u8{'&'} ++ [_]u8{';'} ** (ENTITY_MAX_LENGTH + 1); @memcpy(key[1 .. i + 1], text[0..i]); if (htmlentities.lookup(key[0 .. i + 2])) |item| { try out.appendSlice(item.characters); return i + 1; } } } return null; } fn unescapeHtmlInto(html: []const u8, out: *std.ArrayList(u8)) !void { const size = html.len; var i: usize = 0; while (i < size) { const org = i; while (i < size and html[i] != '&') : (i += 1) {} if (i > org) { if (org == 0 and i >= size) { try out.appendSlice(html); return; } try out.appendSlice(html[org..i]); } if (i >= size) return; i += 1; if (try unescapeInto(html[i..], out)) |unescaped_size| { i += unescaped_size; } else { try out.append('&'); } } } pub fn unescapeHtml(allocator: mem.Allocator, html: []const u8) ![]u8 { var al = std.ArrayList(u8).init(allocator); errdefer al.deinit(); try unescapeHtmlInto(html, &al); return al.toOwnedSlice(); } test "unescapeHtml" { try testCases(unescapeHtml, &[_]Case{ .{ .in = "&#116;&#101;&#115;&#116;", .out = "test" }, .{ .in = "&#12486;&#12473;&#12488;", .out = "テスト" }, .{ .in = "&#x74;&#x65;&#X73;&#X74;", .out = "test" }, .{ .in = "&#x30c6;&#x30b9;&#X30c8;", .out = "テスト" }, // "Although HTML5 does accept some entity references without a trailing semicolon // (such as &copy), these are not recognized here, because it makes the grammar too // ambiguous:" .{ .in = "&hellip;&eacute&Eacute;&rrarr;&oS;", .out = "…&eacuteÉ⇉Ⓢ" }, }); } pub fn cleanAutolink(allocator: mem.Allocator, url: []const u8, kind: nodes.AutolinkType) ![]u8 { const trimmed = trim(url); if (trimmed.len == 0) return &[_]u8{}; var buf = try std.ArrayList(u8).initCapacity(allocator, trimmed.len); errdefer buf.deinit(); if (kind == .Email) try buf.appendSlice("mailto:"); try unescapeHtmlInto(trimmed, &buf); return buf.toOwnedSlice(); } test "cleanAutolink" { const email = try cleanAutolink(std.testing.allocator, " hello&#x40;world.example ", .Email); defer std.testing.allocator.free(email); try testing.expectEqualStrings("mailto:[email protected]", email); const uri = try cleanAutolink(std.testing.allocator, " www&#46;com ", .URI); defer std.testing.allocator.free(uri); try testing.expectEqualStrings("www.com", uri); } fn unescape(allocator: mem.Allocator, s: []const u8) ![]u8 { var buffer = try std.ArrayList(u8).initCapacity(allocator, s.len); errdefer buffer.deinit(); var r: usize = 0; while (r < s.len) : (r += 1) { if (s[r] == '\\' and r + 1 < s.len and isPunct(s[r + 1])) r += 1; try buffer.append(s[r]); } return buffer.toOwnedSlice(); } pub fn cleanUrl(allocator: mem.Allocator, url: []const u8) ![]u8 { const trimmed = trim(url); if (trimmed.len == 0) return &[_]u8{}; const b = try unescapeHtml(allocator, trimmed); defer allocator.free(b); return unescape(allocator, b); } test "cleanUrl" { const url = try cleanUrl(std.testing.allocator, " \\(hello\\)&#x40;world "); defer std.testing.allocator.free(url); try testing.expectEqualStrings("(hello)@world", url); } pub fn cleanTitle(allocator: mem.Allocator, title: []const u8) ![]u8 { if (title.len == 0) return &[_]u8{}; const first = title[0]; const last = title[title.len - 1]; const b = if ((first == '\'' and last == '\'') or (first == '(' and last == ')') or (first == '"' and last == '"')) try unescapeHtml(allocator, title[1 .. title.len - 1]) else try unescapeHtml(allocator, title); defer allocator.free(b); return unescape(allocator, b); } test "cleanTitle" { try testCases(cleanTitle, &[_]Case{ .{ .in = "\\'title", .out = "'title" }, .{ .in = "'title'", .out = "title" }, .{ .in = "(&#x74;&#x65;&#X73;&#X74;)", .out = "test" }, .{ .in = "\"&#x30c6;&#x30b9;&#X30c8;\"", .out = "テスト" }, .{ .in = "'&hellip;&eacute&Eacute;&rrarr;&oS;'", .out = "…&eacuteÉ⇉Ⓢ" }, }); } pub fn normalizeLabel(allocator: mem.Allocator, s: []const u8) ![]u8 { const trimmed = trim(s); var buffer = try std.ArrayList(u8).initCapacity(allocator, trimmed.len); errdefer buffer.deinit(); var last_was_whitespace = false; var view = std.unicode.Utf8View.initUnchecked(trimmed); var it = view.iterator(); while (it.nextCodepoint()) |cp| { const rune: i32 = @intCast(cp); if (zunicode.isSpace(rune)) { if (!last_was_whitespace) { last_was_whitespace = true; try buffer.append(' '); } } else { last_was_whitespace = false; const lower = zunicode.toLower(rune); try encodeUtf8Into(@intCast(lower), &buffer); } } return buffer.toOwnedSlice(); } test "normalizeLabel" { try testCases(normalizeLabel, &[_]Case{ .{ .in = "Hello", .out = "hello" }, .{ .in = " Y E S ", .out = "y e s" }, .{ .in = "yÉs", .out = "yés" }, }); } pub fn toLower(allocator: mem.Allocator, s: []const u8) ![]u8 { var buffer = try std.ArrayList(u8).initCapacity(allocator, s.len); errdefer buffer.deinit(); var view = try std.unicode.Utf8View.init(s); var it = view.iterator(); while (it.nextCodepoint()) |cp| { const rune: i32 = @intCast(cp); const lower = zunicode.toLower(rune); try encodeUtf8Into(@intCast(lower), &buffer); } return buffer.toOwnedSlice(); } test "toLower" { try testCases(toLower, &[_]Case{ .{ .in = "Hello", .out = "hello" }, .{ .in = "ΑαΒβΓγΔδΕεΖζΗηΘθΙιΚκΛλΜμ", .out = "ααββγγδδεεζζηηθθιικκλλμμ" }, .{ .in = "АаБбВвГгДдЕеЁёЖжЗзИиЙйКкЛлМмНнОоПпРрСсТтУуФфХхЦцЧчШшЩщЪъЫыЬьЭэЮюЯя", .out = "ааббввггддееёёжжззииййккллммннооппррссттууффххццччшшщщъъыыььээююяя" }, }); } pub fn createMap(chars: []const u8) [256]bool { var arr = [_]bool{false} ** 256; for (chars) |c| { arr[c] = true; } return arr; } test "createMap" { comptime { const m = createMap("abcxyz"); try testing.expect(m['a']); try testing.expect(m['b']); try testing.expect(m['c']); try testing.expect(!m['d']); try testing.expect(!m['e']); try testing.expect(!m['f']); try testing.expect(m['x']); try testing.expect(!m[0]); } }
0
repos/koino
repos/koino/src/nodes.zig
const std = @import("std"); const mem = std.mem; const ast = @import("ast.zig"); pub const Node = struct { value: NodeValue, start_line: u32 = 0, content: std.ArrayList(u8), open: bool = true, last_line_blank: bool = false, pub fn deinit(self: *Node, allocator: mem.Allocator) void { self.content.deinit(); self.value.deinit(allocator); } }; pub const AstNode = ast.Ast(Node); pub const NodeValue = union(enum) { Document, BlockQuote, List: NodeList, Item: NodeList, // DescriptionList // DescriptionItem // DescriptionTerm // DescriptionDetails CodeBlock: NodeCodeBlock, HtmlBlock: NodeHtmlBlock, Paragraph, Heading: NodeHeading, ThematicBreak, // FootnoteDefinition Table: []TableAlignment, TableRow: TableHeader, TableCell, Text: []u8, // TaskItem SoftBreak, LineBreak, Code: []u8, HtmlInline: []u8, Emph, Strong, Strikethrough, Link: NodeLink, Image: NodeLink, // FootnoteReference pub fn deinit(self: *NodeValue, allocator: mem.Allocator) void { switch (self.*) { .Text, .HtmlInline, .Code => |content| { allocator.free(content); }, .CodeBlock => |ncb| { if (ncb.info) |info| { allocator.free(info); } ncb.literal.deinit(); }, .HtmlBlock => |nhb| { nhb.literal.deinit(); }, .Table => |aligns| { allocator.free(aligns); }, .Link, .Image => |nl| { allocator.free(nl.title); allocator.free(nl.url); }, else => {}, } } pub fn acceptsLines(self: NodeValue) bool { return switch (self) { .Paragraph, .Heading, .CodeBlock => true, else => false, }; } pub fn canContainType(self: NodeValue, child: NodeValue) bool { if (child == .Document) { return false; } return switch (self) { .Document, .BlockQuote, .Item => child.block() and switch (child) { .Item => false, else => true, }, .List => switch (child) { .Item => true, else => false, }, .Paragraph, .Heading, .Emph, .Strong, .Link, .Image => !child.block(), .Table => switch (child) { .TableRow => true, else => false, }, .TableRow => switch (child) { .TableCell => true, else => false, }, .TableCell => switch (child) { .Text, .Code, .Emph, .Strong, .Link, .Image, .Strikethrough, .HtmlInline => true, else => false, }, else => false, }; } pub fn containsInlines(self: NodeValue) bool { return switch (self) { .Paragraph, .Heading, .TableCell => true, else => false, }; } pub fn block(self: NodeValue) bool { return switch (self) { .Document, .BlockQuote, .List, .Item, .CodeBlock, .HtmlBlock, .Paragraph, .Heading, .ThematicBreak, .Table, .TableRow, .TableCell => true, else => false, }; } pub fn text(self: NodeValue) ?[]const u8 { return switch (self) { .Text => |t| t, else => null, }; } pub fn text_mut(self: *NodeValue) ?*[]u8 { return switch (self.*) { .Text => |*t| t, else => null, }; } }; pub const NodeLink = struct { url: []u8, title: []u8, }; pub const ListType = enum { Bullet, Ordered, }; pub const ListDelimType = enum { Period, Paren, }; pub const NodeList = struct { list_type: ListType, marker_offset: usize, padding: usize, start: usize, delimiter: ListDelimType, bullet_char: u8, tight: bool, }; pub const NodeHtmlBlock = struct { block_type: u8, literal: std.ArrayList(u8), }; pub const NodeCodeBlock = struct { fenced: bool, fence_char: u8, fence_length: usize, fence_offset: usize, info: ?[]u8, literal: std.ArrayList(u8), }; pub const NodeHeading = struct { level: u8 = 0, setext: bool = false, }; pub const AutolinkType = enum { URI, Email, }; pub const TableAlignment = enum { None, Left, Center, Right, }; pub const TableHeader = enum { Header, Body, };
0
repos
repos/routez/test.zig
comptime { _ = @import("src/routez.zig"); }
0
repos
repos/routez/README.md
# Routez HTTP server for Zig. ## Example ### [basic](examples/basic.zig) Run with `zig build basic` ```Zig const std = @import("std"); const Address = std.net.Address; usingnamespace @import("routez"); const allocator = std.heap.page_allocator; pub const io_mode = .evented; pub fn main() !void { var server = Server.init( allocator, .{}, .{ all("/", indexHandler), get("/about", aboutHandler), get("/about/more", aboutHandler2), get("/post/{post_num}/?", postHandler), static("./", "/static"), all("/counter", counterHandler), }, ); var addr = try Address.parseIp("127.0.0.1", 8080); try server.listen(addr); } fn indexHandler(req: Request, res: Response) !void { try res.sendFile("examples/index.html"); } fn aboutHandler(req: Request, res: Response) !void { try res.write("Hello from about\n"); } fn aboutHandler2(req: Request, res: Response) !void { try res.write("Hello from about2\n"); } fn postHandler(req: Request, res: Response, args: *const struct { post_num: []const u8, }) !void { try res.print("Hello from post, post_num is {}\n", args.post_num); } var counter = std.atomic.Int(usize).init(0); fn counterHandler(req: Request, res: Response) !void { try res.print("Page loaded {} times\n", counter.fetchAdd(1)); } ```
0
repos
repos/routez/build.zig
const std = @import("std"); const Builder = std.build.Builder; pub fn build(b: *Builder) void { const mode = b.standardReleaseOptions(); const tests = b.addTest("test.zig"); tests.setBuildMode(mode); tests.addPackagePath("zuri", "zuri/src/zuri.zig"); const test_step = b.step("test", "Run library tests"); test_step.dependOn(&tests.step); var basic = b.addExecutable("basic", "examples/basic.zig"); basic.setBuildMode(mode); basic.addPackage(.{ .name = "routez", .source = .{ .path = "src/routez.zig" }, .dependencies = &[_]std.build.Pkg{.{ .name = "zuri", .source = .{ .path = "zuri/src/zuri.zig" }, }}, }); basic.setOutputDir("zig-cache"); basic.install(); const basic_step = b.step("basic", "Basic example"); basic_step.dependOn(&basic.run().step); }
0
repos/routez
repos/routez/assets/example-file.txt
Some text
0
repos/routez
repos/routez/src/routez.zig
pub usingnamespace @import("routez/http.zig"); const s = @import("routez/server.zig"); pub const Server = s.Server; pub const mime = @import("routez/mime.zig"); const r = @import("routez/router.zig"); pub const ErrorHandler = r.ErrorHandler; pub const Route = r.Route; pub usingnamespace @import("routez/routes.zig"); test "routez" { _ = @import("routez/http.zig"); _ = @import("routez/mime.zig"); _ = @import("routez/router.zig"); _ = @import("routez/routes.zig"); _ = @import("routez/server.zig"); }
0
repos/routez/src
repos/routez/src/routez/server.zig
const std = @import("std"); const mem = std.mem; const Allocator = mem.Allocator; const ArenaAllocator = std.heap.ArenaAllocator; const StreamServer = std.net.StreamServer; const Address = std.net.Address; const File = std.fs.File; const builtin = @import("builtin"); const request = @import("http/request.zig"); const response = @import("http/response.zig"); const parser = @import("http/parser.zig"); const http = @import("http.zig"); const router = @import("router.zig"); const Version = @import("http/common.zig").Version; const Method = http.Method; const Request = http.Request; const Response = http.Response; const Headers = http.Headers; const Router = router.Router; const HandlerFn = router.HandlerFn; pub const Server = struct { server: StreamServer, handler: HandlerFn, allocator: Allocator, config: Config, discards: DiscardStack, const DiscardStack = std.atomic.Stack(*Context); pub const Config = struct { keepalive_time: u64 = 5000, max_request_size: u32 = 1024 * 1024, stack_size: usize = 4 * 1024 * 1024, }; pub const Context = struct { stack: []align(16) u8, buf: []u8, index: usize = 0, count: usize = 0, writer: std.io.BufferedWriter(4096, std.net.Stream.Writer), server: *Server, stream: std.net.Stream, frame: @Frame(handleRequest), node: DiscardStack.Node, pub fn init(server: *Server, stream: std.net.Stream) !*Context { var ctx = try server.allocator.create(Context); errdefer server.allocator.destroy(ctx); var stack = try server.allocator.alignedAlloc(u8, 16, server.config.stack_size); errdefer server.allocator.free(stack); var buf = try server.allocator.alloc(u8, server.config.max_request_size); errdefer server.allocator.free(buf); ctx.* = .{ .stack = stack, .buf = buf, .writer = std.io.bufferedWriter(stream.writer()), .server = server, .stream = stream, .frame = undefined, .node = .{ .next = null, .data = ctx, }, }; return ctx; } pub fn deinit(context: *Context) void { context.stream.close(); context.server.allocator.free(context.stack); context.server.allocator.free(context.buf); } pub fn read(context: *Context) !void { context.index = 0; context.count = try context.stream.read(context.buf); } }; const Upgrade = enum { webSocket, http2, none, }; pub fn init(allocator: Allocator, config: Config, handlers: anytype) Server { return .{ .server = StreamServer.init(.{}), .handler = Router(handlers), .allocator = allocator, .config = config, .discards = DiscardStack.init(), }; } pub const ListenError = error{ AddressInUse, AddressNotAvailable, ListenError, AcceptError, BlockedByFirewall, }; pub fn listen(server: *Server, address: Address) ListenError!void { defer server.server.deinit(); server.server.listen(address) catch |err| switch (err) { error.AddressInUse, error.AddressNotAvailable, => |e| return e, else => return error.ListenError, }; while (true) { var conn = server.server.accept() catch |err| switch (err) { error.ConnectionAborted, error.ProcessFdQuotaExceeded, error.SystemFdQuotaExceeded, error.SystemResources, error.ProtocolFailure, error.Unexpected, error.ConnectionResetByPeer, error.NetworkSubsystemFailed, => continue, error.BlockedByFirewall => |e| return e, error.FileDescriptorNotASocket, error.SocketNotListening, error.OperationNotSupported, => return error.ListenError, }; var context = Context.init(server, conn.stream) catch { conn.stream.close(); continue; }; context.frame = async handleRequest(context); while (server.discards.pop()) |c| { c.data.deinit(); server.allocator.destroy(c.data); } } } fn handleRequest(context: *Context) callconv(.Async) void { defer context.server.discards.push(&context.node); const up = handleHttp(context) catch |e| { std.debug.print("error in http handler: {}\n", .{e}); return; }; switch (up) { .webSocket => { // handleWs(self, socket.handle) catch |e| {}; }, .http2 => {}, .none => {}, } } fn handleHttp(ctx: *Context) callconv(.Async) !Upgrade { var buf = std.ArrayList(u8).init(ctx.server.allocator); defer buf.deinit(); // for use in headers and allocations in handlers var arena = ArenaAllocator.init(ctx.server.allocator); defer arena.deinit(); const alloc = arena.allocator(); while (true) { var req = request.Request{ .method = "", .headers = Headers.init(alloc), .path = "", .query = "", .body = "", .version = .Http11, }; var res = response.Response{ .status_code = null, .headers = Headers.init(alloc), .body = buf.writer(), .allocator = alloc, }; try ctx.read(); if (ctx.count == 0) { return .none; } if (parser.parse(&req, ctx)) { var frame = @asyncCall(ctx.stack, {}, ctx.server.handler, .{ &req, &res, req.path }); await frame catch |e| { try defaultErrorHandler(e, &req, &res); }; } else |e| { try defaultErrorHandler(e, &req, &res); try writeResponse(ctx.server, ctx.writer.writer(), &req, &res); try ctx.writer.flush(); return .none; } try writeResponse(ctx.server, ctx.writer.writer(), &req, &res); try ctx.writer.flush(); // reset for next request arena.deinit(); arena = ArenaAllocator.init(ctx.server.allocator); buf.resize(0) catch unreachable; } return .none; } fn writeResponse(_: *Server, writer: anytype, req: Request, res: Response) !void { const body = res.body.context.items; const is_head = mem.eql(u8, req.method, Method.Head); try writer.print("{s} {} {s}\r\n", .{ req.version.toString(), @enumToInt(res.status_code.?), res.status_code.?.toString() }); for (res.headers.list.items) |header| { try writer.print("{s}: {s}\r\n", .{ header.name, header.value }); } const keep_alive = switch (req.version) { Version.Http09 => false, Version.Http10 => req.headers.hasTokenIgnoreCase("connection", "keep-alive"), else => !req.headers.hasTokenIgnoreCase("connection", "close"), }; if (keep_alive) { try writer.writeAll("connection: keep-alive\r\n"); } else { try writer.writeAll("connection: close\r\n"); } if (is_head) { try writer.writeAll("content-length: 0\r\n\r\n"); } else { try writer.print("content-length: {}\r\n\r\n", .{body.len}); } if (!is_head) { try writer.writeAll(body); } } fn defaultErrorHandler(err: anyerror, req: Request, res: Response) !void { switch (err) { error.FileNotFound => { res.status_code = .NotFound; try res.print( \\<!DOCTYPE html> \\<html> \\<head> \\ <title>404 - Not Found</title> \\</head> \\<body> \\ <h1>Not Found</h1> \\ <p>Requested URL {s} was not found.</p> \\</body> \\</html> , .{req.path}); }, else => { if (builtin.mode == .Debug) { res.status_code = .InternalServerError; try res.print( \\<!DOCTYPE html> \\<html> \\<head> \\ <title>500 - Internal Server Error</title> \\</head> \\<body> \\ <h1>Internal Server Error</h1> \\ <p>Debug info - Error: {s}</p> \\</body> \\</html> , .{@errorName(err)}); } else { res.status_code = .InternalServerError; try res.write( \\<!DOCTYPE html> \\<html> \\<head> \\ <title>500 - Internal Server Error</title> \\</head> \\<body> \\ <h1>Internal Server Error</h1> \\</body> \\</html> ); } }, } } };
0
repos/routez/src
repos/routez/src/routez/routes.zig
const std = @import("std"); const expect = std.testing.expect; const Route = @import("router.zig").Route; const router = @import("router.zig"); const http = @import("http.zig"); const Router = router.Router; const HandlerFn = router.HandlerFn; const Request = http.Request; const Response = http.Response; const Headers = http.Headers; const Method = http.Method; pub fn all(path: []const u8, handler: anytype) Route { return createRoute(null, path, handler); } pub fn get(path: []const u8, handler: anytype) Route { return createRoute(Method.Get, path, handler); } pub fn head(path: []const u8, handler: anytype) Route { return createRoute(Method.Head, path, handler); } pub fn post(path: []const u8, handler: anytype) Route { return createRoute(Method.Post, path, handler); } pub fn put(path: []const u8, handler: anytype) Route { return createRoute(Method.Put, path, handler); } pub fn delete(path: []const u8, handler: anytype) Route { return createRoute(Method.Delete, path, handler); } pub fn connect(path: []const u8, handler: anytype) Route { return createRoute(Method.Connect, path, handler); } pub fn options(path: []const u8, handler: anytype) Route { return createRoute(Method.Options, path, handler); } pub fn trace(path: []const u8, handler: anytype) Route { return createRoute(Method.Trace, path, handler); } pub fn patch(path: []const u8, handler: anytype) Route { return createRoute(Method.Patch, path, handler); } pub fn custom(method: []const u8, path: []const u8, handler: anytype) Route { return createRoute(method, path, handler); } /// add route with given method fn createRoute(method: ?[]const u8, path: []const u8, handler: anytype) Route { const t = @typeInfo(@TypeOf(handler)); if (t != .Fn) { @compileError("handler must be a function"); } const f = t.Fn; if (f.args.len != 2 and f.args.len != 3) { @compileError("handler must take 2 or 3 arguments"); } if (f.args[0].arg_type orelse void != Request) { @compileError("first argument of a handler must be a HTTP Request"); } if (f.args[1].arg_type orelse void != Response) { @compileError("second argument of a handler must be a HTTP Response"); } if (f.args.len == 3) { const arg_type = f.args[2].arg_type orelse void; if (@typeInfo(arg_type) != .Pointer or blk: { const ptr = @typeInfo(arg_type).Pointer; break :blk !ptr.is_const or ptr.size != .One or @typeInfo(ptr.child) != .Struct; }) { @compileError("third argument of a handler must be a const pointer to a struct containing all path arguments it takes"); } } const ret = f.return_type.?; if (ret != void and (@typeInfo(ret) != .ErrorUnion or @typeInfo(ret).ErrorUnion.payload != void)) { @compileError("handler must return void which may be in an error union"); } return Route{ .path = path, .method = method, .handler = @ptrCast(fn () void, handler), .handler_ty = @TypeOf(handler), }; } pub fn subRoute(route: []const u8, handlers: anytype) Route { const h = Router(handlers); const handler = struct { fn handle(req: Request, res: Response, args: *const struct { path: []const u8, }) !void { return h(req, res, args.path); } }.handle; const path = (if (route[route.len - 1] == '/') route[0 .. route.len - 2] else route) ++ "{path;}"; return createRoute(Method.Get, path, handler); } // todo static cofig // todo uri decode path pub fn static(local_path: []const u8, remote_path: ?[]const u8) Route { const handler = struct { fn staticHandler(_: Request, res: Response, args: *const struct { path: []const u8, }) !void { const allocator = res.allocator; const path = if (local_path[local_path.len - 1] == '/') local_path else local_path ++ "/"; const full_path = try std.fs.path.join(allocator, &[_][]const u8{ path, args.path }); try res.sendFile(full_path); } }.staticHandler; var path = if (remote_path) |r| if (r[r.len - 1] == '/') r ++ "{path;}" else r ++ "/{path;}" else "/{path;}"; return createRoute(Method.Get, path, handler); } // for tests const request = @import("http/request.zig").Request; const response = @import("http/response.zig").Response; const alloc = std.heap.page_allocator; test "index" { const handler = comptime Router(.{get("/", indexHandler)}); var req = request{ .method = Method.Get, .headers = undefined, .path = "/", .query = undefined, .body = undefined, .version = .Http11, }; var res: response = undefined; try nosuspend handler(&req, &res, req.path); try expect(res.status_code.? == .Ok); } fn indexHandler(_: Request, res: Response) void { res.status_code = .Ok; } test "custom status code" { const handler = comptime Router(.{get("/", customStatusCode)}); var req = request{ .method = Method.Get, .headers = undefined, .path = "/", .query = undefined, .body = undefined, .version = .Http11, }; var res: response = undefined; try nosuspend handler(&req, &res, req.path); try expect(res.status_code.? == .BadRequest); } fn customStatusCode(_: Request, res: Response) void { res.status_code = .BadRequest; } test "args" { const handler = comptime Router(.{get("/a/{num}", argHandler)}); var req = request{ .method = Method.Get, .headers = undefined, .path = "/a/14", .query = undefined, .body = undefined, .version = .Http11, }; var res: response = undefined; try nosuspend handler(&req, &res, req.path); } fn argHandler(_: Request, _: Response, args: *const struct { num: u32, }) !void { try expect(args.num == 14); } test "delim string" { const handler = comptime Router(.{get("/{str;}", delimHandler)}); var req = request{ .method = Method.Get, .headers = undefined, .path = "/all/of/this.html", .query = undefined, .body = undefined, .version = .Http11, }; var res: response = undefined; try nosuspend handler(&req, &res, req.path); } fn delimHandler(_: Request, _: Response, args: *const struct { str: []const u8, }) !void { try expect(std.mem.eql(u8, args.str, "all/of/this.html")); } test "subRoute" { const handler = comptime Router(.{subRoute("/sub", .{get("/other", indexHandler)})}); var req = request{ .method = Method.Get, .path = "/sub/other", .query = undefined, .body = undefined, .version = .Http11, .headers = undefined, }; var res: response = undefined; try nosuspend handler(&req, &res, req.path); try expect(res.status_code.? == .Ok); } test "static files" { const handler = comptime Router(.{static( "assets", "/static", )}); var req = request{ .method = Method.Get, .path = "/static/example-file.txt", .query = undefined, .body = undefined, .version = .Http11, .headers = undefined, }; var buf = std.ArrayList(u8).init(alloc); defer buf.deinit(); var res = response{ .status_code = .Processing, .headers = Headers.init(alloc), .body = .{ .context = &buf }, .allocator = alloc, }; // ignore file not found error nosuspend handler(&req, &res, req.path) catch |e| switch (e) { error.FileNotFound => return, else => return e, }; try expect(std.mem.eql(u8, (try res.headers.get(alloc, "content-type")).?[0].value, "text/plain;charset=UTF-8")); try expect(std.mem.eql(u8, res.body.context.items, "Some text\n")); } test "optional char" { const handler = comptime Router(.{get("/about/?", indexHandler)}); var req = request{ .method = Method.Get, .headers = undefined, .path = "/about", .query = undefined, .body = undefined, .version = .Http11, }; var res: response = undefined; try nosuspend handler(&req, &res, req.path); try expect(res.status_code.? == .Ok); }
0
repos/routez/src
repos/routez/src/routez/mime.zig
const std = @import("std"); const mem = std.mem; const hashString = std.hash_map.hashString; /// Maps extensions to their mimetypes pub const map = std.ComptimeStringMap([]const u8, .{ .{ "js", js }, .{ "json", json }, .{ "css", css }, .{ "html", html }, .{ "png", png }, .{ "jpeg", jpeg }, .{ "gif", gif }, .{ "webp", webp }, .{ "svg", svg }, .{ "ico", icon }, .{ "txt", text }, .{ "wav", wav }, .{ "ogg", ogg }, .{ "webm", webm }, .{ "zig", text }, }); pub const js = "application/javascript;charset=UTF-8"; pub const css = "text/css;charset=UTF-8"; pub const html = "text/html;charset=UTF-8"; pub const json = "application/json"; pub const png = "image/png"; pub const jpeg = "image/jpeg"; pub const gif = "image/gif"; pub const webp = "image/webp"; pub const svg = "image/svg+xml;charset=UTF-8"; pub const icon = "image/x-icon"; pub const text = "text/plain;charset=UTF-8"; pub const wav = "audio/wav"; pub const ogg = "audio/ogg"; pub const webm = "video/webm"; pub const default = "application/octet-stream";
0
repos/routez/src
repos/routez/src/routez/router.zig
const std = @import("std"); const Allocator = std.mem.Allocator; const mem = std.mem; const math = std.math; const assert = std.debug.assert; const meta = std.meta; const http = @import("http.zig"); const Request = http.Request; const Response = http.Response; pub const HandlerFn = fn handle(Request, Response, []const u8) callconv(.Async) anyerror!void; pub const ErrorHandler = struct { handler: fn (Request, Response) void, err: anyerror, }; pub fn Router(comptime handlers: anytype) HandlerFn { comptime var routes: []const Route = &[_]Route{}; comptime var err_handlers: []const ErrorHandler = &[_]ErrorHandler{}; inline for (handlers) |handler| { switch (@TypeOf(handler)) { ErrorHandler => { err_handlers = (err_handlers ++ &[_]ErrorHandler{handler}); }, Route => { routes = (routes ++ &[_]Route{handler}); }, else => |f_type| @compileError("unsupported handler type " ++ @typeName(f_type)), } } if (routes.len == 0) { @compileError("Router must have at least one route"); } return struct { fn handle(req: Request, res: Response, path: []const u8) callconv(.Async) !void { if (req.path[0] == '*') { @panic("Todo server request"); } inline for (routes) |route| { comptime var type_info = @typeInfo(route.handler_ty).Fn; comptime var err: ?type = switch (@typeInfo(type_info.return_type.?)) { .ErrorUnion => @typeInfo(type_info.return_type.?).ErrorUnion.error_set, else => null, }; // try matching path to route if (err == null) { if (match(route, err, req, res, path)) { if (res.status_code == null) res.status_code = .Ok; return; } } else { if (match(route, err, req, res, path) catch |e| { if (err_handlers.len == 0) { return e; } else { return handleError(e, req, res); } }) { if (res.status_code == null) res.status_code = .Ok; return; } } } // not found return if (err_handlers.len == 0) error.FileNotFound else handleError(error.FileNotFound, req, res); } fn handleError(err: anyerror, req: Request, res: Response) !void { inline for (err_handlers) |e| { if (err == e.err) { return e.handler(req, res); } } return err; } }.handle; } pub const Route = struct { path: []const u8, method: ?[]const u8, handler: fn () void, handler_ty: type, }; /// returns true if request matched route pub fn match( comptime route: Route, comptime Errs: ?type, req: Request, res: Response, path: []const u8, ) if (Errs != null) Errs.?!bool else bool { // TODO this can be improved const handler = @ptrCast(route.handler_ty, route.handler); const has_args = @typeInfo(route.handler_ty).Fn.args.len == 3; const Args = if (has_args) @typeInfo(@typeInfo(route.handler_ty).Fn.args[2].arg_type.?).Pointer.child else void; var args: Args = undefined; comptime var used: if (has_args) [@typeInfo(Args).Struct.fields.len]bool else void = undefined; if (has_args) { comptime mem.set(bool, &used, false); } const State = enum { Start, Path, AmperStart, AmperFirst, Format, }; comptime var state = State.Start; comptime var index = 0; comptime var begin = 0; comptime var fmt_begin = 0; // worst-case scenario every byte in route needs to be percentage encoded comptime var pathbuf: [route.path.len * 3]u8 = undefined; comptime var optional = false; var path_index: usize = 0; var len: usize = undefined; inline for (route.path) |c, i| { switch (state) { .Start => comptime switch (c) { '/' => { pathbuf[index] = '/'; state = .Path; index += 1; }, '*' => { state = .Path; break; }, else => @compileError("route must begin with a '/'"), }, .Path => switch (c) { '?' => { if (!optional) { @compileError("previous character is not optional"); } else { optional = false; index -= 1; const r = pathbuf[begin..index]; begin = index; if (path.len < r.len or !mem.eql(u8, r, path[path_index .. path_index + r.len])) { return false; } path_index += r.len; if (path.len > path_index and path[path_index] == pathbuf[begin]) { path_index += 1; } } }, 'a'...'z', 'A'...'Z', '0'...'9', '-', '.', '_', '~', '!', '$', '&', '\'', '(', ')', '*', '+', ',', ';', '=', ':', '@', '%', '/' => comptime { pathbuf[index] = c; index += 1; if (c == '%') { state = .AmperStart; } optional = true; }, '{' => { if (!has_args) { @compileError("handler does not take path arguments"); } optional = false; state = .Format; fmt_begin = i + 1; const r = pathbuf[begin..index]; begin = index; if (path.len < r.len or !mem.eql(u8, r, path[path_index .. path_index + r.len])) { return false; } path_index += r.len; }, else => comptime { const hex_digits = "0123456789ABCDEF"; pathbuf[index] = '%'; pathbuf[index + 1] = hex_digits[(c & 0xF0) >> 4]; pathbuf[index + 2] = hex_digits[c & 0x0F]; index += 3; optional = true; }, }, .AmperStart, .AmperFirst => comptime switch (c) { '0'...'9', 'a'...'f', 'A'...'F' => { pathbuf[index] = c; index += 1; if (state == .AmperStart) { state = .AmperFirst; } else { state = .Path; } }, else => @compileError("'%' must be followed by two hexadecimal digits"), }, .Format => switch (c) { '}' => { comptime var radix = 10; comptime var number = true; comptime var field_name: []const u8 = undefined; comptime var field_type: type = undefined; comptime var delim: []const u8 = "/."; comptime { const Fstate = enum { Name, Radix, Done, Fmt, }; _ = Fstate; var fstate = .Name; var fmt = route.path[fmt_begin..i]; if (fmt.len == 0) { @compileError("path argument's name must at least one character"); } for (fmt) |fc, fi| { switch (fstate) { .Name => switch (fc) { ';' => { if (fi == 0) { @compileError("path argument's name must at least one character"); } field_name = fmt[0..fi]; canUse(Args, field_name, &used); field_type = @TypeOf(@field(args, field_name)); verifyField(field_type, &number); if (number) { fstate = .Fmt; } else { delim = fmt[fi + 1 ..]; fstate = .Done; break; } }, else => {}, }, .Radix => switch (fc) { '0'...'9' => { radix *= 10; radix += fc - '0'; }, else => @compileError("radix must be a number"), }, .Fmt => switch (fc) { 'r', 'R' => { radix = 0; fstate = .Radix; }, 'x', 'X' => { radix = 16; fstate = .Done; }, else => @compileError("invalid format character"), }, .Done => @compileError("unexpected character after format '" ++ fmt[fi .. fi + 1] ++ "'"), else => unreachable, } } if (fstate == .Name) { field_name = fmt[0..]; canUse(Args, field_name, &used); field_type = @TypeOf(@field(args, field_name)); verifyField(field_type, &number); } if (radix < 2 or radix > 36) { @compileError("radix must be in range [2,36]"); } } len = 0; if (number) { @field(args, field_name) = getNum(field_type, path[path_index..], radix, &len); } else { if (delim.len != 0) { @field(args, field_name) = getString(path[path_index..], delim, &len); } else { @field(args, field_name) = path[path_index..]; len += path[path_index..].len; } } // route is incorrect if the argument given is zero sized if (len == 0) { return false; } path_index += len; state = .Path; }, else => {}, }, } } if (state != .Path) { @compileError("Invalid route"); } comptime if (has_args) { for (used) |u, i| { if (!u) { @compileError("handler argument '" ++ @typeInfo(Args).Struct.fields[i].name ++ "' is not given in the path"); } } }; const r = pathbuf[begin..index]; if (route.path[0] != '*' and !mem.eql(u8, r, path[path_index..])) { return false; } if (route.method) |m| { if (!mem.eql(u8, req.method, m)) { res.status_code = .MethodNotAllowed; // routing was successful but method was not allowed return true; // todo return false and try to find a route with correct method } } if (has_args) { if (Errs != null) { try handler(req, res, &args); } else { handler(req, res, &args); } } else { if (Errs != null) { try handler(req, res); } else { handler(req, res); } } return true; } fn canUse(comptime Args: type, comptime field_name: []const u8, used: []bool) void { const index = meta.fieldIndex(Args, field_name) orelse { @compileError("handler does not take argument '" ++ field_name ++ "'"); }; if (used[index]) { @compileError("argument '" ++ field_name ++ "' already used"); } else { used[index] = true; } } fn verifyField(comptime field: type, number: *bool) void { number.* = @typeInfo(field) == .Int; if (!number.*) { assert(@typeInfo(field) == .Pointer); const ptr = @typeInfo(field).Pointer; assert(ptr.is_const and ptr.size == .Slice and ptr.child == u8); } } fn getNum(comptime T: type, path: []const u8, radix: u8, len: *usize) T { const signed = @typeInfo(T).Int.signedness == .signed; var sign = if (signed) false; var res: T = 0; for (path) |c, i| { if (signed and c == '-' and i == 1) { sign = true; } const value = switch (c) { '0'...'9' => c - '0', 'A'...'Z' => c - 'A' + 10, 'a'...'z' => c - 'a' + 10, else => break, }; if (value >= radix) break; if (res != 0) res = math.mul(T, res, @intCast(T, radix)) catch break; res = math.add(T, res, @intCast(T, value)) catch break; len.* += 1; } if (signed and sign) { res = -res; } return res; } fn getString(path: []const u8, delim: []const u8, len: *usize) []const u8 { for (path) |c, i| { var done = false; for (delim) |d| { done = done or c == d; } if (done) { len.* = i; return path[0..i]; } } len.* = path.len; return path; }
0
repos/routez/src
repos/routez/src/routez/http.zig
const req = @import("http/request.zig"); const res = @import("http/response.zig"); pub usingnamespace @import("http/common.zig"); pub usingnamespace @import("http/headers.zig"); pub const Request = *const req.Request; pub const Response = *res.Response; test "http" { _ = @import("http/headers.zig"); _ = @import("http/parser.zig"); _ = @import("http/request.zig"); _ = @import("http/response.zig"); }
0
repos/routez/src/routez
repos/routez/src/routez/http/response.zig
const std = @import("std"); const mime = @import("../mime.zig"); const Headers = @import("headers.zig").Headers; const StatusCode = @import("common.zig").StatusCode; pub const Response = struct { status_code: ?StatusCode, headers: Headers, body: std.ArrayList(u8).Writer, /// arena allocator that frees everything when response has been sent allocator: std.mem.Allocator, pub fn setType(res: *Response, mimetype: []const u8) Headers.Error!void { try res.headers.put("content-type", mimetype); } pub const SendFileError = error{ SystemError, AccessDenied, FileNotFound, } || WriteError; // todo improve, cache control pub fn sendFile(res: *Response, path: []const u8) SendFileError!void { var in_file = (std.fs.cwd().openFile(path, .{}) catch |err| switch (err) { error.AccessDenied, error.FileNotFound, => |e| return e, else => return error.SystemError, }); defer in_file.close(); const content = in_file.reader().readAllAlloc(res.allocator, 1024 * 1024) catch |err| switch (err) { error.OutOfMemory => |e| return e, else => return error.SystemError, }; defer res.allocator.free(content); try res.body.writeAll(content); var mimetype: []const u8 = mime.default; if (std.mem.lastIndexOfScalar(u8, path, '.')) |i| { if (mime.map.get(path[i + 1 ..])) |m| mimetype = m; } try res.setType(mimetype); } pub const WriteError = std.mem.Allocator.Error || Headers.Error; pub fn write(res: *Response, bytes: []const u8) WriteError!void { try res.setType(mime.html); try res.body.writeAll(bytes); } pub fn print(res: *Response, comptime format: []const u8, args: anytype) WriteError!void { try res.setType(mime.html); try res.body.print(format, args); } };
0
repos/routez/src/routez
repos/routez/src/routez/http/parser.zig
const std = @import("std"); const mem = std.mem; const zuri = @import("zuri"); const Uri = zuri.Uri; const Headers = @import("headers.zig").Headers; const Request = @import("request.zig").Request; const Version = @import("common.zig").Version; const Context = @import("../server.zig").Server.Context; const Method = @import("../http.zig").Method; const t = std.testing; pub fn parse(req: *Request, ctx: *Context) !void { var cur = ctx.index; // method if (!seek(ctx, ' ')) { return error.NoMethod; } req.method = ctx.buf[cur .. ctx.index - 1]; cur = ctx.index; // path if (!seek(ctx, ' ')) { return error.NoPath; } const uri = try Uri.parse(ctx.buf[cur .. ctx.index - 1], true); req.path = try Uri.resolvePath(req.headers.list.allocator, uri.path); req.query = uri.query; cur = ctx.index; // version if (!seek(ctx, '\r')) { return error.NoVersion; } req.version = try Version.fromString(ctx.buf[cur .. ctx.index - 1]); if (req.version == .Http30) { return error.UnsupportedVersion; } try expect(ctx, '\n'); // HTTP/0.9 allows request with no headers to end after "METHOD PATH HTTP/0.9\r\n" if (req.version == .Http09 and ctx.index == ctx.count) { return; } try parseHeaders(&req.headers, ctx); try expect(ctx, '\r'); try expect(ctx, '\n'); req.body = ctx.buf[ctx.index..ctx.count]; } fn parseHeaders(h: *Headers, ctx: *Context) !void { var name: []u8 = ""; var cur = ctx.index; while (ctx.buf[cur] != '\r') { if (!seek(ctx, ':')) { return error.NoName; } name = ctx.buf[cur .. ctx.index - 1]; cur = ctx.index; if (!seek(ctx, '\r')) { return error.NoValue; } try expect(ctx, '\n'); switch (ctx.buf[ctx.index]) { '\t', ' ' => { // obs-fold if (!seek(ctx, '\r')) { return error.InvalidObsFold; } try expect(ctx, '\n'); }, else => {}, } try h.put(name, ctx.buf[cur .. ctx.index - 2]); cur = ctx.index; } } // index is after first `c` fn seek(ctx: *Context, c: u8) bool { while (true) { if (ctx.index >= ctx.count) { return false; } else if (ctx.buf[ctx.index] == c) { ctx.index += 1; return true; } else { ctx.index += 1; } } } // index is after `c` fn expect(ctx: *Context, c: u8) !void { if (ctx.count < ctx.index + 1) { return error.UnexpectedEof; } if (ctx.buf[ctx.index] == c) { ctx.index += 1; } else { return error.InvalidChar; } } const alloc = std.heap.page_allocator; test "parse headers" { var b = try alloc.dupe(u8, "User-Agent: Mozilla/5.0 (X11; Linux x86_64; rv:67.0) Gecko/20100101 Firefox/67.0\r\n" ++ "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8\r\n" ++ "Accept-Language: en-US,en;q=0.5\r\n" ++ "Accept-Encoding: gzip, deflate\r\n" ++ "DNT: 1\r\n" ++ "Connection: keep-alive\r\n" ++ "Upgrade-Insecure-Requests: 1\r\n\r\n"); defer alloc.free(b); var h = Headers.init(alloc); defer h.list.deinit(); var ctx = Context{ .buf = b, .count = b.len, .stack = undefined, .writer = undefined, .server = undefined, .stream = undefined, .frame = undefined, .node = undefined, }; try parseHeaders(&h, &ctx); var slice = h.list.items; try t.expect(mem.eql(u8, slice[0].name, "user-agent")); try t.expect(mem.eql(u8, slice[0].value, "Mozilla/5.0 (X11; Linux x86_64; rv:67.0) Gecko/20100101 Firefox/67.0")); try t.expect(mem.eql(u8, slice[1].name, "accept")); try t.expect(mem.eql(u8, slice[1].value, "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8")); try t.expect(mem.eql(u8, slice[2].name, "accept-language")); try t.expect(mem.eql(u8, slice[2].value, "en-US,en;q=0.5")); try t.expect(mem.eql(u8, slice[3].name, "accept-encoding")); try t.expect(mem.eql(u8, slice[3].value, "gzip, deflate")); try t.expect(mem.eql(u8, slice[4].name, "dnt")); try t.expect(mem.eql(u8, slice[4].value, "1")); try t.expect(mem.eql(u8, slice[5].name, "connection")); try t.expect(mem.eql(u8, slice[5].value, "keep-alive")); try t.expect(mem.eql(u8, slice[6].name, "upgrade-insecure-requests")); try t.expect(mem.eql(u8, slice[6].value, "1")); } test "HTTP/0.9" { var b = try alloc.dupe(u8, "GET / HTTP/0.9\r\n"); defer alloc.free(b); var req: Request = undefined; req.headers = Headers.init(alloc); defer req.headers.deinit(); var ctx = Context{ .buf = b, .count = b.len, .stack = undefined, .writer = undefined, .server = undefined, .stream = undefined, .frame = undefined, .node = undefined, }; try parse(&req, &ctx); try t.expect(mem.eql(u8, req.method, Method.Get)); try t.expect(mem.eql(u8, req.path, "/")); try t.expect(req.version == .Http09); } test "HTTP/1.1" { var b = try alloc.dupe(u8, "POST /about HTTP/1.1\r\n" ++ "expires: Mon, 08 Jul 2019 11:49:03 GMT\r\n" ++ "last-modified: Fri, 09 Nov 2018 06:15:00 GMT\r\n" ++ "X-Test: test\r\n" ++ " obs-fold\r\n" ++ "\r\na body\n"); defer alloc.free(b); var req: Request = undefined; req.headers = Headers.init(alloc); defer req.headers.deinit(); var ctx = Context{ .buf = b, .count = b.len, .stack = undefined, .writer = undefined, .server = undefined, .stream = undefined, .frame = undefined, .node = undefined, }; try parse(&req, &ctx); try t.expect(mem.eql(u8, req.method, Method.Post)); try t.expect(mem.eql(u8, req.path, "/about")); try t.expect(req.version == .Http11); try t.expect(mem.eql(u8, req.body, "a body\n")); try t.expect(mem.eql(u8, (try req.headers.get(alloc, "expires")).?[0].value, "Mon, 08 Jul 2019 11:49:03 GMT")); try t.expect(mem.eql(u8, (try req.headers.get(alloc, "last-modified")).?[0].value, "Fri, 09 Nov 2018 06:15:00 GMT")); const val = try req.headers.get(alloc, "x-test"); _ = val; try t.expect(mem.eql(u8, (try req.headers.get(alloc, "x-test")).?[0].value, "test obs-fold")); } test "HTTP/3.0" { var b = try alloc.dupe(u8, "POST /about HTTP/3.0\r\n\r\n"); defer alloc.free(b); var req: Request = undefined; req.headers = Headers.init(alloc); defer req.headers.deinit(); var ctx = Context{ .buf = b, .count = b.len, .stack = undefined, .writer = undefined, .server = undefined, .stream = undefined, .frame = undefined, .node = undefined, }; try t.expectError(error.UnsupportedVersion, parse(&req, &ctx)); }
0
repos/routez/src/routez
repos/routez/src/routez/http/request.zig
const Version = @import("common.zig").Version; const Headers = @import("headers.zig").Headers; pub const Request = struct { method: []const u8, headers: Headers, path: []const u8, query: []const u8, body: []const u8, version: Version, };
0
repos/routez/src/routez
repos/routez/src/routez/http/headers.zig
const std = @import("std"); const ascii = std.ascii; const mem = std.mem; const Allocator = mem.Allocator; const ArrayList = std.ArrayList; const Context = @import("../server.zig").Server.Context; // TODO use std.http.Headers pub const Headers = struct { list: HeaderList, pub const Error = error{ InvalidChar, OutOfMemory, }; const HeaderList = ArrayList(Header); const Header = struct { name: []const u8, value: []const u8, fn from(allocator: Allocator, name: []const u8, value: []const u8) Error!Header { var copy_name = try allocator.alloc(u8, name.len); var copy_value = try allocator.alloc(u8, value.len); errdefer allocator.free(copy_name); errdefer allocator.free(copy_value); for (name) |c, i| { copy_name[i] = switch (c) { 'a'...'z', '0'...'9', '!', '#', '$', '%', '&', '\'', '*', '+', '-', '.', '/', '^', '_', '`', '|', '~' => c, 'A'...'Z' => c | 0x20, else => return Error.InvalidChar, }; } var i: usize = 0; for (mem.trim(u8, value, " \t")) |c| { if (c == '\r' or c == '\n') { // obs-fold } else if (c < ' ' or c > '~') { return Error.InvalidChar; } else { copy_value[i] = c; i += 1; } } copy_value = allocator.shrink(copy_value, i); return Header{ .name = copy_name, .value = copy_value, }; } }; pub fn init(allocator: Allocator) Headers { return Headers{ .list = HeaderList.init(allocator), }; } pub fn deinit(headers: Headers) void { const a = headers.list.allocator; for (headers.list.items) |h| { a.free(h.name); a.free(h.value); } headers.list.deinit(); } pub fn get(headers: *const Headers, allocator: Allocator, name: []const u8) Error!?[]const *Header { var list = ArrayList(*Header).init(allocator); errdefer list.deinit(); for (headers.list.items) |*h| { if (mem.eql(u8, h.name, name)) { const new = try list.addOne(); new.* = h; } } if (list.items.len == 0) { return null; } else { return list.toOwnedSlice(); } } // pub fn set(h: *Headers, name: []const u8, value: []const u8) Error!?[]const u8 { // // var old = get() // } pub fn has(headers: *Headers, name: []const u8) bool { for (headers.list.items) |*h| { if (mem.eql(u8, h.name, name)) { return true; } } return false; } pub fn hasTokenIgnoreCase(headers: *const Headers, name: []const u8, token: []const u8) bool { for (headers.list.items) |*h| { if (ascii.eqlIgnoreCase(h.name, name) and ascii.eqlIgnoreCase(h.value, token)) { return true; } } return false; } pub fn put(h: *Headers, name: []const u8, value: []const u8) Error!void { try h.list.append(try Header.from(h.list.allocator, name, value)); } };
0
repos/routez/src/routez
repos/routez/src/routez/http/common.zig
const std = @import("std"); const mem = std.mem; pub const StatusCode = enum(u16) { // informational Continue = 100, SwitchingProtocols = 101, Processing = 102, EarlyHints = 103, // success Ok = 200, Created = 201, Accepted = 202, NonAuthoritativeInformation = 203, NoContent = 204, ResetContent = 205, PartialContent = 206, MultiStatus = 207, AlreadyReported = 208, ImUsed = 226, // redirection MultipleChoices = 300, MovedPermanently = 301, Found = 302, SeeOther = 303, NotModified = 304, UseProxy = 305, SwitchProxy = 306, TemporaryRedirect = 307, PermanentRedirect = 308, // client error BadRequest = 400, Unauthorized = 401, PaymentRequired = 402, Forbidden = 403, NotFound = 404, MethodNotAllowed = 405, NotAcceptable = 406, ProxyAuthenticationRequired = 407, RequestTimeout = 408, Conflict = 409, Gone = 410, LengthRequired = 411, PreconditionFailed = 412, PayloadTooLarge = 413, UriTooLong = 414, UnsupportedMediaType = 415, RangeNotSatisfiable = 416, ExpectationFailed = 417, ImATeapot = 418, MisdirectedRequest = 421, UnprocessableEntity = 422, Locked = 423, FailedDependency = 424, TooEarly = 425, UpgradeRequired = 426, PreconditionRequired = 428, TooManyRequests = 429, RequestHeaderFieldsTooLarge = 431, UnavailableForLegalReasons = 451, // server error InternalServerError = 500, NotImplemented = 501, BadGateway = 502, ServiceUnavailable = 503, GatewayTimeout = 504, HttpVersionNotSupported = 505, VariantAlsoNegotiates = 506, InsufficientStorage = 507, LoopDetected = 508, NotExtended = 510, NetworkAuthenticationRequired = 511, pub fn toString(self: StatusCode) []const u8 { return switch (self) { .Continue => "Continue", .SwitchingProtocols => "Switching Protocols", .Processing => "Processing", .EarlyHints => "Early Hints", .Ok => "OK", .Created => "Created", .Accepted => "Accepted", .NonAuthoritativeInformation => "Non-Authoritative Information", .NoContent => "No Content", .ResetContent => "Reset Content", .PartialContent => "Partial Content", .MultiStatus => "Multi Status", .AlreadyReported => "Already Reported", .ImUsed => "IM Used", .MultipleChoices => "Multiple Choices", .MovedPermanently => "Moved Permanently", .Found => "Found", .SeeOther => "See Other", .NotModified => "Not Modified", .UseProxy => "Use Proxy", .SwitchProxy => "Switch Proxy", .TemporaryRedirect => "Temporary Redirect", .PermanentRedirect => "Permanent Redirect", .BadRequest => "Bad Request", .Unauthorized => "Unauthorized", .PaymentRequired => "Payment Required", .Forbidden => "Forbidden", .NotFound => "Not Found", .MethodNotAllowed => "Method Not Allowed", .NotAcceptable => "Not Acceptable", .ProxyAuthenticationRequired => "Proxy Authentication Required", .RequestTimeout => "Request Timeout", .Conflict => "Conflict", .Gone => "Gone", .LengthRequired => "Length Required", .PreconditionFailed => "Precondition Failed", .PayloadTooLarge => "Payload Too Large", .UriTooLong => "URI Too Long", .UnsupportedMediaType => "Unsupported Media Type", .RangeNotSatisfiable => "Range Not Satisfiable", .ExpectationFailed => "Expectation Failed", .ImATeapot => "I'm a teapot", .MisdirectedRequest => "Misdirected Request", .UnprocessableEntity => "Unprocessable Entity", .Locked => "Locked", .FailedDependency => "Failed Dependency", .TooEarly => "Too Early", .UpgradeRequired => "Upgrade Required", .PreconditionRequired => "Precondition Required", .TooManyRequests => "Too Many Requests", .RequestHeaderFieldsTooLarge => "Request Header Fields Too Large", .UnavailableForLegalReasons => "Unavailable For Legal Reasons", .InternalServerError => "Internal Server Error", .NotImplemented => "Not Implemented", .BadGateway => "Bad Gateway", .ServiceUnavailable => "Service Unavailable", .GatewayTimeout => "Gateway Timeout", .HttpVersionNotSupported => "HTTP Version Not Supported", .VariantAlsoNegotiates => "Variant Also Negotiates", .InsufficientStorage => "Insufficient Storage", .LoopDetected => "Loop Detected", .NotExtended => "Not Extended", .NetworkAuthenticationRequired => "Network Authentication Required", }; } }; pub const Method = struct { pub const Get = "GET"; pub const Head = "HEAD"; pub const Post = "POST"; pub const Put = "PUT"; pub const Delete = "DELETE"; pub const Connect = "CONNECT"; pub const Options = "OPTIONS"; pub const Trace = "TRACE"; pub const Patch = "PATCH"; }; pub const Version = enum { Http09, Http10, Http11, Http20, Http30, const vers = [_][]const u8{ "HTTP/0.9", "HTTP/1.0", "HTTP/1.1", "HTTP/2.0", "HTTP/3.0", }; pub fn toString(self: Version) []const u8 { return vers[@enumToInt(self)]; } pub fn fromString(str: []const u8) !Version { for (vers) |v, i| { if (mem.eql(u8, v, str)) { return @intToEnum(Version, @truncate(u3, i)); } } return error.Unsupported; } };
0
repos/routez
repos/routez/examples/basic.zig
const std = @import("std"); const Address = std.net.Address; const r = @import("routez"); const allocator = std.heap.page_allocator; pub const io_mode = .evented; pub fn main() !void { var server = r.Server.init( allocator, .{}, .{ r.all("/", indexHandler), r.get("/about", aboutHandler), r.get("/about/more", aboutHandler2), r.get("/post/{post_num}/?", postHandler), r.static("./", "/static"), r.all("/counter", counterHandler), }, ); var addr = try Address.parseIp("127.0.0.1", 8080); try server.listen(addr); } fn indexHandler(_: r.Request, res: r.Response) !void { try res.sendFile("examples/index.html"); } fn aboutHandler(_: r.Request, res: r.Response) !void { try res.write("Hello from about\n"); } fn aboutHandler2(_: r.Request, res: r.Response) !void { try res.write("Hello from about2\n"); } fn postHandler(_: r.Request, res: r.Response, args: *const struct { post_num: []const u8, }) !void { try res.print("Hello from post, post_num is {s}\n", .{args.post_num}); } var counter = std.atomic.Atomic(usize).init(0); fn counterHandler(_: r.Request, res: r.Response) !void { try res.print("Page loaded {d} times\n", .{counter.fetchAdd(1, .SeqCst)}); }
0
repos/routez
repos/routez/examples/index.html
<!DOCTYPE html> <html> <head> <title>Routez!</title> </head> <body> <h1>Page served with <a href="https://github.com/Vexu/routez">Routez</a> and <a href="https://github.com/ziglang/zig">Zig</a></h1> <a href="/about">about</a> <a href="/about/more">about more</a> <a href="/post/1234">post 1234</a> <a href="/static/assets/example-file.txt">example-file.txt</a> <a href="/counter">page load counter</a> </body> </html>