diff --git a/src-self-hosted/main.zig b/src-self-hosted/main.zig index a264b5484a0c..3c4afaf511dd 100644 --- a/src-self-hosted/main.zig +++ b/src-self-hosted/main.zig @@ -687,12 +687,13 @@ const usage_fmt = \\Options: \\ --help Print this help and exit \\ --color [auto|off|on] Enable or disable colored error messages - \\ + \\ -r Recursive (searches within directories) \\ ; const args_fmt_spec = []Flag{ Flag.Bool("--help"), + Flag.Bool("-r"), Flag.Option("--color", []const []const u8{ "auto", "off", @@ -700,6 +701,47 @@ const args_fmt_spec = []Flag{ }), }; +// Returns false if any errors occur +fn fmtHandleFile(allocator: *Allocator, file_path: []const u8, color: errmsg.Color) !bool { + var file = try os.File.openRead(allocator, file_path); + defer file.close(); + + return blk: { + const source_code = io.readFileAlloc(allocator, file_path) catch |err| { + try stderr.print("unable to open '{}': {}\n", file_path, err); + break :blk false; + }; + defer allocator.free(source_code); + + var tree = std.zig.parse(allocator, source_code) catch |err| { + try stderr.print("error parsing file '{}': {}\n", file_path, err); + break :blk false; + }; + defer tree.deinit(); + + var error_it = tree.errors.iterator(0); + while (error_it.next()) |parse_error| { + const msg = try errmsg.createFromParseError(allocator, parse_error, &tree, file_path); + defer allocator.destroy(msg); + + try errmsg.printToFile(&stderr_file, msg, color); + } + if (tree.errors.len != 0) { + break :blk false; + } + + const baf = try io.BufferedAtomicFile.create(allocator, file_path); + defer baf.destroy(); + + const anything_changed = try std.zig.render(allocator, baf.stream(), &tree); + if (anything_changed) { + try stderr.print("{}\n", file_path); + try baf.finish(); + } + break :blk true; + }; +} + fn cmdFmt(allocator: *Allocator, args: []const []const u8) !void { var flags = try Args.parse(allocator, args_fmt_spec, args); defer flags.deinit(); @@ -729,43 +771,36 @@ fn cmdFmt(allocator: *Allocator, args: []const []const u8) !void { }; var fmt_errors = false; - for (flags.positionals.toSliceConst()) |file_path| { - var file = try os.File.openRead(allocator, file_path); - defer file.close(); - - const source_code = io.readFileAlloc(allocator, file_path) catch |err| { - try stderr.print("unable to open '{}': {}\n", file_path, err); - fmt_errors = true; - continue; - }; - defer allocator.free(source_code); - - var tree = std.zig.parse(allocator, source_code) catch |err| { - try stderr.print("error parsing file '{}': {}\n", file_path, err); - fmt_errors = true; - continue; - }; - defer tree.deinit(); - - var error_it = tree.errors.iterator(0); - while (error_it.next()) |parse_error| { - const msg = try errmsg.createFromParseError(allocator, parse_error, &tree, file_path); - defer allocator.destroy(msg); - - try errmsg.printToFile(&stderr_file, msg, color); - } - if (tree.errors.len != 0) { - fmt_errors = true; - continue; - } - - const baf = try io.BufferedAtomicFile.create(allocator, file_path); - defer baf.destroy(); - - const anything_changed = try std.zig.render(allocator, baf.stream(), &tree); - if (anything_changed) { - try stderr.print("{}\n", file_path); - try baf.finish(); + const positionals = flags.positionals.toSliceConst(); + + const recursive = flags.present("r"); + + for (positionals) |file_path| { + if (recursive) { + var dir = std.os.Dir.open(allocator, file_path) catch null; + defer (??dir).close(); + var full_entry_buf = ArrayList(u8).init(allocator); + defer full_entry_buf.deinit(); + + while (try (??dir).next()) |entry| { + if (mem.endsWith(u8, entry.name, ".zig")) { + try full_entry_buf.resize(file_path.len + entry.name.len); + const full_entry_path = full_entry_buf.toSlice(); + mem.copy(u8, full_entry_path, file_path); + mem.copy(u8, full_entry_path[file_path.len..], entry.name); + fmt_errors = try fmtHandleFile(allocator, full_entry_path, color); + } + } + } else { + if (!try fmtHandleFile(allocator, file_path, color)) { + fmt_errors = true; + var dir = std.os.Dir.open(allocator, file_path) catch null; + if (dir != null) { + (??dir).close(); + try stderr.write("To fmt recursively pass the '-r' flag.\n"); + os.exit(1); + } + } } } diff --git a/std/buffer.zig b/std/buffer.zig index 469f81709b6e..3b2936d22362 100644 --- a/std/buffer.zig +++ b/std/buffer.zig @@ -28,6 +28,7 @@ pub const Buffer = struct { /// Must deinitialize with deinit. /// None of the other operations are valid until you do one of these: /// * ::replaceContents + /// * ::replaceContentsBuffer /// * ::resize pub fn initNull(allocator: *Allocator) Buffer { return Buffer{ .list = ArrayList(u8).init(allocator) }; @@ -115,7 +116,7 @@ pub const Buffer = struct { return mem.eql(u8, self.list.items[start..l], m); } - pub fn replaceContents(self: *Buffer, m: []const u8) !void { + pub fn replaceContents(self: *const Buffer, m: []const u8) !void { try self.resize(m.len); mem.copy(u8, self.list.toSlice(), m); } diff --git a/std/json.zig b/std/json.zig index 6cf83eef1aec..71673ad20fd0 100644 --- a/std/json.zig +++ b/std/json.zig @@ -3,7 +3,6 @@ // https://tools.ietf.org/html/rfc8259 const std = @import("index.zig"); -const debug = std.debug; const mem = std.mem; const u1 = @IntType(false, 1); @@ -87,9 +86,7 @@ pub const Token = struct { // parsing state requires ~40-50 bytes of stack space. // // Conforms strictly to RFC8529. -// -// For a non-byte based wrapper, consider using TokenStream instead. -pub const StreamingParser = struct { +pub const StreamingJsonParser = struct { // Current state state: State, // How many bytes we have counted for the current token @@ -112,13 +109,13 @@ pub const StreamingParser = struct { const array_bit = 1; const max_stack_size = @maxValue(u8); - pub fn init() StreamingParser { - var p: StreamingParser = undefined; + pub fn init() StreamingJsonParser { + var p: StreamingJsonParser = undefined; p.reset(); return p; } - pub fn reset(p: *StreamingParser) void { + pub fn reset(p: *StreamingJsonParser) void { p.state = State.TopLevelBegin; p.count = 0; // Set before ever read in main transition function @@ -178,7 +175,7 @@ pub const StreamingParser = struct { // Only call this function to generate array/object final state. pub fn fromInt(x: var) State { - debug.assert(x == 0 or x == 1); + std.debug.assert(x == 0 or x == 1); const T = @TagType(State); return State(T(x)); } @@ -208,7 +205,7 @@ pub const StreamingParser = struct { // tokens. token2 is always null if token1 is null. // // There is currently no error recovery on a bad stream. - pub fn feed(p: *StreamingParser, c: u8, token1: *?Token, token2: *?Token) Error!void { + pub fn feed(p: *StreamingJsonParser, c: u8, token1: *?Token, token2: *?Token) Error!void { token1.* = null; token2.* = null; p.count += 1; @@ -220,7 +217,7 @@ pub const StreamingParser = struct { } // Perform a single transition on the state machine and return any possible token. - fn transition(p: *StreamingParser, c: u8, token: *?Token) Error!bool { + fn transition(p: *StreamingJsonParser, c: u8, token: *?Token) Error!bool { switch (p.state) { State.TopLevelBegin => switch (c) { '{' => { @@ -855,116 +852,10 @@ pub const StreamingParser = struct { } }; -// A small wrapper over a StreamingParser for full slices. Returns a stream of json Tokens. -pub const TokenStream = struct { - i: usize, - slice: []const u8, - parser: StreamingParser, - token: ?Token, - - pub fn init(slice: []const u8) TokenStream { - return TokenStream{ - .i = 0, - .slice = slice, - .parser = StreamingParser.init(), - .token = null, - }; - } - - pub fn next(self: *TokenStream) !?Token { - if (self.token) |token| { - self.token = null; - return token; - } - - var t1: ?Token = undefined; - var t2: ?Token = undefined; - - while (self.i < self.slice.len) { - try self.parser.feed(self.slice[self.i], &t1, &t2); - self.i += 1; - - if (t1) |token| { - self.token = t2; - return token; - } - } - - if (self.i > self.slice.len) { - try self.parser.feed(' ', &t1, &t2); - self.i += 1; - - if (t1) |token| { - return token; - } - } - - return null; - } -}; - -fn checkNext(p: *TokenStream, id: Token.Id) void { - const token = ??(p.next() catch unreachable); - debug.assert(token.id == id); -} - -test "token" { - const s = - \\{ - \\ "Image": { - \\ "Width": 800, - \\ "Height": 600, - \\ "Title": "View from 15th Floor", - \\ "Thumbnail": { - \\ "Url": "http://www.example.com/image/481989943", - \\ "Height": 125, - \\ "Width": 100 - \\ }, - \\ "Animated" : false, - \\ "IDs": [116, 943, 234, 38793] - \\ } - \\} - ; - - var p = TokenStream.init(s); - - checkNext(&p, Token.Id.ObjectBegin); - checkNext(&p, Token.Id.String); // Image - checkNext(&p, Token.Id.ObjectBegin); - checkNext(&p, Token.Id.String); // Width - checkNext(&p, Token.Id.Number); - checkNext(&p, Token.Id.String); // Height - checkNext(&p, Token.Id.Number); - checkNext(&p, Token.Id.String); // Title - checkNext(&p, Token.Id.String); - checkNext(&p, Token.Id.String); // Thumbnail - checkNext(&p, Token.Id.ObjectBegin); - checkNext(&p, Token.Id.String); // Url - checkNext(&p, Token.Id.String); - checkNext(&p, Token.Id.String); // Height - checkNext(&p, Token.Id.Number); - checkNext(&p, Token.Id.String); // Width - checkNext(&p, Token.Id.Number); - checkNext(&p, Token.Id.ObjectEnd); - checkNext(&p, Token.Id.String); // Animated - checkNext(&p, Token.Id.False); - checkNext(&p, Token.Id.String); // IDs - checkNext(&p, Token.Id.ArrayBegin); - checkNext(&p, Token.Id.Number); - checkNext(&p, Token.Id.Number); - checkNext(&p, Token.Id.Number); - checkNext(&p, Token.Id.Number); - checkNext(&p, Token.Id.ArrayEnd); - checkNext(&p, Token.Id.ObjectEnd); - checkNext(&p, Token.Id.ObjectEnd); - - debug.assert((try p.next()) == null); -} - // Validate a JSON string. This does not limit number precision so a decoder may not necessarily // be able to decode the string even if this returns true. pub fn validate(s: []const u8) bool { - var p = StreamingParser.init(); + var p = StreamingJsonParser.init(); for (s) |c, i| { var token1: ?Token = undefined; @@ -1006,46 +897,46 @@ pub const Value = union(enum) { pub fn dump(self: *const Value) void { switch (self.*) { Value.Null => { - debug.warn("null"); + std.debug.warn("null"); }, Value.Bool => |inner| { - debug.warn("{}", inner); + std.debug.warn("{}", inner); }, Value.Integer => |inner| { - debug.warn("{}", inner); + std.debug.warn("{}", inner); }, Value.Float => |inner| { - debug.warn("{.5}", inner); + std.debug.warn("{.5}", inner); }, Value.String => |inner| { - debug.warn("\"{}\"", inner); + std.debug.warn("\"{}\"", inner); }, Value.Array => |inner| { var not_first = false; - debug.warn("["); + std.debug.warn("["); for (inner.toSliceConst()) |value| { if (not_first) { - debug.warn(","); + std.debug.warn(","); } not_first = true; value.dump(); } - debug.warn("]"); + std.debug.warn("]"); }, Value.Object => |inner| { var not_first = false; - debug.warn("{{"); + std.debug.warn("{{"); var it = inner.iterator(); while (it.next()) |entry| { if (not_first) { - debug.warn(","); + std.debug.warn(","); } not_first = true; - debug.warn("\"{}\":", entry.key); + std.debug.warn("\"{}\":", entry.key); entry.value.dump(); } - debug.warn("}}"); + std.debug.warn("}}"); }, } } @@ -1061,53 +952,53 @@ pub const Value = union(enum) { fn dumpIndentLevel(self: *const Value, indent: usize, level: usize) void { switch (self.*) { Value.Null => { - debug.warn("null"); + std.debug.warn("null"); }, Value.Bool => |inner| { - debug.warn("{}", inner); + std.debug.warn("{}", inner); }, Value.Integer => |inner| { - debug.warn("{}", inner); + std.debug.warn("{}", inner); }, Value.Float => |inner| { - debug.warn("{.5}", inner); + std.debug.warn("{.5}", inner); }, Value.String => |inner| { - debug.warn("\"{}\"", inner); + std.debug.warn("\"{}\"", inner); }, Value.Array => |inner| { var not_first = false; - debug.warn("[\n"); + std.debug.warn("[\n"); for (inner.toSliceConst()) |value| { if (not_first) { - debug.warn(",\n"); + std.debug.warn(",\n"); } not_first = true; padSpace(level + indent); value.dumpIndentLevel(indent, level + indent); } - debug.warn("\n"); + std.debug.warn("\n"); padSpace(level); - debug.warn("]"); + std.debug.warn("]"); }, Value.Object => |inner| { var not_first = false; - debug.warn("{{\n"); + std.debug.warn("{{\n"); var it = inner.iterator(); while (it.next()) |entry| { if (not_first) { - debug.warn(",\n"); + std.debug.warn(",\n"); } not_first = true; padSpace(level + indent); - debug.warn("\"{}\": ", entry.key); + std.debug.warn("\"{}\": ", entry.key); entry.value.dumpIndentLevel(indent, level + indent); } - debug.warn("\n"); + std.debug.warn("\n"); padSpace(level); - debug.warn("}}"); + std.debug.warn("}}"); }, } } @@ -1115,13 +1006,13 @@ pub const Value = union(enum) { fn padSpace(indent: usize) void { var i: usize = 0; while (i < indent) : (i += 1) { - debug.warn(" "); + std.debug.warn(" "); } } }; // A non-stream JSON parser which constructs a tree of Value's. -pub const Parser = struct { +pub const JsonParser = struct { allocator: *Allocator, state: State, copy_strings: bool, @@ -1135,8 +1026,8 @@ pub const Parser = struct { Simple, }; - pub fn init(allocator: *Allocator, copy_strings: bool) Parser { - return Parser{ + pub fn init(allocator: *Allocator, copy_strings: bool) JsonParser { + return JsonParser{ .allocator = allocator, .state = State.Simple, .copy_strings = copy_strings, @@ -1144,26 +1035,52 @@ pub const Parser = struct { }; } - pub fn deinit(p: *Parser) void { + pub fn deinit(p: *JsonParser) void { p.stack.deinit(); } - pub fn reset(p: *Parser) void { + pub fn reset(p: *JsonParser) void { p.state = State.Simple; p.stack.shrink(0); } - pub fn parse(p: *Parser, input: []const u8) !ValueTree { - var s = TokenStream.init(input); + pub fn parse(p: *JsonParser, input: []const u8) !ValueTree { + var mp = StreamingJsonParser.init(); var arena = ArenaAllocator.init(p.allocator); errdefer arena.deinit(); - while (try s.next()) |token| { - try p.transition(&arena.allocator, input, s.i - 1, token); + for (input) |c, i| { + var mt1: ?Token = undefined; + var mt2: ?Token = undefined; + + try mp.feed(c, &mt1, &mt2); + if (mt1) |t1| { + try p.transition(&arena.allocator, input, i, t1); + + if (mt2) |t2| { + try p.transition(&arena.allocator, input, i, t2); + } + } } - debug.assert(p.stack.len == 1); + // Handle top-level lonely number values. + { + const i = input.len; + var mt1: ?Token = undefined; + var mt2: ?Token = undefined; + + try mp.feed(' ', &mt1, &mt2); + if (mt1) |t1| { + try p.transition(&arena.allocator, input, i, t1); + } + } + + if (!mp.complete) { + return error.IncompleteJsonInput; + } + + std.debug.assert(p.stack.len == 1); return ValueTree{ .arena = arena, @@ -1173,7 +1090,7 @@ pub const Parser = struct { // Even though p.allocator exists, we take an explicit allocator so that allocation state // can be cleaned up on error correctly during a `parse` on call. - fn transition(p: *Parser, allocator: *Allocator, input: []const u8, i: usize, token: *const Token) !void { + fn transition(p: *JsonParser, allocator: *Allocator, input: []const u8, i: usize, token: *const Token) !void { switch (p.state) { State.ObjectKey => switch (token.id) { Token.Id.ObjectEnd => { @@ -1306,7 +1223,7 @@ pub const Parser = struct { } } - fn pushToParent(p: *Parser, value: *const Value) !void { + fn pushToParent(p: *JsonParser, value: *const Value) !void { switch (p.stack.at(p.stack.len - 1)) { // Object Parent -> [ ..., object, , value ] Value.String => |key| { @@ -1327,14 +1244,14 @@ pub const Parser = struct { } } - fn parseString(p: *Parser, allocator: *Allocator, token: *const Token, input: []const u8, i: usize) !Value { + fn parseString(p: *JsonParser, allocator: *Allocator, token: *const Token, input: []const u8, i: usize) !Value { // TODO: We don't strictly have to copy values which do not contain any escape // characters if flagged with the option. const slice = token.slice(input, i); return Value{ .String = try mem.dupe(p.allocator, u8, slice) }; } - fn parseNumber(p: *Parser, token: *const Token, input: []const u8, i: usize) !Value { + fn parseNumber(p: *JsonParser, token: *const Token, input: []const u8, i: usize) !Value { return if (token.number_is_integer) Value{ .Integer = try std.fmt.parseInt(i64, token.slice(input, i), 10) } else @@ -1342,8 +1259,10 @@ pub const Parser = struct { } }; +const debug = std.debug; + test "json parser dynamic" { - var p = Parser.init(debug.global_allocator, false); + var p = JsonParser.init(std.debug.global_allocator, false); defer p.deinit(); const s = diff --git a/std/os/file.zig b/std/os/file.zig index f15fa77688be..d5af55b5e41b 100644 --- a/std/os/file.zig +++ b/std/os/file.zig @@ -123,8 +123,7 @@ pub const File = struct { } return true; } else if (is_windows) { - // TODO do not depend on shlwapi.dll - if (os.windows.PathFileExistsA(path_with_null.ptr) == os.windows.TRUE) { + if (os.windows.PathFileExists(path_with_null.ptr) == os.windows.TRUE) { return true; } diff --git a/std/os/windows/index.zig b/std/os/windows/index.zig index 0934c3fd900f..53e12500e764 100644 --- a/std/os/windows/index.zig +++ b/std/os/windows/index.zig @@ -123,6 +123,8 @@ pub extern "kernel32" stdcallcc fn QueryPerformanceCounter(lpPerformanceCount: * pub extern "kernel32" stdcallcc fn QueryPerformanceFrequency(lpFrequency: *LARGE_INTEGER) BOOL; +pub extern "kernel32" stdcallcc fn PathFileExists(pszPath: ?LPCTSTR) BOOL; + pub extern "kernel32" stdcallcc fn ReadFile( in_hFile: HANDLE, out_lpBuffer: *c_void, @@ -161,8 +163,6 @@ pub extern "kernel32" stdcallcc fn FreeLibrary(hModule: HMODULE) BOOL; pub extern "user32" stdcallcc fn MessageBoxA(hWnd: ?HANDLE, lpText: ?LPCTSTR, lpCaption: ?LPCTSTR, uType: UINT) c_int; -pub extern "shlwapi" stdcallcc fn PathFileExistsA(pszPath: ?LPCTSTR) BOOL; - pub const PROV_RSA_FULL = 1; pub const BOOL = c_int;