Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Node compat: Add path member to fs.Dirent #7649

Closed
wants to merge 16 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions packages/bun-types/fs.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -176,6 +176,17 @@ declare module "fs" {
* @since v0.0.67
*/
name: string;
/**
* The base path that this `fs.Dirent` object refers to.
* Deprecated in favor of `parentPath`.
* @since v??????
Copy link
Member

@paperclover paperclover Jan 5, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

since ???? should be removed

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Seems like this entire file gets deleted to resolve the merge conflict -- if I'm understanding things correctly, type definitions are now in /~https://github.com/DefinitelyTyped/DefinitelyTyped per #7670.

*/
path: string;
/**
* The base path that this `fs.Dirent` object refers to.
* @since v??????
*/
parentPath: string;
}

/**
Expand Down
8 changes: 8 additions & 0 deletions src/bun.js/node/node.classes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -429,6 +429,14 @@ export default [
getter: "getName",
cache: true,
},
path: {
getter: "getPath",
cache: true,
},
parentPath: {
getter: "getPath",
cache: true,
},
},
}),
define({
Expand Down
55 changes: 47 additions & 8 deletions src/bun.js/node/node_fs.zig
Original file line number Diff line number Diff line change
Expand Up @@ -349,7 +349,7 @@ pub const AsyncReaddirRecursiveTask = struct {
/// All the subtasks will use this fd to open files
root_fd: FileDescriptor = bun.invalid_fd,

/// This isued when joining the file paths for error messages
/// This is used when joining the file paths for error messages
root_path: PathString = PathString.empty,

pending_err: ?Syscall.Error = null,
Expand All @@ -366,6 +366,7 @@ pub const AsyncReaddirRecursiveTask = struct {
.with_file_types => |*res| {
for (res.items) |item| {
item.name.deref();
item.path.deref();
}
res.clearAndFree();
},
Expand Down Expand Up @@ -403,7 +404,8 @@ pub const AsyncReaddirRecursiveTask = struct {
bun.default_allocator.destroy(this);
}
var buf: [bun.MAX_PATH_BYTES]u8 = undefined;
this.readdir_task.performWork(this.basename.sliceAssumeZ(), &buf, false);
var path_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
this.readdir_task.performWork(this.basename.sliceAssumeZ(), &buf, &path_buf, false);
}
};

Expand Down Expand Up @@ -454,7 +456,13 @@ pub const AsyncReaddirRecursiveTask = struct {
return task.promise.value();
}

pub fn performWork(this: *AsyncReaddirRecursiveTask, basename: [:0]const u8, buf: *[bun.MAX_PATH_BYTES]u8, comptime is_root: bool) void {
pub fn performWork(
this: *AsyncReaddirRecursiveTask,
basename: [:0]const u8,
buf: *[bun.MAX_PATH_BYTES]u8,
path_buf: *[bun.MAX_PATH_BYTES]u8,
comptime is_root: bool
) void {
switch (this.args.tag()) {
inline else => |tag| {
const ResultType = comptime switch (tag) {
Expand All @@ -471,6 +479,7 @@ pub const AsyncReaddirRecursiveTask = struct {

switch (NodeFS.readdirWithEntriesRecursiveAsync(
buf,
path_buf,
this.args,
this,
basename,
Expand All @@ -482,7 +491,10 @@ pub const AsyncReaddirRecursiveTask = struct {
for (entries.items) |*item| {
switch (comptime ResultType) {
bun.String => item.deref(),
Dirent => item.name.deref(),
Dirent => {
item.name.deref();
item.path.deref();
},
Buffer => bun.default_allocator.free(item.buffer.byteSlice()),
else => unreachable,
}
Expand Down Expand Up @@ -512,7 +524,8 @@ pub const AsyncReaddirRecursiveTask = struct {
fn workPoolCallback(task: *JSC.WorkPoolTask) void {
var this: *AsyncReaddirRecursiveTask = @fieldParentPtr(AsyncReaddirRecursiveTask, "task", task);
var buf: [bun.MAX_PATH_BYTES]u8 = undefined;
this.performWork(this.root_path.sliceAssumeZ(), &buf, true);
var path_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
this.performWork(this.root_path.sliceAssumeZ(), &buf, &path_buf, true);
}

pub fn writeResults(this: *AsyncReaddirRecursiveTask, comptime ResultType: type, result: *std.ArrayList(ResultType)) void {
Expand Down Expand Up @@ -4620,6 +4633,7 @@ pub const NodeFS = struct {
switch (comptime ExpectedType) {
Dirent => {
item.name.deref();
item.path.deref();
},
Buffer => {
item.destroy();
Expand All @@ -4644,6 +4658,7 @@ pub const NodeFS = struct {
Dirent => {
entries.append(.{
.name = bun.String.create(utf8_name),
.path = bun.String.create(args.path.slice()),
.kind = current.kind,
}) catch bun.outOfMemory();
},
Expand All @@ -4662,6 +4677,7 @@ pub const NodeFS = struct {

pub fn readdirWithEntriesRecursiveAsync(
buf: *[bun.MAX_PATH_BYTES]u8,
path_buf: *[bun.MAX_PATH_BYTES]u8,
args: Arguments.Readdir,
async_task: *AsyncReaddirRecursiveTask,
basename: [:0]const u8,
Expand Down Expand Up @@ -4736,6 +4752,15 @@ pub const NodeFS = struct {
break :brk bun.path.joinZBuf(buf, &path_parts, .auto);
};

const dirent_path_to_copy = brk: {
if (async_task.root_path.sliceAssumeZ().ptr == basename.ptr) {
break :brk args.path.slice();
}

const path_parts = [_]string{ args.path.slice(), name_to_copy };
break :brk std.fs.path.dirname(bun.path.joinZBuf(path_buf, &path_parts, .auto)) orelse args.path.slice();
};

enqueue: {
switch (current.kind) {
// a symlink might be a directory or might not be
Expand All @@ -4762,7 +4787,8 @@ pub const NodeFS = struct {
switch (comptime ExpectedType) {
Dirent => {
entries.append(.{
.name = bun.String.create(name_to_copy),
.name = bun.String.create(utf8_name),
.path = bun.String.create(dirent_path_to_copy),
.kind = current.kind,
}) catch bun.outOfMemory();
},
Expand All @@ -4781,6 +4807,7 @@ pub const NodeFS = struct {

fn readdirWithEntriesRecursiveSync(
buf: *[bun.MAX_PATH_BYTES]u8,
path_buf: *[bun.MAX_PATH_BYTES]u8,
args: Arguments.Readdir,
root_basename: [:0]const u8,
comptime ExpectedType: type,
Expand Down Expand Up @@ -4872,6 +4899,15 @@ pub const NodeFS = struct {
break :brk bun.path.joinZBuf(buf, &path_parts, .auto);
};

const dirent_path_to_copy = brk: {
if (root_basename.ptr == basename.ptr) {
break :brk args.path.slice();
}

const path_parts = [_]string{ args.path.slice(), name_to_copy };
break :brk std.fs.path.dirname(bun.path.joinZBuf(path_buf, &path_parts, .auto)) orelse args.path.slice();
};

enqueue: {
switch (current.kind) {
// a symlink might be a directory or might not be
Expand All @@ -4891,7 +4927,8 @@ pub const NodeFS = struct {
switch (comptime ExpectedType) {
Dirent => {
entries.append(.{
.name = bun.String.create(name_to_copy),
.name = bun.String.create(utf8_name),
.path = bun.String.create(dirent_path_to_copy),
.kind = current.kind,
}) catch bun.outOfMemory();
},
Expand Down Expand Up @@ -4927,14 +4964,16 @@ pub const NodeFS = struct {

if (comptime recursive and flavor == .sync) {
var buf_to_pass: [bun.MAX_PATH_BYTES]u8 = undefined;
var path_buf_to_pass: [bun.MAX_PATH_BYTES]u8 = undefined;

var entries = std.ArrayList(ExpectedType).init(bun.default_allocator);
return switch (readdirWithEntriesRecursiveSync(&buf_to_pass, args, path, ExpectedType, &entries)) {
return switch (readdirWithEntriesRecursiveSync(&buf_to_pass, &path_buf_to_pass, args, path, ExpectedType, &entries)) {
.err => |err| {
for (entries.items) |*result| {
switch (comptime ExpectedType) {
Dirent => {
result.name.deref();
result.path.deref();
},
Buffer => {
result.destroy();
Expand Down
5 changes: 5 additions & 0 deletions src/bun.js/node/types.zig
Original file line number Diff line number Diff line change
Expand Up @@ -1720,6 +1720,7 @@ pub const Stats = union(enum) {
/// @since v12.12.0
pub const Dirent = struct {
name: bun.String,
path: bun.String,
// not publicly exposed
kind: Kind,

Expand All @@ -1735,6 +1736,10 @@ pub const Dirent = struct {
return this.name.toJS(globalObject);
}

pub fn getPath(this: *Dirent, globalObject: *JSC.JSGlobalObject) callconv(.C) JSC.JSValue {
return this.path.toJS(globalObject);
}

pub fn isBlockDevice(
this: *Dirent,
_: *JSC.JSGlobalObject,
Expand Down
60 changes: 58 additions & 2 deletions test/js/node/fs/fs.test.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { describe, expect, it } from "bun:test";
import { describe, expect, it, beforeEach } from "bun:test";
import { dirname, resolve, relative } from "node:path";
import { promisify } from "node:util";
import { bunEnv, bunExe, gc } from "harness";
Expand Down Expand Up @@ -66,6 +66,20 @@ it("Dirent.name setter", () => {
expect(dirent.name).toBe("hello");
});

it("Dirent.path setter", () => {
const dirent = Object.create(Dirent.prototype);
expect(dirent.path).toBeUndefined();
dirent.path = "hello";
expect(dirent.path).toBe("hello");
});

it("Dirent.parentPath setter", () => {
const dirent = Object.create(Dirent.prototype);
expect(dirent.parentPath).toBeUndefined();
dirent.parentPath = "hello";
expect(dirent.parentPath).toBe("hello");
});

it("writeFileSync in append should not truncate the file", () => {
const path = join(tmpdir(), "writeFileSync-should-not-append-" + (Date.now() * 10000).toString(16));
var str = "";
Expand Down Expand Up @@ -991,7 +1005,7 @@ describe("stat", () => {

it("stat returns ENOENT", () => {
try {
statSync("${tmpdir()}/doesntexist");
statSync(`${tmpdir()}/doesntexist`);
throw "statSync should throw";
} catch (e: any) {
expect(e.code).toBe("ENOENT");
Expand Down Expand Up @@ -2469,3 +2483,45 @@ it.if(isWindows)("writing to windows hidden file is possible", () => {
const content = readFileSync("file.txt", "utf8");
expect(content).toBe("Hello World");
});

describe("Dirent name and path values", () => {
const dirPath = `${tmpdir()}/Bun-fs-readdir-dirent`;
beforeEach(() => {
rmSync(dirPath, { recursive: true });
mkdirSync(dirPath, { recursive: true });
});
describe("nonrecursive readdir", () => {
it("should return the correct name and path values for directory", () => {
const path = join(dirPath, "directory");
mkdirSync(path, { recursive: true });
const entries = readdirSync(dirPath, { withFileTypes: true });
expect(entries[0].name).toBe("directory");
expect(entries[0].path).toBe(dirPath);
});
it("should return the correct name and path values for file", () => {
const path = join(dirPath, "file.txt");
writeFileSync(path, "Hello World");
const entries = readdirSync(dirPath, { withFileTypes: true });
expect(entries[0].name).toBe("file.txt");
expect(entries[0].path).toBe(dirPath);
});
});
describe("recursive readdirSync", () => {
it("should return the correct path values for directory and subdirectory", () => {
const path = join(dirPath, "directory", "subdirectory");
mkdirSync(path, { recursive: true });
const entries = readdirSync(dirPath, { recursive: true, withFileTypes: true });
expect(entries.find(e => e.name === "directory").path).toBe(dirPath);
expect(entries.find(e => e.name === "subdirectory").path).toBe(join(dirPath, "directory"));
});
});
describe("recursive readdir async", () => {
it("should return the correct path values for directory and subdirectory", async () => {
const path = join(dirPath, "directory", "subdirectory");
mkdirSync(path, { recursive: true });
const entries = await promises.readdir(dirPath, { recursive: true, withFileTypes: true });
expect(entries.find(e => e.name === "directory").path).toBe(dirPath);
expect(entries.find(e => e.name === "subdirectory").path).toBe(join(dirPath, "directory"));
});
});
});