Remove use of Bun.file (#14215)

This commit is contained in:
Dax
2026-02-19 11:32:32 -05:00
committed by GitHub
parent 0fcba68d4c
commit 02a9495063
44 changed files with 634 additions and 473 deletions

View File

@@ -1,42 +0,0 @@
---
name: bun-file-io
description: Use this when you are working on file operations like reading, writing, scanning, or deleting files. It summarizes the preferred file APIs and patterns used in this repo. It also notes when to use filesystem helpers for directories.
---
## Use this when
- Editing file I/O or scans in `packages/opencode`
- Handling directory operations or external tools
## Bun file APIs (from Bun docs)
- `Bun.file(path)` is lazy; call `text`, `json`, `stream`, `arrayBuffer`, `bytes`, `exists` to read.
- Metadata: `file.size`, `file.type`, `file.name`.
- `Bun.write(dest, input)` writes strings, buffers, Blobs, Responses, or files.
- `Bun.file(...).delete()` deletes a file.
- `file.writer()` returns a FileSink for incremental writes.
- `Bun.Glob` + `Array.fromAsync(glob.scan({ cwd, absolute, onlyFiles, dot }))` for scans.
- Use `Bun.which` to find a binary, then `Bun.spawn` to run it.
- `Bun.readableStreamToText/Bytes/JSON` for stream output.
## When to use node:fs
- Use `node:fs/promises` for directories (`mkdir`, `readdir`, recursive operations).
## Repo patterns
- Prefer Bun APIs over Node `fs` for file access.
- Check `Bun.file(...).exists()` before reading.
- For binary/large files use `arrayBuffer()` and MIME checks via `file.type`.
- Use `Bun.Glob` + `Array.fromAsync` for scans.
- Decode tool stderr with `Bun.readableStreamToText`.
- For large writes, use `Bun.write(Bun.file(path), text)`.
NOTE: Bun.file(...).exists() will return `false` if the value is a directory.
Use Filesystem.exists(...) instead if path can be file or directory
## Quick checklist
- Use Bun APIs first.
- Use `path.join`/`path.resolve` for paths.
- Prefer promise `.catch(...)` over `try/catch` when possible.

View File

@@ -1,9 +1,10 @@
import path from "path" import path from "path"
import { Global } from "@/global" import { Global } from "@/global"
import { Filesystem } from "@/util/filesystem"
import { onMount } from "solid-js" import { onMount } from "solid-js"
import { createStore } from "solid-js/store" import { createStore } from "solid-js/store"
import { createSimpleContext } from "../../context/helper" import { createSimpleContext } from "../../context/helper"
import { appendFile } from "fs/promises" import { appendFile, writeFile } from "fs/promises"
function calculateFrecency(entry?: { frequency: number; lastOpen: number }): number { function calculateFrecency(entry?: { frequency: number; lastOpen: number }): number {
if (!entry) return 0 if (!entry) return 0
@@ -17,9 +18,9 @@ const MAX_FRECENCY_ENTRIES = 1000
export const { use: useFrecency, provider: FrecencyProvider } = createSimpleContext({ export const { use: useFrecency, provider: FrecencyProvider } = createSimpleContext({
name: "Frecency", name: "Frecency",
init: () => { init: () => {
const frecencyFile = Bun.file(path.join(Global.Path.state, "frecency.jsonl")) const frecencyPath = path.join(Global.Path.state, "frecency.jsonl")
onMount(async () => { onMount(async () => {
const text = await frecencyFile.text().catch(() => "") const text = await Filesystem.readText(frecencyPath).catch(() => "")
const lines = text const lines = text
.split("\n") .split("\n")
.filter(Boolean) .filter(Boolean)
@@ -53,7 +54,7 @@ export const { use: useFrecency, provider: FrecencyProvider } = createSimpleCont
if (sorted.length > 0) { if (sorted.length > 0) {
const content = sorted.map((entry) => JSON.stringify(entry)).join("\n") + "\n" const content = sorted.map((entry) => JSON.stringify(entry)).join("\n") + "\n"
Bun.write(frecencyFile, content).catch(() => {}) writeFile(frecencyPath, content).catch(() => {})
} }
}) })
@@ -68,7 +69,7 @@ export const { use: useFrecency, provider: FrecencyProvider } = createSimpleCont
lastOpen: Date.now(), lastOpen: Date.now(),
} }
setStore("data", absolutePath, newEntry) setStore("data", absolutePath, newEntry)
appendFile(frecencyFile.name!, JSON.stringify({ path: absolutePath, ...newEntry }) + "\n").catch(() => {}) appendFile(frecencyPath, JSON.stringify({ path: absolutePath, ...newEntry }) + "\n").catch(() => {})
if (Object.keys(store.data).length > MAX_FRECENCY_ENTRIES) { if (Object.keys(store.data).length > MAX_FRECENCY_ENTRIES) {
const sorted = Object.entries(store.data) const sorted = Object.entries(store.data)
@@ -76,7 +77,7 @@ export const { use: useFrecency, provider: FrecencyProvider } = createSimpleCont
.slice(0, MAX_FRECENCY_ENTRIES) .slice(0, MAX_FRECENCY_ENTRIES)
setStore("data", Object.fromEntries(sorted)) setStore("data", Object.fromEntries(sorted))
const content = sorted.map(([path, entry]) => JSON.stringify({ path, ...entry })).join("\n") + "\n" const content = sorted.map(([path, entry]) => JSON.stringify({ path, ...entry })).join("\n") + "\n"
Bun.write(frecencyFile, content).catch(() => {}) writeFile(frecencyPath, content).catch(() => {})
} }
} }

View File

@@ -1,5 +1,6 @@
import path from "path" import path from "path"
import { Global } from "@/global" import { Global } from "@/global"
import { Filesystem } from "@/util/filesystem"
import { onMount } from "solid-js" import { onMount } from "solid-js"
import { createStore, produce } from "solid-js/store" import { createStore, produce } from "solid-js/store"
import { clone } from "remeda" import { clone } from "remeda"
@@ -30,9 +31,9 @@ const MAX_HISTORY_ENTRIES = 50
export const { use: usePromptHistory, provider: PromptHistoryProvider } = createSimpleContext({ export const { use: usePromptHistory, provider: PromptHistoryProvider } = createSimpleContext({
name: "PromptHistory", name: "PromptHistory",
init: () => { init: () => {
const historyFile = Bun.file(path.join(Global.Path.state, "prompt-history.jsonl")) const historyPath = path.join(Global.Path.state, "prompt-history.jsonl")
onMount(async () => { onMount(async () => {
const text = await historyFile.text().catch(() => "") const text = await Filesystem.readText(historyPath).catch(() => "")
const lines = text const lines = text
.split("\n") .split("\n")
.filter(Boolean) .filter(Boolean)
@@ -51,7 +52,7 @@ export const { use: usePromptHistory, provider: PromptHistoryProvider } = create
// Rewrite file with only valid entries to self-heal corruption // Rewrite file with only valid entries to self-heal corruption
if (lines.length > 0) { if (lines.length > 0) {
const content = lines.map((line) => JSON.stringify(line)).join("\n") + "\n" const content = lines.map((line) => JSON.stringify(line)).join("\n") + "\n"
writeFile(historyFile.name!, content).catch(() => {}) writeFile(historyPath, content).catch(() => {})
} }
}) })
@@ -97,11 +98,11 @@ export const { use: usePromptHistory, provider: PromptHistoryProvider } = create
if (trimmed) { if (trimmed) {
const content = store.history.map((line) => JSON.stringify(line)).join("\n") + "\n" const content = store.history.map((line) => JSON.stringify(line)).join("\n") + "\n"
writeFile(historyFile.name!, content).catch(() => {}) writeFile(historyPath, content).catch(() => {})
return return
} }
appendFile(historyFile.name!, JSON.stringify(entry) + "\n").catch(() => {}) appendFile(historyPath, JSON.stringify(entry) + "\n").catch(() => {})
}, },
} }
}, },

View File

@@ -1,6 +1,8 @@
import { BoxRenderable, TextareaRenderable, MouseEvent, PasteEvent, t, dim, fg } from "@opentui/core" import { BoxRenderable, TextareaRenderable, MouseEvent, PasteEvent, t, dim, fg } from "@opentui/core"
import { createEffect, createMemo, type JSX, onMount, createSignal, onCleanup, on, Show, Switch, Match } from "solid-js" import { createEffect, createMemo, type JSX, onMount, createSignal, onCleanup, on, Show, Switch, Match } from "solid-js"
import "opentui-spinner/solid" import "opentui-spinner/solid"
import path from "path"
import { Filesystem } from "@/util/filesystem"
import { useLocal } from "@tui/context/local" import { useLocal } from "@tui/context/local"
import { useTheme } from "@tui/context/theme" import { useTheme } from "@tui/context/theme"
import { EmptyBorder } from "@tui/component/border" import { EmptyBorder } from "@tui/component/border"
@@ -931,26 +933,26 @@ export function Prompt(props: PromptProps) {
const isUrl = /^(https?):\/\//.test(filepath) const isUrl = /^(https?):\/\//.test(filepath)
if (!isUrl) { if (!isUrl) {
try { try {
const file = Bun.file(filepath) const mime = Filesystem.mimeType(filepath)
const filename = path.basename(filepath)
// Handle SVG as raw text content, not as base64 image // Handle SVG as raw text content, not as base64 image
if (file.type === "image/svg+xml") { if (mime === "image/svg+xml") {
event.preventDefault() event.preventDefault()
const content = await file.text().catch(() => {}) const content = await Filesystem.readText(filepath).catch(() => {})
if (content) { if (content) {
pasteText(content, `[SVG: ${file.name ?? "image"}]`) pasteText(content, `[SVG: ${filename ?? "image"}]`)
return return
} }
} }
if (file.type.startsWith("image/")) { if (mime.startsWith("image/")) {
event.preventDefault() event.preventDefault()
const content = await file const content = await Filesystem.readArrayBuffer(filepath)
.arrayBuffer()
.then((buffer) => Buffer.from(buffer).toString("base64")) .then((buffer) => Buffer.from(buffer).toString("base64"))
.catch(() => {}) .catch(() => {})
if (content) { if (content) {
await pasteImage({ await pasteImage({
filename: file.name, filename,
mime: file.type, mime,
content, content,
}) })
return return

View File

@@ -1,5 +1,6 @@
import path from "path" import path from "path"
import { Global } from "@/global" import { Global } from "@/global"
import { Filesystem } from "@/util/filesystem"
import { onMount } from "solid-js" import { onMount } from "solid-js"
import { createStore, produce } from "solid-js/store" import { createStore, produce } from "solid-js/store"
import { clone } from "remeda" import { clone } from "remeda"
@@ -18,9 +19,9 @@ const MAX_STASH_ENTRIES = 50
export const { use: usePromptStash, provider: PromptStashProvider } = createSimpleContext({ export const { use: usePromptStash, provider: PromptStashProvider } = createSimpleContext({
name: "PromptStash", name: "PromptStash",
init: () => { init: () => {
const stashFile = Bun.file(path.join(Global.Path.state, "prompt-stash.jsonl")) const stashPath = path.join(Global.Path.state, "prompt-stash.jsonl")
onMount(async () => { onMount(async () => {
const text = await stashFile.text().catch(() => "") const text = await Filesystem.readText(stashPath).catch(() => "")
const lines = text const lines = text
.split("\n") .split("\n")
.filter(Boolean) .filter(Boolean)
@@ -39,7 +40,7 @@ export const { use: usePromptStash, provider: PromptStashProvider } = createSimp
// Rewrite file with only valid entries to self-heal corruption // Rewrite file with only valid entries to self-heal corruption
if (lines.length > 0) { if (lines.length > 0) {
const content = lines.map((line) => JSON.stringify(line)).join("\n") + "\n" const content = lines.map((line) => JSON.stringify(line)).join("\n") + "\n"
writeFile(stashFile.name!, content).catch(() => {}) writeFile(stashPath, content).catch(() => {})
} }
}) })
@@ -66,11 +67,11 @@ export const { use: usePromptStash, provider: PromptStashProvider } = createSimp
if (trimmed) { if (trimmed) {
const content = store.entries.map((line) => JSON.stringify(line)).join("\n") + "\n" const content = store.entries.map((line) => JSON.stringify(line)).join("\n") + "\n"
writeFile(stashFile.name!, content).catch(() => {}) writeFile(stashPath, content).catch(() => {})
return return
} }
appendFile(stashFile.name!, JSON.stringify(stash) + "\n").catch(() => {}) appendFile(stashPath, JSON.stringify(stash) + "\n").catch(() => {})
}, },
pop() { pop() {
if (store.entries.length === 0) return undefined if (store.entries.length === 0) return undefined
@@ -82,7 +83,7 @@ export const { use: usePromptStash, provider: PromptStashProvider } = createSimp
) )
const content = const content =
store.entries.length > 0 ? store.entries.map((line) => JSON.stringify(line)).join("\n") + "\n" : "" store.entries.length > 0 ? store.entries.map((line) => JSON.stringify(line)).join("\n") + "\n" : ""
writeFile(stashFile.name!, content).catch(() => {}) writeFile(stashPath, content).catch(() => {})
return entry return entry
}, },
remove(index: number) { remove(index: number) {
@@ -94,7 +95,7 @@ export const { use: usePromptStash, provider: PromptStashProvider } = createSimp
) )
const content = const content =
store.entries.length > 0 ? store.entries.map((line) => JSON.stringify(line)).join("\n") + "\n" : "" store.entries.length > 0 ? store.entries.map((line) => JSON.stringify(line)).join("\n") + "\n" : ""
writeFile(stashFile.name!, content).catch(() => {}) writeFile(stashPath, content).catch(() => {})
}, },
} }
}, },

View File

@@ -1,4 +1,5 @@
import { Global } from "@/global" import { Global } from "@/global"
import { Filesystem } from "@/util/filesystem"
import { createSignal, type Setter } from "solid-js" import { createSignal, type Setter } from "solid-js"
import { createStore } from "solid-js/store" import { createStore } from "solid-js/store"
import { createSimpleContext } from "./helper" import { createSimpleContext } from "./helper"
@@ -9,10 +10,9 @@ export const { use: useKV, provider: KVProvider } = createSimpleContext({
init: () => { init: () => {
const [ready, setReady] = createSignal(false) const [ready, setReady] = createSignal(false)
const [store, setStore] = createStore<Record<string, any>>() const [store, setStore] = createStore<Record<string, any>>()
const file = Bun.file(path.join(Global.Path.state, "kv.json")) const filePath = path.join(Global.Path.state, "kv.json")
file Filesystem.readJson(filePath)
.json()
.then((x) => { .then((x) => {
setStore(x) setStore(x)
}) })
@@ -44,7 +44,7 @@ export const { use: useKV, provider: KVProvider } = createSimpleContext({
}, },
set(key: string, value: any) { set(key: string, value: any) {
setStore(key, value) setStore(key, value)
Bun.write(file, JSON.stringify(store, null, 2)) Filesystem.writeJson(filePath, store)
}, },
} }
return result return result

View File

@@ -12,6 +12,7 @@ import { Provider } from "@/provider/provider"
import { useArgs } from "./args" import { useArgs } from "./args"
import { useSDK } from "./sdk" import { useSDK } from "./sdk"
import { RGBA } from "@opentui/core" import { RGBA } from "@opentui/core"
import { Filesystem } from "@/util/filesystem"
export const { use: useLocal, provider: LocalProvider } = createSimpleContext({ export const { use: useLocal, provider: LocalProvider } = createSimpleContext({
name: "Local", name: "Local",
@@ -119,7 +120,7 @@ export const { use: useLocal, provider: LocalProvider } = createSimpleContext({
variant: {}, variant: {},
}) })
const file = Bun.file(path.join(Global.Path.state, "model.json")) const filePath = path.join(Global.Path.state, "model.json")
const state = { const state = {
pending: false, pending: false,
} }
@@ -130,19 +131,15 @@ export const { use: useLocal, provider: LocalProvider } = createSimpleContext({
return return
} }
state.pending = false state.pending = false
Bun.write( Filesystem.writeJson(filePath, {
file, recent: modelStore.recent,
JSON.stringify({ favorite: modelStore.favorite,
recent: modelStore.recent, variant: modelStore.variant,
favorite: modelStore.favorite, })
variant: modelStore.variant,
}),
)
} }
file Filesystem.readJson(filePath)
.json() .then((x: any) => {
.then((x) => {
if (Array.isArray(x.recent)) setModelStore("recent", x.recent) if (Array.isArray(x.recent)) setModelStore("recent", x.recent)
if (Array.isArray(x.favorite)) setModelStore("favorite", x.favorite) if (Array.isArray(x.favorite)) setModelStore("favorite", x.favorite)
if (typeof x.variant === "object" && x.variant !== null) setModelStore("variant", x.variant) if (typeof x.variant === "object" && x.variant !== null) setModelStore("variant", x.variant)

View File

@@ -412,7 +412,7 @@ async function getCustomThemes() {
cwd: dir, cwd: dir,
})) { })) {
const name = path.basename(item, ".json") const name = path.basename(item, ".json")
result[name] = await Bun.file(item).json() result[name] = await Filesystem.readJson(item)
} }
} }
return result return result

View File

@@ -3,10 +3,12 @@ import { tui } from "./app"
import { Rpc } from "@/util/rpc" import { Rpc } from "@/util/rpc"
import { type rpc } from "./worker" import { type rpc } from "./worker"
import path from "path" import path from "path"
import { fileURLToPath } from "url"
import { UI } from "@/cli/ui" import { UI } from "@/cli/ui"
import { iife } from "@/util/iife" import { iife } from "@/util/iife"
import { Log } from "@/util/log" import { Log } from "@/util/log"
import { withNetworkOptions, resolveNetworkOptions } from "@/cli/network" import { withNetworkOptions, resolveNetworkOptions } from "@/cli/network"
import { Filesystem } from "@/util/filesystem"
import type { Event } from "@opencode-ai/sdk/v2" import type { Event } from "@opencode-ai/sdk/v2"
import type { EventSource } from "./context/sdk" import type { EventSource } from "./context/sdk"
import { win32DisableProcessedInput, win32InstallCtrlCGuard } from "./win32" import { win32DisableProcessedInput, win32InstallCtrlCGuard } from "./win32"
@@ -99,7 +101,7 @@ export const TuiThreadCommand = cmd({
const distWorker = new URL("./cli/cmd/tui/worker.js", import.meta.url) const distWorker = new URL("./cli/cmd/tui/worker.js", import.meta.url)
const workerPath = await iife(async () => { const workerPath = await iife(async () => {
if (typeof OPENCODE_WORKER_PATH !== "undefined") return OPENCODE_WORKER_PATH if (typeof OPENCODE_WORKER_PATH !== "undefined") return OPENCODE_WORKER_PATH
if (await Bun.file(distWorker).exists()) return distWorker if (await Filesystem.exists(fileURLToPath(distWorker))) return distWorker
return localWorker return localWorker
}) })
try { try {

View File

@@ -147,8 +147,7 @@ export namespace LSPClient {
notify: { notify: {
async open(input: { path: string }) { async open(input: { path: string }) {
input.path = path.isAbsolute(input.path) ? input.path : path.resolve(Instance.directory, input.path) input.path = path.isAbsolute(input.path) ? input.path : path.resolve(Instance.directory, input.path)
const file = Bun.file(input.path) const text = await Filesystem.readText(input.path)
const text = await file.text()
const extension = path.extname(input.path) const extension = path.extname(input.path)
const languageId = LANGUAGE_EXTENSIONS[extension] ?? "plaintext" const languageId = LANGUAGE_EXTENSIONS[extension] ?? "plaintext"

View File

@@ -131,7 +131,7 @@ export namespace LSPServer {
"bin", "bin",
"vue-language-server.js", "vue-language-server.js",
) )
if (!(await Bun.file(js).exists())) { if (!(await Filesystem.exists(js))) {
if (Flag.OPENCODE_DISABLE_LSP_DOWNLOAD) return if (Flag.OPENCODE_DISABLE_LSP_DOWNLOAD) return
await Bun.spawn([BunProc.which(), "install", "@vue/language-server"], { await Bun.spawn([BunProc.which(), "install", "@vue/language-server"], {
cwd: Global.Path.bin, cwd: Global.Path.bin,
@@ -173,14 +173,14 @@ export namespace LSPServer {
if (!eslint) return if (!eslint) return
log.info("spawning eslint server") log.info("spawning eslint server")
const serverPath = path.join(Global.Path.bin, "vscode-eslint", "server", "out", "eslintServer.js") const serverPath = path.join(Global.Path.bin, "vscode-eslint", "server", "out", "eslintServer.js")
if (!(await Bun.file(serverPath).exists())) { if (!(await Filesystem.exists(serverPath))) {
if (Flag.OPENCODE_DISABLE_LSP_DOWNLOAD) return if (Flag.OPENCODE_DISABLE_LSP_DOWNLOAD) return
log.info("downloading and building VS Code ESLint server") log.info("downloading and building VS Code ESLint server")
const response = await fetch("https://github.com/microsoft/vscode-eslint/archive/refs/heads/main.zip") const response = await fetch("https://github.com/microsoft/vscode-eslint/archive/refs/heads/main.zip")
if (!response.ok) return if (!response.ok) return
const zipPath = path.join(Global.Path.bin, "vscode-eslint.zip") const zipPath = path.join(Global.Path.bin, "vscode-eslint.zip")
await Bun.file(zipPath).write(response) if (response.body) await Filesystem.writeStream(zipPath, response.body)
const ok = await Archive.extractZip(zipPath, Global.Path.bin) const ok = await Archive.extractZip(zipPath, Global.Path.bin)
.then(() => true) .then(() => true)
@@ -242,7 +242,7 @@ export namespace LSPServer {
const resolveBin = async (target: string) => { const resolveBin = async (target: string) => {
const localBin = path.join(root, target) const localBin = path.join(root, target)
if (await Bun.file(localBin).exists()) return localBin if (await Filesystem.exists(localBin)) return localBin
const candidates = Filesystem.up({ const candidates = Filesystem.up({
targets: [target], targets: [target],
@@ -326,7 +326,7 @@ export namespace LSPServer {
async spawn(root) { async spawn(root) {
const localBin = path.join(root, "node_modules", ".bin", "biome") const localBin = path.join(root, "node_modules", ".bin", "biome")
let bin: string | undefined let bin: string | undefined
if (await Bun.file(localBin).exists()) bin = localBin if (await Filesystem.exists(localBin)) bin = localBin
if (!bin) { if (!bin) {
const found = Bun.which("biome") const found = Bun.which("biome")
if (found) bin = found if (found) bin = found
@@ -467,7 +467,7 @@ export namespace LSPServer {
const potentialPythonPath = isWindows const potentialPythonPath = isWindows
? path.join(venvPath, "Scripts", "python.exe") ? path.join(venvPath, "Scripts", "python.exe")
: path.join(venvPath, "bin", "python") : path.join(venvPath, "bin", "python")
if (await Bun.file(potentialPythonPath).exists()) { if (await Filesystem.exists(potentialPythonPath)) {
initialization["pythonPath"] = potentialPythonPath initialization["pythonPath"] = potentialPythonPath
break break
} }
@@ -479,7 +479,7 @@ export namespace LSPServer {
const potentialTyPath = isWindows const potentialTyPath = isWindows
? path.join(venvPath, "Scripts", "ty.exe") ? path.join(venvPath, "Scripts", "ty.exe")
: path.join(venvPath, "bin", "ty") : path.join(venvPath, "bin", "ty")
if (await Bun.file(potentialTyPath).exists()) { if (await Filesystem.exists(potentialTyPath)) {
binary = potentialTyPath binary = potentialTyPath
break break
} }
@@ -511,7 +511,7 @@ export namespace LSPServer {
const args = [] const args = []
if (!binary) { if (!binary) {
const js = path.join(Global.Path.bin, "node_modules", "pyright", "dist", "pyright-langserver.js") const js = path.join(Global.Path.bin, "node_modules", "pyright", "dist", "pyright-langserver.js")
if (!(await Bun.file(js).exists())) { if (!(await Filesystem.exists(js))) {
if (Flag.OPENCODE_DISABLE_LSP_DOWNLOAD) return if (Flag.OPENCODE_DISABLE_LSP_DOWNLOAD) return
await Bun.spawn([BunProc.which(), "install", "pyright"], { await Bun.spawn([BunProc.which(), "install", "pyright"], {
cwd: Global.Path.bin, cwd: Global.Path.bin,
@@ -536,7 +536,7 @@ export namespace LSPServer {
const potentialPythonPath = isWindows const potentialPythonPath = isWindows
? path.join(venvPath, "Scripts", "python.exe") ? path.join(venvPath, "Scripts", "python.exe")
: path.join(venvPath, "bin", "python") : path.join(venvPath, "bin", "python")
if (await Bun.file(potentialPythonPath).exists()) { if (await Filesystem.exists(potentialPythonPath)) {
initialization["pythonPath"] = potentialPythonPath initialization["pythonPath"] = potentialPythonPath
break break
} }
@@ -571,7 +571,7 @@ export namespace LSPServer {
process.platform === "win32" ? "language_server.bat" : "language_server.sh", process.platform === "win32" ? "language_server.bat" : "language_server.sh",
) )
if (!(await Bun.file(binary).exists())) { if (!(await Filesystem.exists(binary))) {
const elixir = Bun.which("elixir") const elixir = Bun.which("elixir")
if (!elixir) { if (!elixir) {
log.error("elixir is required to run elixir-ls") log.error("elixir is required to run elixir-ls")
@@ -584,7 +584,7 @@ export namespace LSPServer {
const response = await fetch("https://github.com/elixir-lsp/elixir-ls/archive/refs/heads/master.zip") const response = await fetch("https://github.com/elixir-lsp/elixir-ls/archive/refs/heads/master.zip")
if (!response.ok) return if (!response.ok) return
const zipPath = path.join(Global.Path.bin, "elixir-ls.zip") const zipPath = path.join(Global.Path.bin, "elixir-ls.zip")
await Bun.file(zipPath).write(response) if (response.body) await Filesystem.writeStream(zipPath, response.body)
const ok = await Archive.extractZip(zipPath, Global.Path.bin) const ok = await Archive.extractZip(zipPath, Global.Path.bin)
.then(() => true) .then(() => true)
@@ -692,7 +692,7 @@ export namespace LSPServer {
} }
const tempPath = path.join(Global.Path.bin, assetName) const tempPath = path.join(Global.Path.bin, assetName)
await Bun.file(tempPath).write(downloadResponse) if (downloadResponse.body) await Filesystem.writeStream(tempPath, downloadResponse.body)
if (ext === "zip") { if (ext === "zip") {
const ok = await Archive.extractZip(tempPath, Global.Path.bin) const ok = await Archive.extractZip(tempPath, Global.Path.bin)
@@ -710,7 +710,7 @@ export namespace LSPServer {
bin = path.join(Global.Path.bin, "zls" + (platform === "win32" ? ".exe" : "")) bin = path.join(Global.Path.bin, "zls" + (platform === "win32" ? ".exe" : ""))
if (!(await Bun.file(bin).exists())) { if (!(await Filesystem.exists(bin))) {
log.error("Failed to extract zls binary") log.error("Failed to extract zls binary")
return return
} }
@@ -857,7 +857,7 @@ export namespace LSPServer {
// Stop at filesystem root // Stop at filesystem root
const cargoTomlPath = path.join(currentDir, "Cargo.toml") const cargoTomlPath = path.join(currentDir, "Cargo.toml")
try { try {
const cargoTomlContent = await Bun.file(cargoTomlPath).text() const cargoTomlContent = await Filesystem.readText(cargoTomlPath)
if (cargoTomlContent.includes("[workspace]")) { if (cargoTomlContent.includes("[workspace]")) {
return currentDir return currentDir
} }
@@ -907,7 +907,7 @@ export namespace LSPServer {
const ext = process.platform === "win32" ? ".exe" : "" const ext = process.platform === "win32" ? ".exe" : ""
const direct = path.join(Global.Path.bin, "clangd" + ext) const direct = path.join(Global.Path.bin, "clangd" + ext)
if (await Bun.file(direct).exists()) { if (await Filesystem.exists(direct)) {
return { return {
process: spawn(direct, args, { process: spawn(direct, args, {
cwd: root, cwd: root,
@@ -920,7 +920,7 @@ export namespace LSPServer {
if (!entry.isDirectory()) continue if (!entry.isDirectory()) continue
if (!entry.name.startsWith("clangd_")) continue if (!entry.name.startsWith("clangd_")) continue
const candidate = path.join(Global.Path.bin, entry.name, "bin", "clangd" + ext) const candidate = path.join(Global.Path.bin, entry.name, "bin", "clangd" + ext)
if (await Bun.file(candidate).exists()) { if (await Filesystem.exists(candidate)) {
return { return {
process: spawn(candidate, args, { process: spawn(candidate, args, {
cwd: root, cwd: root,
@@ -990,7 +990,7 @@ export namespace LSPServer {
log.error("Failed to write clangd archive") log.error("Failed to write clangd archive")
return return
} }
await Bun.write(archive, buf) await Filesystem.write(archive, Buffer.from(buf))
const zip = name.endsWith(".zip") const zip = name.endsWith(".zip")
const tar = name.endsWith(".tar.xz") const tar = name.endsWith(".tar.xz")
@@ -1014,7 +1014,7 @@ export namespace LSPServer {
await fs.rm(archive, { force: true }) await fs.rm(archive, { force: true })
const bin = path.join(Global.Path.bin, "clangd_" + tag, "bin", "clangd" + ext) const bin = path.join(Global.Path.bin, "clangd_" + tag, "bin", "clangd" + ext)
if (!(await Bun.file(bin).exists())) { if (!(await Filesystem.exists(bin))) {
log.error("Failed to extract clangd binary") log.error("Failed to extract clangd binary")
return return
} }
@@ -1045,7 +1045,7 @@ export namespace LSPServer {
const args: string[] = [] const args: string[] = []
if (!binary) { if (!binary) {
const js = path.join(Global.Path.bin, "node_modules", "svelte-language-server", "bin", "server.js") const js = path.join(Global.Path.bin, "node_modules", "svelte-language-server", "bin", "server.js")
if (!(await Bun.file(js).exists())) { if (!(await Filesystem.exists(js))) {
if (Flag.OPENCODE_DISABLE_LSP_DOWNLOAD) return if (Flag.OPENCODE_DISABLE_LSP_DOWNLOAD) return
await Bun.spawn([BunProc.which(), "install", "svelte-language-server"], { await Bun.spawn([BunProc.which(), "install", "svelte-language-server"], {
cwd: Global.Path.bin, cwd: Global.Path.bin,
@@ -1092,7 +1092,7 @@ export namespace LSPServer {
const args: string[] = [] const args: string[] = []
if (!binary) { if (!binary) {
const js = path.join(Global.Path.bin, "node_modules", "@astrojs", "language-server", "bin", "nodeServer.js") const js = path.join(Global.Path.bin, "node_modules", "@astrojs", "language-server", "bin", "nodeServer.js")
if (!(await Bun.file(js).exists())) { if (!(await Filesystem.exists(js))) {
if (Flag.OPENCODE_DISABLE_LSP_DOWNLOAD) return if (Flag.OPENCODE_DISABLE_LSP_DOWNLOAD) return
await Bun.spawn([BunProc.which(), "install", "@astrojs/language-server"], { await Bun.spawn([BunProc.which(), "install", "@astrojs/language-server"], {
cwd: Global.Path.bin, cwd: Global.Path.bin,
@@ -1248,7 +1248,7 @@ export namespace LSPServer {
const distPath = path.join(Global.Path.bin, "kotlin-ls") const distPath = path.join(Global.Path.bin, "kotlin-ls")
const launcherScript = const launcherScript =
process.platform === "win32" ? path.join(distPath, "kotlin-lsp.cmd") : path.join(distPath, "kotlin-lsp.sh") process.platform === "win32" ? path.join(distPath, "kotlin-lsp.cmd") : path.join(distPath, "kotlin-lsp.sh")
const installed = await Bun.file(launcherScript).exists() const installed = await Filesystem.exists(launcherScript)
if (!installed) { if (!installed) {
if (Flag.OPENCODE_DISABLE_LSP_DOWNLOAD) return if (Flag.OPENCODE_DISABLE_LSP_DOWNLOAD) return
log.info("Downloading Kotlin Language Server from GitHub.") log.info("Downloading Kotlin Language Server from GitHub.")
@@ -1307,7 +1307,7 @@ export namespace LSPServer {
} }
log.info("Installed Kotlin Language Server", { path: launcherScript }) log.info("Installed Kotlin Language Server", { path: launcherScript })
} }
if (!(await Bun.file(launcherScript).exists())) { if (!(await Filesystem.exists(launcherScript))) {
log.error(`Failed to locate the Kotlin LS launcher script in the installed directory: ${distPath}.`) log.error(`Failed to locate the Kotlin LS launcher script in the installed directory: ${distPath}.`)
return return
} }
@@ -1336,7 +1336,7 @@ export namespace LSPServer {
"src", "src",
"server.js", "server.js",
) )
const exists = await Bun.file(js).exists() const exists = await Filesystem.exists(js)
if (!exists) { if (!exists) {
if (Flag.OPENCODE_DISABLE_LSP_DOWNLOAD) return if (Flag.OPENCODE_DISABLE_LSP_DOWNLOAD) return
await Bun.spawn([BunProc.which(), "install", "yaml-language-server"], { await Bun.spawn([BunProc.which(), "install", "yaml-language-server"], {
@@ -1443,7 +1443,7 @@ export namespace LSPServer {
} }
const tempPath = path.join(Global.Path.bin, assetName) const tempPath = path.join(Global.Path.bin, assetName)
await Bun.file(tempPath).write(downloadResponse) if (downloadResponse.body) await Filesystem.writeStream(tempPath, downloadResponse.body)
// Unlike zls which is a single self-contained binary, // Unlike zls which is a single self-contained binary,
// lua-language-server needs supporting files (meta/, locale/, etc.) // lua-language-server needs supporting files (meta/, locale/, etc.)
@@ -1482,7 +1482,7 @@ export namespace LSPServer {
// Binary is located in bin/ subdirectory within the extracted archive // Binary is located in bin/ subdirectory within the extracted archive
bin = path.join(installDir, "bin", "lua-language-server" + (platform === "win32" ? ".exe" : "")) bin = path.join(installDir, "bin", "lua-language-server" + (platform === "win32" ? ".exe" : ""))
if (!(await Bun.file(bin).exists())) { if (!(await Filesystem.exists(bin))) {
log.error("Failed to extract lua-language-server binary") log.error("Failed to extract lua-language-server binary")
return return
} }
@@ -1516,7 +1516,7 @@ export namespace LSPServer {
const args: string[] = [] const args: string[] = []
if (!binary) { if (!binary) {
const js = path.join(Global.Path.bin, "node_modules", "intelephense", "lib", "intelephense.js") const js = path.join(Global.Path.bin, "node_modules", "intelephense", "lib", "intelephense.js")
if (!(await Bun.file(js).exists())) { if (!(await Filesystem.exists(js))) {
if (Flag.OPENCODE_DISABLE_LSP_DOWNLOAD) return if (Flag.OPENCODE_DISABLE_LSP_DOWNLOAD) return
await Bun.spawn([BunProc.which(), "install", "intelephense"], { await Bun.spawn([BunProc.which(), "install", "intelephense"], {
cwd: Global.Path.bin, cwd: Global.Path.bin,
@@ -1613,7 +1613,7 @@ export namespace LSPServer {
const args: string[] = [] const args: string[] = []
if (!binary) { if (!binary) {
const js = path.join(Global.Path.bin, "node_modules", "bash-language-server", "out", "cli.js") const js = path.join(Global.Path.bin, "node_modules", "bash-language-server", "out", "cli.js")
if (!(await Bun.file(js).exists())) { if (!(await Filesystem.exists(js))) {
if (Flag.OPENCODE_DISABLE_LSP_DOWNLOAD) return if (Flag.OPENCODE_DISABLE_LSP_DOWNLOAD) return
await Bun.spawn([BunProc.which(), "install", "bash-language-server"], { await Bun.spawn([BunProc.which(), "install", "bash-language-server"], {
cwd: Global.Path.bin, cwd: Global.Path.bin,
@@ -1654,22 +1654,17 @@ export namespace LSPServer {
if (!bin) { if (!bin) {
if (Flag.OPENCODE_DISABLE_LSP_DOWNLOAD) return if (Flag.OPENCODE_DISABLE_LSP_DOWNLOAD) return
log.info("downloading terraform-ls from GitHub releases") log.info("downloading terraform-ls from HashiCorp releases")
const releaseResponse = await fetch("https://api.github.com/repos/hashicorp/terraform-ls/releases/latest") const releaseResponse = await fetch("https://api.releases.hashicorp.com/v1/releases/terraform-ls/latest")
if (!releaseResponse.ok) { if (!releaseResponse.ok) {
log.error("Failed to fetch terraform-ls release info") log.error("Failed to fetch terraform-ls release info")
return return
} }
const release = (await releaseResponse.json()) as { const release = (await releaseResponse.json()) as {
tag_name?: string version?: string
assets?: { name?: string; browser_download_url?: string }[] builds?: { arch?: string; os?: string; url?: string }[]
}
const version = release.tag_name?.replace("v", "")
if (!version) {
log.error("terraform-ls release did not include a version tag")
return
} }
const platform = process.platform const platform = process.platform
@@ -1678,23 +1673,21 @@ export namespace LSPServer {
const tfArch = arch === "arm64" ? "arm64" : "amd64" const tfArch = arch === "arm64" ? "arm64" : "amd64"
const tfPlatform = platform === "win32" ? "windows" : platform const tfPlatform = platform === "win32" ? "windows" : platform
const assetName = `terraform-ls_${version}_${tfPlatform}_${tfArch}.zip` const builds = release.builds ?? []
const build = builds.find((b) => b.arch === tfArch && b.os === tfPlatform)
const assets = release.assets ?? [] if (!build?.url) {
const asset = assets.find((a) => a.name === assetName) log.error(`Could not find build for ${tfPlatform}/${tfArch} terraform-ls release version ${release.version}`)
if (!asset?.browser_download_url) {
log.error(`Could not find asset ${assetName} in terraform-ls release`)
return return
} }
const downloadResponse = await fetch(asset.browser_download_url) const downloadResponse = await fetch(build.url)
if (!downloadResponse.ok) { if (!downloadResponse.ok) {
log.error("Failed to download terraform-ls") log.error("Failed to download terraform-ls")
return return
} }
const tempPath = path.join(Global.Path.bin, assetName) const tempPath = path.join(Global.Path.bin, "terraform-ls.zip")
await Bun.file(tempPath).write(downloadResponse) if (downloadResponse.body) await Filesystem.writeStream(tempPath, downloadResponse.body)
const ok = await Archive.extractZip(tempPath, Global.Path.bin) const ok = await Archive.extractZip(tempPath, Global.Path.bin)
.then(() => true) .then(() => true)
@@ -1707,7 +1700,7 @@ export namespace LSPServer {
bin = path.join(Global.Path.bin, "terraform-ls" + (platform === "win32" ? ".exe" : "")) bin = path.join(Global.Path.bin, "terraform-ls" + (platform === "win32" ? ".exe" : ""))
if (!(await Bun.file(bin).exists())) { if (!(await Filesystem.exists(bin))) {
log.error("Failed to extract terraform-ls binary") log.error("Failed to extract terraform-ls binary")
return return
} }
@@ -1784,7 +1777,7 @@ export namespace LSPServer {
} }
const tempPath = path.join(Global.Path.bin, assetName) const tempPath = path.join(Global.Path.bin, assetName)
await Bun.file(tempPath).write(downloadResponse) if (downloadResponse.body) await Filesystem.writeStream(tempPath, downloadResponse.body)
if (ext === "zip") { if (ext === "zip") {
const ok = await Archive.extractZip(tempPath, Global.Path.bin) const ok = await Archive.extractZip(tempPath, Global.Path.bin)
@@ -1803,7 +1796,7 @@ export namespace LSPServer {
bin = path.join(Global.Path.bin, "texlab" + (platform === "win32" ? ".exe" : "")) bin = path.join(Global.Path.bin, "texlab" + (platform === "win32" ? ".exe" : ""))
if (!(await Bun.file(bin).exists())) { if (!(await Filesystem.exists(bin))) {
log.error("Failed to extract texlab binary") log.error("Failed to extract texlab binary")
return return
} }
@@ -1832,7 +1825,7 @@ export namespace LSPServer {
const args: string[] = [] const args: string[] = []
if (!binary) { if (!binary) {
const js = path.join(Global.Path.bin, "node_modules", "dockerfile-language-server-nodejs", "lib", "server.js") const js = path.join(Global.Path.bin, "node_modules", "dockerfile-language-server-nodejs", "lib", "server.js")
if (!(await Bun.file(js).exists())) { if (!(await Filesystem.exists(js))) {
if (Flag.OPENCODE_DISABLE_LSP_DOWNLOAD) return if (Flag.OPENCODE_DISABLE_LSP_DOWNLOAD) return
await Bun.spawn([BunProc.which(), "install", "dockerfile-language-server-nodejs"], { await Bun.spawn([BunProc.which(), "install", "dockerfile-language-server-nodejs"], {
cwd: Global.Path.bin, cwd: Global.Path.bin,
@@ -1990,7 +1983,7 @@ export namespace LSPServer {
} }
const tempPath = path.join(Global.Path.bin, assetName) const tempPath = path.join(Global.Path.bin, assetName)
await Bun.file(tempPath).write(downloadResponse) if (downloadResponse.body) await Filesystem.writeStream(tempPath, downloadResponse.body)
if (ext === "zip") { if (ext === "zip") {
const ok = await Archive.extractZip(tempPath, Global.Path.bin) const ok = await Archive.extractZip(tempPath, Global.Path.bin)
@@ -2008,7 +2001,7 @@ export namespace LSPServer {
bin = path.join(Global.Path.bin, "tinymist" + (platform === "win32" ? ".exe" : "")) bin = path.join(Global.Path.bin, "tinymist" + (platform === "win32" ? ".exe" : ""))
if (!(await Bun.file(bin).exists())) { if (!(await Filesystem.exists(bin))) {
log.error("Failed to extract tinymist binary") log.error("Failed to extract tinymist binary")
return return
} }

View File

@@ -1,6 +1,7 @@
import path from "path" import path from "path"
import z from "zod" import z from "zod"
import { Global } from "../global" import { Global } from "../global"
import { Filesystem } from "../util/filesystem"
export namespace McpAuth { export namespace McpAuth {
export const Tokens = z.object({ export const Tokens = z.object({
@@ -53,25 +54,22 @@ export namespace McpAuth {
} }
export async function all(): Promise<Record<string, Entry>> { export async function all(): Promise<Record<string, Entry>> {
const file = Bun.file(filepath) return Filesystem.readJson<Record<string, Entry>>(filepath).catch(() => ({}))
return file.json().catch(() => ({}))
} }
export async function set(mcpName: string, entry: Entry, serverUrl?: string): Promise<void> { export async function set(mcpName: string, entry: Entry, serverUrl?: string): Promise<void> {
const file = Bun.file(filepath)
const data = await all() const data = await all()
// Always update serverUrl if provided // Always update serverUrl if provided
if (serverUrl) { if (serverUrl) {
entry.serverUrl = serverUrl entry.serverUrl = serverUrl
} }
await Bun.write(file, JSON.stringify({ ...data, [mcpName]: entry }, null, 2), { mode: 0o600 }) await Filesystem.writeJson(filepath, { ...data, [mcpName]: entry }, 0o600)
} }
export async function remove(mcpName: string): Promise<void> { export async function remove(mcpName: string): Promise<void> {
const file = Bun.file(filepath)
const data = await all() const data = await all()
delete data[mcpName] delete data[mcpName]
await Bun.write(file, JSON.stringify(data, null, 2), { mode: 0o600 }) await Filesystem.writeJson(filepath, data, 0o600)
} }
export async function updateTokens(mcpName: string, tokens: Tokens, serverUrl?: string): Promise<void> { export async function updateTokens(mcpName: string, tokens: Tokens, serverUrl?: string): Promise<void> {

View File

@@ -86,8 +86,7 @@ export namespace Project {
const gitBinary = Bun.which("git") const gitBinary = Bun.which("git")
// cached id calculation // cached id calculation
let id = await Bun.file(path.join(dotgit, "opencode")) let id = await Filesystem.readText(path.join(dotgit, "opencode"))
.text()
.then((x) => x.trim()) .then((x) => x.trim())
.catch(() => undefined) .catch(() => undefined)
@@ -125,9 +124,7 @@ export namespace Project {
id = roots[0] id = roots[0]
if (id) { if (id) {
void Bun.file(path.join(dotgit, "opencode")) void Filesystem.write(path.join(dotgit, "opencode"), id).catch(() => undefined)
.write(id)
.catch(() => undefined)
} }
} }
@@ -277,10 +274,9 @@ export namespace Project {
) )
const shortest = matches.sort((a, b) => a.length - b.length)[0] const shortest = matches.sort((a, b) => a.length - b.length)[0]
if (!shortest) return if (!shortest) return
const file = Bun.file(shortest) const buffer = await Filesystem.readBytes(shortest)
const buffer = await file.arrayBuffer() const base64 = buffer.toString("base64")
const base64 = Buffer.from(buffer).toString("base64") const mime = Filesystem.mimeType(shortest) || "image/png"
const mime = file.type || "image/png"
const url = `data:${mime};base64,${base64}` const url = `data:${mime};base64,${base64}`
await update({ await update({
projectID: input.id, projectID: input.id,
@@ -381,10 +377,8 @@ export namespace Project {
const data = fromRow(row) const data = fromRow(row)
const valid: string[] = [] const valid: string[] = []
for (const dir of data.sandboxes) { for (const dir of data.sandboxes) {
const stat = await Bun.file(dir) const s = Filesystem.stat(dir)
.stat() if (s?.isDirectory()) valid.push(dir)
.catch(() => undefined)
if (stat?.isDirectory()) valid.push(dir)
} }
return valid return valid
} }

View File

@@ -5,6 +5,7 @@ import z from "zod"
import { Installation } from "../installation" import { Installation } from "../installation"
import { Flag } from "../flag/flag" import { Flag } from "../flag/flag"
import { lazy } from "@/util/lazy" import { lazy } from "@/util/lazy"
import { Filesystem } from "../util/filesystem"
// Try to import bundled snapshot (generated at build time) // Try to import bundled snapshot (generated at build time)
// Falls back to undefined in dev mode when snapshot doesn't exist // Falls back to undefined in dev mode when snapshot doesn't exist
@@ -85,8 +86,7 @@ export namespace ModelsDev {
} }
export const Data = lazy(async () => { export const Data = lazy(async () => {
const file = Bun.file(Flag.OPENCODE_MODELS_PATH ?? filepath) const result = await Filesystem.readJson(Flag.OPENCODE_MODELS_PATH ?? filepath).catch(() => {})
const result = await file.json().catch(() => {})
if (result) return result if (result) return result
// @ts-ignore // @ts-ignore
const snapshot = await import("./models-snapshot") const snapshot = await import("./models-snapshot")
@@ -104,7 +104,6 @@ export namespace ModelsDev {
} }
export async function refresh() { export async function refresh() {
const file = Bun.file(filepath)
const result = await fetch(`${url()}/api.json`, { const result = await fetch(`${url()}/api.json`, {
headers: { headers: {
"User-Agent": Installation.USER_AGENT, "User-Agent": Installation.USER_AGENT,
@@ -116,7 +115,7 @@ export namespace ModelsDev {
}) })
}) })
if (result && result.ok) { if (result && result.ok) {
await Bun.write(file, await result.text()) await Filesystem.write(filepath, await result.text())
ModelsDev.Data.reset() ModelsDev.Data.reset()
} }
} }

View File

@@ -16,6 +16,7 @@ import { Flag } from "../flag/flag"
import { iife } from "@/util/iife" import { iife } from "@/util/iife"
import { Global } from "../global" import { Global } from "../global"
import path from "path" import path from "path"
import { Filesystem } from "../util/filesystem"
// Direct imports for bundled providers // Direct imports for bundled providers
import { createAmazonBedrock, type AmazonBedrockProviderSettings } from "@ai-sdk/amazon-bedrock" import { createAmazonBedrock, type AmazonBedrockProviderSettings } from "@ai-sdk/amazon-bedrock"
@@ -1289,8 +1290,9 @@ export namespace Provider {
if (cfg.model) return parseModel(cfg.model) if (cfg.model) return parseModel(cfg.model)
const providers = await list() const providers = await list()
const recent = (await Bun.file(path.join(Global.Path.state, "model.json")) const recent = (await Filesystem.readJson<{ recent?: { providerID: string; modelID: string }[] }>(
.json() path.join(Global.Path.state, "model.json"),
)
.then((x) => (Array.isArray(x.recent) ? x.recent : [])) .then((x) => (Array.isArray(x.recent) ? x.recent : []))
.catch(() => [])) as { providerID: string; modelID: string }[] .catch(() => [])) as { providerID: string; modelID: string }[]
for (const entry of recent) { for (const entry of recent) {

View File

@@ -85,7 +85,7 @@ export namespace InstructionPrompt {
} }
for (const file of globalFiles()) { for (const file of globalFiles()) {
if (await Bun.file(file).exists()) { if (await Filesystem.exists(file)) {
paths.add(path.resolve(file)) paths.add(path.resolve(file))
break break
} }
@@ -120,9 +120,7 @@ export namespace InstructionPrompt {
const paths = await systemPaths() const paths = await systemPaths()
const files = Array.from(paths).map(async (p) => { const files = Array.from(paths).map(async (p) => {
const content = await Bun.file(p) const content = await Filesystem.readText(p).catch(() => "")
.text()
.catch(() => "")
return content ? "Instructions from: " + p + "\n" + content : "" return content ? "Instructions from: " + p + "\n" + content : ""
}) })
@@ -164,7 +162,7 @@ export namespace InstructionPrompt {
export async function find(dir: string) { export async function find(dir: string) {
for (const file of FILES) { for (const file of FILES) {
const filepath = path.resolve(path.join(dir, file)) const filepath = path.resolve(path.join(dir, file))
if (await Bun.file(filepath).exists()) return filepath if (await Filesystem.exists(filepath)) return filepath
} }
} }
@@ -182,9 +180,7 @@ export namespace InstructionPrompt {
if (found && found !== target && !system.has(found) && !already.has(found) && !isClaimed(messageID, found)) { if (found && found !== target && !system.has(found) && !already.has(found) && !isClaimed(messageID, found)) {
claim(messageID, found) claim(messageID, found)
const content = await Bun.file(found) const content = await Filesystem.readText(found).catch(() => undefined)
.text()
.catch(() => undefined)
if (content) { if (content) {
results.push({ filepath: found, content: "Instructions from: " + found + "\n" + content }) results.push({ filepath: found, content: "Instructions from: " + found + "\n" + content })
} }

View File

@@ -2,6 +2,7 @@ import path from "path"
import os from "os" import os from "os"
import fs from "fs/promises" import fs from "fs/promises"
import z from "zod" import z from "zod"
import { Filesystem } from "../util/filesystem"
import { Identifier } from "../id/id" import { Identifier } from "../id/id"
import { MessageV2 } from "./message-v2" import { MessageV2 } from "./message-v2"
import { Log } from "../util/log" import { Log } from "../util/log"
@@ -1082,11 +1083,9 @@ export namespace SessionPrompt {
// have to normalize, symbol search returns absolute paths // have to normalize, symbol search returns absolute paths
// Decode the pathname since URL constructor doesn't automatically decode it // Decode the pathname since URL constructor doesn't automatically decode it
const filepath = fileURLToPath(part.url) const filepath = fileURLToPath(part.url)
const stat = await Bun.file(filepath) const s = Filesystem.stat(filepath)
.stat()
.catch(() => undefined)
if (stat?.isDirectory()) { if (s?.isDirectory()) {
part.mime = "application/x-directory" part.mime = "application/x-directory"
} }
@@ -1233,14 +1232,13 @@ export namespace SessionPrompt {
] ]
} }
const file = Bun.file(filepath)
FileTime.read(input.sessionID, filepath) FileTime.read(input.sessionID, filepath)
return [ return [
{ {
messageID: info.id, messageID: info.id,
sessionID: input.sessionID, sessionID: input.sessionID,
type: "text", type: "text",
text: `Called the Read tool with the following input: {\"filePath\":\"${filepath}\"}`, text: `Called the Read tool with the following input: {"filePath":"${filepath}"}`,
synthetic: true, synthetic: true,
}, },
{ {
@@ -1248,7 +1246,7 @@ export namespace SessionPrompt {
messageID: info.id, messageID: info.id,
sessionID: input.sessionID, sessionID: input.sessionID,
type: "file", type: "file",
url: `data:${part.mime};base64,` + Buffer.from(await file.bytes()).toString("base64"), url: `data:${part.mime};base64,` + (await Filesystem.readBytes(filepath)).toString("base64"),
mime: part.mime, mime: part.mime,
filename: part.filename!, filename: part.filename!,
source: part.source, source: part.source,
@@ -1354,7 +1352,7 @@ export namespace SessionPrompt {
// Switching from plan mode to build mode // Switching from plan mode to build mode
if (input.agent.name !== "plan" && assistantMessage?.info.agent === "plan") { if (input.agent.name !== "plan" && assistantMessage?.info.agent === "plan") {
const plan = Session.plan(input.session) const plan = Session.plan(input.session)
const exists = await Bun.file(plan).exists() const exists = await Filesystem.exists(plan)
if (exists) { if (exists) {
const part = await Session.updatePart({ const part = await Session.updatePart({
id: Identifier.ascending("part"), id: Identifier.ascending("part"),
@@ -1373,7 +1371,7 @@ export namespace SessionPrompt {
// Entering plan mode // Entering plan mode
if (input.agent.name === "plan" && assistantMessage?.info.agent !== "plan") { if (input.agent.name === "plan" && assistantMessage?.info.agent !== "plan") {
const plan = Session.plan(input.session) const plan = Session.plan(input.session)
const exists = await Bun.file(plan).exists() const exists = await Filesystem.exists(plan)
if (!exists) await fs.mkdir(path.dirname(plan), { recursive: true }) if (!exists) await fs.mkdir(path.dirname(plan), { recursive: true })
const part = await Session.updatePart({ const part = await Session.updatePart({
id: Identifier.ascending("part"), id: Identifier.ascending("part"),

View File

@@ -1,5 +1,6 @@
import { Flag } from "@/flag/flag" import { Flag } from "@/flag/flag"
import { lazy } from "@/util/lazy" import { lazy } from "@/util/lazy"
import { Filesystem } from "@/util/filesystem"
import path from "path" import path from "path"
import { spawn, type ChildProcess } from "child_process" import { spawn, type ChildProcess } from "child_process"
@@ -43,7 +44,7 @@ export namespace Shell {
// git.exe is typically at: C:\Program Files\Git\cmd\git.exe // git.exe is typically at: C:\Program Files\Git\cmd\git.exe
// bash.exe is at: C:\Program Files\Git\bin\bash.exe // bash.exe is at: C:\Program Files\Git\bin\bash.exe
const bash = path.join(git, "..", "..", "bin", "bash.exe") const bash = path.join(git, "..", "..", "bin", "bash.exe")
if (Bun.file(bash).size) return bash if (Filesystem.stat(bash)?.size) return bash
} }
return process.env.COMSPEC || "cmd.exe" return process.env.COMSPEC || "cmd.exe"
} }

View File

@@ -2,6 +2,7 @@ import path from "path"
import { mkdir } from "fs/promises" import { mkdir } from "fs/promises"
import { Log } from "../util/log" import { Log } from "../util/log"
import { Global } from "../global" import { Global } from "../global"
import { Filesystem } from "../util/filesystem"
export namespace Discovery { export namespace Discovery {
const log = Log.create({ service: "skill-discovery" }) const log = Log.create({ service: "skill-discovery" })
@@ -19,14 +20,14 @@ export namespace Discovery {
} }
async function get(url: string, dest: string): Promise<boolean> { async function get(url: string, dest: string): Promise<boolean> {
if (await Bun.file(dest).exists()) return true if (await Filesystem.exists(dest)) return true
return fetch(url) return fetch(url)
.then(async (response) => { .then(async (response) => {
if (!response.ok) { if (!response.ok) {
log.error("failed to download", { url, status: response.status }) log.error("failed to download", { url, status: response.status })
return false return false
} }
await Bun.write(dest, await response.text()) if (response.body) await Filesystem.writeStream(dest, response.body)
return true return true
}) })
.catch((err) => { .catch((err) => {
@@ -88,7 +89,7 @@ export namespace Discovery {
) )
const md = path.join(root, "SKILL.md") const md = path.join(root, "SKILL.md")
if (await Bun.file(md).exists()) result.push(root) if (await Filesystem.exists(md)) result.push(root)
}), }),
) )

View File

@@ -10,7 +10,7 @@ import { Log } from "../util/log"
import { NamedError } from "@opencode-ai/util/error" import { NamedError } from "@opencode-ai/util/error"
import z from "zod" import z from "zod"
import path from "path" import path from "path"
import { readFileSync, readdirSync } from "fs" import { readFileSync, readdirSync, existsSync } from "fs"
import * as schema from "./schema" import * as schema from "./schema"
declare const OPENCODE_MIGRATIONS: { sql: string; timestamp: number }[] | undefined declare const OPENCODE_MIGRATIONS: { sql: string; timestamp: number }[] | undefined
@@ -54,7 +54,7 @@ export namespace Database {
const sql = dirs const sql = dirs
.map((name) => { .map((name) => {
const file = path.join(dir, name, "migration.sql") const file = path.join(dir, name, "migration.sql")
if (!Bun.file(file).size) return if (!existsSync(file)) return
return { return {
sql: readFileSync(file, "utf-8"), sql: readFileSync(file, "utf-8"),
timestamp: time(name), timestamp: time(name),

View File

@@ -7,6 +7,7 @@ import { SessionTable, MessageTable, PartTable, TodoTable, PermissionTable } fro
import { SessionShareTable } from "../share/share.sql" import { SessionShareTable } from "../share/share.sql"
import path from "path" import path from "path"
import { existsSync } from "fs" import { existsSync } from "fs"
import { Filesystem } from "../util/filesystem"
export namespace JsonMigration { export namespace JsonMigration {
const log = Log.create({ service: "json-migration" }) const log = Log.create({ service: "json-migration" })
@@ -82,7 +83,7 @@ export namespace JsonMigration {
const count = end - start const count = end - start
const tasks = new Array(count) const tasks = new Array(count)
for (let i = 0; i < count; i++) { for (let i = 0; i < count; i++) {
tasks[i] = Bun.file(files[start + i]).json() tasks[i] = Filesystem.readJson(files[start + i])
} }
const results = await Promise.allSettled(tasks) const results = await Promise.allSettled(tasks)
const items = new Array(count) const items = new Array(count)

View File

@@ -39,7 +39,7 @@ export namespace Storage {
cwd: path.join(project, projectDir), cwd: path.join(project, projectDir),
absolute: true, absolute: true,
})) { })) {
const json = await Bun.file(msgFile).json() const json = await Filesystem.readJson<any>(msgFile)
worktree = json.path?.root worktree = json.path?.root
if (worktree) break if (worktree) break
} }
@@ -60,18 +60,15 @@ export namespace Storage {
if (!id) continue if (!id) continue
projectID = id projectID = id
await Bun.write( await Filesystem.writeJson(path.join(dir, "project", projectID + ".json"), {
path.join(dir, "project", projectID + ".json"), id,
JSON.stringify({ vcs: "git",
id, worktree,
vcs: "git", time: {
worktree, created: Date.now(),
time: { initialized: Date.now(),
created: Date.now(), },
initialized: Date.now(), })
},
}),
)
log.info(`migrating sessions for project ${projectID}`) log.info(`migrating sessions for project ${projectID}`)
for await (const sessionFile of new Bun.Glob("storage/session/info/*.json").scan({ for await (const sessionFile of new Bun.Glob("storage/session/info/*.json").scan({
@@ -83,8 +80,8 @@ export namespace Storage {
sessionFile, sessionFile,
dest, dest,
}) })
const session = await Bun.file(sessionFile).json() const session = await Filesystem.readJson<any>(sessionFile)
await Bun.write(dest, JSON.stringify(session)) await Filesystem.writeJson(dest, session)
log.info(`migrating messages for session ${session.id}`) log.info(`migrating messages for session ${session.id}`)
for await (const msgFile of new Bun.Glob(`storage/session/message/${session.id}/*.json`).scan({ for await (const msgFile of new Bun.Glob(`storage/session/message/${session.id}/*.json`).scan({
cwd: fullProjectDir, cwd: fullProjectDir,
@@ -95,8 +92,8 @@ export namespace Storage {
msgFile, msgFile,
dest, dest,
}) })
const message = await Bun.file(msgFile).json() const message = await Filesystem.readJson<any>(msgFile)
await Bun.write(dest, JSON.stringify(message)) await Filesystem.writeJson(dest, message)
log.info(`migrating parts for message ${message.id}`) log.info(`migrating parts for message ${message.id}`)
for await (const partFile of new Bun.Glob(`storage/session/part/${session.id}/${message.id}/*.json`).scan( for await (const partFile of new Bun.Glob(`storage/session/part/${session.id}/${message.id}/*.json`).scan(
@@ -106,12 +103,12 @@ export namespace Storage {
}, },
)) { )) {
const dest = path.join(dir, "part", message.id, path.basename(partFile)) const dest = path.join(dir, "part", message.id, path.basename(partFile))
const part = await Bun.file(partFile).json() const part = await Filesystem.readJson(partFile)
log.info("copying", { log.info("copying", {
partFile, partFile,
dest, dest,
}) })
await Bun.write(dest, JSON.stringify(part)) await Filesystem.writeJson(dest, part)
} }
} }
} }
@@ -123,35 +120,32 @@ export namespace Storage {
cwd: dir, cwd: dir,
absolute: true, absolute: true,
})) { })) {
const session = await Bun.file(item).json() const session = await Filesystem.readJson<any>(item)
if (!session.projectID) continue if (!session.projectID) continue
if (!session.summary?.diffs) continue if (!session.summary?.diffs) continue
const { diffs } = session.summary const { diffs } = session.summary
await Bun.file(path.join(dir, "session_diff", session.id + ".json")).write(JSON.stringify(diffs)) await Filesystem.write(path.join(dir, "session_diff", session.id + ".json"), JSON.stringify(diffs))
await Bun.file(path.join(dir, "session", session.projectID, session.id + ".json")).write( await Filesystem.writeJson(path.join(dir, "session", session.projectID, session.id + ".json"), {
JSON.stringify({ ...session,
...session, summary: {
summary: { additions: diffs.reduce((sum: any, x: any) => sum + x.additions, 0),
additions: diffs.reduce((sum: any, x: any) => sum + x.additions, 0), deletions: diffs.reduce((sum: any, x: any) => sum + x.deletions, 0),
deletions: diffs.reduce((sum: any, x: any) => sum + x.deletions, 0), },
}, })
}),
)
} }
}, },
] ]
const state = lazy(async () => { const state = lazy(async () => {
const dir = path.join(Global.Path.data, "storage") const dir = path.join(Global.Path.data, "storage")
const migration = await Bun.file(path.join(dir, "migration")) const migration = await Filesystem.readJson<string>(path.join(dir, "migration"))
.json()
.then((x) => parseInt(x)) .then((x) => parseInt(x))
.catch(() => 0) .catch(() => 0)
for (let index = migration; index < MIGRATIONS.length; index++) { for (let index = migration; index < MIGRATIONS.length; index++) {
log.info("running migration", { index }) log.info("running migration", { index })
const migration = MIGRATIONS[index] const migration = MIGRATIONS[index]
await migration(dir).catch(() => log.error("failed to run migration", { index })) await migration(dir).catch(() => log.error("failed to run migration", { index }))
await Bun.write(path.join(dir, "migration"), (index + 1).toString()) await Filesystem.write(path.join(dir, "migration"), (index + 1).toString())
} }
return { return {
dir, dir,
@@ -171,7 +165,7 @@ export namespace Storage {
const target = path.join(dir, ...key) + ".json" const target = path.join(dir, ...key) + ".json"
return withErrorHandling(async () => { return withErrorHandling(async () => {
using _ = await Lock.read(target) using _ = await Lock.read(target)
const result = await Bun.file(target).json() const result = await Filesystem.readJson<T>(target)
return result as T return result as T
}) })
} }
@@ -181,10 +175,10 @@ export namespace Storage {
const target = path.join(dir, ...key) + ".json" const target = path.join(dir, ...key) + ".json"
return withErrorHandling(async () => { return withErrorHandling(async () => {
using _ = await Lock.write(target) using _ = await Lock.write(target)
const content = await Bun.file(target).json() const content = await Filesystem.readJson<T>(target)
fn(content) fn(content as T)
await Bun.write(target, JSON.stringify(content, null, 2)) await Filesystem.writeJson(target, content)
return content as T return content
}) })
} }
@@ -193,7 +187,7 @@ export namespace Storage {
const target = path.join(dir, ...key) + ".json" const target = path.join(dir, ...key) + ".json"
return withErrorHandling(async () => { return withErrorHandling(async () => {
using _ = await Lock.write(target) using _ = await Lock.write(target)
await Bun.write(target, JSON.stringify(content, null, 2)) await Filesystem.writeJson(target, content)
}) })
} }

View File

@@ -49,7 +49,7 @@ export const EditTool = Tool.define("edit", {
let contentNew = "" let contentNew = ""
await FileTime.withLock(filePath, async () => { await FileTime.withLock(filePath, async () => {
if (params.oldString === "") { if (params.oldString === "") {
const existed = await Bun.file(filePath).exists() const existed = await Filesystem.exists(filePath)
contentNew = params.newString contentNew = params.newString
diff = trimDiff(createTwoFilesPatch(filePath, filePath, contentOld, contentNew)) diff = trimDiff(createTwoFilesPatch(filePath, filePath, contentOld, contentNew))
await ctx.ask({ await ctx.ask({
@@ -61,7 +61,7 @@ export const EditTool = Tool.define("edit", {
diff, diff,
}, },
}) })
await Bun.write(filePath, params.newString) await Filesystem.write(filePath, params.newString)
await Bus.publish(File.Event.Edited, { await Bus.publish(File.Event.Edited, {
file: filePath, file: filePath,
}) })
@@ -73,12 +73,11 @@ export const EditTool = Tool.define("edit", {
return return
} }
const file = Bun.file(filePath) const stats = Filesystem.stat(filePath)
const stats = await file.stat().catch(() => {})
if (!stats) throw new Error(`File ${filePath} not found`) if (!stats) throw new Error(`File ${filePath} not found`)
if (stats.isDirectory()) throw new Error(`Path is a directory, not a file: ${filePath}`) if (stats.isDirectory()) throw new Error(`Path is a directory, not a file: ${filePath}`)
await FileTime.assert(ctx.sessionID, filePath) await FileTime.assert(ctx.sessionID, filePath)
contentOld = await file.text() contentOld = await Filesystem.readText(filePath)
contentNew = replace(contentOld, params.oldString, params.newString, params.replaceAll) contentNew = replace(contentOld, params.oldString, params.newString, params.replaceAll)
diff = trimDiff( diff = trimDiff(
@@ -94,7 +93,7 @@ export const EditTool = Tool.define("edit", {
}, },
}) })
await file.write(contentNew) await Filesystem.write(filePath, contentNew)
await Bus.publish(File.Event.Edited, { await Bus.publish(File.Event.Edited, {
file: filePath, file: filePath,
}) })
@@ -102,7 +101,7 @@ export const EditTool = Tool.define("edit", {
file: filePath, file: filePath,
event: "change", event: "change",
}) })
contentNew = await file.text() contentNew = await Filesystem.readText(filePath)
diff = trimDiff( diff = trimDiff(
createTwoFilesPatch(filePath, filePath, normalizeLineEndings(contentOld), normalizeLineEndings(contentNew)), createTwoFilesPatch(filePath, filePath, normalizeLineEndings(contentOld), normalizeLineEndings(contentNew)),
) )

View File

@@ -1,6 +1,7 @@
import z from "zod" import z from "zod"
import path from "path" import path from "path"
import { Tool } from "./tool" import { Tool } from "./tool"
import { Filesystem } from "../util/filesystem"
import DESCRIPTION from "./glob.txt" import DESCRIPTION from "./glob.txt"
import { Ripgrep } from "../file/ripgrep" import { Ripgrep } from "../file/ripgrep"
import { Instance } from "../project/instance" import { Instance } from "../project/instance"
@@ -45,10 +46,7 @@ export const GlobTool = Tool.define("glob", {
break break
} }
const full = path.resolve(search, file) const full = path.resolve(search, file)
const stats = await Bun.file(full) const stats = Filesystem.stat(full)?.mtime.getTime() ?? 0
.stat()
.then((x) => x.mtime.getTime())
.catch(() => 0)
files.push({ files.push({
path: full, path: full,
mtime: stats, mtime: stats,

View File

@@ -1,5 +1,6 @@
import z from "zod" import z from "zod"
import { Tool } from "./tool" import { Tool } from "./tool"
import { Filesystem } from "../util/filesystem"
import { Ripgrep } from "../file/ripgrep" import { Ripgrep } from "../file/ripgrep"
import DESCRIPTION from "./grep.txt" import DESCRIPTION from "./grep.txt"
@@ -83,8 +84,7 @@ export const GrepTool = Tool.define("grep", {
const lineNum = parseInt(lineNumStr, 10) const lineNum = parseInt(lineNumStr, 10)
const lineText = lineTextParts.join("|") const lineText = lineTextParts.join("|")
const file = Bun.file(filePath) const stats = Filesystem.stat(filePath)
const stats = await file.stat().catch(() => null)
if (!stats) continue if (!stats) continue
matches.push({ matches.push({

View File

@@ -6,6 +6,7 @@ import DESCRIPTION from "./lsp.txt"
import { Instance } from "../project/instance" import { Instance } from "../project/instance"
import { pathToFileURL } from "url" import { pathToFileURL } from "url"
import { assertExternalDirectory } from "./external-directory" import { assertExternalDirectory } from "./external-directory"
import { Filesystem } from "../util/filesystem"
const operations = [ const operations = [
"goToDefinition", "goToDefinition",
@@ -47,7 +48,7 @@ export const LspTool = Tool.define("lsp", {
const relPath = path.relative(Instance.worktree, file) const relPath = path.relative(Instance.worktree, file)
const title = `${args.operation} ${relPath}:${args.line}:${args.character}` const title = `${args.operation} ${relPath}:${args.line}:${args.character}`
const exists = await Bun.file(file).exists() const exists = await Filesystem.exists(file)
if (!exists) { if (!exists) {
throw new Error(`File not found: ${file}`) throw new Error(`File not found: ${file}`)
} }

View File

@@ -10,6 +10,7 @@ import DESCRIPTION from "./read.txt"
import { Instance } from "../project/instance" import { Instance } from "../project/instance"
import { assertExternalDirectory } from "./external-directory" import { assertExternalDirectory } from "./external-directory"
import { InstructionPrompt } from "../session/instruction" import { InstructionPrompt } from "../session/instruction"
import { Filesystem } from "../util/filesystem"
const DEFAULT_READ_LIMIT = 2000 const DEFAULT_READ_LIMIT = 2000
const MAX_LINE_LENGTH = 2000 const MAX_LINE_LENGTH = 2000
@@ -34,8 +35,7 @@ export const ReadTool = Tool.define("read", {
} }
const title = path.relative(Instance.worktree, filepath) const title = path.relative(Instance.worktree, filepath)
const file = Bun.file(filepath) const stat = Filesystem.stat(filepath)
const stat = await file.stat().catch(() => undefined)
await assertExternalDirectory(ctx, filepath, { await assertExternalDirectory(ctx, filepath, {
bypass: Boolean(ctx.extra?.["bypassCwdCheck"]), bypass: Boolean(ctx.extra?.["bypassCwdCheck"]),
@@ -118,11 +118,10 @@ export const ReadTool = Tool.define("read", {
const instructions = await InstructionPrompt.resolve(ctx.messages, filepath, ctx.messageID) const instructions = await InstructionPrompt.resolve(ctx.messages, filepath, ctx.messageID)
// Exclude SVG (XML-based) and vnd.fastbidsheet (.fbs extension, commonly FlatBuffers schema files) // Exclude SVG (XML-based) and vnd.fastbidsheet (.fbs extension, commonly FlatBuffers schema files)
const isImage = const mime = Filesystem.mimeType(filepath)
file.type.startsWith("image/") && file.type !== "image/svg+xml" && file.type !== "image/vnd.fastbidsheet" const isImage = mime.startsWith("image/") && mime !== "image/svg+xml" && mime !== "image/vnd.fastbidsheet"
const isPdf = file.type === "application/pdf" const isPdf = mime === "application/pdf"
if (isImage || isPdf) { if (isImage || isPdf) {
const mime = file.type
const msg = `${isImage ? "Image" : "PDF"} read successfully` const msg = `${isImage ? "Image" : "PDF"} read successfully`
return { return {
title, title,
@@ -136,13 +135,13 @@ export const ReadTool = Tool.define("read", {
{ {
type: "file", type: "file",
mime, mime,
url: `data:${mime};base64,${Buffer.from(await file.bytes()).toString("base64")}`, url: `data:${mime};base64,${Buffer.from(await Filesystem.readBytes(filepath)).toString("base64")}`,
}, },
], ],
} }
} }
const isBinary = await isBinaryFile(filepath, stat.size) const isBinary = await isBinaryFile(filepath, Number(stat.size))
if (isBinary) throw new Error(`Cannot read binary file: ${filepath}`) if (isBinary) throw new Error(`Cannot read binary file: ${filepath}`)
const stream = createReadStream(filepath, { encoding: "utf8" }) const stream = createReadStream(filepath, { encoding: "utf8" })

View File

@@ -5,6 +5,7 @@ import { Identifier } from "../id/id"
import { PermissionNext } from "../permission/next" import { PermissionNext } from "../permission/next"
import type { Agent } from "../agent/agent" import type { Agent } from "../agent/agent"
import { Scheduler } from "../scheduler" import { Scheduler } from "../scheduler"
import { Filesystem } from "../util/filesystem"
export namespace Truncate { export namespace Truncate {
export const MAX_LINES = 2000 export const MAX_LINES = 2000
@@ -91,7 +92,7 @@ export namespace Truncate {
const id = Identifier.ascending("tool") const id = Identifier.ascending("tool")
const filepath = path.join(DIR, id) const filepath = path.join(DIR, id)
await Bun.write(Bun.file(filepath), text) await Filesystem.write(filepath, text)
const hint = hasTaskTool(agent) const hint = hasTaskTool(agent)
? `The tool call succeeded but the output was truncated. Full output saved to: ${filepath}\nUse the Task tool to have explore agent process this file with Grep and Read (with offset/limit). Do NOT read the full file yourself - delegate to save context.` ? `The tool call succeeded but the output was truncated. Full output saved to: ${filepath}\nUse the Task tool to have explore agent process this file with Grep and Read (with offset/limit). Do NOT read the full file yourself - delegate to save context.`

View File

@@ -26,9 +26,8 @@ export const WriteTool = Tool.define("write", {
const filepath = path.isAbsolute(params.filePath) ? params.filePath : path.join(Instance.directory, params.filePath) const filepath = path.isAbsolute(params.filePath) ? params.filePath : path.join(Instance.directory, params.filePath)
await assertExternalDirectory(ctx, filepath) await assertExternalDirectory(ctx, filepath)
const file = Bun.file(filepath) const exists = await Filesystem.exists(filepath)
const exists = await file.exists() const contentOld = exists ? await Filesystem.readText(filepath) : ""
const contentOld = exists ? await file.text() : ""
if (exists) await FileTime.assert(ctx.sessionID, filepath) if (exists) await FileTime.assert(ctx.sessionID, filepath)
const diff = trimDiff(createTwoFilesPatch(filepath, filepath, contentOld, params.content)) const diff = trimDiff(createTwoFilesPatch(filepath, filepath, contentOld, params.content))
@@ -42,7 +41,7 @@ export const WriteTool = Tool.define("write", {
}, },
}) })
await Bun.write(filepath, params.content) await Filesystem.write(filepath, params.content)
await Bus.publish(File.Event.Edited, { await Bus.publish(File.Event.Edited, {
file: filepath, file: filepath,
}) })

View File

@@ -1,8 +1,10 @@
import { mkdir, readFile, writeFile } from "fs/promises" import { chmod, mkdir, readFile, writeFile } from "fs/promises"
import { existsSync, statSync } from "fs" import { createWriteStream, existsSync, statSync } from "fs"
import { lookup } from "mime-types" import { lookup } from "mime-types"
import { realpathSync } from "fs" import { realpathSync } from "fs"
import { dirname, join, relative } from "path" import { dirname, join, relative } from "path"
import { Readable } from "stream"
import { pipeline } from "stream/promises"
export namespace Filesystem { export namespace Filesystem {
// Fast sync version for metadata checks // Fast sync version for metadata checks
@@ -39,11 +41,16 @@ export namespace Filesystem {
return readFile(p) return readFile(p)
} }
export async function readArrayBuffer(p: string): Promise<ArrayBuffer> {
const buf = await readFile(p)
return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength) as ArrayBuffer
}
function isEnoent(e: unknown): e is { code: "ENOENT" } { function isEnoent(e: unknown): e is { code: "ENOENT" } {
return typeof e === "object" && e !== null && "code" in e && (e as { code: string }).code === "ENOENT" return typeof e === "object" && e !== null && "code" in e && (e as { code: string }).code === "ENOENT"
} }
export async function write(p: string, content: string | Buffer, mode?: number): Promise<void> { export async function write(p: string, content: string | Buffer | Uint8Array, mode?: number): Promise<void> {
try { try {
if (mode) { if (mode) {
await writeFile(p, content, { mode }) await writeFile(p, content, { mode })
@@ -68,6 +75,25 @@ export namespace Filesystem {
return write(p, JSON.stringify(data, null, 2), mode) return write(p, JSON.stringify(data, null, 2), mode)
} }
export async function writeStream(
p: string,
stream: ReadableStream<Uint8Array> | Readable,
mode?: number,
): Promise<void> {
const dir = dirname(p)
if (!existsSync(dir)) {
await mkdir(dir, { recursive: true })
}
const nodeStream = stream instanceof ReadableStream ? Readable.fromWeb(stream as any) : stream
const writeStream = createWriteStream(p)
await pipeline(nodeStream, writeStream)
if (mode) {
await chmod(p, mode)
}
}
export function mimeType(p: string): string { export function mimeType(p: string): string {
return lookup(p) || "application/octet-stream" return lookup(p) || "application/octet-stream"
} }

View File

@@ -1,5 +1,6 @@
import path from "path" import path from "path"
import fs from "fs/promises" import fs from "fs/promises"
import { createWriteStream } from "fs"
import { Global } from "../global" import { Global } from "../global"
import z from "zod" import z from "zod"
@@ -63,13 +64,15 @@ export namespace Log {
Global.Path.log, Global.Path.log,
options.dev ? "dev.log" : new Date().toISOString().split(".")[0].replace(/:/g, "") + ".log", options.dev ? "dev.log" : new Date().toISOString().split(".")[0].replace(/:/g, "") + ".log",
) )
const logfile = Bun.file(logpath)
await fs.truncate(logpath).catch(() => {}) await fs.truncate(logpath).catch(() => {})
const writer = logfile.writer() const stream = createWriteStream(logpath, { flags: "a" })
write = async (msg: any) => { write = async (msg: any) => {
const num = writer.write(msg) return new Promise((resolve, reject) => {
writer.flush() stream.write(msg, (err) => {
return num if (err) reject(err)
else resolve(msg.length)
})
})
} }
} }

View File

@@ -7,6 +7,7 @@ import path from "path"
import fs from "fs/promises" import fs from "fs/promises"
import { pathToFileURL } from "url" import { pathToFileURL } from "url"
import { Global } from "../../src/global" import { Global } from "../../src/global"
import { Filesystem } from "../../src/util/filesystem"
// Get managed config directory from environment (set in preload.ts) // Get managed config directory from environment (set in preload.ts)
const managedConfigDir = process.env.OPENCODE_TEST_MANAGED_CONFIG_DIR! const managedConfigDir = process.env.OPENCODE_TEST_MANAGED_CONFIG_DIR!
@@ -17,11 +18,11 @@ afterEach(async () => {
async function writeManagedSettings(settings: object, filename = "opencode.json") { async function writeManagedSettings(settings: object, filename = "opencode.json") {
await fs.mkdir(managedConfigDir, { recursive: true }) await fs.mkdir(managedConfigDir, { recursive: true })
await Bun.write(path.join(managedConfigDir, filename), JSON.stringify(settings)) await Filesystem.write(path.join(managedConfigDir, filename), JSON.stringify(settings))
} }
async function writeConfig(dir: string, config: object, name = "opencode.json") { async function writeConfig(dir: string, config: object, name = "opencode.json") {
await Bun.write(path.join(dir, name), JSON.stringify(config)) await Filesystem.write(path.join(dir, name), JSON.stringify(config))
} }
test("loads config with defaults when no files exist", async () => { test("loads config with defaults when no files exist", async () => {
@@ -58,7 +59,7 @@ test("loads JSON config file", async () => {
test("loads JSONC config file", async () => { test("loads JSONC config file", async () => {
await using tmp = await tmpdir({ await using tmp = await tmpdir({
init: async (dir) => { init: async (dir) => {
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.jsonc"), path.join(dir, "opencode.jsonc"),
`{ `{
// This is a comment // This is a comment
@@ -144,7 +145,7 @@ test("preserves env variables when adding $schema to config", async () => {
await using tmp = await tmpdir({ await using tmp = await tmpdir({
init: async (dir) => { init: async (dir) => {
// Config without $schema - should trigger auto-add // Config without $schema - should trigger auto-add
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.json"), path.join(dir, "opencode.json"),
JSON.stringify({ JSON.stringify({
theme: "{env:PRESERVE_VAR}", theme: "{env:PRESERVE_VAR}",
@@ -159,7 +160,7 @@ test("preserves env variables when adding $schema to config", async () => {
expect(config.theme).toBe("secret_value") expect(config.theme).toBe("secret_value")
// Read the file to verify the env variable was preserved // Read the file to verify the env variable was preserved
const content = await Bun.file(path.join(tmp.path, "opencode.json")).text() const content = await Filesystem.readText(path.join(tmp.path, "opencode.json"))
expect(content).toContain("{env:PRESERVE_VAR}") expect(content).toContain("{env:PRESERVE_VAR}")
expect(content).not.toContain("secret_value") expect(content).not.toContain("secret_value")
expect(content).toContain("$schema") expect(content).toContain("$schema")
@@ -177,7 +178,7 @@ test("preserves env variables when adding $schema to config", async () => {
test("handles file inclusion substitution", async () => { test("handles file inclusion substitution", async () => {
await using tmp = await tmpdir({ await using tmp = await tmpdir({
init: async (dir) => { init: async (dir) => {
await Bun.write(path.join(dir, "included.txt"), "test_theme") await Filesystem.write(path.join(dir, "included.txt"), "test_theme")
await writeConfig(dir, { await writeConfig(dir, {
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
theme: "{file:included.txt}", theme: "{file:included.txt}",
@@ -196,7 +197,7 @@ test("handles file inclusion substitution", async () => {
test("handles file inclusion with replacement tokens", async () => { test("handles file inclusion with replacement tokens", async () => {
await using tmp = await tmpdir({ await using tmp = await tmpdir({
init: async (dir) => { init: async (dir) => {
await Bun.write(path.join(dir, "included.md"), "const out = await Bun.$`echo hi`") await Filesystem.write(path.join(dir, "included.md"), "const out = await Bun.$`echo hi`")
await writeConfig(dir, { await writeConfig(dir, {
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
theme: "{file:included.md}", theme: "{file:included.md}",
@@ -233,7 +234,7 @@ test("validates config schema and throws on invalid fields", async () => {
test("throws error for invalid JSON", async () => { test("throws error for invalid JSON", async () => {
await using tmp = await tmpdir({ await using tmp = await tmpdir({
init: async (dir) => { init: async (dir) => {
await Bun.write(path.join(dir, "opencode.json"), "{ invalid json }") await Filesystem.write(path.join(dir, "opencode.json"), "{ invalid json }")
}, },
}) })
await Instance.provide({ await Instance.provide({
@@ -336,7 +337,7 @@ test("handles command configuration", async () => {
test("migrates autoshare to share field", async () => { test("migrates autoshare to share field", async () => {
await using tmp = await tmpdir({ await using tmp = await tmpdir({
init: async (dir) => { init: async (dir) => {
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.json"), path.join(dir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -358,7 +359,7 @@ test("migrates autoshare to share field", async () => {
test("migrates mode field to agent field", async () => { test("migrates mode field to agent field", async () => {
await using tmp = await tmpdir({ await using tmp = await tmpdir({
init: async (dir) => { init: async (dir) => {
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.json"), path.join(dir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -395,7 +396,7 @@ test("loads config from .opencode directory", async () => {
const agentDir = path.join(opencodeDir, "agent") const agentDir = path.join(opencodeDir, "agent")
await fs.mkdir(agentDir, { recursive: true }) await fs.mkdir(agentDir, { recursive: true })
await Bun.write( await Filesystem.write(
path.join(agentDir, "test.md"), path.join(agentDir, "test.md"),
`--- `---
model: test/model model: test/model
@@ -428,7 +429,7 @@ test("loads agents from .opencode/agents (plural)", async () => {
const agentsDir = path.join(opencodeDir, "agents") const agentsDir = path.join(opencodeDir, "agents")
await fs.mkdir(path.join(agentsDir, "nested"), { recursive: true }) await fs.mkdir(path.join(agentsDir, "nested"), { recursive: true })
await Bun.write( await Filesystem.write(
path.join(agentsDir, "helper.md"), path.join(agentsDir, "helper.md"),
`--- `---
model: test/model model: test/model
@@ -437,7 +438,7 @@ mode: subagent
Helper agent prompt`, Helper agent prompt`,
) )
await Bun.write( await Filesystem.write(
path.join(agentsDir, "nested", "child.md"), path.join(agentsDir, "nested", "child.md"),
`--- `---
model: test/model model: test/model
@@ -479,7 +480,7 @@ test("loads commands from .opencode/command (singular)", async () => {
const commandDir = path.join(opencodeDir, "command") const commandDir = path.join(opencodeDir, "command")
await fs.mkdir(path.join(commandDir, "nested"), { recursive: true }) await fs.mkdir(path.join(commandDir, "nested"), { recursive: true })
await Bun.write( await Filesystem.write(
path.join(commandDir, "hello.md"), path.join(commandDir, "hello.md"),
`--- `---
description: Test command description: Test command
@@ -487,7 +488,7 @@ description: Test command
Hello from singular command`, Hello from singular command`,
) )
await Bun.write( await Filesystem.write(
path.join(commandDir, "nested", "child.md"), path.join(commandDir, "nested", "child.md"),
`--- `---
description: Nested command description: Nested command
@@ -524,7 +525,7 @@ test("loads commands from .opencode/commands (plural)", async () => {
const commandsDir = path.join(opencodeDir, "commands") const commandsDir = path.join(opencodeDir, "commands")
await fs.mkdir(path.join(commandsDir, "nested"), { recursive: true }) await fs.mkdir(path.join(commandsDir, "nested"), { recursive: true })
await Bun.write( await Filesystem.write(
path.join(commandsDir, "hello.md"), path.join(commandsDir, "hello.md"),
`--- `---
description: Test command description: Test command
@@ -532,7 +533,7 @@ description: Test command
Hello from plural commands`, Hello from plural commands`,
) )
await Bun.write( await Filesystem.write(
path.join(commandsDir, "nested", "child.md"), path.join(commandsDir, "nested", "child.md"),
`--- `---
description: Nested command description: Nested command
@@ -568,7 +569,7 @@ test("updates config and writes to file", async () => {
const newConfig = { model: "updated/model" } const newConfig = { model: "updated/model" }
await Config.update(newConfig as any) await Config.update(newConfig as any)
const writtenConfig = JSON.parse(await Bun.file(path.join(tmp.path, "config.json")).text()) const writtenConfig = await Filesystem.readJson(path.join(tmp.path, "config.json"))
expect(writtenConfig.model).toBe("updated/model") expect(writtenConfig.model).toBe("updated/model")
}, },
}) })
@@ -639,8 +640,8 @@ test("installs dependencies in writable OPENCODE_CONFIG_DIR", async () => {
}, },
}) })
expect(await Bun.file(path.join(tmp.extra, "package.json")).exists()).toBe(true) expect(await Filesystem.exists(path.join(tmp.extra, "package.json"))).toBe(true)
expect(await Bun.file(path.join(tmp.extra, ".gitignore")).exists()).toBe(true) expect(await Filesystem.exists(path.join(tmp.extra, ".gitignore"))).toBe(true)
} finally { } finally {
if (prev === undefined) delete process.env.OPENCODE_CONFIG_DIR if (prev === undefined) delete process.env.OPENCODE_CONFIG_DIR
else process.env.OPENCODE_CONFIG_DIR = prev else process.env.OPENCODE_CONFIG_DIR = prev
@@ -653,12 +654,12 @@ test("resolves scoped npm plugins in config", async () => {
const pluginDir = path.join(dir, "node_modules", "@scope", "plugin") const pluginDir = path.join(dir, "node_modules", "@scope", "plugin")
await fs.mkdir(pluginDir, { recursive: true }) await fs.mkdir(pluginDir, { recursive: true })
await Bun.write( await Filesystem.write(
path.join(dir, "package.json"), path.join(dir, "package.json"),
JSON.stringify({ name: "config-fixture", version: "1.0.0", type: "module" }, null, 2), JSON.stringify({ name: "config-fixture", version: "1.0.0", type: "module" }, null, 2),
) )
await Bun.write( await Filesystem.write(
path.join(pluginDir, "package.json"), path.join(pluginDir, "package.json"),
JSON.stringify( JSON.stringify(
{ {
@@ -672,9 +673,9 @@ test("resolves scoped npm plugins in config", async () => {
), ),
) )
await Bun.write(path.join(pluginDir, "index.js"), "export default {}\n") await Filesystem.write(path.join(pluginDir, "index.js"), "export default {}\n")
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.json"), path.join(dir, "opencode.json"),
JSON.stringify({ $schema: "https://opencode.ai/config.json", plugin: ["@scope/plugin"] }, null, 2), JSON.stringify({ $schema: "https://opencode.ai/config.json", plugin: ["@scope/plugin"] }, null, 2),
) )
@@ -708,7 +709,7 @@ test("merges plugin arrays from global and local configs", async () => {
await fs.mkdir(opencodeDir, { recursive: true }) await fs.mkdir(opencodeDir, { recursive: true })
// Global config with plugins // Global config with plugins
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.json"), path.join(dir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -717,7 +718,7 @@ test("merges plugin arrays from global and local configs", async () => {
) )
// Local .opencode config with different plugins // Local .opencode config with different plugins
await Bun.write( await Filesystem.write(
path.join(opencodeDir, "opencode.json"), path.join(opencodeDir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -753,7 +754,7 @@ test("does not error when only custom agent is a subagent", async () => {
const agentDir = path.join(opencodeDir, "agent") const agentDir = path.join(opencodeDir, "agent")
await fs.mkdir(agentDir, { recursive: true }) await fs.mkdir(agentDir, { recursive: true })
await Bun.write( await Filesystem.write(
path.join(agentDir, "helper.md"), path.join(agentDir, "helper.md"),
`--- `---
model: test/model model: test/model
@@ -784,7 +785,7 @@ test("merges instructions arrays from global and local configs", async () => {
const opencodeDir = path.join(projectDir, ".opencode") const opencodeDir = path.join(projectDir, ".opencode")
await fs.mkdir(opencodeDir, { recursive: true }) await fs.mkdir(opencodeDir, { recursive: true })
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.json"), path.join(dir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -792,7 +793,7 @@ test("merges instructions arrays from global and local configs", async () => {
}), }),
) )
await Bun.write( await Filesystem.write(
path.join(opencodeDir, "opencode.json"), path.join(opencodeDir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -823,7 +824,7 @@ test("deduplicates duplicate instructions from global and local configs", async
const opencodeDir = path.join(projectDir, ".opencode") const opencodeDir = path.join(projectDir, ".opencode")
await fs.mkdir(opencodeDir, { recursive: true }) await fs.mkdir(opencodeDir, { recursive: true })
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.json"), path.join(dir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -831,7 +832,7 @@ test("deduplicates duplicate instructions from global and local configs", async
}), }),
) )
await Bun.write( await Filesystem.write(
path.join(opencodeDir, "opencode.json"), path.join(opencodeDir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -867,7 +868,7 @@ test("deduplicates duplicate plugins from global and local configs", async () =>
await fs.mkdir(opencodeDir, { recursive: true }) await fs.mkdir(opencodeDir, { recursive: true })
// Global config with plugins // Global config with plugins
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.json"), path.join(dir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -876,7 +877,7 @@ test("deduplicates duplicate plugins from global and local configs", async () =>
) )
// Local .opencode config with some overlapping plugins // Local .opencode config with some overlapping plugins
await Bun.write( await Filesystem.write(
path.join(opencodeDir, "opencode.json"), path.join(opencodeDir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -915,7 +916,7 @@ test("deduplicates duplicate plugins from global and local configs", async () =>
test("migrates legacy tools config to permissions - allow", async () => { test("migrates legacy tools config to permissions - allow", async () => {
await using tmp = await tmpdir({ await using tmp = await tmpdir({
init: async (dir) => { init: async (dir) => {
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.json"), path.join(dir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -946,7 +947,7 @@ test("migrates legacy tools config to permissions - allow", async () => {
test("migrates legacy tools config to permissions - deny", async () => { test("migrates legacy tools config to permissions - deny", async () => {
await using tmp = await tmpdir({ await using tmp = await tmpdir({
init: async (dir) => { init: async (dir) => {
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.json"), path.join(dir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -977,7 +978,7 @@ test("migrates legacy tools config to permissions - deny", async () => {
test("migrates legacy write tool to edit permission", async () => { test("migrates legacy write tool to edit permission", async () => {
await using tmp = await tmpdir({ await using tmp = await tmpdir({
init: async (dir) => { init: async (dir) => {
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.json"), path.join(dir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -1086,7 +1087,7 @@ test("missing managed settings file is not an error", async () => {
test("migrates legacy edit tool to edit permission", async () => { test("migrates legacy edit tool to edit permission", async () => {
await using tmp = await tmpdir({ await using tmp = await tmpdir({
init: async (dir) => { init: async (dir) => {
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.json"), path.join(dir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -1115,7 +1116,7 @@ test("migrates legacy edit tool to edit permission", async () => {
test("migrates legacy patch tool to edit permission", async () => { test("migrates legacy patch tool to edit permission", async () => {
await using tmp = await tmpdir({ await using tmp = await tmpdir({
init: async (dir) => { init: async (dir) => {
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.json"), path.join(dir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -1144,7 +1145,7 @@ test("migrates legacy patch tool to edit permission", async () => {
test("migrates legacy multiedit tool to edit permission", async () => { test("migrates legacy multiedit tool to edit permission", async () => {
await using tmp = await tmpdir({ await using tmp = await tmpdir({
init: async (dir) => { init: async (dir) => {
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.json"), path.join(dir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -1173,7 +1174,7 @@ test("migrates legacy multiedit tool to edit permission", async () => {
test("migrates mixed legacy tools config", async () => { test("migrates mixed legacy tools config", async () => {
await using tmp = await tmpdir({ await using tmp = await tmpdir({
init: async (dir) => { init: async (dir) => {
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.json"), path.join(dir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -1208,7 +1209,7 @@ test("migrates mixed legacy tools config", async () => {
test("merges legacy tools with existing permission config", async () => { test("merges legacy tools with existing permission config", async () => {
await using tmp = await tmpdir({ await using tmp = await tmpdir({
init: async (dir) => { init: async (dir) => {
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.json"), path.join(dir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -1241,7 +1242,7 @@ test("merges legacy tools with existing permission config", async () => {
test("permission config preserves key order", async () => { test("permission config preserves key order", async () => {
await using tmp = await tmpdir({ await using tmp = await tmpdir({
init: async (dir) => { init: async (dir) => {
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.json"), path.join(dir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -1289,7 +1290,7 @@ test("project config can override MCP server enabled status", async () => {
await using tmp = await tmpdir({ await using tmp = await tmpdir({
init: async (dir) => { init: async (dir) => {
// Simulates a base config (like from remote .well-known) with disabled MCP // Simulates a base config (like from remote .well-known) with disabled MCP
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.jsonc"), path.join(dir, "opencode.jsonc"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -1308,7 +1309,7 @@ test("project config can override MCP server enabled status", async () => {
}), }),
) )
// Project config enables just jira // Project config enables just jira
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.json"), path.join(dir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -1347,7 +1348,7 @@ test("MCP config deep merges preserving base config properties", async () => {
await using tmp = await tmpdir({ await using tmp = await tmpdir({
init: async (dir) => { init: async (dir) => {
// Base config with full MCP definition // Base config with full MCP definition
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.jsonc"), path.join(dir, "opencode.jsonc"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -1364,7 +1365,7 @@ test("MCP config deep merges preserving base config properties", async () => {
}), }),
) )
// Override just enables it, should preserve other properties // Override just enables it, should preserve other properties
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.json"), path.join(dir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -1399,7 +1400,7 @@ test("local .opencode config can override MCP from project config", async () =>
await using tmp = await tmpdir({ await using tmp = await tmpdir({
init: async (dir) => { init: async (dir) => {
// Project config with disabled MCP // Project config with disabled MCP
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.json"), path.join(dir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -1415,7 +1416,7 @@ test("local .opencode config can override MCP from project config", async () =>
// Local .opencode directory config enables it // Local .opencode directory config enables it
const opencodeDir = path.join(dir, ".opencode") const opencodeDir = path.join(dir, ".opencode")
await fs.mkdir(opencodeDir, { recursive: true }) await fs.mkdir(opencodeDir, { recursive: true })
await Bun.write( await Filesystem.write(
path.join(opencodeDir, "opencode.json"), path.join(opencodeDir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -1483,7 +1484,7 @@ test("project config overrides remote well-known config", async () => {
git: true, git: true,
init: async (dir) => { init: async (dir) => {
// Project config enables jira (overriding remote default) // Project config enables jira (overriding remote default)
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.json"), path.join(dir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -1576,7 +1577,7 @@ describe("deduplicatePlugins", () => {
const pluginDir = path.join(opencodeDir, "plugin") const pluginDir = path.join(opencodeDir, "plugin")
await fs.mkdir(pluginDir, { recursive: true }) await fs.mkdir(pluginDir, { recursive: true })
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.json"), path.join(dir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -1584,7 +1585,7 @@ describe("deduplicatePlugins", () => {
}), }),
) )
await Bun.write(path.join(pluginDir, "my-plugin.js"), "export default {}") await Filesystem.write(path.join(pluginDir, "my-plugin.js"), "export default {}")
}, },
}) })
@@ -1611,7 +1612,7 @@ describe("OPENCODE_DISABLE_PROJECT_CONFIG", () => {
await using tmp = await tmpdir({ await using tmp = await tmpdir({
init: async (dir) => { init: async (dir) => {
// Create a project config that would normally be loaded // Create a project config that would normally be loaded
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.json"), path.join(dir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -1649,7 +1650,7 @@ describe("OPENCODE_DISABLE_PROJECT_CONFIG", () => {
// Create a .opencode directory with a command // Create a .opencode directory with a command
const opencodeDir = path.join(dir, ".opencode", "command") const opencodeDir = path.join(dir, ".opencode", "command")
await fs.mkdir(opencodeDir, { recursive: true }) await fs.mkdir(opencodeDir, { recursive: true })
await Bun.write(path.join(opencodeDir, "test-cmd.md"), "# Test Command\nThis is a test command.") await Filesystem.write(path.join(opencodeDir, "test-cmd.md"), "# Test Command\nThis is a test command.")
}, },
}) })
await Instance.provide({ await Instance.provide({
@@ -1706,7 +1707,7 @@ describe("OPENCODE_DISABLE_PROJECT_CONFIG", () => {
await using tmp = await tmpdir({ await using tmp = await tmpdir({
init: async (dir) => { init: async (dir) => {
// Create a config with relative instruction path // Create a config with relative instruction path
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.json"), path.join(dir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -1714,7 +1715,7 @@ describe("OPENCODE_DISABLE_PROJECT_CONFIG", () => {
}), }),
) )
// Create the instruction file (should be skipped) // Create the instruction file (should be skipped)
await Bun.write(path.join(dir, "CUSTOM.md"), "# Custom Instructions") await Filesystem.write(path.join(dir, "CUSTOM.md"), "# Custom Instructions")
}, },
}) })
@@ -1752,7 +1753,7 @@ describe("OPENCODE_DISABLE_PROJECT_CONFIG", () => {
await using configDirTmp = await tmpdir({ await using configDirTmp = await tmpdir({
init: async (dir) => { init: async (dir) => {
// Create config in the custom config dir // Create config in the custom config dir
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.json"), path.join(dir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -1765,7 +1766,7 @@ describe("OPENCODE_DISABLE_PROJECT_CONFIG", () => {
await using projectTmp = await tmpdir({ await using projectTmp = await tmpdir({
init: async (dir) => { init: async (dir) => {
// Create config in project (should be ignored) // Create config in project (should be ignored)
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.json"), path.join(dir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",

View File

@@ -3,11 +3,12 @@ import path from "path"
import fs from "fs/promises" import fs from "fs/promises"
import { File } from "../../src/file" import { File } from "../../src/file"
import { Instance } from "../../src/project/instance" import { Instance } from "../../src/project/instance"
import { Filesystem } from "../../src/util/filesystem"
import { tmpdir } from "../fixture/fixture" import { tmpdir } from "../fixture/fixture"
describe("file/index Bun.file patterns", () => { describe("file/index Filesystem patterns", () => {
describe("File.read() - text content", () => { describe("File.read() - text content", () => {
test("reads text file via Bun.file().text()", async () => { test("reads text file via Filesystem.readText()", async () => {
await using tmp = await tmpdir() await using tmp = await tmpdir()
const filepath = path.join(tmp.path, "test.txt") const filepath = path.join(tmp.path, "test.txt")
await fs.writeFile(filepath, "Hello World", "utf-8") await fs.writeFile(filepath, "Hello World", "utf-8")
@@ -22,7 +23,7 @@ describe("file/index Bun.file patterns", () => {
}) })
}) })
test("reads with Bun.file().exists() check", async () => { test("reads with Filesystem.exists() check", async () => {
await using tmp = await tmpdir() await using tmp = await tmpdir()
await Instance.provide({ await Instance.provide({
@@ -81,7 +82,7 @@ describe("file/index Bun.file patterns", () => {
}) })
describe("File.read() - binary content", () => { describe("File.read() - binary content", () => {
test("reads binary file via Bun.file().arrayBuffer()", async () => { test("reads binary file via Filesystem.readArrayBuffer()", async () => {
await using tmp = await tmpdir() await using tmp = await tmpdir()
const filepath = path.join(tmp.path, "image.png") const filepath = path.join(tmp.path, "image.png")
const binaryContent = Buffer.from([0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a]) const binaryContent = Buffer.from([0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a])
@@ -115,8 +116,8 @@ describe("file/index Bun.file patterns", () => {
}) })
}) })
describe("File.read() - Bun.file().type", () => { describe("File.read() - Filesystem.mimeType()", () => {
test("detects MIME type via Bun.file().type", async () => { test("detects MIME type via Filesystem.mimeType()", async () => {
await using tmp = await tmpdir() await using tmp = await tmpdir()
const filepath = path.join(tmp.path, "test.json") const filepath = path.join(tmp.path, "test.json")
await fs.writeFile(filepath, '{"key": "value"}', "utf-8") await fs.writeFile(filepath, '{"key": "value"}', "utf-8")
@@ -124,8 +125,7 @@ describe("file/index Bun.file patterns", () => {
await Instance.provide({ await Instance.provide({
directory: tmp.path, directory: tmp.path,
fn: async () => { fn: async () => {
const bunFile = Bun.file(filepath) expect(Filesystem.mimeType(filepath)).toContain("application/json")
expect(bunFile.type).toContain("application/json")
const result = await File.read("test.json") const result = await File.read("test.json")
expect(result.type).toBe("text") expect(result.type).toBe("text")
@@ -149,16 +149,15 @@ describe("file/index Bun.file patterns", () => {
await Instance.provide({ await Instance.provide({
directory: tmp.path, directory: tmp.path,
fn: async () => { fn: async () => {
const bunFile = Bun.file(filepath) expect(Filesystem.mimeType(filepath)).toContain(mime)
expect(bunFile.type).toContain(mime)
}, },
}) })
} }
}) })
}) })
describe("File.list() - Bun.file().exists() and .text()", () => { describe("File.list() - Filesystem.exists() and readText()", () => {
test("reads .gitignore via Bun.file().exists() and .text()", async () => { test("reads .gitignore via Filesystem.exists() and readText()", async () => {
await using tmp = await tmpdir({ git: true }) await using tmp = await tmpdir({ git: true })
await Instance.provide({ await Instance.provide({
@@ -168,10 +167,9 @@ describe("file/index Bun.file patterns", () => {
await fs.writeFile(gitignorePath, "node_modules\ndist\n", "utf-8") await fs.writeFile(gitignorePath, "node_modules\ndist\n", "utf-8")
// This is used internally in File.list() // This is used internally in File.list()
const bunFile = Bun.file(gitignorePath) expect(await Filesystem.exists(gitignorePath)).toBe(true)
expect(await bunFile.exists()).toBe(true)
const content = await bunFile.text() const content = await Filesystem.readText(gitignorePath)
expect(content).toContain("node_modules") expect(content).toContain("node_modules")
}, },
}) })
@@ -186,9 +184,8 @@ describe("file/index Bun.file patterns", () => {
const ignorePath = path.join(tmp.path, ".ignore") const ignorePath = path.join(tmp.path, ".ignore")
await fs.writeFile(ignorePath, "*.log\n.env\n", "utf-8") await fs.writeFile(ignorePath, "*.log\n.env\n", "utf-8")
const bunFile = Bun.file(ignorePath) expect(await Filesystem.exists(ignorePath)).toBe(true)
expect(await bunFile.exists()).toBe(true) expect(await Filesystem.readText(ignorePath)).toContain("*.log")
expect(await bunFile.text()).toContain("*.log")
}, },
}) })
}) })
@@ -200,8 +197,7 @@ describe("file/index Bun.file patterns", () => {
directory: tmp.path, directory: tmp.path,
fn: async () => { fn: async () => {
const gitignorePath = path.join(tmp.path, ".gitignore") const gitignorePath = path.join(tmp.path, ".gitignore")
const bunFile = Bun.file(gitignorePath) expect(await Filesystem.exists(gitignorePath)).toBe(false)
expect(await bunFile.exists()).toBe(false)
// File.list() should still work // File.list() should still work
const nodes = await File.list() const nodes = await File.list()
@@ -211,8 +207,8 @@ describe("file/index Bun.file patterns", () => {
}) })
}) })
describe("File.changed() - Bun.file().text() for untracked files", () => { describe("File.changed() - Filesystem.readText() for untracked files", () => {
test("reads untracked files via Bun.file().text()", async () => { test("reads untracked files via Filesystem.readText()", async () => {
await using tmp = await tmpdir({ git: true }) await using tmp = await tmpdir({ git: true })
await Instance.provide({ await Instance.provide({
@@ -222,8 +218,7 @@ describe("file/index Bun.file patterns", () => {
await fs.writeFile(untrackedPath, "new content\nwith multiple lines", "utf-8") await fs.writeFile(untrackedPath, "new content\nwith multiple lines", "utf-8")
// This is how File.changed() reads untracked files // This is how File.changed() reads untracked files
const bunFile = Bun.file(untrackedPath) const content = await Filesystem.readText(untrackedPath)
const content = await bunFile.text()
const lines = content.split("\n").length const lines = content.split("\n").length
expect(lines).toBe(2) expect(lines).toBe(2)
}, },
@@ -232,7 +227,7 @@ describe("file/index Bun.file patterns", () => {
}) })
describe("Error handling", () => { describe("Error handling", () => {
test("handles errors gracefully in Bun.file().text()", async () => { test("handles errors gracefully in Filesystem.readText()", async () => {
await using tmp = await tmpdir() await using tmp = await tmpdir()
const filepath = path.join(tmp.path, "readonly.txt") const filepath = path.join(tmp.path, "readonly.txt")
await fs.writeFile(filepath, "content", "utf-8") await fs.writeFile(filepath, "content", "utf-8")
@@ -240,9 +235,9 @@ describe("file/index Bun.file patterns", () => {
await Instance.provide({ await Instance.provide({
directory: tmp.path, directory: tmp.path,
fn: async () => { fn: async () => {
const nonExistentFile = Bun.file(path.join(tmp.path, "does-not-exist.txt")) const nonExistentPath = path.join(tmp.path, "does-not-exist.txt")
// Bun.file().text() on non-existent file throws // Filesystem.readText() on non-existent file throws
await expect(nonExistentFile.text()).rejects.toThrow() await expect(Filesystem.readText(nonExistentPath)).rejects.toThrow()
// But File.read() handles this gracefully // But File.read() handles this gracefully
const result = await File.read("does-not-exist.txt") const result = await File.read("does-not-exist.txt")
@@ -251,14 +246,14 @@ describe("file/index Bun.file patterns", () => {
}) })
}) })
test("handles errors in Bun.file().arrayBuffer()", async () => { test("handles errors in Filesystem.readArrayBuffer()", async () => {
await using tmp = await tmpdir() await using tmp = await tmpdir()
await Instance.provide({ await Instance.provide({
directory: tmp.path, directory: tmp.path,
fn: async () => { fn: async () => {
const nonExistentFile = Bun.file(path.join(tmp.path, "does-not-exist.bin")) const nonExistentPath = path.join(tmp.path, "does-not-exist.bin")
const buffer = await nonExistentFile.arrayBuffer().catch(() => new ArrayBuffer(0)) const buffer = await Filesystem.readArrayBuffer(nonExistentPath).catch(() => new ArrayBuffer(0))
expect(buffer.byteLength).toBe(0) expect(buffer.byteLength).toBe(0)
}, },
}) })
@@ -272,7 +267,6 @@ describe("file/index Bun.file patterns", () => {
await Instance.provide({ await Instance.provide({
directory: tmp.path, directory: tmp.path,
fn: async () => { fn: async () => {
const bunFile = Bun.file(filepath)
// File.read() handles missing images gracefully // File.read() handles missing images gracefully
const result = await File.read("broken.png") const result = await File.read("broken.png")
expect(result.type).toBe("text") expect(result.type).toBe("text")

View File

@@ -3,6 +3,7 @@ import path from "path"
import fs from "fs/promises" import fs from "fs/promises"
import { FileTime } from "../../src/file/time" import { FileTime } from "../../src/file/time"
import { Instance } from "../../src/project/instance" import { Instance } from "../../src/project/instance"
import { Filesystem } from "../../src/util/filesystem"
import { tmpdir } from "../fixture/fixture" import { tmpdir } from "../fixture/fixture"
describe("file/time", () => { describe("file/time", () => {
@@ -312,8 +313,8 @@ describe("file/time", () => {
}) })
}) })
describe("stat() Bun.file pattern", () => { describe("stat() Filesystem.stat pattern", () => {
test("reads file modification time via Bun.file().stat()", async () => { test("reads file modification time via Filesystem.stat()", async () => {
await using tmp = await tmpdir() await using tmp = await tmpdir()
const filepath = path.join(tmp.path, "file.txt") const filepath = path.join(tmp.path, "file.txt")
await fs.writeFile(filepath, "content", "utf-8") await fs.writeFile(filepath, "content", "utf-8")
@@ -323,9 +324,9 @@ describe("file/time", () => {
fn: async () => { fn: async () => {
FileTime.read(sessionID, filepath) FileTime.read(sessionID, filepath)
const stats = await Bun.file(filepath).stat() const stats = Filesystem.stat(filepath)
expect(stats.mtime).toBeInstanceOf(Date) expect(stats?.mtime).toBeInstanceOf(Date)
expect(stats.mtime.getTime()).toBeGreaterThan(0) expect(stats!.mtime.getTime()).toBeGreaterThan(0)
// FileTime.assert uses this stat internally // FileTime.assert uses this stat internally
await FileTime.assert(sessionID, filepath) await FileTime.assert(sessionID, filepath)
@@ -343,14 +344,14 @@ describe("file/time", () => {
fn: async () => { fn: async () => {
FileTime.read(sessionID, filepath) FileTime.read(sessionID, filepath)
const originalStat = await Bun.file(filepath).stat() const originalStat = Filesystem.stat(filepath)
// Wait and modify // Wait and modify
await new Promise((resolve) => setTimeout(resolve, 100)) await new Promise((resolve) => setTimeout(resolve, 100))
await fs.writeFile(filepath, "modified", "utf-8") await fs.writeFile(filepath, "modified", "utf-8")
const newStat = await Bun.file(filepath).stat() const newStat = Filesystem.stat(filepath)
expect(newStat.mtime.getTime()).toBeGreaterThan(originalStat.mtime.getTime()) expect(newStat!.mtime.getTime()).toBeGreaterThan(originalStat!.mtime.getTime())
await expect(FileTime.assert(sessionID, filepath)).rejects.toThrow() await expect(FileTime.assert(sessionID, filepath)).rejects.toThrow()
}, },

View File

@@ -4,6 +4,7 @@ import { Log } from "../../src/util/log"
import { $ } from "bun" import { $ } from "bun"
import path from "path" import path from "path"
import { tmpdir } from "../fixture/fixture" import { tmpdir } from "../fixture/fixture"
import { Filesystem } from "../../src/util/filesystem"
import { GlobalBus } from "../../src/bus/global" import { GlobalBus } from "../../src/bus/global"
Log.init({ print: false }) Log.init({ print: false })
@@ -78,7 +79,7 @@ describe("Project.fromDirectory", () => {
expect(project.worktree).toBe(tmp.path) expect(project.worktree).toBe(tmp.path)
const opencodeFile = path.join(tmp.path, ".git", "opencode") const opencodeFile = path.join(tmp.path, ".git", "opencode")
const fileExists = await Bun.file(opencodeFile).exists() const fileExists = await Filesystem.exists(opencodeFile)
expect(fileExists).toBe(false) expect(fileExists).toBe(false)
}) })
@@ -94,7 +95,7 @@ describe("Project.fromDirectory", () => {
expect(project.worktree).toBe(tmp.path) expect(project.worktree).toBe(tmp.path)
const opencodeFile = path.join(tmp.path, ".git", "opencode") const opencodeFile = path.join(tmp.path, ".git", "opencode")
const fileExists = await Bun.file(opencodeFile).exists() const fileExists = await Filesystem.exists(opencodeFile)
expect(fileExists).toBe(true) expect(fileExists).toBe(true)
}) })

View File

@@ -4,6 +4,7 @@ import fs from "fs/promises"
import path from "path" import path from "path"
import { Instance } from "../../src/project/instance" import { Instance } from "../../src/project/instance"
import { Worktree } from "../../src/worktree" import { Worktree } from "../../src/worktree"
import { Filesystem } from "../../src/util/filesystem"
import { tmpdir } from "../fixture/fixture" import { tmpdir } from "../fixture/fixture"
describe("Worktree.remove", () => { describe("Worktree.remove", () => {
@@ -53,7 +54,7 @@ describe("Worktree.remove", () => {
})() })()
expect(ok).toBe(true) expect(ok).toBe(true)
expect(await Bun.file(dir).exists()).toBe(false) expect(await Filesystem.exists(dir)).toBe(false)
const list = await $`git worktree list --porcelain`.cwd(root).quiet().text() const list = await $`git worktree list --porcelain`.cwd(root).quiet().text()
expect(list).not.toContain(`worktree ${dir}`) expect(list).not.toContain(`worktree ${dir}`)

View File

@@ -7,11 +7,12 @@ import { Instance } from "../../src/project/instance"
import { Provider } from "../../src/provider/provider" import { Provider } from "../../src/provider/provider"
import { Env } from "../../src/env" import { Env } from "../../src/env"
import { Global } from "../../src/global" import { Global } from "../../src/global"
import { Filesystem } from "../../src/util/filesystem"
test("Bedrock: config region takes precedence over AWS_REGION env var", async () => { test("Bedrock: config region takes precedence over AWS_REGION env var", async () => {
await using tmp = await tmpdir({ await using tmp = await tmpdir({
init: async (dir) => { init: async (dir) => {
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.json"), path.join(dir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -43,7 +44,7 @@ test("Bedrock: config region takes precedence over AWS_REGION env var", async ()
test("Bedrock: falls back to AWS_REGION env var when no config region", async () => { test("Bedrock: falls back to AWS_REGION env var when no config region", async () => {
await using tmp = await tmpdir({ await using tmp = await tmpdir({
init: async (dir) => { init: async (dir) => {
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.json"), path.join(dir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -68,7 +69,7 @@ test("Bedrock: falls back to AWS_REGION env var when no config region", async ()
test("Bedrock: loads when bearer token from auth.json is present", async () => { test("Bedrock: loads when bearer token from auth.json is present", async () => {
await using tmp = await tmpdir({ await using tmp = await tmpdir({
init: async (dir) => { init: async (dir) => {
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.json"), path.join(dir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -89,14 +90,14 @@ test("Bedrock: loads when bearer token from auth.json is present", async () => {
// Save original auth.json if it exists // Save original auth.json if it exists
let originalAuth: string | undefined let originalAuth: string | undefined
try { try {
originalAuth = await Bun.file(authPath).text() originalAuth = await Filesystem.readText(authPath)
} catch { } catch {
// File doesn't exist, that's fine // File doesn't exist, that's fine
} }
try { try {
// Write test auth.json // Write test auth.json
await Bun.write( await Filesystem.write(
authPath, authPath,
JSON.stringify({ JSON.stringify({
"amazon-bedrock": { "amazon-bedrock": {
@@ -122,7 +123,7 @@ test("Bedrock: loads when bearer token from auth.json is present", async () => {
} finally { } finally {
// Restore original or delete // Restore original or delete
if (originalAuth !== undefined) { if (originalAuth !== undefined) {
await Bun.write(authPath, originalAuth) await Filesystem.write(authPath, originalAuth)
} else { } else {
try { try {
await unlink(authPath) await unlink(authPath)
@@ -136,7 +137,7 @@ test("Bedrock: loads when bearer token from auth.json is present", async () => {
test("Bedrock: config profile takes precedence over AWS_PROFILE env var", async () => { test("Bedrock: config profile takes precedence over AWS_PROFILE env var", async () => {
await using tmp = await tmpdir({ await using tmp = await tmpdir({
init: async (dir) => { init: async (dir) => {
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.json"), path.join(dir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -169,7 +170,7 @@ test("Bedrock: config profile takes precedence over AWS_PROFILE env var", async
test("Bedrock: includes custom endpoint in options when specified", async () => { test("Bedrock: includes custom endpoint in options when specified", async () => {
await using tmp = await tmpdir({ await using tmp = await tmpdir({
init: async (dir) => { init: async (dir) => {
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.json"), path.join(dir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -202,7 +203,7 @@ test("Bedrock: includes custom endpoint in options when specified", async () =>
test("Bedrock: autoloads when AWS_WEB_IDENTITY_TOKEN_FILE is present", async () => { test("Bedrock: autoloads when AWS_WEB_IDENTITY_TOKEN_FILE is present", async () => {
await using tmp = await tmpdir({ await using tmp = await tmpdir({
init: async (dir) => { init: async (dir) => {
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.json"), path.join(dir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -240,7 +241,7 @@ test("Bedrock: autoloads when AWS_WEB_IDENTITY_TOKEN_FILE is present", async ()
test("Bedrock: model with us. prefix should not be double-prefixed", async () => { test("Bedrock: model with us. prefix should not be double-prefixed", async () => {
await using tmp = await tmpdir({ await using tmp = await tmpdir({
init: async (dir) => { init: async (dir) => {
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.json"), path.join(dir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -277,7 +278,7 @@ test("Bedrock: model with us. prefix should not be double-prefixed", async () =>
test("Bedrock: model with global. prefix should not be prefixed", async () => { test("Bedrock: model with global. prefix should not be prefixed", async () => {
await using tmp = await tmpdir({ await using tmp = await tmpdir({
init: async (dir) => { init: async (dir) => {
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.json"), path.join(dir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -313,7 +314,7 @@ test("Bedrock: model with global. prefix should not be prefixed", async () => {
test("Bedrock: model with eu. prefix should not be double-prefixed", async () => { test("Bedrock: model with eu. prefix should not be double-prefixed", async () => {
await using tmp = await tmpdir({ await using tmp = await tmpdir({
init: async (dir) => { init: async (dir) => {
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.json"), path.join(dir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",
@@ -349,7 +350,7 @@ test("Bedrock: model with eu. prefix should not be double-prefixed", async () =>
test("Bedrock: model without prefix in US region should get us. prefix added", async () => { test("Bedrock: model without prefix in US region should get us. prefix added", async () => {
await using tmp = await tmpdir({ await using tmp = await tmpdir({
init: async (dir) => { init: async (dir) => {
await Bun.write( await Filesystem.write(
path.join(dir, "opencode.json"), path.join(dir, "opencode.json"),
JSON.stringify({ JSON.stringify({
$schema: "https://opencode.ai/config.json", $schema: "https://opencode.ai/config.json",

View File

@@ -7,6 +7,7 @@ import { Instance } from "../../src/project/instance"
import { Provider } from "../../src/provider/provider" import { Provider } from "../../src/provider/provider"
import { ProviderTransform } from "../../src/provider/transform" import { ProviderTransform } from "../../src/provider/transform"
import { ModelsDev } from "../../src/provider/models" import { ModelsDev } from "../../src/provider/models"
import { Filesystem } from "../../src/util/filesystem"
import { tmpdir } from "../fixture/fixture" import { tmpdir } from "../fixture/fixture"
import type { Agent } from "../../src/agent/agent" import type { Agent } from "../../src/agent/agent"
import type { MessageV2 } from "../../src/session/message-v2" import type { MessageV2 } from "../../src/session/message-v2"
@@ -185,7 +186,7 @@ function createChatStream(text: string) {
async function loadFixture(providerID: string, modelID: string) { async function loadFixture(providerID: string, modelID: string) {
const fixturePath = path.join(import.meta.dir, "../tool/fixtures/models-api.json") const fixturePath = path.join(import.meta.dir, "../tool/fixtures/models-api.json")
const data = (await Bun.file(fixturePath).json()) as Record<string, ModelsDev.Provider> const data = await Filesystem.readJson<Record<string, ModelsDev.Provider>>(fixturePath)
const provider = data[providerID] const provider = data[providerID]
if (!provider) { if (!provider) {
throw new Error(`Missing provider in fixture: ${providerID}`) throw new Error(`Missing provider in fixture: ${providerID}`)

View File

@@ -1,5 +1,6 @@
import { describe, test, expect } from "bun:test" import { describe, test, expect } from "bun:test"
import { Discovery } from "../../src/skill/discovery" import { Discovery } from "../../src/skill/discovery"
import { Filesystem } from "../../src/util/filesystem"
import path from "path" import path from "path"
const CLOUDFLARE_SKILLS_URL = "https://developers.cloudflare.com/.well-known/skills/" const CLOUDFLARE_SKILLS_URL = "https://developers.cloudflare.com/.well-known/skills/"
@@ -11,7 +12,7 @@ describe("Discovery.pull", () => {
for (const dir of dirs) { for (const dir of dirs) {
expect(dir).toStartWith(Discovery.dir()) expect(dir).toStartWith(Discovery.dir())
const md = path.join(dir, "SKILL.md") const md = path.join(dir, "SKILL.md")
expect(await Bun.file(md).exists()).toBe(true) expect(await Filesystem.exists(md)).toBe(true)
} }
}, 30_000) }, 30_000)
@@ -20,7 +21,7 @@ describe("Discovery.pull", () => {
expect(dirs.length).toBeGreaterThan(0) expect(dirs.length).toBeGreaterThan(0)
for (const dir of dirs) { for (const dir of dirs) {
const md = path.join(dir, "SKILL.md") const md = path.join(dir, "SKILL.md")
expect(await Bun.file(md).exists()).toBe(true) expect(await Filesystem.exists(md)).toBe(true)
} }
}, 30_000) }, 30_000)
@@ -40,7 +41,7 @@ describe("Discovery.pull", () => {
const agentsSdk = dirs.find((d) => d.endsWith("/agents-sdk")) const agentsSdk = dirs.find((d) => d.endsWith("/agents-sdk"))
if (agentsSdk) { if (agentsSdk) {
const refs = path.join(agentsSdk, "references") const refs = path.join(agentsSdk, "references")
expect(await Bun.file(path.join(agentsSdk, "SKILL.md")).exists()).toBe(true) expect(await Filesystem.exists(path.join(agentsSdk, "SKILL.md"))).toBe(true)
// agents-sdk has reference files per the index // agents-sdk has reference files per the index
const refDir = await Array.fromAsync(new Bun.Glob("**/*.md").scan({ cwd: refs, onlyFiles: true })) const refDir = await Array.fromAsync(new Bun.Glob("**/*.md").scan({ cwd: refs, onlyFiles: true }))
expect(refDir.length).toBeGreaterThan(0) expect(refDir.length).toBeGreaterThan(0)

View File

@@ -1,7 +1,9 @@
import { test, expect } from "bun:test" import { test, expect } from "bun:test"
import { $ } from "bun" import { $ } from "bun"
import fs from "fs/promises"
import { Snapshot } from "../../src/snapshot" import { Snapshot } from "../../src/snapshot"
import { Instance } from "../../src/project/instance" import { Instance } from "../../src/project/instance"
import { Filesystem } from "../../src/util/filesystem"
import { tmpdir } from "../fixture/fixture" import { tmpdir } from "../fixture/fixture"
async function bootstrap() { async function bootstrap() {
@@ -11,8 +13,8 @@ async function bootstrap() {
const unique = Math.random().toString(36).slice(2) const unique = Math.random().toString(36).slice(2)
const aContent = `A${unique}` const aContent = `A${unique}`
const bContent = `B${unique}` const bContent = `B${unique}`
await Bun.write(`${dir}/a.txt`, aContent) await Filesystem.write(`${dir}/a.txt`, aContent)
await Bun.write(`${dir}/b.txt`, bContent) await Filesystem.write(`${dir}/b.txt`, bContent)
await $`git add .`.cwd(dir).quiet() await $`git add .`.cwd(dir).quiet()
await $`git commit --no-gpg-sign -m init`.cwd(dir).quiet() await $`git commit --no-gpg-sign -m init`.cwd(dir).quiet()
return { return {
@@ -46,11 +48,16 @@ test("revert should remove new files", async () => {
const before = await Snapshot.track() const before = await Snapshot.track()
expect(before).toBeTruthy() expect(before).toBeTruthy()
await Bun.write(`${tmp.path}/new.txt`, "NEW") await Filesystem.write(`${tmp.path}/new.txt`, "NEW")
await Snapshot.revert([await Snapshot.patch(before!)]) await Snapshot.revert([await Snapshot.patch(before!)])
expect(await Bun.file(`${tmp.path}/new.txt`).exists()).toBe(false) expect(
await fs
.access(`${tmp.path}/new.txt`)
.then(() => true)
.catch(() => false),
).toBe(false)
}, },
}) })
}) })
@@ -64,11 +71,16 @@ test("revert in subdirectory", async () => {
expect(before).toBeTruthy() expect(before).toBeTruthy()
await $`mkdir -p ${tmp.path}/sub`.quiet() await $`mkdir -p ${tmp.path}/sub`.quiet()
await Bun.write(`${tmp.path}/sub/file.txt`, "SUB") await Filesystem.write(`${tmp.path}/sub/file.txt`, "SUB")
await Snapshot.revert([await Snapshot.patch(before!)]) await Snapshot.revert([await Snapshot.patch(before!)])
expect(await Bun.file(`${tmp.path}/sub/file.txt`).exists()).toBe(false) expect(
await fs
.access(`${tmp.path}/sub/file.txt`)
.then(() => true)
.catch(() => false),
).toBe(false)
// Note: revert currently only removes files, not directories // Note: revert currently only removes files, not directories
// The empty subdirectory will remain // The empty subdirectory will remain
}, },
@@ -84,18 +96,23 @@ test("multiple file operations", async () => {
expect(before).toBeTruthy() expect(before).toBeTruthy()
await $`rm ${tmp.path}/a.txt`.quiet() await $`rm ${tmp.path}/a.txt`.quiet()
await Bun.write(`${tmp.path}/c.txt`, "C") await Filesystem.write(`${tmp.path}/c.txt`, "C")
await $`mkdir -p ${tmp.path}/dir`.quiet() await $`mkdir -p ${tmp.path}/dir`.quiet()
await Bun.write(`${tmp.path}/dir/d.txt`, "D") await Filesystem.write(`${tmp.path}/dir/d.txt`, "D")
await Bun.write(`${tmp.path}/b.txt`, "MODIFIED") await Filesystem.write(`${tmp.path}/b.txt`, "MODIFIED")
await Snapshot.revert([await Snapshot.patch(before!)]) await Snapshot.revert([await Snapshot.patch(before!)])
expect(await Bun.file(`${tmp.path}/a.txt`).text()).toBe(tmp.extra.aContent) expect(await fs.readFile(`${tmp.path}/a.txt`, "utf-8")).toBe(tmp.extra.aContent)
expect(await Bun.file(`${tmp.path}/c.txt`).exists()).toBe(false) expect(
await fs
.access(`${tmp.path}/c.txt`)
.then(() => true)
.catch(() => false),
).toBe(false)
// Note: revert currently only removes files, not directories // Note: revert currently only removes files, not directories
// The empty directory will remain // The empty directory will remain
expect(await Bun.file(`${tmp.path}/b.txt`).text()).toBe(tmp.extra.bContent) expect(await fs.readFile(`${tmp.path}/b.txt`, "utf-8")).toBe(tmp.extra.bContent)
}, },
}) })
}) })
@@ -123,13 +140,18 @@ test("binary file handling", async () => {
const before = await Snapshot.track() const before = await Snapshot.track()
expect(before).toBeTruthy() expect(before).toBeTruthy()
await Bun.write(`${tmp.path}/image.png`, new Uint8Array([0x89, 0x50, 0x4e, 0x47])) await Filesystem.write(`${tmp.path}/image.png`, new Uint8Array([0x89, 0x50, 0x4e, 0x47]))
const patch = await Snapshot.patch(before!) const patch = await Snapshot.patch(before!)
expect(patch.files).toContain(`${tmp.path}/image.png`) expect(patch.files).toContain(`${tmp.path}/image.png`)
await Snapshot.revert([patch]) await Snapshot.revert([patch])
expect(await Bun.file(`${tmp.path}/image.png`).exists()).toBe(false) expect(
await fs
.access(`${tmp.path}/image.png`)
.then(() => true)
.catch(() => false),
).toBe(false)
}, },
}) })
}) })
@@ -157,7 +179,7 @@ test("large file handling", async () => {
const before = await Snapshot.track() const before = await Snapshot.track()
expect(before).toBeTruthy() expect(before).toBeTruthy()
await Bun.write(`${tmp.path}/large.txt`, "x".repeat(1024 * 1024)) await Filesystem.write(`${tmp.path}/large.txt`, "x".repeat(1024 * 1024))
expect((await Snapshot.patch(before!)).files).toContain(`${tmp.path}/large.txt`) expect((await Snapshot.patch(before!)).files).toContain(`${tmp.path}/large.txt`)
}, },
@@ -173,11 +195,16 @@ test("nested directory revert", async () => {
expect(before).toBeTruthy() expect(before).toBeTruthy()
await $`mkdir -p ${tmp.path}/level1/level2/level3`.quiet() await $`mkdir -p ${tmp.path}/level1/level2/level3`.quiet()
await Bun.write(`${tmp.path}/level1/level2/level3/deep.txt`, "DEEP") await Filesystem.write(`${tmp.path}/level1/level2/level3/deep.txt`, "DEEP")
await Snapshot.revert([await Snapshot.patch(before!)]) await Snapshot.revert([await Snapshot.patch(before!)])
expect(await Bun.file(`${tmp.path}/level1/level2/level3/deep.txt`).exists()).toBe(false) expect(
await fs
.access(`${tmp.path}/level1/level2/level3/deep.txt`)
.then(() => true)
.catch(() => false),
).toBe(false)
}, },
}) })
}) })
@@ -190,9 +217,9 @@ test("special characters in filenames", async () => {
const before = await Snapshot.track() const before = await Snapshot.track()
expect(before).toBeTruthy() expect(before).toBeTruthy()
await Bun.write(`${tmp.path}/file with spaces.txt`, "SPACES") await Filesystem.write(`${tmp.path}/file with spaces.txt`, "SPACES")
await Bun.write(`${tmp.path}/file-with-dashes.txt`, "DASHES") await Filesystem.write(`${tmp.path}/file-with-dashes.txt`, "DASHES")
await Bun.write(`${tmp.path}/file_with_underscores.txt`, "UNDERSCORES") await Filesystem.write(`${tmp.path}/file_with_underscores.txt`, "UNDERSCORES")
const files = (await Snapshot.patch(before!)).files const files = (await Snapshot.patch(before!)).files
expect(files).toContain(`${tmp.path}/file with spaces.txt`) expect(files).toContain(`${tmp.path}/file with spaces.txt`)
@@ -225,7 +252,7 @@ test("patch with invalid hash", async () => {
expect(before).toBeTruthy() expect(before).toBeTruthy()
// Create a change // Create a change
await Bun.write(`${tmp.path}/test.txt`, "TEST") await Filesystem.write(`${tmp.path}/test.txt`, "TEST")
// Try to patch with invalid hash - should handle gracefully // Try to patch with invalid hash - should handle gracefully
const patch = await Snapshot.patch("invalid-hash-12345") const patch = await Snapshot.patch("invalid-hash-12345")
@@ -273,7 +300,7 @@ test("unicode filenames", async () => {
] ]
for (const file of unicodeFiles) { for (const file of unicodeFiles) {
await Bun.write(file.path, file.content) await Filesystem.write(file.path, file.content)
} }
const patch = await Snapshot.patch(before!) const patch = await Snapshot.patch(before!)
@@ -286,7 +313,12 @@ test("unicode filenames", async () => {
await Snapshot.revert([patch]) await Snapshot.revert([patch])
for (const file of unicodeFiles) { for (const file of unicodeFiles) {
expect(await Bun.file(file.path).exists()).toBe(false) expect(
await fs
.access(file.path)
.then(() => true)
.catch(() => false),
).toBe(false)
} }
}, },
}) })
@@ -300,14 +332,14 @@ test.skip("unicode filenames modification and restore", async () => {
const chineseFile = `${tmp.path}/文件.txt` const chineseFile = `${tmp.path}/文件.txt`
const cyrillicFile = `${tmp.path}/файл.txt` const cyrillicFile = `${tmp.path}/файл.txt`
await Bun.write(chineseFile, "original chinese") await Filesystem.write(chineseFile, "original chinese")
await Bun.write(cyrillicFile, "original cyrillic") await Filesystem.write(cyrillicFile, "original cyrillic")
const before = await Snapshot.track() const before = await Snapshot.track()
expect(before).toBeTruthy() expect(before).toBeTruthy()
await Bun.write(chineseFile, "modified chinese") await Filesystem.write(chineseFile, "modified chinese")
await Bun.write(cyrillicFile, "modified cyrillic") await Filesystem.write(cyrillicFile, "modified cyrillic")
const patch = await Snapshot.patch(before!) const patch = await Snapshot.patch(before!)
expect(patch.files).toContain(chineseFile) expect(patch.files).toContain(chineseFile)
@@ -315,8 +347,8 @@ test.skip("unicode filenames modification and restore", async () => {
await Snapshot.revert([patch]) await Snapshot.revert([patch])
expect(await Bun.file(chineseFile).text()).toBe("original chinese") expect(await fs.readFile(chineseFile, "utf-8")).toBe("original chinese")
expect(await Bun.file(cyrillicFile).text()).toBe("original cyrillic") expect(await fs.readFile(cyrillicFile, "utf-8")).toBe("original cyrillic")
}, },
}) })
}) })
@@ -331,13 +363,18 @@ test("unicode filenames in subdirectories", async () => {
await $`mkdir -p "${tmp.path}/目录/подкаталог"`.quiet() await $`mkdir -p "${tmp.path}/目录/подкаталог"`.quiet()
const deepFile = `${tmp.path}/目录/подкаталог/文件.txt` const deepFile = `${tmp.path}/目录/подкаталог/文件.txt`
await Bun.write(deepFile, "deep unicode content") await Filesystem.write(deepFile, "deep unicode content")
const patch = await Snapshot.patch(before!) const patch = await Snapshot.patch(before!)
expect(patch.files).toContain(deepFile) expect(patch.files).toContain(deepFile)
await Snapshot.revert([patch]) await Snapshot.revert([patch])
expect(await Bun.file(deepFile).exists()).toBe(false) expect(
await fs
.access(deepFile)
.then(() => true)
.catch(() => false),
).toBe(false)
}, },
}) })
}) })
@@ -353,13 +390,18 @@ test("very long filenames", async () => {
const longName = "a".repeat(200) + ".txt" const longName = "a".repeat(200) + ".txt"
const longFile = `${tmp.path}/${longName}` const longFile = `${tmp.path}/${longName}`
await Bun.write(longFile, "long filename content") await Filesystem.write(longFile, "long filename content")
const patch = await Snapshot.patch(before!) const patch = await Snapshot.patch(before!)
expect(patch.files).toContain(longFile) expect(patch.files).toContain(longFile)
await Snapshot.revert([patch]) await Snapshot.revert([patch])
expect(await Bun.file(longFile).exists()).toBe(false) expect(
await fs
.access(longFile)
.then(() => true)
.catch(() => false),
).toBe(false)
}, },
}) })
}) })
@@ -372,9 +414,9 @@ test("hidden files", async () => {
const before = await Snapshot.track() const before = await Snapshot.track()
expect(before).toBeTruthy() expect(before).toBeTruthy()
await Bun.write(`${tmp.path}/.hidden`, "hidden content") await Filesystem.write(`${tmp.path}/.hidden`, "hidden content")
await Bun.write(`${tmp.path}/.gitignore`, "*.log") await Filesystem.write(`${tmp.path}/.gitignore`, "*.log")
await Bun.write(`${tmp.path}/.config`, "config content") await Filesystem.write(`${tmp.path}/.config`, "config content")
const patch = await Snapshot.patch(before!) const patch = await Snapshot.patch(before!)
expect(patch.files).toContain(`${tmp.path}/.hidden`) expect(patch.files).toContain(`${tmp.path}/.hidden`)
@@ -393,7 +435,7 @@ test("nested symlinks", async () => {
expect(before).toBeTruthy() expect(before).toBeTruthy()
await $`mkdir -p ${tmp.path}/sub/dir`.quiet() await $`mkdir -p ${tmp.path}/sub/dir`.quiet()
await Bun.write(`${tmp.path}/sub/dir/target.txt`, "target content") await Filesystem.write(`${tmp.path}/sub/dir/target.txt`, "target content")
await $`ln -s ${tmp.path}/sub/dir/target.txt ${tmp.path}/sub/dir/link.txt`.quiet() await $`ln -s ${tmp.path}/sub/dir/target.txt ${tmp.path}/sub/dir/link.txt`.quiet()
await $`ln -s ${tmp.path}/sub ${tmp.path}/sub-link`.quiet() await $`ln -s ${tmp.path}/sub ${tmp.path}/sub-link`.quiet()
@@ -450,9 +492,9 @@ test("gitignore changes", async () => {
const before = await Snapshot.track() const before = await Snapshot.track()
expect(before).toBeTruthy() expect(before).toBeTruthy()
await Bun.write(`${tmp.path}/.gitignore`, "*.ignored") await Filesystem.write(`${tmp.path}/.gitignore`, "*.ignored")
await Bun.write(`${tmp.path}/test.ignored`, "ignored content") await Filesystem.write(`${tmp.path}/test.ignored`, "ignored content")
await Bun.write(`${tmp.path}/normal.txt`, "normal content") await Filesystem.write(`${tmp.path}/normal.txt`, "normal content")
const patch = await Snapshot.patch(before!) const patch = await Snapshot.patch(before!)
@@ -477,7 +519,7 @@ test("concurrent file operations during patch", async () => {
// Start creating files // Start creating files
const createPromise = (async () => { const createPromise = (async () => {
for (let i = 0; i < 10; i++) { for (let i = 0; i < 10; i++) {
await Bun.write(`${tmp.path}/concurrent${i}.txt`, `concurrent${i}`) await Filesystem.write(`${tmp.path}/concurrent${i}.txt`, `concurrent${i}`)
// Small delay to simulate concurrent operations // Small delay to simulate concurrent operations
await new Promise((resolve) => setTimeout(resolve, 1)) await new Promise((resolve) => setTimeout(resolve, 1))
} }
@@ -504,7 +546,7 @@ test("snapshot state isolation between projects", async () => {
directory: tmp1.path, directory: tmp1.path,
fn: async () => { fn: async () => {
const before1 = await Snapshot.track() const before1 = await Snapshot.track()
await Bun.write(`${tmp1.path}/project1.txt`, "project1 content") await Filesystem.write(`${tmp1.path}/project1.txt`, "project1 content")
const patch1 = await Snapshot.patch(before1!) const patch1 = await Snapshot.patch(before1!)
expect(patch1.files).toContain(`${tmp1.path}/project1.txt`) expect(patch1.files).toContain(`${tmp1.path}/project1.txt`)
}, },
@@ -514,7 +556,7 @@ test("snapshot state isolation between projects", async () => {
directory: tmp2.path, directory: tmp2.path,
fn: async () => { fn: async () => {
const before2 = await Snapshot.track() const before2 = await Snapshot.track()
await Bun.write(`${tmp2.path}/project2.txt`, "project2 content") await Filesystem.write(`${tmp2.path}/project2.txt`, "project2 content")
const patch2 = await Snapshot.patch(before2!) const patch2 = await Snapshot.patch(before2!)
expect(patch2.files).toContain(`${tmp2.path}/project2.txt`) expect(patch2.files).toContain(`${tmp2.path}/project2.txt`)
@@ -544,7 +586,7 @@ test("patch detects changes in secondary worktree", async () => {
expect(before).toBeTruthy() expect(before).toBeTruthy()
const worktreeFile = `${worktreePath}/worktree.txt` const worktreeFile = `${worktreePath}/worktree.txt`
await Bun.write(worktreeFile, "worktree content") await Filesystem.write(worktreeFile, "worktree content")
const patch = await Snapshot.patch(before!) const patch = await Snapshot.patch(before!)
expect(patch.files).toContain(worktreeFile) expect(patch.files).toContain(worktreeFile)
@@ -569,7 +611,7 @@ test("revert only removes files in invoking worktree", async () => {
}, },
}) })
const primaryFile = `${tmp.path}/worktree.txt` const primaryFile = `${tmp.path}/worktree.txt`
await Bun.write(primaryFile, "primary content") await Filesystem.write(primaryFile, "primary content")
await Instance.provide({ await Instance.provide({
directory: worktreePath, directory: worktreePath,
@@ -578,16 +620,21 @@ test("revert only removes files in invoking worktree", async () => {
expect(before).toBeTruthy() expect(before).toBeTruthy()
const worktreeFile = `${worktreePath}/worktree.txt` const worktreeFile = `${worktreePath}/worktree.txt`
await Bun.write(worktreeFile, "worktree content") await Filesystem.write(worktreeFile, "worktree content")
const patch = await Snapshot.patch(before!) const patch = await Snapshot.patch(before!)
await Snapshot.revert([patch]) await Snapshot.revert([patch])
expect(await Bun.file(worktreeFile).exists()).toBe(false) expect(
await fs
.access(worktreeFile)
.then(() => true)
.catch(() => false),
).toBe(false)
}, },
}) })
expect(await Bun.file(primaryFile).text()).toBe("primary content") expect(await fs.readFile(primaryFile, "utf-8")).toBe("primary content")
} finally { } finally {
await $`git worktree remove --force ${worktreePath}`.cwd(tmp.path).quiet().nothrow() await $`git worktree remove --force ${worktreePath}`.cwd(tmp.path).quiet().nothrow()
await $`rm -rf ${worktreePath}`.quiet() await $`rm -rf ${worktreePath}`.quiet()
@@ -614,10 +661,10 @@ test("diff reports worktree-only/shared edits and ignores primary-only", async (
const before = await Snapshot.track() const before = await Snapshot.track()
expect(before).toBeTruthy() expect(before).toBeTruthy()
await Bun.write(`${worktreePath}/worktree-only.txt`, "worktree diff content") await Filesystem.write(`${worktreePath}/worktree-only.txt`, "worktree diff content")
await Bun.write(`${worktreePath}/shared.txt`, "worktree edit") await Filesystem.write(`${worktreePath}/shared.txt`, "worktree edit")
await Bun.write(`${tmp.path}/shared.txt`, "primary edit") await Filesystem.write(`${tmp.path}/shared.txt`, "primary edit")
await Bun.write(`${tmp.path}/primary-only.txt`, "primary change") await Filesystem.write(`${tmp.path}/primary-only.txt`, "primary change")
const diff = await Snapshot.diff(before!) const diff = await Snapshot.diff(before!)
expect(diff).toContain("worktree-only.txt") expect(diff).toContain("worktree-only.txt")
@@ -662,8 +709,8 @@ test("diff function with various changes", async () => {
// Make various changes // Make various changes
await $`rm ${tmp.path}/a.txt`.quiet() await $`rm ${tmp.path}/a.txt`.quiet()
await Bun.write(`${tmp.path}/new.txt`, "new content") await Filesystem.write(`${tmp.path}/new.txt`, "new content")
await Bun.write(`${tmp.path}/b.txt`, "modified content") await Filesystem.write(`${tmp.path}/b.txt`, "modified content")
const diff = await Snapshot.diff(before!) const diff = await Snapshot.diff(before!)
expect(diff).toContain("a.txt") expect(diff).toContain("a.txt")
@@ -683,16 +730,26 @@ test("restore function", async () => {
// Make changes // Make changes
await $`rm ${tmp.path}/a.txt`.quiet() await $`rm ${tmp.path}/a.txt`.quiet()
await Bun.write(`${tmp.path}/new.txt`, "new content") await Filesystem.write(`${tmp.path}/new.txt`, "new content")
await Bun.write(`${tmp.path}/b.txt`, "modified") await Filesystem.write(`${tmp.path}/b.txt`, "modified")
// Restore to original state // Restore to original state
await Snapshot.restore(before!) await Snapshot.restore(before!)
expect(await Bun.file(`${tmp.path}/a.txt`).exists()).toBe(true) expect(
expect(await Bun.file(`${tmp.path}/a.txt`).text()).toBe(tmp.extra.aContent) await fs
expect(await Bun.file(`${tmp.path}/new.txt`).exists()).toBe(true) // New files should remain .access(`${tmp.path}/a.txt`)
expect(await Bun.file(`${tmp.path}/b.txt`).text()).toBe(tmp.extra.bContent) .then(() => true)
.catch(() => false),
).toBe(true)
expect(await fs.readFile(`${tmp.path}/a.txt`, "utf-8")).toBe(tmp.extra.aContent)
expect(
await fs
.access(`${tmp.path}/new.txt`)
.then(() => true)
.catch(() => false),
).toBe(true) // New files should remain
expect(await fs.readFile(`${tmp.path}/b.txt`, "utf-8")).toBe(tmp.extra.bContent)
}, },
}) })
}) })
@@ -710,14 +767,19 @@ test("revert should not delete files that existed but were deleted in snapshot",
const snapshot2 = await Snapshot.track() const snapshot2 = await Snapshot.track()
expect(snapshot2).toBeTruthy() expect(snapshot2).toBeTruthy()
await Bun.write(`${tmp.path}/a.txt`, "recreated content") await Filesystem.write(`${tmp.path}/a.txt`, "recreated content")
const patch = await Snapshot.patch(snapshot2!) const patch = await Snapshot.patch(snapshot2!)
expect(patch.files).toContain(`${tmp.path}/a.txt`) expect(patch.files).toContain(`${tmp.path}/a.txt`)
await Snapshot.revert([patch]) await Snapshot.revert([patch])
expect(await Bun.file(`${tmp.path}/a.txt`).exists()).toBe(false) expect(
await fs
.access(`${tmp.path}/a.txt`)
.then(() => true)
.catch(() => false),
).toBe(false)
}, },
}) })
}) })
@@ -727,14 +789,14 @@ test("revert preserves file that existed in snapshot when deleted then recreated
await Instance.provide({ await Instance.provide({
directory: tmp.path, directory: tmp.path,
fn: async () => { fn: async () => {
await Bun.write(`${tmp.path}/existing.txt`, "original content") await Filesystem.write(`${tmp.path}/existing.txt`, "original content")
const snapshot = await Snapshot.track() const snapshot = await Snapshot.track()
expect(snapshot).toBeTruthy() expect(snapshot).toBeTruthy()
await $`rm ${tmp.path}/existing.txt`.quiet() await $`rm ${tmp.path}/existing.txt`.quiet()
await Bun.write(`${tmp.path}/existing.txt`, "recreated") await Filesystem.write(`${tmp.path}/existing.txt`, "recreated")
await Bun.write(`${tmp.path}/newfile.txt`, "new") await Filesystem.write(`${tmp.path}/newfile.txt`, "new")
const patch = await Snapshot.patch(snapshot!) const patch = await Snapshot.patch(snapshot!)
expect(patch.files).toContain(`${tmp.path}/existing.txt`) expect(patch.files).toContain(`${tmp.path}/existing.txt`)
@@ -742,9 +804,19 @@ test("revert preserves file that existed in snapshot when deleted then recreated
await Snapshot.revert([patch]) await Snapshot.revert([patch])
expect(await Bun.file(`${tmp.path}/newfile.txt`).exists()).toBe(false) expect(
expect(await Bun.file(`${tmp.path}/existing.txt`).exists()).toBe(true) await fs
expect(await Bun.file(`${tmp.path}/existing.txt`).text()).toBe("original content") .access(`${tmp.path}/newfile.txt`)
.then(() => true)
.catch(() => false),
).toBe(false)
expect(
await fs
.access(`${tmp.path}/existing.txt`)
.then(() => true)
.catch(() => false),
).toBe(true)
expect(await fs.readFile(`${tmp.path}/existing.txt`, "utf-8")).toBe("original content")
}, },
}) })
}) })
@@ -754,17 +826,17 @@ test("diffFull sets status based on git change type", async () => {
await Instance.provide({ await Instance.provide({
directory: tmp.path, directory: tmp.path,
fn: async () => { fn: async () => {
await Bun.write(`${tmp.path}/grow.txt`, "one\n") await Filesystem.write(`${tmp.path}/grow.txt`, "one\n")
await Bun.write(`${tmp.path}/trim.txt`, "line1\nline2\n") await Filesystem.write(`${tmp.path}/trim.txt`, "line1\nline2\n")
await Bun.write(`${tmp.path}/delete.txt`, "gone") await Filesystem.write(`${tmp.path}/delete.txt`, "gone")
const before = await Snapshot.track() const before = await Snapshot.track()
expect(before).toBeTruthy() expect(before).toBeTruthy()
await Bun.write(`${tmp.path}/grow.txt`, "one\ntwo\n") await Filesystem.write(`${tmp.path}/grow.txt`, "one\ntwo\n")
await Bun.write(`${tmp.path}/trim.txt`, "line1\n") await Filesystem.write(`${tmp.path}/trim.txt`, "line1\n")
await $`rm ${tmp.path}/delete.txt`.quiet() await $`rm ${tmp.path}/delete.txt`.quiet()
await Bun.write(`${tmp.path}/added.txt`, "new") await Filesystem.write(`${tmp.path}/added.txt`, "new")
const after = await Snapshot.track() const after = await Snapshot.track()
expect(after).toBeTruthy() expect(after).toBeTruthy()
@@ -803,7 +875,7 @@ test("diffFull with new file additions", async () => {
const before = await Snapshot.track() const before = await Snapshot.track()
expect(before).toBeTruthy() expect(before).toBeTruthy()
await Bun.write(`${tmp.path}/new.txt`, "new content") await Filesystem.write(`${tmp.path}/new.txt`, "new content")
const after = await Snapshot.track() const after = await Snapshot.track()
expect(after).toBeTruthy() expect(after).toBeTruthy()
@@ -829,7 +901,7 @@ test("diffFull with file modifications", async () => {
const before = await Snapshot.track() const before = await Snapshot.track()
expect(before).toBeTruthy() expect(before).toBeTruthy()
await Bun.write(`${tmp.path}/b.txt`, "modified content") await Filesystem.write(`${tmp.path}/b.txt`, "modified content")
const after = await Snapshot.track() const after = await Snapshot.track()
expect(after).toBeTruthy() expect(after).toBeTruthy()
@@ -881,7 +953,7 @@ test("diffFull with multiple line additions", async () => {
const before = await Snapshot.track() const before = await Snapshot.track()
expect(before).toBeTruthy() expect(before).toBeTruthy()
await Bun.write(`${tmp.path}/multi.txt`, "line1\nline2\nline3") await Filesystem.write(`${tmp.path}/multi.txt`, "line1\nline2\nline3")
const after = await Snapshot.track() const after = await Snapshot.track()
expect(after).toBeTruthy() expect(after).toBeTruthy()
@@ -907,7 +979,7 @@ test("diffFull with addition and deletion", async () => {
const before = await Snapshot.track() const before = await Snapshot.track()
expect(before).toBeTruthy() expect(before).toBeTruthy()
await Bun.write(`${tmp.path}/added.txt`, "added content") await Filesystem.write(`${tmp.path}/added.txt`, "added content")
await $`rm ${tmp.path}/a.txt`.quiet() await $`rm ${tmp.path}/a.txt`.quiet()
const after = await Snapshot.track() const after = await Snapshot.track()
@@ -941,8 +1013,8 @@ test("diffFull with multiple additions and deletions", async () => {
const before = await Snapshot.track() const before = await Snapshot.track()
expect(before).toBeTruthy() expect(before).toBeTruthy()
await Bun.write(`${tmp.path}/multi1.txt`, "line1\nline2\nline3") await Filesystem.write(`${tmp.path}/multi1.txt`, "line1\nline2\nline3")
await Bun.write(`${tmp.path}/multi2.txt`, "single line") await Filesystem.write(`${tmp.path}/multi2.txt`, "single line")
await $`rm ${tmp.path}/a.txt`.quiet() await $`rm ${tmp.path}/a.txt`.quiet()
await $`rm ${tmp.path}/b.txt`.quiet() await $`rm ${tmp.path}/b.txt`.quiet()
@@ -1000,7 +1072,7 @@ test("diffFull with binary file changes", async () => {
const before = await Snapshot.track() const before = await Snapshot.track()
expect(before).toBeTruthy() expect(before).toBeTruthy()
await Bun.write(`${tmp.path}/binary.bin`, new Uint8Array([0x00, 0x01, 0x02, 0x03])) await Filesystem.write(`${tmp.path}/binary.bin`, new Uint8Array([0x00, 0x01, 0x02, 0x03]))
const after = await Snapshot.track() const after = await Snapshot.track()
expect(after).toBeTruthy() expect(after).toBeTruthy()
@@ -1020,11 +1092,11 @@ test("diffFull with whitespace changes", async () => {
await Instance.provide({ await Instance.provide({
directory: tmp.path, directory: tmp.path,
fn: async () => { fn: async () => {
await Bun.write(`${tmp.path}/whitespace.txt`, "line1\nline2") await Filesystem.write(`${tmp.path}/whitespace.txt`, "line1\nline2")
const before = await Snapshot.track() const before = await Snapshot.track()
expect(before).toBeTruthy() expect(before).toBeTruthy()
await Bun.write(`${tmp.path}/whitespace.txt`, "line1\n\nline2\n") await Filesystem.write(`${tmp.path}/whitespace.txt`, "line1\n\nline2\n")
const after = await Snapshot.track() const after = await Snapshot.track()
expect(after).toBeTruthy() expect(after).toBeTruthy()

View File

@@ -2,6 +2,7 @@ import { describe, expect, test } from "bun:test"
import path from "path" import path from "path"
import { BashTool } from "../../src/tool/bash" import { BashTool } from "../../src/tool/bash"
import { Instance } from "../../src/project/instance" import { Instance } from "../../src/project/instance"
import { Filesystem } from "../../src/util/filesystem"
import { tmpdir } from "../fixture/fixture" import { tmpdir } from "../fixture/fixture"
import type { PermissionNext } from "../../src/permission/next" import type { PermissionNext } from "../../src/permission/next"
import { Truncate } from "../../src/tool/truncation" import { Truncate } from "../../src/tool/truncation"
@@ -388,7 +389,7 @@ describe("tool.bash truncation", () => {
const filepath = (result.metadata as any).outputPath const filepath = (result.metadata as any).outputPath
expect(filepath).toBeTruthy() expect(filepath).toBeTruthy()
const saved = await Bun.file(filepath).text() const saved = await Filesystem.readText(filepath)
const lines = saved.trim().split("\n") const lines = saved.trim().split("\n")
expect(lines.length).toBe(lineCount) expect(lines.length).toBe(lineCount)
expect(lines[0]).toBe("1") expect(lines[0]).toBe("1")

View File

@@ -2,6 +2,7 @@ import { describe, expect, test } from "bun:test"
import path from "path" import path from "path"
import { ReadTool } from "../../src/tool/read" import { ReadTool } from "../../src/tool/read"
import { Instance } from "../../src/project/instance" import { Instance } from "../../src/project/instance"
import { Filesystem } from "../../src/util/filesystem"
import { tmpdir } from "../fixture/fixture" import { tmpdir } from "../fixture/fixture"
import { PermissionNext } from "../../src/permission/next" import { PermissionNext } from "../../src/permission/next"
import { Agent } from "../../src/agent/agent" import { Agent } from "../../src/agent/agent"
@@ -199,10 +200,10 @@ describe("tool.read truncation", () => {
test("truncates large file by bytes and sets truncated metadata", async () => { test("truncates large file by bytes and sets truncated metadata", async () => {
await using tmp = await tmpdir({ await using tmp = await tmpdir({
init: async (dir) => { init: async (dir) => {
const base = await Bun.file(path.join(FIXTURES_DIR, "models-api.json")).text() const base = await Filesystem.readText(path.join(FIXTURES_DIR, "models-api.json"))
const target = 60 * 1024 const target = 60 * 1024
const content = base.length >= target ? base : base.repeat(Math.ceil(target / base.length)) const content = base.length >= target ? base : base.repeat(Math.ceil(target / base.length))
await Bun.write(path.join(dir, "large.json"), content) await Filesystem.write(path.join(dir, "large.json"), content)
}, },
}) })
await Instance.provide({ await Instance.provide({

View File

@@ -1,6 +1,7 @@
import { describe, test, expect, afterAll } from "bun:test" import { describe, test, expect, afterAll } from "bun:test"
import { Truncate } from "../../src/tool/truncation" import { Truncate } from "../../src/tool/truncation"
import { Identifier } from "../../src/id/id" import { Identifier } from "../../src/id/id"
import { Filesystem } from "../../src/util/filesystem"
import fs from "fs/promises" import fs from "fs/promises"
import path from "path" import path from "path"
@@ -9,7 +10,7 @@ const FIXTURES_DIR = path.join(import.meta.dir, "fixtures")
describe("Truncate", () => { describe("Truncate", () => {
describe("output", () => { describe("output", () => {
test("truncates large json file by bytes", async () => { test("truncates large json file by bytes", async () => {
const content = await Bun.file(path.join(FIXTURES_DIR, "models-api.json")).text() const content = await Filesystem.readText(path.join(FIXTURES_DIR, "models-api.json"))
const result = await Truncate.output(content) const result = await Truncate.output(content)
expect(result.truncated).toBe(true) expect(result.truncated).toBe(true)
@@ -69,7 +70,7 @@ describe("Truncate", () => {
}) })
test("large single-line file truncates with byte message", async () => { test("large single-line file truncates with byte message", async () => {
const content = await Bun.file(path.join(FIXTURES_DIR, "models-api.json")).text() const content = await Filesystem.readText(path.join(FIXTURES_DIR, "models-api.json"))
const result = await Truncate.output(content) const result = await Truncate.output(content)
expect(result.truncated).toBe(true) expect(result.truncated).toBe(true)
@@ -88,7 +89,7 @@ describe("Truncate", () => {
expect(result.outputPath).toBeDefined() expect(result.outputPath).toBeDefined()
expect(result.outputPath).toContain("tool_") expect(result.outputPath).toContain("tool_")
const written = await Bun.file(result.outputPath).text() const written = await Filesystem.readText(result.outputPath!)
expect(written).toBe(lines) expect(written).toBe(lines)
}) })
@@ -139,21 +140,21 @@ describe("Truncate", () => {
const oldTimestamp = Date.now() - 10 * DAY_MS const oldTimestamp = Date.now() - 10 * DAY_MS
const oldId = Identifier.create("tool", false, oldTimestamp) const oldId = Identifier.create("tool", false, oldTimestamp)
oldFile = path.join(Truncate.DIR, oldId) oldFile = path.join(Truncate.DIR, oldId)
await Bun.write(Bun.file(oldFile), "old content") await Filesystem.write(oldFile, "old content")
// Create a recent file (3 days ago) // Create a recent file (3 days ago)
const recentTimestamp = Date.now() - 3 * DAY_MS const recentTimestamp = Date.now() - 3 * DAY_MS
const recentId = Identifier.create("tool", false, recentTimestamp) const recentId = Identifier.create("tool", false, recentTimestamp)
recentFile = path.join(Truncate.DIR, recentId) recentFile = path.join(Truncate.DIR, recentId)
await Bun.write(Bun.file(recentFile), "recent content") await Filesystem.write(recentFile, "recent content")
await Truncate.cleanup() await Truncate.cleanup()
// Old file should be deleted // Old file should be deleted
expect(await Bun.file(oldFile).exists()).toBe(false) expect(await Filesystem.exists(oldFile)).toBe(false)
// Recent file should still exist // Recent file should still exist
expect(await Bun.file(recentFile).exists()).toBe(true) expect(await Filesystem.exists(recentFile)).toBe(true)
}) })
}) })
}) })

View File

@@ -285,4 +285,125 @@ describe("filesystem", () => {
expect(Filesystem.mimeType("Makefile")).toBe("application/octet-stream") expect(Filesystem.mimeType("Makefile")).toBe("application/octet-stream")
}) })
}) })
describe("writeStream()", () => {
test("writes from Web ReadableStream", async () => {
await using tmp = await tmpdir()
const filepath = path.join(tmp.path, "streamed.txt")
const content = "Hello from stream!"
const encoder = new TextEncoder()
const stream = new ReadableStream({
start(controller) {
controller.enqueue(encoder.encode(content))
controller.close()
},
})
await Filesystem.writeStream(filepath, stream)
expect(await fs.readFile(filepath, "utf-8")).toBe(content)
})
test("writes from Node.js Readable stream", async () => {
await using tmp = await tmpdir()
const filepath = path.join(tmp.path, "node-streamed.txt")
const content = "Hello from Node stream!"
const { Readable } = await import("stream")
const stream = Readable.from([content])
await Filesystem.writeStream(filepath, stream)
expect(await fs.readFile(filepath, "utf-8")).toBe(content)
})
test("writes binary data from Web ReadableStream", async () => {
await using tmp = await tmpdir()
const filepath = path.join(tmp.path, "binary.dat")
const binaryData = new Uint8Array([0x00, 0x01, 0x02, 0x03, 0xff])
const stream = new ReadableStream({
start(controller) {
controller.enqueue(binaryData)
controller.close()
},
})
await Filesystem.writeStream(filepath, stream)
const read = await fs.readFile(filepath)
expect(Buffer.from(read)).toEqual(Buffer.from(binaryData))
})
test("writes large content in chunks", async () => {
await using tmp = await tmpdir()
const filepath = path.join(tmp.path, "large.txt")
const chunks = ["chunk1", "chunk2", "chunk3", "chunk4", "chunk5"]
const stream = new ReadableStream({
start(controller) {
for (const chunk of chunks) {
controller.enqueue(new TextEncoder().encode(chunk))
}
controller.close()
},
})
await Filesystem.writeStream(filepath, stream)
expect(await fs.readFile(filepath, "utf-8")).toBe(chunks.join(""))
})
test("creates parent directories", async () => {
await using tmp = await tmpdir()
const filepath = path.join(tmp.path, "nested", "deep", "streamed.txt")
const content = "nested stream content"
const stream = new ReadableStream({
start(controller) {
controller.enqueue(new TextEncoder().encode(content))
controller.close()
},
})
await Filesystem.writeStream(filepath, stream)
expect(await fs.readFile(filepath, "utf-8")).toBe(content)
})
test("writes with permissions", async () => {
await using tmp = await tmpdir()
const filepath = path.join(tmp.path, "protected-stream.txt")
const content = "secret stream content"
const stream = new ReadableStream({
start(controller) {
controller.enqueue(new TextEncoder().encode(content))
controller.close()
},
})
await Filesystem.writeStream(filepath, stream, 0o600)
const stats = await fs.stat(filepath)
if (process.platform !== "win32") {
expect(stats.mode & 0o777).toBe(0o600)
}
})
test("writes executable with permissions", async () => {
await using tmp = await tmpdir()
const filepath = path.join(tmp.path, "script.sh")
const content = "#!/bin/bash\necho hello"
const stream = new ReadableStream({
start(controller) {
controller.enqueue(new TextEncoder().encode(content))
controller.close()
},
})
await Filesystem.writeStream(filepath, stream, 0o755)
const stats = await fs.stat(filepath)
if (process.platform !== "win32") {
expect(stats.mode & 0o777).toBe(0o755)
}
expect(await fs.readFile(filepath, "utf-8")).toBe(content)
})
})
}) })