Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 29 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -217,6 +217,20 @@ Commands with JSON output support:
- `--output json`, `-o json` - Output JSON with liveViewUrl
- `kernel browsers get <id>` - Get detailed browser session info
- `--output json`, `-o json` - Output raw JSON object
- `kernel browsers curl <id> <url>` - Make HTTP requests through a browser session's Chrome network stack
- `-X, --request <method>` - HTTP method (default: GET; defaults to POST when `--data` is set)
- `-H, --header <header>` - HTTP header, repeatable (`"Key: Value"` format)
- `-d, --data <body>` - Request body
- `--data-file <path>` - Read request body from file
- `--max-time <seconds>` - Maximum time allowed for the request (default: 30)
- `-o, --output <path>` - Write response body to file
- `-I, --head` - Fetch headers only
- `-i, --include` - Include response headers in output
- `-D, --dump-header <path>` - Write received headers to file (use `-` for stdout)
- `-w, --write-out <format>` - Output text after completion; supports `%{http_code}`, `%{response_code}`, `%{time_total}`, and `%{size_download}`
- `-f, --fail` - Fail with no body output on HTTP errors
- `-s, --silent` - Suppress progress output
- _Note: redirects are followed automatically by Chromium._

### Browser Pools

Expand Down Expand Up @@ -593,6 +607,21 @@ kernel browsers delete browser123
# Get live view URL
kernel browsers view browser123

# Make an HTTP request through the browser session
kernel browsers curl browser123 https://example.com

# Include response headers and save the response to a file
kernel browsers curl browser123 -i -o page.html https://example.com

# Send JSON and print curl-style status metrics
kernel browsers curl browser123 https://api.example.com \
-H "Content-Type: application/json" \
-d '{"key":"value"}' \
-w 'status=%{http_code} bytes=%{size_download}\n'

# Fail on HTTP errors without printing the response body
kernel browsers curl browser123 -f https://example.com/missing

# Stream browser logs
kernel browsers logs stream my-browser --source supervisor --follow --supervisor-process chromium

Expand Down
302 changes: 302 additions & 0 deletions cmd/browsers.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import (
"regexp"
"strconv"
"strings"
"time"

"github.com/kernel/cli/pkg/table"
"github.com/kernel/cli/pkg/util"
Expand All @@ -36,6 +37,7 @@ type BrowsersService interface {
Update(ctx context.Context, id string, body kernel.BrowserUpdateParams, opts ...option.RequestOption) (res *kernel.BrowserUpdateResponse, err error)
Delete(ctx context.Context, body kernel.BrowserDeleteParams, opts ...option.RequestOption) (err error)
DeleteByID(ctx context.Context, id string, opts ...option.RequestOption) (err error)
HTTPClient(id string, opts ...option.RequestOption) (*http.Client, error)
LoadExtensions(ctx context.Context, id string, body kernel.BrowserLoadExtensionsParams, opts ...option.RequestOption) (err error)
}

Expand Down Expand Up @@ -2519,6 +2521,31 @@ func init() {
browsersCreateCmd.Flags().String("pool-id", "", "Browser pool ID to acquire from (mutually exclusive with --pool-name)")
browsersCreateCmd.Flags().String("pool-name", "", "Browser pool name to acquire from (mutually exclusive with --pool-id)")

// curl
curlCmd := &cobra.Command{
Use: "curl <session-id> <url>",
Short: "Make HTTP requests through a browser session",
Long: `Execute HTTP requests through Chrome's network stack, inheriting the
browser's TLS fingerprint, cookies, proxy configuration, and headers.
Works like curl but requests go through the browser session. Redirects are
followed automatically by Chromium.`,
Args: cobra.ExactArgs(2),
RunE: runBrowsersCurl,
}
curlCmd.Flags().StringP("request", "X", "", "HTTP method (default: GET)")
curlCmd.Flags().StringArrayP("header", "H", nil, "HTTP header (repeatable, \"Key: Value\" format)")
curlCmd.Flags().StringP("data", "d", "", "Request body")
curlCmd.Flags().String("data-file", "", "Read request body from file")
curlCmd.Flags().Float64("max-time", 30, "Maximum time allowed for the request in seconds")
curlCmd.Flags().StringP("output", "o", "", "Write response body to file")
curlCmd.Flags().BoolP("head", "I", false, "Fetch headers only")
curlCmd.Flags().BoolP("include", "i", false, "Include response headers in output")
curlCmd.Flags().StringP("dump-header", "D", "", "Write received headers to file (use - for stdout)")
curlCmd.Flags().StringP("write-out", "w", "", "Output text after completion; supports %{http_code}, %{response_code}, %{time_total}, %{size_download}")
curlCmd.Flags().BoolP("fail", "f", false, "Fail with no body output on HTTP errors")
curlCmd.Flags().BoolP("silent", "s", false, "Suppress progress output")
browsersCmd.AddCommand(curlCmd)

// no flags for view; it takes a single positional argument
}

Expand Down Expand Up @@ -3256,6 +3283,281 @@ func runBrowsersComputerWriteClipboard(cmd *cobra.Command, args []string) error
return b.ComputerWriteClipboard(cmd.Context(), BrowsersComputerWriteClipboardInput{Identifier: args[0], Text: text})
}

// Curl

type BrowsersCurlInput struct {
Identifier string
URL string
Method string
Headers []string
Data string
DataFile string
MaxTime time.Duration
OutputFile string
Head bool
Include bool
DumpHeader string
WriteOut string
Fail bool
Silent bool
Comment thread
cursor[bot] marked this conversation as resolved.
}

type silentCurlError struct {
err error
}

func (e silentCurlError) Error() string {
return e.err.Error()
}

func (e silentCurlError) Unwrap() error {
return e.err
}

func (e silentCurlError) Silent() bool {
return true
}

func curlError(in BrowsersCurlInput, err error) error {
if err == nil {
return nil
}
if in.Silent {
return silentCurlError{err: err}
}
return err
}

func parseCurlHeaders(raw []string) http.Header {
if len(raw) == 0 {
return nil
}
headers := make(http.Header)
for _, h := range raw {
k, v, ok := strings.Cut(h, ":")
if !ok {
continue
}
headers.Add(strings.TrimSpace(k), strings.TrimSpace(v))
}
return headers
}
Comment thread
cursor[bot] marked this conversation as resolved.

func readCurlBody(in BrowsersCurlInput) (string, error) {
if in.DataFile == "" {
return in.Data, nil
}

data, err := os.ReadFile(in.DataFile)
if err != nil {
return "", fmt.Errorf("reading data file: %w", err)
}
return string(data), nil
}

func hasCurlHeader(headers http.Header, name string) bool {
for key := range headers {
if strings.EqualFold(key, name) {
return true
}
}
return false
}

type curlWriteOutStats struct {
statusCode int
timeTotal time.Duration
sizeDownload int64
}

func expandCurlWriteOut(format string, stats curlWriteOutStats) string {
replacer := strings.NewReplacer(
`\\`, "\\",
`\n`, "\n",
`\r`, "\r",
`\t`, "\t",
`%%`, "%",
"%{http_code}", fmt.Sprintf("%03d", stats.statusCode),
"%{response_code}", fmt.Sprintf("%03d", stats.statusCode),
"%{time_total}", fmt.Sprintf("%.6f", stats.timeTotal.Seconds()),
"%{size_download}", fmt.Sprintf("%d", stats.sizeDownload),
)
return replacer.Replace(format)
}

func openCurlOutputFile(path string) (io.Writer, func() error, error) {
if path == "" {
return nil, nil, nil
}
if path == "-" {
return os.Stdout, func() error { return nil }, nil
}
f, err := os.Create(path)
if err != nil {
return nil, nil, err
}
return f, f.Close, nil
}

func (b BrowsersCmd) Curl(ctx context.Context, in BrowsersCurlInput) error {
body, err := readCurlBody(in)
if err != nil {
return curlError(in, err)
}

method := in.Method
if method == "" {
method = "GET"
if body != "" {
method = "POST"
}
if in.Head {
method = "HEAD"
}
}
Comment thread
cursor[bot] marked this conversation as resolved.
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Empty data file doesn't default method to POST

Low Severity

The method inference checks body != "" to decide whether to default to POST, but the PR documents "defaults to POST when --data is set." When --data-file points to an empty file, readCurlBody returns "", so the method stays GET despite the user explicitly requesting a data upload. Checking in.DataFile != "" alongside the body content would fix the --data-file case.

Additional Locations (1)
Fix in Cursor Fix in Web

Reviewed by Cursor Bugbot for commit db97766. Configure here.

include := in.Include || in.Head

var bodyReader io.Reader
if body != "" {
bodyReader = strings.NewReader(body)
}

// Seed the SDK's browser route cache before constructing the raw curl client.
if _, err := b.browsers.Get(ctx, in.Identifier, kernel.BrowserGetParams{}); err != nil {
return curlError(in, util.CleanedUpSdkError{Err: err})
}
Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

we might want to consider a local disk cache of this since this is super inefficient


httpClient, err := b.browsers.HTTPClient(in.Identifier)
if err != nil {
return curlError(in, util.CleanedUpSdkError{Err: err})
}
if in.MaxTime > 0 {
httpClient.Timeout = in.MaxTime
}

req, err := http.NewRequestWithContext(ctx, method, in.URL, bodyReader)
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

can we verify that /curl/raw doesn't forward Go's default User-Agent / Accept-Encoding as browser request headers when the user didn't specify them?

The CLI builds a normal Go http.Request and sends it through the SDK HTTPClient; Go's transport can add defaults like User-Agent: Go-http-client/1.1, and the raw endpoint appears to forward non-hop-by-hop headers into the Chromium forward proxy. If the Chromium proxy honors those forwarded headers, this could override the browser's UA/default headers even though the command is documented as inheriting browser headers.

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

addressing upstream in chromium

if err != nil {
return curlError(in, fmt.Errorf("creating request: %w", err))
}
headers := parseCurlHeaders(in.Headers)
for key, values := range headers {
for _, value := range values {
req.Header.Add(key, value)
}
}
if body != "" && !hasCurlHeader(headers, "Content-Type") {
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
}
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Go default headers leak through browser proxy

High Severity

When no User-Agent header is explicitly provided by the user, Go's http.Transport automatically injects User-Agent: Go-http-client/1.1 into the outgoing request. Since /curl/raw appears to forward non-hop-by-hop headers into the Chromium forward proxy, this Go default will override the browser's natural User-Agent, defeating the core purpose of routing requests through Chrome's network stack for TLS fingerprint and header inheritance. The same concern applies to Accept-Encoding: gzip. The request construction here doesn't prevent these Go defaults from being added.

Fix in Cursor Fix in Web

Reviewed by Cursor Bugbot for commit db97766. Configure here.


start := time.Now()
resp, err := httpClient.Do(req)
if err != nil {
return curlError(in, fmt.Errorf("request failed: %w", err))
}
defer resp.Body.Close()

var writer io.Writer = os.Stdout
var outputFile *os.File
if in.OutputFile != "" {
f, err := os.Create(in.OutputFile)
if err != nil {
return curlError(in, fmt.Errorf("creating output file: %w", err))
}
outputFile = f
defer outputFile.Close()
writer = outputFile
}
Comment thread
cursor[bot] marked this conversation as resolved.

if in.DumpHeader != "" {
headerWriter, closeHeaderWriter, err := openCurlOutputFile(in.DumpHeader)
if err != nil {
return curlError(in, fmt.Errorf("creating dump header file: %w", err))
}
defer closeHeaderWriter()
writeCurlResponseHeaders(headerWriter, resp)
}

if in.Fail && resp.StatusCode >= 400 {
if in.WriteOut != "" {
fmt.Fprint(os.Stdout, expandCurlWriteOut(in.WriteOut, curlWriteOutStats{
statusCode: resp.StatusCode,
timeTotal: time.Since(start),
}))
}
return curlError(in, fmt.Errorf("HTTP error: %s", resp.Status))
}

if include {
writeCurlResponseHeaders(writer, resp)
}

var sizeDownload int64
if !in.Head {
sizeDownload, err = io.Copy(writer, resp.Body)
if err != nil {
if in.OutputFile != "" {
return curlError(in, fmt.Errorf("writing output file: %w", err))
}
return curlError(in, err)
}
}

if in.WriteOut != "" {
fmt.Fprint(os.Stdout, expandCurlWriteOut(in.WriteOut, curlWriteOutStats{
statusCode: resp.StatusCode,
timeTotal: time.Since(start),
sizeDownload: sizeDownload,
}))
}
return nil
}

func writeCurlResponseHeaders(w io.Writer, resp *http.Response) {
fmt.Fprintf(w, "%s %s\r\n", resp.Proto, resp.Status)
for key, vals := range resp.Header {
for _, value := range vals {
fmt.Fprintf(w, "%s: %s\r\n", key, value)
}
}
fmt.Fprint(w, "\r\n")
}
Comment thread
cursor[bot] marked this conversation as resolved.

func runBrowsersCurl(cmd *cobra.Command, args []string) error {
client := getKernelClient(cmd)
svc := client.Browsers

method, _ := cmd.Flags().GetString("request")
headers, _ := cmd.Flags().GetStringArray("header")
data, _ := cmd.Flags().GetString("data")
dataFile, _ := cmd.Flags().GetString("data-file")
maxTime, _ := cmd.Flags().GetFloat64("max-time")
outputFile, _ := cmd.Flags().GetString("output")
head, _ := cmd.Flags().GetBool("head")
include, _ := cmd.Flags().GetBool("include")
dumpHeader, _ := cmd.Flags().GetString("dump-header")
writeOut, _ := cmd.Flags().GetString("write-out")
fail, _ := cmd.Flags().GetBool("fail")
silent, _ := cmd.Flags().GetBool("silent")

b := BrowsersCmd{browsers: &svc}
return b.Curl(cmd.Context(), BrowsersCurlInput{
Identifier: args[0],
URL: args[1],
Method: method,
Headers: headers,
Data: data,
DataFile: dataFile,
MaxTime: time.Duration(maxTime * float64(time.Second)),
OutputFile: outputFile,
Head: head,
Include: include,
DumpHeader: dumpHeader,
WriteOut: writeOut,
Fail: fail,
Silent: silent,
})
}

func truncateURL(url string, maxLen int) string {
if !table.IsStdoutTTY() {
return url
Expand Down
Loading
Loading