diff --git a/README.md b/README.md index 398bb45..50d9080 100644 --- a/README.md +++ b/README.md @@ -217,6 +217,20 @@ Commands with JSON output support: - `--output json`, `-o json` - Output JSON with liveViewUrl - `kernel browsers get ` - Get detailed browser session info - `--output json`, `-o json` - Output raw JSON object +- `kernel browsers curl ` - Make HTTP requests through a browser session's Chrome network stack + - `-X, --request ` - HTTP method (default: GET; defaults to POST when `--data` is set) + - `-H, --header
` - HTTP header, repeatable (`"Key: Value"` format) + - `-d, --data ` - Request body + - `--data-file ` - Read request body from file + - `--max-time ` - Maximum time allowed for the request (default: 30) + - `-o, --output ` - Write response body to file + - `-I, --head` - Fetch headers only + - `-i, --include` - Include response headers in output + - `-D, --dump-header ` - Write received headers to file (use `-` for stdout) + - `-w, --write-out ` - Output text after completion; supports `%{http_code}`, `%{response_code}`, `%{time_total}`, and `%{size_download}` + - `-f, --fail` - Fail with no body output on HTTP errors + - `-s, --silent` - Suppress progress output + - _Note: redirects are followed automatically by Chromium._ ### Browser Pools @@ -593,6 +607,21 @@ kernel browsers delete browser123 # Get live view URL kernel browsers view browser123 +# Make an HTTP request through the browser session +kernel browsers curl browser123 https://example.com + +# Include response headers and save the response to a file +kernel browsers curl browser123 -i -o page.html https://example.com + +# Send JSON and print curl-style status metrics +kernel browsers curl browser123 https://api.example.com \ + -H "Content-Type: application/json" \ + -d '{"key":"value"}' \ + -w 'status=%{http_code} bytes=%{size_download}\n' + +# Fail on HTTP errors without printing the response body +kernel browsers curl browser123 -f https://example.com/missing + # Stream browser logs kernel browsers logs stream my-browser --source supervisor --follow --supervisor-process chromium diff --git a/cmd/browsers.go b/cmd/browsers.go index b53f043..6c0ac5d 100644 --- a/cmd/browsers.go +++ b/cmd/browsers.go @@ -14,6 +14,7 @@ import ( "regexp" "strconv" "strings" + "time" "github.com/kernel/cli/pkg/table" "github.com/kernel/cli/pkg/util" @@ -36,6 +37,7 @@ type BrowsersService interface { Update(ctx context.Context, id string, body kernel.BrowserUpdateParams, opts ...option.RequestOption) (res *kernel.BrowserUpdateResponse, err error) Delete(ctx context.Context, body kernel.BrowserDeleteParams, opts ...option.RequestOption) (err error) DeleteByID(ctx context.Context, id string, opts ...option.RequestOption) (err error) + HTTPClient(id string, opts ...option.RequestOption) (*http.Client, error) LoadExtensions(ctx context.Context, id string, body kernel.BrowserLoadExtensionsParams, opts ...option.RequestOption) (err error) } @@ -2519,6 +2521,31 @@ func init() { browsersCreateCmd.Flags().String("pool-id", "", "Browser pool ID to acquire from (mutually exclusive with --pool-name)") browsersCreateCmd.Flags().String("pool-name", "", "Browser pool name to acquire from (mutually exclusive with --pool-id)") + // curl + curlCmd := &cobra.Command{ + Use: "curl ", + Short: "Make HTTP requests through a browser session", + Long: `Execute HTTP requests through Chrome's network stack, inheriting the +browser's TLS fingerprint, cookies, proxy configuration, and headers. +Works like curl but requests go through the browser session. Redirects are +followed automatically by Chromium.`, + Args: cobra.ExactArgs(2), + RunE: runBrowsersCurl, + } + curlCmd.Flags().StringP("request", "X", "", "HTTP method (default: GET)") + curlCmd.Flags().StringArrayP("header", "H", nil, "HTTP header (repeatable, \"Key: Value\" format)") + curlCmd.Flags().StringP("data", "d", "", "Request body") + curlCmd.Flags().String("data-file", "", "Read request body from file") + curlCmd.Flags().Float64("max-time", 30, "Maximum time allowed for the request in seconds") + curlCmd.Flags().StringP("output", "o", "", "Write response body to file") + curlCmd.Flags().BoolP("head", "I", false, "Fetch headers only") + curlCmd.Flags().BoolP("include", "i", false, "Include response headers in output") + curlCmd.Flags().StringP("dump-header", "D", "", "Write received headers to file (use - for stdout)") + curlCmd.Flags().StringP("write-out", "w", "", "Output text after completion; supports %{http_code}, %{response_code}, %{time_total}, %{size_download}") + curlCmd.Flags().BoolP("fail", "f", false, "Fail with no body output on HTTP errors") + curlCmd.Flags().BoolP("silent", "s", false, "Suppress progress output") + browsersCmd.AddCommand(curlCmd) + // no flags for view; it takes a single positional argument } @@ -3256,6 +3283,281 @@ func runBrowsersComputerWriteClipboard(cmd *cobra.Command, args []string) error return b.ComputerWriteClipboard(cmd.Context(), BrowsersComputerWriteClipboardInput{Identifier: args[0], Text: text}) } +// Curl + +type BrowsersCurlInput struct { + Identifier string + URL string + Method string + Headers []string + Data string + DataFile string + MaxTime time.Duration + OutputFile string + Head bool + Include bool + DumpHeader string + WriteOut string + Fail bool + Silent bool +} + +type silentCurlError struct { + err error +} + +func (e silentCurlError) Error() string { + return e.err.Error() +} + +func (e silentCurlError) Unwrap() error { + return e.err +} + +func (e silentCurlError) Silent() bool { + return true +} + +func curlError(in BrowsersCurlInput, err error) error { + if err == nil { + return nil + } + if in.Silent { + return silentCurlError{err: err} + } + return err +} + +func parseCurlHeaders(raw []string) http.Header { + if len(raw) == 0 { + return nil + } + headers := make(http.Header) + for _, h := range raw { + k, v, ok := strings.Cut(h, ":") + if !ok { + continue + } + headers.Add(strings.TrimSpace(k), strings.TrimSpace(v)) + } + return headers +} + +func readCurlBody(in BrowsersCurlInput) (string, error) { + if in.DataFile == "" { + return in.Data, nil + } + + data, err := os.ReadFile(in.DataFile) + if err != nil { + return "", fmt.Errorf("reading data file: %w", err) + } + return string(data), nil +} + +func hasCurlHeader(headers http.Header, name string) bool { + for key := range headers { + if strings.EqualFold(key, name) { + return true + } + } + return false +} + +type curlWriteOutStats struct { + statusCode int + timeTotal time.Duration + sizeDownload int64 +} + +func expandCurlWriteOut(format string, stats curlWriteOutStats) string { + replacer := strings.NewReplacer( + `\\`, "\\", + `\n`, "\n", + `\r`, "\r", + `\t`, "\t", + `%%`, "%", + "%{http_code}", fmt.Sprintf("%03d", stats.statusCode), + "%{response_code}", fmt.Sprintf("%03d", stats.statusCode), + "%{time_total}", fmt.Sprintf("%.6f", stats.timeTotal.Seconds()), + "%{size_download}", fmt.Sprintf("%d", stats.sizeDownload), + ) + return replacer.Replace(format) +} + +func openCurlOutputFile(path string) (io.Writer, func() error, error) { + if path == "" { + return nil, nil, nil + } + if path == "-" { + return os.Stdout, func() error { return nil }, nil + } + f, err := os.Create(path) + if err != nil { + return nil, nil, err + } + return f, f.Close, nil +} + +func (b BrowsersCmd) Curl(ctx context.Context, in BrowsersCurlInput) error { + body, err := readCurlBody(in) + if err != nil { + return curlError(in, err) + } + + method := in.Method + if method == "" { + method = "GET" + if body != "" { + method = "POST" + } + if in.Head { + method = "HEAD" + } + } + include := in.Include || in.Head + + var bodyReader io.Reader + if body != "" { + bodyReader = strings.NewReader(body) + } + + // Seed the SDK's browser route cache before constructing the raw curl client. + if _, err := b.browsers.Get(ctx, in.Identifier, kernel.BrowserGetParams{}); err != nil { + return curlError(in, util.CleanedUpSdkError{Err: err}) + } + + httpClient, err := b.browsers.HTTPClient(in.Identifier) + if err != nil { + return curlError(in, util.CleanedUpSdkError{Err: err}) + } + if in.MaxTime > 0 { + httpClient.Timeout = in.MaxTime + } + + req, err := http.NewRequestWithContext(ctx, method, in.URL, bodyReader) + if err != nil { + return curlError(in, fmt.Errorf("creating request: %w", err)) + } + headers := parseCurlHeaders(in.Headers) + for key, values := range headers { + for _, value := range values { + req.Header.Add(key, value) + } + } + if body != "" && !hasCurlHeader(headers, "Content-Type") { + req.Header.Set("Content-Type", "application/x-www-form-urlencoded") + } + + start := time.Now() + resp, err := httpClient.Do(req) + if err != nil { + return curlError(in, fmt.Errorf("request failed: %w", err)) + } + defer resp.Body.Close() + + var writer io.Writer = os.Stdout + var outputFile *os.File + if in.OutputFile != "" { + f, err := os.Create(in.OutputFile) + if err != nil { + return curlError(in, fmt.Errorf("creating output file: %w", err)) + } + outputFile = f + defer outputFile.Close() + writer = outputFile + } + + if in.DumpHeader != "" { + headerWriter, closeHeaderWriter, err := openCurlOutputFile(in.DumpHeader) + if err != nil { + return curlError(in, fmt.Errorf("creating dump header file: %w", err)) + } + defer closeHeaderWriter() + writeCurlResponseHeaders(headerWriter, resp) + } + + if in.Fail && resp.StatusCode >= 400 { + if in.WriteOut != "" { + fmt.Fprint(os.Stdout, expandCurlWriteOut(in.WriteOut, curlWriteOutStats{ + statusCode: resp.StatusCode, + timeTotal: time.Since(start), + })) + } + return curlError(in, fmt.Errorf("HTTP error: %s", resp.Status)) + } + + if include { + writeCurlResponseHeaders(writer, resp) + } + + var sizeDownload int64 + if !in.Head { + sizeDownload, err = io.Copy(writer, resp.Body) + if err != nil { + if in.OutputFile != "" { + return curlError(in, fmt.Errorf("writing output file: %w", err)) + } + return curlError(in, err) + } + } + + if in.WriteOut != "" { + fmt.Fprint(os.Stdout, expandCurlWriteOut(in.WriteOut, curlWriteOutStats{ + statusCode: resp.StatusCode, + timeTotal: time.Since(start), + sizeDownload: sizeDownload, + })) + } + return nil +} + +func writeCurlResponseHeaders(w io.Writer, resp *http.Response) { + fmt.Fprintf(w, "%s %s\r\n", resp.Proto, resp.Status) + for key, vals := range resp.Header { + for _, value := range vals { + fmt.Fprintf(w, "%s: %s\r\n", key, value) + } + } + fmt.Fprint(w, "\r\n") +} + +func runBrowsersCurl(cmd *cobra.Command, args []string) error { + client := getKernelClient(cmd) + svc := client.Browsers + + method, _ := cmd.Flags().GetString("request") + headers, _ := cmd.Flags().GetStringArray("header") + data, _ := cmd.Flags().GetString("data") + dataFile, _ := cmd.Flags().GetString("data-file") + maxTime, _ := cmd.Flags().GetFloat64("max-time") + outputFile, _ := cmd.Flags().GetString("output") + head, _ := cmd.Flags().GetBool("head") + include, _ := cmd.Flags().GetBool("include") + dumpHeader, _ := cmd.Flags().GetString("dump-header") + writeOut, _ := cmd.Flags().GetString("write-out") + fail, _ := cmd.Flags().GetBool("fail") + silent, _ := cmd.Flags().GetBool("silent") + + b := BrowsersCmd{browsers: &svc} + return b.Curl(cmd.Context(), BrowsersCurlInput{ + Identifier: args[0], + URL: args[1], + Method: method, + Headers: headers, + Data: data, + DataFile: dataFile, + MaxTime: time.Duration(maxTime * float64(time.Second)), + OutputFile: outputFile, + Head: head, + Include: include, + DumpHeader: dumpHeader, + WriteOut: writeOut, + Fail: fail, + Silent: silent, + }) +} + func truncateURL(url string, maxLen int) string { if !table.IsStdoutTTY() { return url diff --git a/cmd/browsers_test.go b/cmd/browsers_test.go index 2bb2c71..816c2ea 100644 --- a/cmd/browsers_test.go +++ b/cmd/browsers_test.go @@ -7,6 +7,7 @@ import ( "errors" "io" "net/http" + "net/http/httptest" "os" "path/filepath" "strings" @@ -20,6 +21,7 @@ import ( "github.com/kernel/kernel-go-sdk/shared" "github.com/pterm/pterm" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) // outBuf captures pterm output during tests. @@ -60,6 +62,7 @@ type FakeBrowsersService struct { UpdateFunc func(ctx context.Context, id string, body kernel.BrowserUpdateParams, opts ...option.RequestOption) (*kernel.BrowserUpdateResponse, error) DeleteFunc func(ctx context.Context, body kernel.BrowserDeleteParams, opts ...option.RequestOption) error DeleteByIDFunc func(ctx context.Context, id string, opts ...option.RequestOption) error + HTTPClientFunc func(id string, opts ...option.RequestOption) (*http.Client, error) LoadExtensionsFunc func(ctx context.Context, id string, body kernel.BrowserLoadExtensionsParams, opts ...option.RequestOption) error } @@ -105,6 +108,13 @@ func (f *FakeBrowsersService) DeleteByID(ctx context.Context, id string, opts .. return nil } +func (f *FakeBrowsersService) HTTPClient(id string, opts ...option.RequestOption) (*http.Client, error) { + if f.HTTPClientFunc != nil { + return f.HTTPClientFunc(id, opts...) + } + return http.DefaultClient, nil +} + func (f *FakeBrowsersService) LoadExtensions(ctx context.Context, id string, body kernel.BrowserLoadExtensionsParams, opts ...option.RequestOption) error { if f.LoadExtensionsFunc != nil { return f.LoadExtensionsFunc(ctx, id, body, opts...) @@ -112,6 +122,243 @@ func (f *FakeBrowsersService) LoadExtensions(ctx context.Context, id string, bod return nil } +func TestBrowsersCurlRawUsesBrowserHTTPClient(t *testing.T) { + var ( + gotMethod string + gotHeaders []string + gotContentType string + gotBody string + ) + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + var err error + body, err := io.ReadAll(r.Body) + require.NoError(t, err) + + gotMethod = r.Method + gotHeaders = r.Header.Values("X-Test") + gotContentType = r.Header.Get("Content-Type") + gotBody = string(body) + + w.WriteHeader(http.StatusAccepted) + _, err = w.Write([]byte("proxied")) + require.NoError(t, err) + })) + defer srv.Close() + + getCalled := false + fake := &FakeBrowsersService{ + GetFunc: func(ctx context.Context, id string, query kernel.BrowserGetParams, opts ...option.RequestOption) (*kernel.BrowserGetResponse, error) { + getCalled = true + assert.Equal(t, "brw_123", id) + return &kernel.BrowserGetResponse{}, nil + }, + HTTPClientFunc: func(id string, opts ...option.RequestOption) (*http.Client, error) { + assert.Equal(t, "brw_123", id) + return srv.Client(), nil + }, + } + + outputFile := filepath.Join(t.TempDir(), "response.txt") + b := BrowsersCmd{browsers: fake} + err := b.Curl(context.Background(), BrowsersCurlInput{ + Identifier: "brw_123", + URL: srv.URL + "/target", + Headers: []string{"X-Test: yes", "X-Test: also-yes"}, + Data: "hello", + OutputFile: outputFile, + Silent: true, + }) + require.NoError(t, err) + + data, err := os.ReadFile(outputFile) + require.NoError(t, err) + assert.True(t, getCalled) + assert.Equal(t, http.MethodPost, gotMethod) + assert.Equal(t, []string{"yes", "also-yes"}, gotHeaders) + assert.Equal(t, "application/x-www-form-urlencoded", gotContentType) + assert.Equal(t, "hello", gotBody) + assert.Equal(t, "proxied", string(data)) +} + +func TestBrowsersCurlIncludeWritesHeadersToOutputFile(t *testing.T) { + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("X-Test", "yes") + w.WriteHeader(http.StatusCreated) + _, err := w.Write([]byte("proxied")) + require.NoError(t, err) + })) + defer srv.Close() + + fake := &FakeBrowsersService{ + GetFunc: func(ctx context.Context, id string, query kernel.BrowserGetParams, opts ...option.RequestOption) (*kernel.BrowserGetResponse, error) { + return &kernel.BrowserGetResponse{}, nil + }, + HTTPClientFunc: func(id string, opts ...option.RequestOption) (*http.Client, error) { + return srv.Client(), nil + }, + } + + outputFile := filepath.Join(t.TempDir(), "response.txt") + b := BrowsersCmd{browsers: fake} + err := b.Curl(context.Background(), BrowsersCurlInput{ + Identifier: "brw_123", + URL: srv.URL + "/target", + OutputFile: outputFile, + Include: true, + }) + require.NoError(t, err) + + data, err := os.ReadFile(outputFile) + require.NoError(t, err) + assert.Contains(t, string(data), "HTTP/1.1 201 Created\r\n") + assert.Contains(t, string(data), "X-Test: yes\r\n") + assert.Contains(t, string(data), "\r\nproxied") +} + +func TestBrowsersCurlHeadWritesHeadersOnly(t *testing.T) { + gotMethod := "" + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + gotMethod = r.Method + w.Header().Set("X-Test", "yes") + _, err := w.Write([]byte("proxied")) + require.NoError(t, err) + })) + defer srv.Close() + + fake := &FakeBrowsersService{ + GetFunc: func(ctx context.Context, id string, query kernel.BrowserGetParams, opts ...option.RequestOption) (*kernel.BrowserGetResponse, error) { + return &kernel.BrowserGetResponse{}, nil + }, + HTTPClientFunc: func(id string, opts ...option.RequestOption) (*http.Client, error) { + return srv.Client(), nil + }, + } + + outputFile := filepath.Join(t.TempDir(), "response.txt") + b := BrowsersCmd{browsers: fake} + err := b.Curl(context.Background(), BrowsersCurlInput{ + Identifier: "brw_123", + URL: srv.URL + "/target", + Data: "hello", + OutputFile: outputFile, + Head: true, + }) + require.NoError(t, err) + + data, err := os.ReadFile(outputFile) + require.NoError(t, err) + assert.Equal(t, http.MethodHead, gotMethod) + assert.Contains(t, string(data), "HTTP/1.1 200 OK\r\n") + assert.Contains(t, string(data), "X-Test: yes\r\n") + assert.NotContains(t, string(data), "proxied") +} + +func TestBrowsersCurlSilentWrapsErrors(t *testing.T) { + fake := &FakeBrowsersService{ + GetFunc: func(ctx context.Context, id string, query kernel.BrowserGetParams, opts ...option.RequestOption) (*kernel.BrowserGetResponse, error) { + return &kernel.BrowserGetResponse{}, nil + }, + HTTPClientFunc: func(id string, opts ...option.RequestOption) (*http.Client, error) { + return http.DefaultClient, nil + }, + } + + b := BrowsersCmd{browsers: fake} + err := b.Curl(context.Background(), BrowsersCurlInput{ + Identifier: "brw_123", + URL: "://not-a-url", + Silent: true, + }) + require.Error(t, err) + + var silent interface{ Silent() bool } + require.ErrorAs(t, err, &silent) + assert.True(t, silent.Silent()) +} + +func TestBrowsersCurlDumpHeaderAndWriteOut(t *testing.T) { + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("X-Test", "yes") + _, err := w.Write([]byte("proxied")) + require.NoError(t, err) + })) + defer srv.Close() + + fake := &FakeBrowsersService{ + GetFunc: func(ctx context.Context, id string, query kernel.BrowserGetParams, opts ...option.RequestOption) (*kernel.BrowserGetResponse, error) { + return &kernel.BrowserGetResponse{}, nil + }, + HTTPClientFunc: func(id string, opts ...option.RequestOption) (*http.Client, error) { + return srv.Client(), nil + }, + } + + tmp := t.TempDir() + outputFile := filepath.Join(tmp, "body.txt") + headerFile := filepath.Join(tmp, "headers.txt") + + oldStdout := os.Stdout + r, w, err := os.Pipe() + require.NoError(t, err) + os.Stdout = w + defer func() { + os.Stdout = oldStdout + }() + + b := BrowsersCmd{browsers: fake} + err = b.Curl(context.Background(), BrowsersCurlInput{ + Identifier: "brw_123", + URL: srv.URL + "/target", + OutputFile: outputFile, + DumpHeader: headerFile, + WriteOut: " code=%{http_code} bytes=%{size_download}\\n", + }) + require.NoError(t, err) + require.NoError(t, w.Close()) + out, err := io.ReadAll(r) + require.NoError(t, err) + + body, err := os.ReadFile(outputFile) + require.NoError(t, err) + headers, err := os.ReadFile(headerFile) + require.NoError(t, err) + assert.Equal(t, "proxied", string(body)) + assert.Contains(t, string(headers), "HTTP/1.1 200 OK\r\n") + assert.Contains(t, string(headers), "X-Test: yes\r\n") + assert.Equal(t, " code=200 bytes=7\n", string(out)) +} + +func TestBrowsersCurlFailSuppressesHTTPErrorBody(t *testing.T) { + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + http.Error(w, "not found", http.StatusNotFound) + })) + defer srv.Close() + + fake := &FakeBrowsersService{ + GetFunc: func(ctx context.Context, id string, query kernel.BrowserGetParams, opts ...option.RequestOption) (*kernel.BrowserGetResponse, error) { + return &kernel.BrowserGetResponse{}, nil + }, + HTTPClientFunc: func(id string, opts ...option.RequestOption) (*http.Client, error) { + return srv.Client(), nil + }, + } + + outputFile := filepath.Join(t.TempDir(), "body.txt") + b := BrowsersCmd{browsers: fake} + err := b.Curl(context.Background(), BrowsersCurlInput{ + Identifier: "brw_123", + URL: srv.URL + "/target", + OutputFile: outputFile, + Fail: true, + }) + require.Error(t, err) + assert.Contains(t, err.Error(), "HTTP error: 404 Not Found") + + data, err := os.ReadFile(outputFile) + require.NoError(t, err) + assert.Empty(t, data) +} + func TestBrowsersList_PrintsEmptyMessage(t *testing.T) { setupStdoutCapture(t) diff --git a/cmd/root.go b/cmd/root.go index 375e9bb..88d6d87 100644 --- a/cmd/root.go +++ b/cmd/root.go @@ -2,6 +2,7 @@ package cmd import ( "context" + "errors" "fmt" "io" "os" @@ -204,13 +205,30 @@ func Execute(m Metadata) { fang.WithCommit(metadata.Commit), fang.WithErrorHandler(func(w io.Writer, styles fang.Styles, err error) { err = util.CleanedUpSdkError{Err: err} + + // Some subcommands intentionally suppress diagnostics for curl-like + // quiet modes while still returning a non-zero exit status. + var silent interface{ Silent() bool } + if errors.As(err, &silent) && silent.Silent() { + return + } + // remove margins so that it matches other pterm.error "style" // we should add them back later as it looks cleaner errorTextStyle := styles.ErrorText.UnsetMargins() + + // Keep command errors on fang's error stream, normally stderr. This + // gives curl-like commands a quiet stdout for response bodies and + // scripts while preserving the existing pterm error styling. + oldErrorWriter := pterm.Error.Writer + pterm.Error.Writer = w + defer func() { + pterm.Error.Writer = oldErrorWriter + }() pterm.Error.Println(errorTextStyle.Render(strings.TrimSpace(err.Error()))) if isUsageError(err) { - pterm.Println() - pterm.Println(lipgloss.JoinHorizontal( + fmt.Fprintln(w) + fmt.Fprintln(w, lipgloss.JoinHorizontal( lipgloss.Left, errorTextStyle.UnsetWidth().Render("Try"), styles.Program.Flag.Render("--help"),