Documentation Index
Fetch the complete documentation index at: https://firecrawl-mog-search-exclude-include-domains.mintlify.app/llms.txt
Use this file to discover all available pages before exploring further.
官方 Go SDK 维护在 Firecrawl 的 monorepo 仓库 apps/go-sdk 中。
要安装 Firecrawl Go SDK,请运行:
go get github.com/firecrawl/firecrawl/apps/go-sdk
- 前往 firecrawl.dev 获取 API 密钥
- 将 API 密钥设置为名为
FIRECRAWL_API_KEY 的环境变量,或通过 option.WithAPIKey(...) 传入
下面是一个基于当前 SDK API 的快速示例:
package main
import (
"context"
"fmt"
"log"
firecrawl "github.com/firecrawl/firecrawl/apps/go-sdk"
"github.com/firecrawl/firecrawl/apps/go-sdk/option"
)
func main() {
// 创建客户端(从环境变量读取 FIRECRAWL_API_KEY)
client, err := firecrawl.NewClient()
if err != nil {
log.Fatal(err)
}
// 或直接传入 API 密钥
client, err = firecrawl.NewClient(
option.WithAPIKey("fc-your-api-key"),
)
if err != nil {
log.Fatal(err)
}
ctx := context.Background()
// 抓取单个页面
doc, err := client.Scrape(ctx, "https://firecrawl.dev", &firecrawl.ScrapeOptions{
Formats: []string{"markdown"},
})
if err != nil {
log.Fatal(err)
}
fmt.Println(doc.Markdown)
// 爬取网站
job, err := client.Crawl(ctx, "https://firecrawl.dev", &firecrawl.CrawlOptions{
Limit: firecrawl.Int(5),
})
if err != nil {
log.Fatal(err)
}
fmt.Printf("Crawled pages: %d\n", len(job.Data))
}
如需抓取单个 URL,可使用 Scrape 方法。
doc, err := client.Scrape(ctx, "https://firecrawl.dev", &firecrawl.ScrapeOptions{
Formats: []string{"markdown", "html"},
OnlyMainContent: firecrawl.Bool(true),
WaitFor: firecrawl.Int(5000),
})
if err != nil {
log.Fatal(err)
}
fmt.Println(doc.Markdown)
fmt.Println(doc.Metadata["title"])
使用 JsonOptions 通过 Scrape 端点提取结构化 JSON:
doc, err := client.Scrape(ctx, "https://example.com/product", &firecrawl.ScrapeOptions{
Formats: []string{"json"},
JsonOptions: &firecrawl.JsonOptions{
Prompt: "Extract the product name and price",
Schema: map[string]interface{}{
"type": "object",
"properties": map[string]interface{}{
"name": map[string]interface{}{"type": "string"},
"price": map[string]interface{}{"type": "number"},
},
},
},
})
if err != nil {
log.Fatal(err)
}
fmt.Println(doc.JSON)
要爬取网站并等待完成,请使用 Crawl。
job, err := client.Crawl(ctx, "https://firecrawl.dev", &firecrawl.CrawlOptions{
Limit: firecrawl.Int(50),
MaxDiscoveryDepth: firecrawl.Int(3),
ScrapeOptions: &firecrawl.ScrapeOptions{
Formats: []string{"markdown"},
},
})
if err != nil {
log.Fatal(err)
}
fmt.Printf("Status: %s\n", job.Status)
fmt.Printf("Progress: %d/%d\n", job.Completed, job.Total)
for _, page := range job.Data {
fmt.Println(page.Metadata["sourceURL"])
}
使用 StartCrawl 启动任务,无需等待其完成。
resp, err := client.StartCrawl(ctx, "https://firecrawl.dev", &firecrawl.CrawlOptions{
Limit: firecrawl.Int(100),
})
if err != nil {
log.Fatal(err)
}
fmt.Printf("Job ID: %s\n", resp.ID)
使用 GetCrawlStatus 查看爬取进度。
status, err := client.GetCrawlStatus(ctx, resp.ID)
if err != nil {
log.Fatal(err)
}
fmt.Printf("Status: %s\n", status.Status)
fmt.Printf("Progress: %d/%d\n", status.Completed, status.Total)
使用 CancelCrawl 取消正在进行的爬取。
result, err := client.CancelCrawl(ctx, resp.ID)
if err != nil {
log.Fatal(err)
}
fmt.Println(result)
使用 Map 发现网站中的链接。
mapData, err := client.Map(ctx, "https://firecrawl.dev", &firecrawl.MapOptions{
Limit: firecrawl.Int(100),
Search: firecrawl.String("blog"),
IncludeSubdomains: firecrawl.Bool(true),
})
if err != nil {
log.Fatal(err)
}
for _, link := range mapData.Links {
fmt.Println(link["url"], "-", link["title"])
}
使用 Search 并按需配置搜索选项进行搜索。
results, err := client.Search(ctx, "firecrawl web scraping", &firecrawl.SearchOptions{
Limit: firecrawl.Int(10),
ScrapeOptions: &firecrawl.ScrapeOptions{
Formats: []string{"markdown"},
},
})
if err != nil {
log.Fatal(err)
}
for _, result := range results.Web {
fmt.Println(result["title"], "-", result["url"])
}
使用 BatchScrape 可并行抓取多个 URL。
urls := []string{
"https://firecrawl.dev",
"https://firecrawl.dev/blog",
}
job, err := client.BatchScrape(ctx, urls, &firecrawl.BatchScrapeOptions{
ScrapeOptions: &firecrawl.ScrapeOptions{
Formats: []string{"markdown"},
},
})
if err != nil {
log.Fatal(err)
}
for _, doc := range job.Data {
fmt.Println(doc.Markdown)
}
使用 Agent 运行一个 AI 代理。
status, err := client.Agent(ctx, &firecrawl.AgentOptions{
Prompt: "Find the pricing plans for Firecrawl and compare them",
})
if err != nil {
log.Fatal(err)
}
fmt.Println(status.Data)
使用结构化输出的 JSON schema:
status, err := client.Agent(ctx, &firecrawl.AgentOptions{
Prompt: "Extract pricing plan details",
URLs: []string{"https://firecrawl.dev"},
Schema: map[string]interface{}{
"type": "object",
"properties": map[string]interface{}{
"plans": map[string]interface{}{
"type": "array",
"items": map[string]interface{}{
"type": "object",
"properties": map[string]interface{}{
"name": map[string]interface{}{"type": "string"},
"price": map[string]interface{}{"type": "string"},
},
},
},
},
},
})
if err != nil {
log.Fatal(err)
}
fmt.Println(status.Data)
查看并发数和剩余额度:
concurrency, err := client.GetConcurrency(ctx)
if err != nil {
log.Fatal(err)
}
fmt.Printf("Concurrency: %d/%d\n", concurrency.Concurrency, concurrency.MaxConcurrency)
credits, err := client.GetCreditUsage(ctx)
if err != nil {
log.Fatal(err)
}
fmt.Printf("Remaining credits: %d\n", credits.RemainingCredits)
Go SDK 提供了 Browser Sandbox 辅助函数。
session, err := client.Browser(ctx, &firecrawl.BrowserOptions{
TTL: firecrawl.Int(300),
StreamWebView: firecrawl.Bool(true),
})
if err != nil {
log.Fatal(err)
}
fmt.Println(session.ID)
fmt.Println(session.CDPUrl)
fmt.Println(session.LiveViewURL)
result, err := client.BrowserExecute(ctx, session.ID,
`await page.goto("https://example.com"); console.log(await page.title());`,
&firecrawl.BrowserExecuteParams{
Language: "node",
Timeout: firecrawl.Int(60),
},
)
if err != nil {
log.Fatal(err)
}
fmt.Println(result.Stdout)
fmt.Println(*result.ExitCode)
使用抓取任务 ID,在同一重放上下文中运行后续浏览器代码:
Interact(...) 会在与抓取任务绑定的浏览器会话中运行代码 (首次使用时会初始化该会话) 。
StopInteractiveBrowser(...) 会在使用完成后显式停止该交互会话。
scrapeJobID := "550e8400-e29b-41d4-a716-446655440000"
execResp, err := client.Interact(ctx, scrapeJobID, "console.log(page.url())", &firecrawl.InteractParams{
Language: "node",
Timeout: firecrawl.Int(60),
})
if err != nil {
log.Fatal(err)
}
fmt.Println(execResp.Stdout)
deleteResp, err := client.StopInteractiveBrowser(ctx, scrapeJobID)
if err != nil {
log.Fatal(err)
}
fmt.Printf("Deleted: %v\n", deleteResp.Success)
active, err := client.ListBrowsers(ctx, "active")
if err != nil {
log.Fatal(err)
}
for _, s := range active.Sessions {
fmt.Printf("%s - %s\n", s.ID, s.Status)
}
closed, err := client.DeleteBrowser(ctx, session.ID)
if err != nil {
log.Fatal(err)
}
fmt.Printf("Closed: %v\n", closed.Success)
firecrawl.NewClient() 支持以下函数式选项:
| 选项 | 类型 | 默认值 | 描述 |
|---|
option.WithAPIKey | string | FIRECRAWL_API_KEY 环境变量 | 你的 Firecrawl API 密钥 |
option.WithAPIURL | string | https://api.firecrawl.dev (或 FIRECRAWL_API_URL) | API 基础 URL |
option.WithTimeout | time.Duration | 5 * time.Minute | HTTP 客户端超时时间 |
option.WithMaxRetries | int | 3 | 发生瞬时故障时自动重试 |
option.WithBackoffFactor | float64 | 0.5 | 以秒为单位的指数退避系数 |
option.WithHTTPClient | *http.Client | 基于超时配置构建 | 预先配置的 HTTP 客户端实例 |
option.WithHeader | string, string | — | 为所有请求添加额外的请求头 |
import (
"net/http"
"time"
firecrawl "github.com/firecrawl/firecrawl/apps/go-sdk"
"github.com/firecrawl/firecrawl/apps/go-sdk/option"
)
client, err := firecrawl.NewClient(
option.WithAPIKey("fc-your-api-key"),
option.WithAPIURL("https://api.firecrawl.dev"),
option.WithTimeout(5 * time.Minute),
option.WithMaxRetries(3),
option.WithBackoffFactor(0.5),
)
你可以传入一个预先配置的 *http.Client,以控制传输设置、代理配置、TLS 设置等。提供该客户端后,WithTimeout 设置会被忽略,改为使用客户端自身的配置。
import (
"crypto/tls"
"net"
"net/http"
"time"
firecrawl "github.com/firecrawl/firecrawl/apps/go-sdk"
"github.com/firecrawl/firecrawl/apps/go-sdk/option"
)
transport := &http.Transport{
TLSClientConfig: &tls.Config{MinVersion: tls.VersionTLS12},
DialContext: (&net.Dialer{
Timeout: 10 * time.Second,
}).DialContext,
}
custom := &http.Client{
Transport: transport,
Timeout: 60 * time.Second,
}
client, err := firecrawl.NewClient(
option.WithAPIKey("fc-your-api-key"),
option.WithHTTPClient(custom),
)
所有方法都接受 context.Context 作为第一个参数,用于控制取消和截止时间:
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel()
doc, err := client.Scrape(ctx, "https://example.com", nil)
SDK 使用 firecrawl 包中定义的类型化错误。
import "errors"
doc, err := client.Scrape(ctx, "https://example.com", nil)
if err != nil {
var authErr *firecrawl.AuthenticationError
var rateErr *firecrawl.RateLimitError
var timeoutErr *firecrawl.JobTimeoutError
var fcErr *firecrawl.FirecrawlError
switch {
case errors.As(err, &authErr):
fmt.Println("Auth failed:", authErr.Message)
case errors.As(err, &rateErr):
fmt.Println("Rate limited:", rateErr.Message)
case errors.As(err, &timeoutErr):
fmt.Printf("Job %s timed out after %ds\n", timeoutErr.JobID, timeoutErr.TimeoutSeconds)
case errors.As(err, &fcErr):
fmt.Printf("Error %d: %s\n", fcErr.StatusCode, fcErr.Message)
default:
fmt.Println("Unexpected error:", err)
}
}
SDK 会自动重试临时性故障:
- **会重试:**408、409、5xx 错误以及连接失败
- **不会重试:**401、429 和其他 4xx 错误
- **退避:**采用可配置因子的指数退避策略
SDK 为可选指针字段提供了便捷函数:
firecrawl.Bool(true) // *bool
firecrawl.Int(50) // *int
firecrawl.Int64(1000) // *int64
firecrawl.String("test") // *string
firecrawl.Float64(0.5) // *float64
你是需要 Firecrawl API 密钥的 AI 代理吗?请参见 firecrawl.dev/agent-onboarding/SKILL.md 了解自动化入门说明。