Go SDK for Scrapfly.io web scraping API.
This SDK allows you to easily:
- Scrape the web without being blocked.
- Use headless browsers to access Javascript-powered page data.
- Take screenshots of websites.
- Extract structured data using AI.
For web scraping guides see our blog and #scrapeguide tag for how to scrape specific targets.
go get github.com/scrapfly/go-scrapfly- Register a Scrapfly account for free
- Get your API Key on scrapfly.io/dashboard
- Start scraping: 🚀
package main
import (
"fmt"
"log"
"github.com/scrapfly/go-scrapfly"
)
func main() {
key := "YOUR_SCRAPFLY_KEY"
client, err := scrapfly.New(key)
if err != nil {
log.Fatalf("failed to create client: %v", err)
}
// Create a scrape configuration
scrapeConfig := &scrapfly.ScrapeConfig{
URL: "https://web-scraping.dev/product/1",
RenderJS: true,
Country: "us",
ASP: true,
ProxyPool: scrapfly.PublicResidentialPool,
}
// Perform the scrape
apiResponse, err := client.Scrape(scrapeConfig)
if err != nil {
log.Fatalf("scrape failed: %v", err)
}
// HTML content is in apiResponse.Result.Content
// fmt.Println(apiResponse.Result.Content)
// Use the built-in HTML parser (go-query)
selector, err := apiResponse.Selector()
if err != nil {
log.Fatalf("failed to get selector: %v", err)
}
fmt.Println("Product Title:", selector.Find("h3").First().Text())
}- Please refer to the Scrapfly API documentation for full documentation and examples.