Categorygithub.com/blacheinc/twitter-scraper
modulepackage
0.0.4
Repository: https://github.com/blacheinc/twitter-scraper.git
Documentation: pkg.go.dev

# README

Twitter Scraper

Go Reference

This is a fork of n0madic/twitter-scraper

Installation

go get -u github.com/Funmi4194/twitter-scraper

Usage

Get user followers

package main

import (
    "log"
    twitterscraper "github.com/blacheinc/twitter-scraper"
)

func main() {
 scraper := twitterscraper.New()

	if err = scraper.Login("username", "password"); err != nil {
		return false, err
	}

    // get the logged in user cookie
	cookie := scraper.GetCookies()

    // set cookie for subsequent
	scraper.SetCookies(cookie)

    // Get the user profile to extract the followers count
    profile, err := scraper.GetProfile(twitterUsername)
	if err != nil {
		log.Fatal(err)
	}

	followers := scraper.GetFollowers(context.Background(), twitterUserID, profile.FollowersCount)

	for follower := range followers {
        // you will get the userIds in this format "user-947425510262562817"
        // when checking if a userid is among the return Ids use this
        // `formattedUserId := "user-" + user ` then compare.
	    fmt.Println(follower.UserID)
	}
}

Get favorite tweets

package main

import (
    "log"
    twitterscraper "github.com/blacheinc/twitter-scraper"
)

func main() {
    scraper := twitterscraper.New()

	if err = scraper.Login("username", "password"); err != nil {
		return false, err
	}

	// get the logged in user cookie
	cookie := scraper.GetCookies()

	// set cookie for subsequent
	scraper.SetCookies(cookie)

	tweets := scraper.FavoriteTweets(context.Background(), twitterUsername, 10)

    for tweet := range tweets {
        log.Println(tweet.Text)
    }
}

Get user tweets

package main

import (
    "context"
    "fmt"
    twitterscraper "github.com/Funmi4194/twitter-scraper"
)

func main() {
    scraper := twitterscraper.New()

    for tweet := range scraper.GetTweets(context.Background(), "Twitter", 50) {
        if tweet.Error != nil {
            panic(tweet.Error)
        }
        fmt.Println(tweet.Text)
    }
}

It appears you can ask for up to 50 tweets (limit ~3200 tweets).

Get single tweet

package main

import (
    "fmt"

    twitterscraper "github.com/Funmi4194/twitter-scraper"
)

func main() {
    scraper := twitterscraper.New()
    tweet, err := scraper.GetTweet("1328684389388185600")
    if err != nil {
        panic(err)
    }
    fmt.Println(tweet.Text)
}

Search tweets by query standard operators

Now the search only works for authenticated users!

Tweets containing “twitter” and “scraper” and “data“, filtering out retweets:

package main

import (
    "context"
    "fmt"
    twitterscraper "github.com/Funmi4194/twitter-scraper"
)

func main() {
    scraper := twitterscraper.New()
    err := scraper.LoginOpenAccount()
    if err != nil {
        panic(err)
    }
    for tweet := range scraper.SearchTweets(context.Background(),
        "twitter scraper data -filter:retweets", 50) {
        if tweet.Error != nil {
            panic(tweet.Error)
        }
        fmt.Println(tweet.Text)
    }
}

The search ends if we have 50 tweets.

See Rules and filtering for build standard queries.

Set search mode

scraper.SetSearchMode(twitterscraper.SearchLatest)

Options:

  • twitterscraper.SearchTop - default mode
  • twitterscraper.SearchLatest - live mode
  • twitterscraper.SearchPhotos - image mode
  • twitterscraper.SearchVideos - video mode
  • twitterscraper.SearchUsers - user mode

Get profile

package main

import (
    "fmt"
    twitterscraper "github.com/Funmi4194/twitter-scraper"
)

func main() {
    scraper := twitterscraper.New()
    profile, err := scraper.GetProfile("Twitter")
    if err != nil {
        panic(err)
    }
    fmt.Printf("%+v\n", profile)
}

Search profiles by query

package main

import (
    "context"
    "fmt"
    twitterscraper "github.com/Funmi4194/twitter-scraper"
)

func main() {
    scraper := twitterscraper.New().SetSearchMode(twitterscraper.SearchUsers)
    err := scraper.Login(username, password)
    if err !== nil {
        panic(err)
    }
    for profile := range scraper.SearchProfiles(context.Background(), "Twitter", 50) {
        if profile.Error != nil {
            panic(profile.Error)
        }
        fmt.Println(profile.Name)
    }
}

Get trends

package main

import (
    "fmt"
    twitterscraper "github.com/Funmi4194/twitter-scraper"
)

func main() {
    scraper := twitterscraper.New()
    trends, err := scraper.GetTrends()
    if err != nil {
        panic(err)
    }
    fmt.Println(trends)
}

Use authentication

Some specified user tweets are protected that you must login and follow. It is also required to search.

Login

err := scraper.Login("username", "password")

Use username to login, not email! But if you have email confirmation, use email address in addition:

err := scraper.Login("username", "password", "email")

If you have two-factor authentication, use code:

err := scraper.Login("username", "password", "code")

Status of login can be checked with:

scraper.IsLoggedIn()

Logout (clear session):

scraper.Logout()

If you want save session between restarts, you can save cookies with scraper.GetCookies() and restore with scraper.SetCookies().

For example, save cookies:

cookies := scraper.GetCookies()
// serialize to JSON
js, _ := json.Marshal(cookies)
// save to file
f, _ = os.Create("cookies.json")
f.Write(js)

and load cookies:

f, _ := os.Open("cookies.json")
// deserialize from JSON
var cookies []*http.Cookie
json.NewDecoder(f).Decode(&cookies)
// load cookies
scraper.SetCookies(cookies)
// check login status
scraper.IsLoggedIn()

Open account

If you don't want to use your account, you can login as a Twitter app:

err := scraper.LoginOpenAccount()

Use Proxy

Support HTTP(s) and SOCKS5 proxy

with HTTP

err := scraper.SetProxy("http://localhost:3128")
if err != nil {
    panic(err)
}

with SOCKS5

err := scraper.SetProxy("socks5://localhost:1080")
if err != nil {
    panic(err)
}

Delay requests

Add delay between API requests (in seconds)

scraper.WithDelay(5)

Load timeline with tweet replies

scraper.WithReplies(true)

# Functions

New creates a Scraper object.

# Constants

default http client timeout.
No description provided by the author
SearchLatest - live mode.
SearchPhotos - image mode.
SearchTop - default mode.
SearchUsers - user mode.
SearchVideos - video mode.

# Structs

Use auth_token cookie as Token and ct0 cookie as CSRFToken.
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
Profile of twitter user.
No description provided by the author
Scraper object.
No description provided by the author
No description provided by the author
No description provided by the author

# Type aliases

SearchMode type.