chore: Remove debugging code and comments
This commit is contained in:
parent
eca46fcadd
commit
e35576fb3f
3 changed files with 2 additions and 53 deletions
|
@ -6,7 +6,6 @@ import (
|
|||
"github.com/jmoiron/sqlx"
|
||||
_ "github.com/mattn/go-sqlite3"
|
||||
ts "github.com/n0madic/twitter-scraper"
|
||||
//"log"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
|
@ -118,7 +117,6 @@ func (db *Database) PruneOldestTweets(channel string) error {
|
|||
if err != nil {
|
||||
return err
|
||||
}
|
||||
//log.Println(count)
|
||||
|
||||
if count > KeepTweets {
|
||||
tx, err := db.Beginx()
|
||||
|
@ -134,7 +132,6 @@ func (db *Database) PruneOldestTweets(channel string) error {
|
|||
for rows.Next() {
|
||||
var i int
|
||||
err = rows.Scan(&i)
|
||||
//log.Println(i)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
return err
|
||||
|
|
|
@ -98,7 +98,6 @@ func Run() {
|
|||
|
||||
if scraper.IsLoggedIn() {
|
||||
log.Println("We're already logged in, skipping login...")
|
||||
//defer scraper.Logout()
|
||||
} else {
|
||||
scraper.ClearCookies()
|
||||
err = scraper.Login(config.Username, config.Password)
|
||||
|
@ -121,7 +120,6 @@ func Run() {
|
|||
log.Fatalf("Failed to create cookie file at %s with the following error: %s\n", cookiePath, fErr.Error())
|
||||
}
|
||||
f.Write(js)
|
||||
//defer scraper.Logout()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -153,25 +151,7 @@ func (app *App) queryX(id int, channel string) {
|
|||
time.Sleep(time.Duration(id) * time.Minute)
|
||||
|
||||
db := app.db
|
||||
|
||||
/*
|
||||
db, dberr := NewDatabase("sqlite3", dbPath)
|
||||
if dberr != nil {
|
||||
log.Fatalf("An error occurred while creating database connection: %s\n", dberr.Error())
|
||||
}
|
||||
defer db.Close()
|
||||
*/
|
||||
|
||||
filter := app.config.Filter[id]
|
||||
|
||||
/*
|
||||
filterQuoted := filter & 1
|
||||
filterPin := filter & 2
|
||||
filterReply := filter & 4
|
||||
filterRetweet := filter & 8
|
||||
filterSelfThread := filter & 16
|
||||
*/
|
||||
|
||||
init := true
|
||||
|
||||
ScrapeLoop:
|
||||
|
@ -182,13 +162,6 @@ ScrapeLoop:
|
|||
}
|
||||
init = false
|
||||
|
||||
/*
|
||||
newestTweet, err := db.GetNewestTweet(channel)
|
||||
if err != nil {
|
||||
log.Printf("No tweets in database yet for channel %s", channel)
|
||||
}
|
||||
*/
|
||||
|
||||
step := ScrapeStep
|
||||
tweets := []*ts.Tweet{}
|
||||
tweetsToParse := []*ts.Tweet{}
|
||||
|
@ -240,8 +213,6 @@ ScrapeLoop:
|
|||
time.Sleep(time.Duration(3) * time.Second) // Wait a few seconds for next api request
|
||||
}
|
||||
|
||||
//slices.Reverse(tweetsToParse)
|
||||
|
||||
ParseTweets:
|
||||
// We want to parse old to new
|
||||
for i := len(tweetsToParse) - 1; i >= 0; i-- {
|
||||
|
@ -257,25 +228,6 @@ ScrapeLoop:
|
|||
continue ParseTweets
|
||||
}
|
||||
tweetsToPost = append(tweetsToPost, tweet)
|
||||
|
||||
/*
|
||||
contains, dberr := db.ContainsTweet(channel, tweet)
|
||||
if dberr != nil {
|
||||
log.Printf("Error while checking tweet for channel %s: %s", channel, dberr.Error())
|
||||
continue ParseTweets
|
||||
}
|
||||
if contains {
|
||||
// Since we posted this tweet already, let's break the loop and post the tweets to Discord (if there are any)
|
||||
break ParseTweets
|
||||
} else {
|
||||
// Tweet not yet in database so we store this one
|
||||
err := db.InsertTweet(channel, tweet)
|
||||
if err != nil {
|
||||
log.Printf("Error while inserting tweet for channel %s into the database: %s", channel, err.Error())
|
||||
}
|
||||
tweetsToPost = append(tweetsToPost, tweet)
|
||||
}
|
||||
*/
|
||||
}
|
||||
|
||||
sendToWebhook(app.config.Webhook, tweetsToPost)
|
||||
|
|
|
@ -4,12 +4,12 @@ password = "asd123"
|
|||
channels = [
|
||||
"NinEverything",
|
||||
"NintendoEurope",
|
||||
"NintendoAmerica"
|
||||
"NintendoAmerica",
|
||||
]
|
||||
# Binary representation for efficient filtering
|
||||
# Bit from right to left: IsQuoted, IsPin, IsReply, IsRetweet, IsSelfThread
|
||||
filter = [
|
||||
0b11111,
|
||||
0b11111,
|
||||
0b11111
|
||||
0b11111,
|
||||
]
|
||||
|
|
Loading…
Reference in a new issue