chore: Remove debugging code and comments

This commit is contained in:
Manuel 2023-08-29 18:30:00 +02:00
parent eca46fcadd
commit e35576fb3f
Signed by: Manuel
GPG key ID: 4085037435E1F07A
3 changed files with 2 additions and 53 deletions

View file

@ -6,7 +6,6 @@ import (
"github.com/jmoiron/sqlx" "github.com/jmoiron/sqlx"
_ "github.com/mattn/go-sqlite3" _ "github.com/mattn/go-sqlite3"
ts "github.com/n0madic/twitter-scraper" ts "github.com/n0madic/twitter-scraper"
//"log"
"strconv" "strconv"
) )
@ -118,7 +117,6 @@ func (db *Database) PruneOldestTweets(channel string) error {
if err != nil { if err != nil {
return err return err
} }
//log.Println(count)
if count > KeepTweets { if count > KeepTweets {
tx, err := db.Beginx() tx, err := db.Beginx()
@ -134,7 +132,6 @@ func (db *Database) PruneOldestTweets(channel string) error {
for rows.Next() { for rows.Next() {
var i int var i int
err = rows.Scan(&i) err = rows.Scan(&i)
//log.Println(i)
if err != nil { if err != nil {
tx.Rollback() tx.Rollback()
return err return err

View file

@ -98,7 +98,6 @@ func Run() {
if scraper.IsLoggedIn() { if scraper.IsLoggedIn() {
log.Println("We're already logged in, skipping login...") log.Println("We're already logged in, skipping login...")
//defer scraper.Logout()
} else { } else {
scraper.ClearCookies() scraper.ClearCookies()
err = scraper.Login(config.Username, config.Password) err = scraper.Login(config.Username, config.Password)
@ -121,7 +120,6 @@ func Run() {
log.Fatalf("Failed to create cookie file at %s with the following error: %s\n", cookiePath, fErr.Error()) log.Fatalf("Failed to create cookie file at %s with the following error: %s\n", cookiePath, fErr.Error())
} }
f.Write(js) f.Write(js)
//defer scraper.Logout()
} }
} }
@ -153,25 +151,7 @@ func (app *App) queryX(id int, channel string) {
time.Sleep(time.Duration(id) * time.Minute) time.Sleep(time.Duration(id) * time.Minute)
db := app.db db := app.db
/*
db, dberr := NewDatabase("sqlite3", dbPath)
if dberr != nil {
log.Fatalf("An error occurred while creating database connection: %s\n", dberr.Error())
}
defer db.Close()
*/
filter := app.config.Filter[id] filter := app.config.Filter[id]
/*
filterQuoted := filter & 1
filterPin := filter & 2
filterReply := filter & 4
filterRetweet := filter & 8
filterSelfThread := filter & 16
*/
init := true init := true
ScrapeLoop: ScrapeLoop:
@ -182,13 +162,6 @@ ScrapeLoop:
} }
init = false init = false
/*
newestTweet, err := db.GetNewestTweet(channel)
if err != nil {
log.Printf("No tweets in database yet for channel %s", channel)
}
*/
step := ScrapeStep step := ScrapeStep
tweets := []*ts.Tweet{} tweets := []*ts.Tweet{}
tweetsToParse := []*ts.Tweet{} tweetsToParse := []*ts.Tweet{}
@ -240,8 +213,6 @@ ScrapeLoop:
time.Sleep(time.Duration(3) * time.Second) // Wait a few seconds for next api request time.Sleep(time.Duration(3) * time.Second) // Wait a few seconds for next api request
} }
//slices.Reverse(tweetsToParse)
ParseTweets: ParseTweets:
// We want to parse old to new // We want to parse old to new
for i := len(tweetsToParse) - 1; i >= 0; i-- { for i := len(tweetsToParse) - 1; i >= 0; i-- {
@ -257,25 +228,6 @@ ScrapeLoop:
continue ParseTweets continue ParseTweets
} }
tweetsToPost = append(tweetsToPost, tweet) tweetsToPost = append(tweetsToPost, tweet)
/*
contains, dberr := db.ContainsTweet(channel, tweet)
if dberr != nil {
log.Printf("Error while checking tweet for channel %s: %s", channel, dberr.Error())
continue ParseTweets
}
if contains {
// Since we posted this tweet already, let's break the loop and post the tweets to Discord (if there are any)
break ParseTweets
} else {
// Tweet not yet in database so we store this one
err := db.InsertTweet(channel, tweet)
if err != nil {
log.Printf("Error while inserting tweet for channel %s into the database: %s", channel, err.Error())
}
tweetsToPost = append(tweetsToPost, tweet)
}
*/
} }
sendToWebhook(app.config.Webhook, tweetsToPost) sendToWebhook(app.config.Webhook, tweetsToPost)

View file

@ -4,12 +4,12 @@ password = "asd123"
channels = [ channels = [
"NinEverything", "NinEverything",
"NintendoEurope", "NintendoEurope",
"NintendoAmerica" "NintendoAmerica",
] ]
# Binary representation for efficient filtering # Binary representation for efficient filtering
# Bit from right to left: IsQuoted, IsPin, IsReply, IsRetweet, IsSelfThread # Bit from right to left: IsQuoted, IsPin, IsReply, IsRetweet, IsSelfThread
filter = [ filter = [
0b11111, 0b11111,
0b11111, 0b11111,
0b11111 0b11111,
] ]