From e35576fb3f6678ea1c853237a697db097e69b6d5 Mon Sep 17 00:00:00 2001 From: Manuel Date: Tue, 29 Aug 2023 18:30:00 +0200 Subject: [PATCH] chore: Remove debugging code and comments --- cmd/database.go | 3 --- cmd/tweeter.go | 48 --------------------------------------------- config.example.toml | 4 ++-- 3 files changed, 2 insertions(+), 53 deletions(-) diff --git a/cmd/database.go b/cmd/database.go index f834d5e..d90188d 100644 --- a/cmd/database.go +++ b/cmd/database.go @@ -6,7 +6,6 @@ import ( "github.com/jmoiron/sqlx" _ "github.com/mattn/go-sqlite3" ts "github.com/n0madic/twitter-scraper" - //"log" "strconv" ) @@ -118,7 +117,6 @@ func (db *Database) PruneOldestTweets(channel string) error { if err != nil { return err } - //log.Println(count) if count > KeepTweets { tx, err := db.Beginx() @@ -134,7 +132,6 @@ func (db *Database) PruneOldestTweets(channel string) error { for rows.Next() { var i int err = rows.Scan(&i) - //log.Println(i) if err != nil { tx.Rollback() return err diff --git a/cmd/tweeter.go b/cmd/tweeter.go index e38b80a..1bbd295 100644 --- a/cmd/tweeter.go +++ b/cmd/tweeter.go @@ -98,7 +98,6 @@ func Run() { if scraper.IsLoggedIn() { log.Println("We're already logged in, skipping login...") - //defer scraper.Logout() } else { scraper.ClearCookies() err = scraper.Login(config.Username, config.Password) @@ -121,7 +120,6 @@ func Run() { log.Fatalf("Failed to create cookie file at %s with the following error: %s\n", cookiePath, fErr.Error()) } f.Write(js) - //defer scraper.Logout() } } @@ -153,25 +151,7 @@ func (app *App) queryX(id int, channel string) { time.Sleep(time.Duration(id) * time.Minute) db := app.db - - /* - db, dberr := NewDatabase("sqlite3", dbPath) - if dberr != nil { - log.Fatalf("An error occurred while creating database connection: %s\n", dberr.Error()) - } - defer db.Close() - */ - filter := app.config.Filter[id] - - /* - filterQuoted := filter & 1 - filterPin := filter & 2 - filterReply := filter & 4 - filterRetweet := filter & 8 - filterSelfThread := filter & 16 - */ - init := true ScrapeLoop: @@ -182,13 +162,6 @@ ScrapeLoop: } init = false - /* - newestTweet, err := db.GetNewestTweet(channel) - if err != nil { - log.Printf("No tweets in database yet for channel %s", channel) - } - */ - step := ScrapeStep tweets := []*ts.Tweet{} tweetsToParse := []*ts.Tweet{} @@ -240,8 +213,6 @@ ScrapeLoop: time.Sleep(time.Duration(3) * time.Second) // Wait a few seconds for next api request } - //slices.Reverse(tweetsToParse) - ParseTweets: // We want to parse old to new for i := len(tweetsToParse) - 1; i >= 0; i-- { @@ -257,25 +228,6 @@ ScrapeLoop: continue ParseTweets } tweetsToPost = append(tweetsToPost, tweet) - - /* - contains, dberr := db.ContainsTweet(channel, tweet) - if dberr != nil { - log.Printf("Error while checking tweet for channel %s: %s", channel, dberr.Error()) - continue ParseTweets - } - if contains { - // Since we posted this tweet already, let's break the loop and post the tweets to Discord (if there are any) - break ParseTweets - } else { - // Tweet not yet in database so we store this one - err := db.InsertTweet(channel, tweet) - if err != nil { - log.Printf("Error while inserting tweet for channel %s into the database: %s", channel, err.Error()) - } - tweetsToPost = append(tweetsToPost, tweet) - } - */ } sendToWebhook(app.config.Webhook, tweetsToPost) diff --git a/config.example.toml b/config.example.toml index e9c83b5..bf3549b 100644 --- a/config.example.toml +++ b/config.example.toml @@ -4,12 +4,12 @@ password = "asd123" channels = [ "NinEverything", "NintendoEurope", - "NintendoAmerica" + "NintendoAmerica", ] # Binary representation for efficient filtering # Bit from right to left: IsQuoted, IsPin, IsReply, IsRetweet, IsSelfThread filter = [ 0b11111, 0b11111, - 0b11111 + 0b11111, ]