323 lines
8.7 KiB
Go
323 lines
8.7 KiB
Go
package main
|
|
|
|
import (
|
|
"encoding/json"
|
|
"fmt"
|
|
"io"
|
|
"io/ioutil"
|
|
"log/slog"
|
|
"net/http"
|
|
"net/url"
|
|
"os"
|
|
"path/filepath"
|
|
"time"
|
|
|
|
auth "git.coopcloud.tech/decentral1se/gtslib-auth-keyring"
|
|
"git.coopcloud.tech/decentral1se/gtslib/client/accounts"
|
|
"git.coopcloud.tech/decentral1se/gtslib/client/statuses"
|
|
"git.coopcloud.tech/decentral1se/gtslib/models"
|
|
"github.com/peterhellberg/link"
|
|
"github.com/pkg/errors"
|
|
"github.com/spf13/cobra"
|
|
)
|
|
|
|
var (
|
|
user string // account username
|
|
weeks int // number of weeks of statuses to retain
|
|
rate int // requests per second
|
|
dry bool // whether or not to run in dry mode
|
|
skipFavourites bool // whether or not to skip deleting favourites
|
|
skipPinned bool // whether or not to skip deleting pinned statuses
|
|
)
|
|
|
|
func init() {
|
|
loginCmd.Flags().StringVarP(&user, "user", "u", "", "username@domain of account")
|
|
rootCmd.AddCommand(loginCmd)
|
|
|
|
archiveCmd.Flags().IntVarP(&rate, "rate", "r", 1, "send a request every 'r' seconds")
|
|
rootCmd.AddCommand(archiveCmd)
|
|
|
|
deleteCmd.Flags().IntVarP(&weeks, "weeks", "w", 2, "keep statuses NEWER than no. of weeks")
|
|
deleteCmd.Flags().IntVarP(&rate, "rate", "r", 1, "send a request every 'r' seconds")
|
|
deleteCmd.Flags().BoolVarP(&dry, "dry", "d", false, "dry run mode (NO DELETION)")
|
|
deleteCmd.Flags().BoolVarP(&skipFavourites, "skip-favourites", "f", false, "skip deletion of favourites")
|
|
deleteCmd.Flags().BoolVarP(&skipPinned, "skip-pinnned", "p", false, "skip deletion of pinned statuses")
|
|
rootCmd.AddCommand(deleteCmd)
|
|
}
|
|
|
|
// main is the command-line entrypoint.
|
|
func main() {
|
|
if err := rootCmd.Execute(); err != nil {
|
|
os.Exit(1) // NOTE(d1): Execute takes care of error output
|
|
}
|
|
}
|
|
|
|
// rootCmd is the root command.
|
|
var rootCmd = &cobra.Command{
|
|
Use: "blurp",
|
|
Short: "A GoToSocial status management tool",
|
|
}
|
|
|
|
// loginCmd is the login command.
|
|
var loginCmd = &cobra.Command{
|
|
Use: "login",
|
|
Short: "Log in",
|
|
RunE: func(cmd *cobra.Command, args []string) error {
|
|
return auth.Login(user, auth.WithName("blurp"))
|
|
},
|
|
}
|
|
|
|
// archiveCmd is the archive command.
|
|
var archiveCmd = &cobra.Command{
|
|
Use: "archive",
|
|
Short: "Archive all account statuses (favourites, boosted, etc.)",
|
|
RunE: func(cmd *cobra.Command, args []string) error {
|
|
authClient, err := auth.NewAuthClient(user)
|
|
if err != nil {
|
|
slog.Error("unable to create auth client", "error", err)
|
|
os.Exit(1)
|
|
}
|
|
|
|
acc, err := getAccount(authClient)
|
|
if err != nil {
|
|
slog.Error("unable to retrieve account", "error", err)
|
|
os.Exit(1)
|
|
}
|
|
|
|
statuses, err := readAllPaged(authClient, acc.ID)
|
|
if err != nil {
|
|
slog.Error("unable to download paged response", "error", err)
|
|
os.Exit(1)
|
|
}
|
|
|
|
basePath := filepath.Join(".", "archive")
|
|
if err := os.MkdirAll(basePath, 0755); err != nil {
|
|
slog.Error("unable to create status directory", "error", err)
|
|
os.Exit(1)
|
|
}
|
|
|
|
for _, status := range statuses {
|
|
basePath = filepath.Join(".", "archive")
|
|
|
|
if len(status.MediaAttachments) > 0 {
|
|
basePath = filepath.Join(basePath, status.ID)
|
|
|
|
if err := os.MkdirAll(basePath, 0755); err != nil {
|
|
slog.Error("unable to create status directory", "error", err)
|
|
os.Exit(1)
|
|
}
|
|
|
|
for _, media := range status.MediaAttachments {
|
|
parsed, err := url.Parse(media.URL)
|
|
if err != nil {
|
|
slog.Error("unable to parse media URL", "error", err)
|
|
os.Exit(1)
|
|
}
|
|
|
|
imagePath := filepath.Join(basePath, filepath.Base(parsed.Path))
|
|
if _, err := os.Stat(imagePath); errors.Is(err, os.ErrNotExist) {
|
|
if err := httpGetFile(imagePath, media.URL); err != nil {
|
|
slog.Error("unable to download file", "error", err)
|
|
os.Exit(1)
|
|
}
|
|
slog.Info(fmt.Sprintf("archived %s", imagePath))
|
|
}
|
|
}
|
|
}
|
|
|
|
payload, err := json.MarshalIndent(status, "", " ")
|
|
if err != nil {
|
|
slog.Error("unable to marshal", "error", err)
|
|
os.Exit(1)
|
|
}
|
|
|
|
jsonPath := filepath.Join(basePath, fmt.Sprintf("%s.json", status.ID))
|
|
if _, err := os.Stat(jsonPath); errors.Is(err, os.ErrNotExist) {
|
|
if err = ioutil.WriteFile(jsonPath, payload, 0644); err != nil {
|
|
slog.Error("unable to write JSON file", "error", err)
|
|
os.Exit(1)
|
|
}
|
|
slog.Info(fmt.Sprintf("archived %s", jsonPath))
|
|
}
|
|
}
|
|
|
|
return nil
|
|
},
|
|
}
|
|
|
|
var deleteCmd = &cobra.Command{
|
|
Use: "delete",
|
|
Short: "Delete statuses like tears in the rain",
|
|
Example: `
|
|
You can use "--weeks/-w 0" to signal "all posts". In combination with
|
|
"--dry/-d", you can get a clear idea of what blurp is going to do with each
|
|
status.
|
|
|
|
blurp delete --dry --weeks 0`,
|
|
RunE: func(cmd *cobra.Command, args []string) error {
|
|
authClient, err := auth.NewAuthClient(user)
|
|
if err != nil {
|
|
slog.Error("unable to create auth client", "error", err)
|
|
os.Exit(1)
|
|
}
|
|
|
|
slog.Info(fmt.Sprintf("keeping statuses NEWER than %d weeks", weeks))
|
|
|
|
if dry {
|
|
slog.Info("DRY RUN MODE ENABLED - STATUS DELETION DISABLED")
|
|
}
|
|
|
|
acc, err := getAccount(authClient)
|
|
if err != nil {
|
|
slog.Error("unable to retrieve account", "error", err)
|
|
os.Exit(1)
|
|
}
|
|
|
|
slog.Info("retrieving statuses... this may take a moment...")
|
|
|
|
allStatuses, err := readAllPaged(authClient, acc.ID)
|
|
if err != nil {
|
|
slog.Error("unable to download paged response", "error", err)
|
|
os.Exit(1)
|
|
}
|
|
|
|
ISO8601 := "2006-01-02T15:04:05.000Z"
|
|
for _, status := range allStatuses {
|
|
t, err := time.Parse(ISO8601, status.CreatedAt)
|
|
if err != nil {
|
|
slog.Error("unable to parse status 'CreatedAt' value", "error", err)
|
|
os.Exit(1)
|
|
}
|
|
|
|
if status.Favourited && skipFavourites {
|
|
slog.Info(fmt.Sprintf("skipping %s (skip favourites: %v)", status.ID, skipFavourites))
|
|
continue
|
|
}
|
|
|
|
if status.Pinned && skipPinned {
|
|
slog.Info(fmt.Sprintf("skipping %s (skip pinned: %v)", status.ID, skipPinned))
|
|
continue
|
|
}
|
|
|
|
numHours := time.Duration(168 * weeks)
|
|
if t.Before(time.Now().Add(-time.Hour * numHours)) {
|
|
if !dry {
|
|
_, err := authClient.Client.Statuses.StatusDelete(&statuses.StatusDeleteParams{
|
|
ID: status.ID,
|
|
}, authClient.Auth)
|
|
if err != nil {
|
|
slog.Error("unable to delete status", "error", err)
|
|
os.Exit(1)
|
|
}
|
|
}
|
|
|
|
msg := fmt.Sprintf("deleted %s (created: %s)", status.ID, t.Format(time.DateOnly))
|
|
if dry {
|
|
msg = fmt.Sprintf("DRY RUN: %s", msg)
|
|
}
|
|
slog.Info(msg)
|
|
|
|
time.Sleep(time.Duration(rate) * time.Second)
|
|
} else {
|
|
slog.Info(fmt.Sprintf("keeping %s (created: %s)", status.ID, t.Format(time.DateOnly)))
|
|
}
|
|
}
|
|
|
|
return nil
|
|
},
|
|
}
|
|
|
|
// getAccount returns the currently authenticated account.
|
|
func getAccount(authClient *auth.Client) (*models.Account, error) {
|
|
err := authClient.Wait()
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
resp, err := authClient.Client.Accounts.AccountVerify(nil, authClient.Auth)
|
|
if err != nil {
|
|
return nil, errors.WithStack(err)
|
|
}
|
|
|
|
return resp.GetPayload(), nil
|
|
}
|
|
|
|
// httpGetFile downloads a file from the internet.
|
|
func httpGetFile(filepath, url string) error {
|
|
out, err := os.Create(filepath)
|
|
if err != nil {
|
|
return fmt.Errorf("httpGetFile: unable to create '%s': %s", filepath, err)
|
|
}
|
|
defer out.Close()
|
|
|
|
resp, err := http.Get(url)
|
|
if err != nil {
|
|
return fmt.Errorf("httpGetFile: unable to HTTP GET '%s'", url)
|
|
}
|
|
defer resp.Body.Close()
|
|
|
|
if resp.StatusCode != http.StatusOK {
|
|
return fmt.Errorf("httpGetFile: HTTP GET response code %v for '%s'", resp.StatusCode, url)
|
|
}
|
|
|
|
_, err = io.Copy(out, resp.Body)
|
|
if err != nil {
|
|
return fmt.Errorf("httpGetFile: unable to copy HTTP GET response to disk: %s", err)
|
|
}
|
|
|
|
return nil
|
|
}
|
|
|
|
// parseLinkMaxID extracts the `max_id` from the `next` link for paging to older items.
|
|
func parseLinkMaxID(linkHeader string) (*string, error) {
|
|
next := link.Parse(linkHeader)["next"]
|
|
if next == nil {
|
|
// No link header in that direction means end of results.
|
|
return nil, nil
|
|
}
|
|
nextUrl, err := url.Parse(next.URI)
|
|
if err != nil {
|
|
return nil, errors.Wrap(err, "couldn't parse next page URL")
|
|
}
|
|
nextMaxID := nextUrl.Query().Get("max_id")
|
|
if nextMaxID == "" {
|
|
return nil, errors.New("couldn't find next page max ID")
|
|
}
|
|
return &nextMaxID, err
|
|
}
|
|
|
|
func readAllPaged(authClient *auth.Client, accID string) ([]*models.Status, error) {
|
|
var all []*models.Status
|
|
var maxID *string
|
|
|
|
for {
|
|
err := authClient.Wait()
|
|
if err != nil {
|
|
return all, errors.WithStack(err)
|
|
}
|
|
|
|
params := &accounts.AccountStatusesParams{ID: accID, MaxID: maxID}
|
|
resp, err := authClient.Client.Accounts.AccountStatuses(params, authClient.Auth)
|
|
if err != nil {
|
|
slog.Error("error fetching page", "error", err)
|
|
return all, errors.WithStack(err)
|
|
}
|
|
|
|
maxID, err = parseLinkMaxID(resp.Link)
|
|
if err != nil {
|
|
slog.Error("error parsing Link header", "error", err)
|
|
return all, errors.WithStack(err)
|
|
}
|
|
if maxID == nil {
|
|
// End of pages.
|
|
break
|
|
}
|
|
|
|
time.Sleep(time.Duration(rate) * time.Second)
|
|
|
|
all = append(all, resp.GetPayload()...)
|
|
}
|
|
|
|
return all, nil
|
|
}
|