fixes some lint errors

This commit is contained in:
stefanodvx 2025-04-16 17:03:16 +02:00
parent 5ccf4e6168
commit 12c12e53f7
16 changed files with 108 additions and 93 deletions

View file

@ -44,21 +44,34 @@ func downloadMediaItem(
}
}()
if format.Type != enums.MediaTypePhoto {
if format.Type == enums.MediaTypePhoto {
file, err := util.DownloadFileInMemory(ctx, format.URL, config)
if err != nil {
return nil, fmt.Errorf("failed to download image: %w", err)
}
path := filepath.Join(config.DownloadDir, fileName)
if err := util.ImgToJPEG(file, path); err != nil {
return nil, fmt.Errorf("failed to convert image: %w", err)
}
filePath = path
cleanup = false
return &models.DownloadedMedia{
FilePath: filePath,
ThumbnailFilePath: thumbnailFilePath,
Media: media,
Index: idx,
}, nil
}
// hndle non-photo (video/audio/other)
if len(format.Segments) == 0 {
path, err := util.DownloadFile(
ctx, format.URL,
fileName, config,
)
path, err := util.DownloadFile(ctx, format.URL, fileName, config)
if err != nil {
return nil, fmt.Errorf("failed to download file: %w", err)
}
filePath = path
} else {
path, err := util.DownloadFileWithSegments(
ctx, format.Segments,
fileName, config,
)
path, err := util.DownloadFileWithSegments(ctx, format.Segments, fileName, config)
if err != nil {
return nil, fmt.Errorf("failed to download segments: %w", err)
}
@ -73,24 +86,10 @@ func downloadMediaItem(
thumbnailFilePath = path
}
if format.Type == enums.MediaTypeVideo {
if format.Width == 0 || format.Height == 0 || format.Duration == 0 {
if format.Type == enums.MediaTypeVideo && (format.Width == 0 || format.Height == 0 || format.Duration == 0) {
insertVideoInfo(format, filePath)
}
}
} else {
file, err := util.DownloadFileInMemory(ctx, format.URL, config)
if err != nil {
return nil, fmt.Errorf("failed to download image: %w", err)
}
path := filepath.Join(config.DownloadDir, fileName)
if err := util.ImgToJPEG(file, path); err != nil {
return nil, fmt.Errorf("failed to convert image: %w", err)
}
filePath = path
}
// all good, no need to delete files
cleanup = false
return &models.DownloadedMedia{
FilePath: filePath,

View file

@ -107,9 +107,11 @@ func StoreMedias(
medias []*models.DownloadedMedia,
) error {
var storedMedias []*models.Media
if len(medias) == 0 {
return fmt.Errorf("no media to store")
}
for idx, msg := range msgs {
fileID := GetMessageFileID(&msg)
if len(fileID) == 0 {
@ -211,27 +213,22 @@ func HandleErrorMessage(
for currentError != nil {
var botError *util.Error
if errors.As(currentError, &botError) {
SendErrorMessage(bot, ctx, fmt.Sprintf(
"error occurred when downloading: %s",
currentError.Error(),
))
SendErrorMessage(bot, ctx,
"error occurred when downloading: "+currentError.Error(),
)
return
}
currentError = errors.Unwrap(currentError)
}
lastError := util.GetLastError(err)
errorMessage := fmt.Sprintf(
"error occurred when downloading: %s",
lastError.Error(),
)
errorMessage := "error occurred when downloading: " + lastError.Error()
if strings.Contains(errorMessage, bot.Token) {
errorMessage = "telegram related error, probably connection issue"
}
SendErrorMessage(bot, ctx, errorMessage)
}
func SendErrorMessage(

View file

@ -17,7 +17,7 @@ import (
"github.com/PaulSonOfLars/gotgbot/v2/ext/handlers/filters/inlinequery"
)
var AllowedUpdates = []string{
var allowedUpdates = []string{
"message",
"callback_query",
"inline_query",
@ -69,7 +69,7 @@ func Start() {
RequestOpts: &gotgbot.RequestOpts{
Timeout: time.Second * 10,
},
AllowedUpdates: AllowedUpdates,
AllowedUpdates: allowedUpdates,
},
})
if err != nil {

View file

@ -41,9 +41,9 @@ func Start() {
if err != nil {
log.Fatalf("failed to get database connection: %v", err)
}
sqlDB.SetMaxIdleConns(10)
sqlDB.SetMaxOpenConns(100)
sqlDB.SetConnMaxLifetime(time.Hour)
sqlDB.SetMaxIdleConns(20)
sqlDB.SetMaxOpenConns(50)
sqlDB.SetConnMaxLifetime(30 * time.Minute)
err = sqlDB.Ping()
if err != nil {
log.Fatalf("failed to ping database: %v", err)

View file

@ -185,7 +185,7 @@ func GetVideoAPI(contentURL string) (*IGramResponse, error) {
if err != nil {
return nil, fmt.Errorf("failed to build signed payload: %w", err)
}
req, err := http.NewRequest("POST", apiURL, payload)
req, err := http.NewRequest(http.MethodPost, apiURL, payload)
if err != nil {
return nil, fmt.Errorf("failed to create request: %w", err)
}

View file

@ -2,6 +2,7 @@ package instagram
import (
"crypto/sha256"
"encoding/hex"
"encoding/json"
"fmt"
"govd/util"
@ -10,6 +11,7 @@ import (
"net/http"
"net/url"
"regexp"
"strconv"
"strings"
"time"
)
@ -19,7 +21,7 @@ var captionPattern = regexp.MustCompile(
)
func BuildSignedPayload(contentURL string) (io.Reader, error) {
timestamp := fmt.Sprintf("%d", time.Now().UnixMilli())
timestamp := strconv.FormatInt(time.Now().UnixMilli(), 10)
hash := sha256.New()
_, err := io.WriteString(
hash,
@ -29,7 +31,7 @@ func BuildSignedPayload(contentURL string) (io.Reader, error) {
return nil, fmt.Errorf("error writing to SHA256 hash: %w", err)
}
secretBytes := hash.Sum(nil)
secretString := fmt.Sprintf("%x", secretBytes)
secretString := hex.EncodeToString(secretBytes)
secretString = strings.ToLower(secretString)
payload := map[string]string{
"url": contentURL,
@ -76,13 +78,13 @@ func ParseIGramResponse(body []byte) (*IGramResponse, error) {
}
func GetCDNURL(contentURL string) (string, error) {
parsedUrl, err := url.Parse(contentURL)
parsedURL, err := url.Parse(contentURL)
if err != nil {
return "", fmt.Errorf("can't parse igram URL: %v", err)
return "", fmt.Errorf("can't parse igram URL: %w", err)
}
queryParams, err := url.ParseQuery(parsedUrl.RawQuery)
queryParams, err := url.ParseQuery(parsedURL.RawQuery)
if err != nil {
return "", fmt.Errorf("can't unescape igram URL: %v", err)
return "", fmt.Errorf("can't unescape igram URL: %w", err)
}
cdnURL := queryParams.Get("uri")
return cdnURL, nil
@ -133,7 +135,6 @@ func GetPostCaption(
if len(matches) < 2 {
// post has no caption most likely
return "", nil
} else {
}
return html.UnescapeString(matches[1]), nil
}
}

View file

@ -27,8 +27,8 @@ var (
"co\\.in", "co\\.nz", "id", "com\\.ec", "com\\.py", "tw", "be", "uk", "com\\.bo", "com\\.pe",
}
validHostRegex = strings.Join(validHost, "|")
validUrlPattern = `https?://(?:[^/]+\.)?pinterest\.(` + validHostRegex + `)/pin/(?:[\w-]+--)?(?P<id>\d+)`
pinValidUrlPattern = `https?://(www\.)?pin\.(` + validHostRegex + `)/(?P<id>\w+)`
validURLPattern = `https?://(?:[^/]+\.)?pinterest\.(` + validHostRegex + `)/pin/(?:[\w-]+--)?(?P<id>\d+)`
pinValidURLPattern = `https?://(www\.)?pin\.(` + validHostRegex + `)/(?P<id>\w+)`
)
var ShortExtractor = &models.Extractor{
@ -36,7 +36,7 @@ var ShortExtractor = &models.Extractor{
CodeName: "pinterest:short",
Type: enums.ExtractorTypeSingle,
Category: enums.ExtractorCategorySocial,
URLPattern: regexp.MustCompile(pinValidUrlPattern),
URLPattern: regexp.MustCompile(pinValidURLPattern),
Host: func() []string {
var domains []string
for _, domain := range validHost {
@ -63,7 +63,7 @@ var Extractor = &models.Extractor{
CodeName: "pinterest",
Type: enums.ExtractorTypeSingle,
Category: enums.ExtractorCategorySocial,
URLPattern: regexp.MustCompile(validUrlPattern),
URLPattern: regexp.MustCompile(validURLPattern),
Host: func() []string {
var domains []string
for _, domain := range validHost {
@ -161,7 +161,7 @@ func ExtractPinMedia(ctx *models.DownloadContext) ([]*models.Media, error) {
func GetPinData(pinID string) (*PinData, error) {
params := BuildPinRequestParams(pinID)
req, err := http.NewRequest("GET", pinResourceEndpoint, nil)
req, err := http.NewRequest(http.MethodGet, pinResourceEndpoint, nil)
if err != nil {
return nil, fmt.Errorf("failed to create request: %w", err)
}

View file

@ -32,7 +32,7 @@ var ShortExtractor = &models.Extractor{
IsRedirect: true,
Run: func(ctx *models.DownloadContext) (*models.ExtractorResponse, error) {
req, err := http.NewRequest("GET", ctx.MatchedContentURL, nil)
req, err := http.NewRequest(http.MethodGet, ctx.MatchedContentURL, nil)
if err != nil {
return nil, fmt.Errorf("failed to create request: %w", err)
}
@ -225,7 +225,7 @@ func MediaListFromAPI(ctx *models.DownloadContext) ([]*models.Media, error) {
func GetRedditData(host string, slug string) (RedditResponse, error) {
url := fmt.Sprintf("https://%s/%s/.json", host, slug)
req, err := http.NewRequest("GET", url, nil)
req, err := http.NewRequest(http.MethodGet, url, nil)
if err != nil {
return nil, fmt.Errorf("failed to create request: %w", err)
}

View file

@ -29,7 +29,7 @@ var ShortExtractor = &models.Extractor{
IsRedirect: true,
Run: func(ctx *models.DownloadContext) (*models.ExtractorResponse, error) {
req, err := http.NewRequest("GET", ctx.MatchedContentURL, nil)
req, err := http.NewRequest(http.MethodGet, ctx.MatchedContentURL, nil)
if err != nil {
return nil, fmt.Errorf("failed to create req: %w", err)
}
@ -139,7 +139,7 @@ func GetTweetAPI(tweetID string) (*Tweet, error) {
}
query := BuildAPIQuery(tweetID)
req, err := http.NewRequest("GET", apiEndpoint, nil)
req, err := http.NewRequest(http.MethodGet, apiEndpoint, nil)
if err != nil {
return nil, fmt.Errorf("failed to create req: %w", err)
}

View file

@ -28,7 +28,7 @@ func BuildAPIHeaders(cookies []*http.Cookie) map[string]string {
return nil
}
headers := map[string]string{
"authorization": fmt.Sprintf("Bearer %s", authToken),
"authorization": "Bearer " + authToken,
"user-agent": util.ChromeUA,
"x-twitter-auth-type": "OAuth2Session",
"x-twitter-client-language": "en",
@ -140,7 +140,7 @@ func FindTweetData(resp *APIResponse, tweetID string) (*Tweet, error) {
}
entries := instructions[0].Entries
entryID := fmt.Sprintf("tweet-%s", tweetID)
entryID := "tweet-" + tweetID
for _, entry := range entries {
if entry.EntryID == entryID {

View file

@ -20,6 +20,7 @@ func main() {
if err != nil {
log.Fatal("error loading .env file")
}
profilerPort, err := strconv.Atoi(os.Getenv("PROFILER_PORT"))
if err == nil && profilerPort > 0 {
go func() {
@ -27,13 +28,17 @@ func main() {
http.ListenAndServe(fmt.Sprintf(":%d", profilerPort), nil)
}()
}
util.CleanupDownloadsDir()
util.StartDownloadsCleanup()
ok := util.CheckFFmpeg()
if !ok {
log.Fatal("ffmpeg executable not found. please install it or add it to your PATH")
}
database.Start()
go bot.Start()
select {} // keep the main goroutine alive

View file

@ -14,7 +14,7 @@ func RemuxFile(
outputFile := inputFile
err := os.Rename(inputFile, tempFileName)
if err != nil {
return fmt.Errorf("failed to rename file: %v", err)
return fmt.Errorf("failed to rename file: %w", err)
}
defer os.Remove(tempFileName)
err = ffmpeg.
@ -26,7 +26,7 @@ func RemuxFile(
OverWriteOutput().
Run()
if err != nil {
return fmt.Errorf("failed to remux file: %v", err)
return fmt.Errorf("failed to remux file: %w", err)
}
return nil
}

View file

@ -184,12 +184,21 @@ func downloadInMemory(
return nil, fmt.Errorf("file too large for in-memory download: %d bytes", resp.ContentLength)
}
var buf bytes.Buffer
var bufPool = sync.Pool{
New: func() any {
return bytes.NewBuffer(make([]byte, 0, 1024*1024))
},
}
buf := bufPool.Get().(*bytes.Buffer)
buf.Reset()
defer bufPool.Put(buf)
if resp.ContentLength > 0 {
buf.Grow(int(resp.ContentLength))
}
_, err = io.Copy(&buf, resp.Body)
_, err = io.Copy(buf, resp.Body)
if err != nil {
return nil, fmt.Errorf("failed to read response body: %w", err)
}
@ -218,8 +227,11 @@ func runChunkedDownload(
) error {
// reduce concurrency if it's greater
// than the number of available CPUs
if runtime.NumCPU() < config.Concurrency && runtime.GOMAXPROCS(0) < config.Concurrency {
config.Concurrency = runtime.NumCPU()
maxProcs := runtime.GOMAXPROCS(0)
optimalConcurrency := int(math.Max(1, float64(maxProcs-1)))
if config.Concurrency > optimalConcurrency {
config.Concurrency = optimalConcurrency
}
fileSize, err := getFileSize(ctx, fileURL, config.Timeout)
@ -446,7 +458,7 @@ func createChunks(fileSize int, chunkSize int) [][2]int {
numChunks := int(math.Ceil(float64(fileSize) / float64(chunkSize)))
chunks := make([][2]int, numChunks)
for i := 0; i < numChunks; i++ {
for i := range chunks {
start := i * chunkSize
end := start + chunkSize - 1
if end >= fileSize {

View file

@ -21,13 +21,18 @@ func GetHTTPSession() *http.Client {
KeepAlive: 30 * time.Second,
}).DialContext,
ForceAttemptHTTP2: true,
MaxIdleConns: 100,
IdleConnTimeout: 90 * time.Second,
TLSHandshakeTimeout: 10 * time.Second,
TLSHandshakeTimeout: 5 * time.Second,
ExpectContinueTimeout: 1 * time.Second,
MaxIdleConnsPerHost: 20,
MaxConnsPerHost: 20,
ResponseHeaderTimeout: 30 * time.Second,
MaxIdleConnsPerHost: 100,
MaxConnsPerHost: 100,
ResponseHeaderTimeout: 10 * time.Second,
DisableCompression: false,
}

View file

@ -7,6 +7,7 @@ import (
"image/jpeg"
"io"
"os"
"slices"
_ "image/gif"
_ "image/png"
@ -117,10 +118,5 @@ func isHEIF(header []byte) bool {
heifBrands := []string{"heic", "heix", "mif1", "msf1"}
brand := string(header[8:12])
for _, b := range heifBrands {
if brand == b {
return true
}
}
return false
return slices.Contains(heifBrands, brand)
}

View file

@ -19,14 +19,14 @@ func GetLocationURL(
url string,
userAgent string,
) (string, error) {
req, err := http.NewRequest("GET", url, nil)
req, err := http.NewRequest(http.MethodGet, url, nil)
if err != nil {
return "", fmt.Errorf("failed to create request: %w", err)
}
if userAgent == "" {
userAgent = ChromeUA
}
req.Header.Set("User-Agent", ChromeUA)
req.Header.Set("User-Agent", userAgent)
session := GetHTTPSession()
resp, err := session.Do(req)
if err != nil {