fixes some lint errors
This commit is contained in:
parent
5ccf4e6168
commit
12c12e53f7
16 changed files with 108 additions and 93 deletions
|
@ -44,21 +44,34 @@ func downloadMediaItem(
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
|
|
||||||
if format.Type != enums.MediaTypePhoto {
|
if format.Type == enums.MediaTypePhoto {
|
||||||
|
file, err := util.DownloadFileInMemory(ctx, format.URL, config)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to download image: %w", err)
|
||||||
|
}
|
||||||
|
path := filepath.Join(config.DownloadDir, fileName)
|
||||||
|
if err := util.ImgToJPEG(file, path); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to convert image: %w", err)
|
||||||
|
}
|
||||||
|
filePath = path
|
||||||
|
cleanup = false
|
||||||
|
return &models.DownloadedMedia{
|
||||||
|
FilePath: filePath,
|
||||||
|
ThumbnailFilePath: thumbnailFilePath,
|
||||||
|
Media: media,
|
||||||
|
Index: idx,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// hndle non-photo (video/audio/other)
|
||||||
if len(format.Segments) == 0 {
|
if len(format.Segments) == 0 {
|
||||||
path, err := util.DownloadFile(
|
path, err := util.DownloadFile(ctx, format.URL, fileName, config)
|
||||||
ctx, format.URL,
|
|
||||||
fileName, config,
|
|
||||||
)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("failed to download file: %w", err)
|
return nil, fmt.Errorf("failed to download file: %w", err)
|
||||||
}
|
}
|
||||||
filePath = path
|
filePath = path
|
||||||
} else {
|
} else {
|
||||||
path, err := util.DownloadFileWithSegments(
|
path, err := util.DownloadFileWithSegments(ctx, format.Segments, fileName, config)
|
||||||
ctx, format.Segments,
|
|
||||||
fileName, config,
|
|
||||||
)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("failed to download segments: %w", err)
|
return nil, fmt.Errorf("failed to download segments: %w", err)
|
||||||
}
|
}
|
||||||
|
@ -73,24 +86,10 @@ func downloadMediaItem(
|
||||||
thumbnailFilePath = path
|
thumbnailFilePath = path
|
||||||
}
|
}
|
||||||
|
|
||||||
if format.Type == enums.MediaTypeVideo {
|
if format.Type == enums.MediaTypeVideo && (format.Width == 0 || format.Height == 0 || format.Duration == 0) {
|
||||||
if format.Width == 0 || format.Height == 0 || format.Duration == 0 {
|
|
||||||
insertVideoInfo(format, filePath)
|
insertVideoInfo(format, filePath)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
} else {
|
|
||||||
file, err := util.DownloadFileInMemory(ctx, format.URL, config)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("failed to download image: %w", err)
|
|
||||||
}
|
|
||||||
path := filepath.Join(config.DownloadDir, fileName)
|
|
||||||
if err := util.ImgToJPEG(file, path); err != nil {
|
|
||||||
return nil, fmt.Errorf("failed to convert image: %w", err)
|
|
||||||
}
|
|
||||||
filePath = path
|
|
||||||
}
|
|
||||||
|
|
||||||
// all good, no need to delete files
|
|
||||||
cleanup = false
|
cleanup = false
|
||||||
return &models.DownloadedMedia{
|
return &models.DownloadedMedia{
|
||||||
FilePath: filePath,
|
FilePath: filePath,
|
||||||
|
|
|
@ -107,9 +107,11 @@ func StoreMedias(
|
||||||
medias []*models.DownloadedMedia,
|
medias []*models.DownloadedMedia,
|
||||||
) error {
|
) error {
|
||||||
var storedMedias []*models.Media
|
var storedMedias []*models.Media
|
||||||
|
|
||||||
if len(medias) == 0 {
|
if len(medias) == 0 {
|
||||||
return fmt.Errorf("no media to store")
|
return fmt.Errorf("no media to store")
|
||||||
}
|
}
|
||||||
|
|
||||||
for idx, msg := range msgs {
|
for idx, msg := range msgs {
|
||||||
fileID := GetMessageFileID(&msg)
|
fileID := GetMessageFileID(&msg)
|
||||||
if len(fileID) == 0 {
|
if len(fileID) == 0 {
|
||||||
|
@ -211,27 +213,22 @@ func HandleErrorMessage(
|
||||||
for currentError != nil {
|
for currentError != nil {
|
||||||
var botError *util.Error
|
var botError *util.Error
|
||||||
if errors.As(currentError, &botError) {
|
if errors.As(currentError, &botError) {
|
||||||
SendErrorMessage(bot, ctx, fmt.Sprintf(
|
SendErrorMessage(bot, ctx,
|
||||||
"error occurred when downloading: %s",
|
"error occurred when downloading: "+currentError.Error(),
|
||||||
currentError.Error(),
|
)
|
||||||
))
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
currentError = errors.Unwrap(currentError)
|
currentError = errors.Unwrap(currentError)
|
||||||
}
|
}
|
||||||
|
|
||||||
lastError := util.GetLastError(err)
|
lastError := util.GetLastError(err)
|
||||||
errorMessage := fmt.Sprintf(
|
errorMessage := "error occurred when downloading: " + lastError.Error()
|
||||||
"error occurred when downloading: %s",
|
|
||||||
lastError.Error(),
|
|
||||||
)
|
|
||||||
|
|
||||||
if strings.Contains(errorMessage, bot.Token) {
|
if strings.Contains(errorMessage, bot.Token) {
|
||||||
errorMessage = "telegram related error, probably connection issue"
|
errorMessage = "telegram related error, probably connection issue"
|
||||||
}
|
}
|
||||||
|
|
||||||
SendErrorMessage(bot, ctx, errorMessage)
|
SendErrorMessage(bot, ctx, errorMessage)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func SendErrorMessage(
|
func SendErrorMessage(
|
||||||
|
|
|
@ -17,7 +17,7 @@ import (
|
||||||
"github.com/PaulSonOfLars/gotgbot/v2/ext/handlers/filters/inlinequery"
|
"github.com/PaulSonOfLars/gotgbot/v2/ext/handlers/filters/inlinequery"
|
||||||
)
|
)
|
||||||
|
|
||||||
var AllowedUpdates = []string{
|
var allowedUpdates = []string{
|
||||||
"message",
|
"message",
|
||||||
"callback_query",
|
"callback_query",
|
||||||
"inline_query",
|
"inline_query",
|
||||||
|
@ -69,7 +69,7 @@ func Start() {
|
||||||
RequestOpts: &gotgbot.RequestOpts{
|
RequestOpts: &gotgbot.RequestOpts{
|
||||||
Timeout: time.Second * 10,
|
Timeout: time.Second * 10,
|
||||||
},
|
},
|
||||||
AllowedUpdates: AllowedUpdates,
|
AllowedUpdates: allowedUpdates,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
@ -41,9 +41,9 @@ func Start() {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("failed to get database connection: %v", err)
|
log.Fatalf("failed to get database connection: %v", err)
|
||||||
}
|
}
|
||||||
sqlDB.SetMaxIdleConns(10)
|
sqlDB.SetMaxIdleConns(20)
|
||||||
sqlDB.SetMaxOpenConns(100)
|
sqlDB.SetMaxOpenConns(50)
|
||||||
sqlDB.SetConnMaxLifetime(time.Hour)
|
sqlDB.SetConnMaxLifetime(30 * time.Minute)
|
||||||
err = sqlDB.Ping()
|
err = sqlDB.Ping()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("failed to ping database: %v", err)
|
log.Fatalf("failed to ping database: %v", err)
|
||||||
|
|
|
@ -185,7 +185,7 @@ func GetVideoAPI(contentURL string) (*IGramResponse, error) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("failed to build signed payload: %w", err)
|
return nil, fmt.Errorf("failed to build signed payload: %w", err)
|
||||||
}
|
}
|
||||||
req, err := http.NewRequest("POST", apiURL, payload)
|
req, err := http.NewRequest(http.MethodPost, apiURL, payload)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("failed to create request: %w", err)
|
return nil, fmt.Errorf("failed to create request: %w", err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,6 +2,7 @@ package instagram
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"crypto/sha256"
|
"crypto/sha256"
|
||||||
|
"encoding/hex"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"govd/util"
|
"govd/util"
|
||||||
|
@ -10,6 +11,7 @@ import (
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"regexp"
|
"regexp"
|
||||||
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
@ -19,7 +21,7 @@ var captionPattern = regexp.MustCompile(
|
||||||
)
|
)
|
||||||
|
|
||||||
func BuildSignedPayload(contentURL string) (io.Reader, error) {
|
func BuildSignedPayload(contentURL string) (io.Reader, error) {
|
||||||
timestamp := fmt.Sprintf("%d", time.Now().UnixMilli())
|
timestamp := strconv.FormatInt(time.Now().UnixMilli(), 10)
|
||||||
hash := sha256.New()
|
hash := sha256.New()
|
||||||
_, err := io.WriteString(
|
_, err := io.WriteString(
|
||||||
hash,
|
hash,
|
||||||
|
@ -29,7 +31,7 @@ func BuildSignedPayload(contentURL string) (io.Reader, error) {
|
||||||
return nil, fmt.Errorf("error writing to SHA256 hash: %w", err)
|
return nil, fmt.Errorf("error writing to SHA256 hash: %w", err)
|
||||||
}
|
}
|
||||||
secretBytes := hash.Sum(nil)
|
secretBytes := hash.Sum(nil)
|
||||||
secretString := fmt.Sprintf("%x", secretBytes)
|
secretString := hex.EncodeToString(secretBytes)
|
||||||
secretString = strings.ToLower(secretString)
|
secretString = strings.ToLower(secretString)
|
||||||
payload := map[string]string{
|
payload := map[string]string{
|
||||||
"url": contentURL,
|
"url": contentURL,
|
||||||
|
@ -76,13 +78,13 @@ func ParseIGramResponse(body []byte) (*IGramResponse, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetCDNURL(contentURL string) (string, error) {
|
func GetCDNURL(contentURL string) (string, error) {
|
||||||
parsedUrl, err := url.Parse(contentURL)
|
parsedURL, err := url.Parse(contentURL)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", fmt.Errorf("can't parse igram URL: %v", err)
|
return "", fmt.Errorf("can't parse igram URL: %w", err)
|
||||||
}
|
}
|
||||||
queryParams, err := url.ParseQuery(parsedUrl.RawQuery)
|
queryParams, err := url.ParseQuery(parsedURL.RawQuery)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", fmt.Errorf("can't unescape igram URL: %v", err)
|
return "", fmt.Errorf("can't unescape igram URL: %w", err)
|
||||||
}
|
}
|
||||||
cdnURL := queryParams.Get("uri")
|
cdnURL := queryParams.Get("uri")
|
||||||
return cdnURL, nil
|
return cdnURL, nil
|
||||||
|
@ -133,7 +135,6 @@ func GetPostCaption(
|
||||||
if len(matches) < 2 {
|
if len(matches) < 2 {
|
||||||
// post has no caption most likely
|
// post has no caption most likely
|
||||||
return "", nil
|
return "", nil
|
||||||
} else {
|
|
||||||
return html.UnescapeString(matches[1]), nil
|
|
||||||
}
|
}
|
||||||
|
return html.UnescapeString(matches[1]), nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,8 +27,8 @@ var (
|
||||||
"co\\.in", "co\\.nz", "id", "com\\.ec", "com\\.py", "tw", "be", "uk", "com\\.bo", "com\\.pe",
|
"co\\.in", "co\\.nz", "id", "com\\.ec", "com\\.py", "tw", "be", "uk", "com\\.bo", "com\\.pe",
|
||||||
}
|
}
|
||||||
validHostRegex = strings.Join(validHost, "|")
|
validHostRegex = strings.Join(validHost, "|")
|
||||||
validUrlPattern = `https?://(?:[^/]+\.)?pinterest\.(` + validHostRegex + `)/pin/(?:[\w-]+--)?(?P<id>\d+)`
|
validURLPattern = `https?://(?:[^/]+\.)?pinterest\.(` + validHostRegex + `)/pin/(?:[\w-]+--)?(?P<id>\d+)`
|
||||||
pinValidUrlPattern = `https?://(www\.)?pin\.(` + validHostRegex + `)/(?P<id>\w+)`
|
pinValidURLPattern = `https?://(www\.)?pin\.(` + validHostRegex + `)/(?P<id>\w+)`
|
||||||
)
|
)
|
||||||
|
|
||||||
var ShortExtractor = &models.Extractor{
|
var ShortExtractor = &models.Extractor{
|
||||||
|
@ -36,7 +36,7 @@ var ShortExtractor = &models.Extractor{
|
||||||
CodeName: "pinterest:short",
|
CodeName: "pinterest:short",
|
||||||
Type: enums.ExtractorTypeSingle,
|
Type: enums.ExtractorTypeSingle,
|
||||||
Category: enums.ExtractorCategorySocial,
|
Category: enums.ExtractorCategorySocial,
|
||||||
URLPattern: regexp.MustCompile(pinValidUrlPattern),
|
URLPattern: regexp.MustCompile(pinValidURLPattern),
|
||||||
Host: func() []string {
|
Host: func() []string {
|
||||||
var domains []string
|
var domains []string
|
||||||
for _, domain := range validHost {
|
for _, domain := range validHost {
|
||||||
|
@ -63,7 +63,7 @@ var Extractor = &models.Extractor{
|
||||||
CodeName: "pinterest",
|
CodeName: "pinterest",
|
||||||
Type: enums.ExtractorTypeSingle,
|
Type: enums.ExtractorTypeSingle,
|
||||||
Category: enums.ExtractorCategorySocial,
|
Category: enums.ExtractorCategorySocial,
|
||||||
URLPattern: regexp.MustCompile(validUrlPattern),
|
URLPattern: regexp.MustCompile(validURLPattern),
|
||||||
Host: func() []string {
|
Host: func() []string {
|
||||||
var domains []string
|
var domains []string
|
||||||
for _, domain := range validHost {
|
for _, domain := range validHost {
|
||||||
|
@ -161,7 +161,7 @@ func ExtractPinMedia(ctx *models.DownloadContext) ([]*models.Media, error) {
|
||||||
func GetPinData(pinID string) (*PinData, error) {
|
func GetPinData(pinID string) (*PinData, error) {
|
||||||
params := BuildPinRequestParams(pinID)
|
params := BuildPinRequestParams(pinID)
|
||||||
|
|
||||||
req, err := http.NewRequest("GET", pinResourceEndpoint, nil)
|
req, err := http.NewRequest(http.MethodGet, pinResourceEndpoint, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("failed to create request: %w", err)
|
return nil, fmt.Errorf("failed to create request: %w", err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -32,7 +32,7 @@ var ShortExtractor = &models.Extractor{
|
||||||
IsRedirect: true,
|
IsRedirect: true,
|
||||||
|
|
||||||
Run: func(ctx *models.DownloadContext) (*models.ExtractorResponse, error) {
|
Run: func(ctx *models.DownloadContext) (*models.ExtractorResponse, error) {
|
||||||
req, err := http.NewRequest("GET", ctx.MatchedContentURL, nil)
|
req, err := http.NewRequest(http.MethodGet, ctx.MatchedContentURL, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("failed to create request: %w", err)
|
return nil, fmt.Errorf("failed to create request: %w", err)
|
||||||
}
|
}
|
||||||
|
@ -225,7 +225,7 @@ func MediaListFromAPI(ctx *models.DownloadContext) ([]*models.Media, error) {
|
||||||
func GetRedditData(host string, slug string) (RedditResponse, error) {
|
func GetRedditData(host string, slug string) (RedditResponse, error) {
|
||||||
url := fmt.Sprintf("https://%s/%s/.json", host, slug)
|
url := fmt.Sprintf("https://%s/%s/.json", host, slug)
|
||||||
|
|
||||||
req, err := http.NewRequest("GET", url, nil)
|
req, err := http.NewRequest(http.MethodGet, url, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("failed to create request: %w", err)
|
return nil, fmt.Errorf("failed to create request: %w", err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,7 +29,7 @@ var ShortExtractor = &models.Extractor{
|
||||||
IsRedirect: true,
|
IsRedirect: true,
|
||||||
|
|
||||||
Run: func(ctx *models.DownloadContext) (*models.ExtractorResponse, error) {
|
Run: func(ctx *models.DownloadContext) (*models.ExtractorResponse, error) {
|
||||||
req, err := http.NewRequest("GET", ctx.MatchedContentURL, nil)
|
req, err := http.NewRequest(http.MethodGet, ctx.MatchedContentURL, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("failed to create req: %w", err)
|
return nil, fmt.Errorf("failed to create req: %w", err)
|
||||||
}
|
}
|
||||||
|
@ -139,7 +139,7 @@ func GetTweetAPI(tweetID string) (*Tweet, error) {
|
||||||
}
|
}
|
||||||
query := BuildAPIQuery(tweetID)
|
query := BuildAPIQuery(tweetID)
|
||||||
|
|
||||||
req, err := http.NewRequest("GET", apiEndpoint, nil)
|
req, err := http.NewRequest(http.MethodGet, apiEndpoint, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("failed to create req: %w", err)
|
return nil, fmt.Errorf("failed to create req: %w", err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,7 +28,7 @@ func BuildAPIHeaders(cookies []*http.Cookie) map[string]string {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
headers := map[string]string{
|
headers := map[string]string{
|
||||||
"authorization": fmt.Sprintf("Bearer %s", authToken),
|
"authorization": "Bearer " + authToken,
|
||||||
"user-agent": util.ChromeUA,
|
"user-agent": util.ChromeUA,
|
||||||
"x-twitter-auth-type": "OAuth2Session",
|
"x-twitter-auth-type": "OAuth2Session",
|
||||||
"x-twitter-client-language": "en",
|
"x-twitter-client-language": "en",
|
||||||
|
@ -140,7 +140,7 @@ func FindTweetData(resp *APIResponse, tweetID string) (*Tweet, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
entries := instructions[0].Entries
|
entries := instructions[0].Entries
|
||||||
entryID := fmt.Sprintf("tweet-%s", tweetID)
|
entryID := "tweet-" + tweetID
|
||||||
|
|
||||||
for _, entry := range entries {
|
for _, entry := range entries {
|
||||||
if entry.EntryID == entryID {
|
if entry.EntryID == entryID {
|
||||||
|
|
5
main.go
5
main.go
|
@ -20,6 +20,7 @@ func main() {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatal("error loading .env file")
|
log.Fatal("error loading .env file")
|
||||||
}
|
}
|
||||||
|
|
||||||
profilerPort, err := strconv.Atoi(os.Getenv("PROFILER_PORT"))
|
profilerPort, err := strconv.Atoi(os.Getenv("PROFILER_PORT"))
|
||||||
if err == nil && profilerPort > 0 {
|
if err == nil && profilerPort > 0 {
|
||||||
go func() {
|
go func() {
|
||||||
|
@ -27,13 +28,17 @@ func main() {
|
||||||
http.ListenAndServe(fmt.Sprintf(":%d", profilerPort), nil)
|
http.ListenAndServe(fmt.Sprintf(":%d", profilerPort), nil)
|
||||||
}()
|
}()
|
||||||
}
|
}
|
||||||
|
|
||||||
util.CleanupDownloadsDir()
|
util.CleanupDownloadsDir()
|
||||||
util.StartDownloadsCleanup()
|
util.StartDownloadsCleanup()
|
||||||
|
|
||||||
ok := util.CheckFFmpeg()
|
ok := util.CheckFFmpeg()
|
||||||
if !ok {
|
if !ok {
|
||||||
log.Fatal("ffmpeg executable not found. please install it or add it to your PATH")
|
log.Fatal("ffmpeg executable not found. please install it or add it to your PATH")
|
||||||
}
|
}
|
||||||
|
|
||||||
database.Start()
|
database.Start()
|
||||||
|
|
||||||
go bot.Start()
|
go bot.Start()
|
||||||
|
|
||||||
select {} // keep the main goroutine alive
|
select {} // keep the main goroutine alive
|
||||||
|
|
|
@ -14,7 +14,7 @@ func RemuxFile(
|
||||||
outputFile := inputFile
|
outputFile := inputFile
|
||||||
err := os.Rename(inputFile, tempFileName)
|
err := os.Rename(inputFile, tempFileName)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to rename file: %v", err)
|
return fmt.Errorf("failed to rename file: %w", err)
|
||||||
}
|
}
|
||||||
defer os.Remove(tempFileName)
|
defer os.Remove(tempFileName)
|
||||||
err = ffmpeg.
|
err = ffmpeg.
|
||||||
|
@ -26,7 +26,7 @@ func RemuxFile(
|
||||||
OverWriteOutput().
|
OverWriteOutput().
|
||||||
Run()
|
Run()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to remux file: %v", err)
|
return fmt.Errorf("failed to remux file: %w", err)
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -184,12 +184,21 @@ func downloadInMemory(
|
||||||
return nil, fmt.Errorf("file too large for in-memory download: %d bytes", resp.ContentLength)
|
return nil, fmt.Errorf("file too large for in-memory download: %d bytes", resp.ContentLength)
|
||||||
}
|
}
|
||||||
|
|
||||||
var buf bytes.Buffer
|
var bufPool = sync.Pool{
|
||||||
|
New: func() any {
|
||||||
|
return bytes.NewBuffer(make([]byte, 0, 1024*1024))
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
buf := bufPool.Get().(*bytes.Buffer)
|
||||||
|
buf.Reset()
|
||||||
|
defer bufPool.Put(buf)
|
||||||
|
|
||||||
if resp.ContentLength > 0 {
|
if resp.ContentLength > 0 {
|
||||||
buf.Grow(int(resp.ContentLength))
|
buf.Grow(int(resp.ContentLength))
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err = io.Copy(&buf, resp.Body)
|
_, err = io.Copy(buf, resp.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("failed to read response body: %w", err)
|
return nil, fmt.Errorf("failed to read response body: %w", err)
|
||||||
}
|
}
|
||||||
|
@ -218,8 +227,11 @@ func runChunkedDownload(
|
||||||
) error {
|
) error {
|
||||||
// reduce concurrency if it's greater
|
// reduce concurrency if it's greater
|
||||||
// than the number of available CPUs
|
// than the number of available CPUs
|
||||||
if runtime.NumCPU() < config.Concurrency && runtime.GOMAXPROCS(0) < config.Concurrency {
|
maxProcs := runtime.GOMAXPROCS(0)
|
||||||
config.Concurrency = runtime.NumCPU()
|
optimalConcurrency := int(math.Max(1, float64(maxProcs-1)))
|
||||||
|
|
||||||
|
if config.Concurrency > optimalConcurrency {
|
||||||
|
config.Concurrency = optimalConcurrency
|
||||||
}
|
}
|
||||||
|
|
||||||
fileSize, err := getFileSize(ctx, fileURL, config.Timeout)
|
fileSize, err := getFileSize(ctx, fileURL, config.Timeout)
|
||||||
|
@ -446,7 +458,7 @@ func createChunks(fileSize int, chunkSize int) [][2]int {
|
||||||
numChunks := int(math.Ceil(float64(fileSize) / float64(chunkSize)))
|
numChunks := int(math.Ceil(float64(fileSize) / float64(chunkSize)))
|
||||||
chunks := make([][2]int, numChunks)
|
chunks := make([][2]int, numChunks)
|
||||||
|
|
||||||
for i := 0; i < numChunks; i++ {
|
for i := range chunks {
|
||||||
start := i * chunkSize
|
start := i * chunkSize
|
||||||
end := start + chunkSize - 1
|
end := start + chunkSize - 1
|
||||||
if end >= fileSize {
|
if end >= fileSize {
|
||||||
|
|
13
util/http.go
13
util/http.go
|
@ -21,13 +21,18 @@ func GetHTTPSession() *http.Client {
|
||||||
KeepAlive: 30 * time.Second,
|
KeepAlive: 30 * time.Second,
|
||||||
}).DialContext,
|
}).DialContext,
|
||||||
ForceAttemptHTTP2: true,
|
ForceAttemptHTTP2: true,
|
||||||
|
|
||||||
MaxIdleConns: 100,
|
MaxIdleConns: 100,
|
||||||
IdleConnTimeout: 90 * time.Second,
|
IdleConnTimeout: 90 * time.Second,
|
||||||
TLSHandshakeTimeout: 10 * time.Second,
|
|
||||||
|
TLSHandshakeTimeout: 5 * time.Second,
|
||||||
ExpectContinueTimeout: 1 * time.Second,
|
ExpectContinueTimeout: 1 * time.Second,
|
||||||
MaxIdleConnsPerHost: 20,
|
|
||||||
MaxConnsPerHost: 20,
|
MaxIdleConnsPerHost: 100,
|
||||||
ResponseHeaderTimeout: 30 * time.Second,
|
MaxConnsPerHost: 100,
|
||||||
|
|
||||||
|
ResponseHeaderTimeout: 10 * time.Second,
|
||||||
|
|
||||||
DisableCompression: false,
|
DisableCompression: false,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -7,6 +7,7 @@ import (
|
||||||
"image/jpeg"
|
"image/jpeg"
|
||||||
"io"
|
"io"
|
||||||
"os"
|
"os"
|
||||||
|
"slices"
|
||||||
|
|
||||||
_ "image/gif"
|
_ "image/gif"
|
||||||
_ "image/png"
|
_ "image/png"
|
||||||
|
@ -117,10 +118,5 @@ func isHEIF(header []byte) bool {
|
||||||
heifBrands := []string{"heic", "heix", "mif1", "msf1"}
|
heifBrands := []string{"heic", "heix", "mif1", "msf1"}
|
||||||
brand := string(header[8:12])
|
brand := string(header[8:12])
|
||||||
|
|
||||||
for _, b := range heifBrands {
|
return slices.Contains(heifBrands, brand)
|
||||||
if brand == b {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,14 +19,14 @@ func GetLocationURL(
|
||||||
url string,
|
url string,
|
||||||
userAgent string,
|
userAgent string,
|
||||||
) (string, error) {
|
) (string, error) {
|
||||||
req, err := http.NewRequest("GET", url, nil)
|
req, err := http.NewRequest(http.MethodGet, url, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", fmt.Errorf("failed to create request: %w", err)
|
return "", fmt.Errorf("failed to create request: %w", err)
|
||||||
}
|
}
|
||||||
if userAgent == "" {
|
if userAgent == "" {
|
||||||
userAgent = ChromeUA
|
userAgent = ChromeUA
|
||||||
}
|
}
|
||||||
req.Header.Set("User-Agent", ChromeUA)
|
req.Header.Set("User-Agent", userAgent)
|
||||||
session := GetHTTPSession()
|
session := GetHTTPSession()
|
||||||
resp, err := session.Do(req)
|
resp, err := session.Do(req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue