fix: resolve memory leaks in download utility
1. writing chunks directly to disk instead of buffering in memory 2. using fixed-size buffers (32KB) for all I/O operations 3. optimizing buffer allocation strategy in downloadInMemory 4. implementing proper file synchronization with mutex locks 5. calculating chunk boundaries on-the-fly instead of pre-allocating the memory profiling showed excessive allocations in bytes.growSlice which has been addressed by minimizing intermediate buffers and eliminating unnecessary memory copies these changes should fix the observed OOM issues when downloading large files while maintaining the same functionality
This commit is contained in:
parent
37c2fbf215
commit
0d986d4573
2 changed files with 136 additions and 113 deletions
|
@ -173,17 +173,22 @@ func GetTweetAPI(
|
|||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, fmt.Errorf("invalid response code: %s", resp.Status)
|
||||
}
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read body: %w", err)
|
||||
}
|
||||
|
||||
var apiResponse APIResponse
|
||||
decoder := sonic.ConfigFastest.NewDecoder(resp.Body)
|
||||
err = decoder.Decode(&apiResponse)
|
||||
err = sonic.ConfigFastest.Unmarshal(body, &apiResponse)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse response: %w", err)
|
||||
}
|
||||
|
||||
result := apiResponse.Data.TweetResult.Result
|
||||
if result == nil {
|
||||
return nil, errors.New("failed to get tweet result")
|
||||
}
|
||||
|
||||
var tweet *Tweet
|
||||
if result.Tweet != nil {
|
||||
tweet = result.Tweet
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue