mirror of
https://github.com/ClassiCube/ClassiCube.git
synced 2025-09-15 10:35:11 -04:00
Webclient: Limit max requests to 8 concurrently
This commit is contained in:
parent
6e4bf4d42f
commit
dbb0664f9f
@ -5,7 +5,7 @@
|
|||||||
#include "Errors.h"
|
#include "Errors.h"
|
||||||
extern void interop_DownloadAsync(const char* url, int method, int reqID);
|
extern void interop_DownloadAsync(const char* url, int method, int reqID);
|
||||||
extern int interop_IsHttpsOnly(void);
|
extern int interop_IsHttpsOnly(void);
|
||||||
static struct RequestList pendingReqs;
|
static struct RequestList workingReqs, queuedReqs;
|
||||||
|
|
||||||
|
|
||||||
/*########################################################################################################################*
|
/*########################################################################################################################*
|
||||||
@ -26,18 +26,20 @@ cc_bool Http_GetCurrent(int* reqID, int* progress) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
int Http_CheckProgress(int reqID) {
|
int Http_CheckProgress(int reqID) {
|
||||||
int idx = RequestList_Find(&pendingReqs, reqID);
|
int idx = RequestList_Find(&workingReqs, reqID);
|
||||||
if (idx == -1) return HTTP_PROGRESS_NOT_WORKING_ON;
|
if (idx == -1) return HTTP_PROGRESS_NOT_WORKING_ON;
|
||||||
|
|
||||||
return pendingReqs.entries[idx].progress;
|
return workingReqs.entries[idx].progress;
|
||||||
}
|
}
|
||||||
|
|
||||||
void Http_ClearPending(void) {
|
void Http_ClearPending(void) {
|
||||||
RequestList_Free(&pendingReqs);
|
RequestList_Free(&queuedReqs);
|
||||||
|
RequestList_Free(&workingReqs);
|
||||||
}
|
}
|
||||||
|
|
||||||
void Http_TryCancel(int reqID) {
|
void Http_TryCancel(int reqID) {
|
||||||
RequestList_TryFree(&pendingReqs, reqID);
|
RequestList_TryFree(&queuedReqs, reqID);
|
||||||
|
RequestList_TryFree(&workingReqs, reqID);
|
||||||
RequestList_TryFree(&processedReqs, reqID);
|
RequestList_TryFree(&processedReqs, reqID);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -47,48 +49,63 @@ void Http_TryCancel(int reqID) {
|
|||||||
*#########################################################################################################################*/
|
*#########################################################################################################################*/
|
||||||
cc_bool Http_DescribeError(cc_result res, cc_string* dst) { return false; }
|
cc_bool Http_DescribeError(cc_result res, cc_string* dst) { return false; }
|
||||||
|
|
||||||
|
#define HTTP_MAX_CONCURRENCY 8
|
||||||
|
static void Http_StartNextDownload(void) {
|
||||||
|
char urlBuffer[URL_MAX_SIZE]; cc_string url;
|
||||||
|
char urlStr[NATIVE_STR_LEN];
|
||||||
|
struct HttpRequest* req;
|
||||||
|
|
||||||
|
/* Avoid making too many requests at once */
|
||||||
|
if (workingReqs.count >= HTTP_MAX_CONCURRENCY) return;
|
||||||
|
if (!queuedReqs.count) return;
|
||||||
|
String_InitArray(url, urlBuffer);
|
||||||
|
|
||||||
|
req = &queuedReqs.entries[0];
|
||||||
|
Http_GetUrl(req, &url);
|
||||||
|
Platform_Log1("Fetching %s", &url);
|
||||||
|
|
||||||
|
Platform_EncodeUtf8(urlStr, &url);
|
||||||
|
interop_DownloadAsync(urlStr, req->requestType, req->id);
|
||||||
|
RequestList_Append(&workingReqs, req, false);
|
||||||
|
RequestList_RemoveAt(&queuedReqs, 0);
|
||||||
|
}
|
||||||
|
|
||||||
EMSCRIPTEN_KEEPALIVE void Http_OnUpdateProgress(int reqID, int read, int total) {
|
EMSCRIPTEN_KEEPALIVE void Http_OnUpdateProgress(int reqID, int read, int total) {
|
||||||
int idx = RequestList_Find(&pendingReqs, reqID);
|
int idx = RequestList_Find(&workingReqs, reqID);
|
||||||
if (idx == -1 || !total) return;
|
if (idx == -1 || !total) return;
|
||||||
|
|
||||||
pendingReqs.entries[idx].progress = (int)(100.0f * read / total);
|
workingReqs.entries[idx].progress = (int)(100.0f * read / total);
|
||||||
}
|
}
|
||||||
|
|
||||||
EMSCRIPTEN_KEEPALIVE void Http_OnFinishedAsync(int reqID, void* data, int len, int status) {
|
EMSCRIPTEN_KEEPALIVE void Http_OnFinishedAsync(int reqID, void* data, int len, int status) {
|
||||||
struct HttpRequest* req;
|
struct HttpRequest* req;
|
||||||
int idx = RequestList_Find(&pendingReqs, reqID);
|
int idx = RequestList_Find(&workingReqs, reqID);
|
||||||
|
|
||||||
/* Shouldn't ever happen, but log a warning anyways */
|
|
||||||
if (idx == -1) {
|
if (idx == -1) {
|
||||||
Mem_Free(data); Platform_Log1("Ignoring invalid request (%i)", &reqID); return;
|
/* Shouldn't ever happen, but log a warning anyways */
|
||||||
|
Mem_Free(data);
|
||||||
|
Platform_Log1("Ignoring invalid request (%i)", &reqID);
|
||||||
|
} else {
|
||||||
|
req = &workingReqs.entries[idx];
|
||||||
|
req->data = data;
|
||||||
|
req->size = len;
|
||||||
|
req->statusCode = status;
|
||||||
|
req->contentLength = len;
|
||||||
|
|
||||||
|
/* Usually this happens when denied by CORS */
|
||||||
|
if (!status && !data) req->result = ERR_DOWNLOAD_INVALID;
|
||||||
|
|
||||||
|
if (req->data) Platform_Log1("HTTP returned data: %i bytes", &req->size);
|
||||||
|
Http_FinishRequest(req);
|
||||||
|
RequestList_RemoveAt(&workingReqs, idx);
|
||||||
}
|
}
|
||||||
|
Http_StartNextDownload();
|
||||||
req = &pendingReqs.entries[idx];
|
|
||||||
req->data = data;
|
|
||||||
req->size = len;
|
|
||||||
req->statusCode = status;
|
|
||||||
req->contentLength = len;
|
|
||||||
|
|
||||||
/* Usually this happens when denied by CORS */
|
|
||||||
if (!status && !data) req->result = ERR_DOWNLOAD_INVALID;
|
|
||||||
|
|
||||||
if (req->data) Platform_Log1("HTTP returned data: %i bytes", &req->size);
|
|
||||||
Http_FinishRequest(req);
|
|
||||||
RequestList_RemoveAt(&pendingReqs, idx);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Adds a req to the list of pending requests, waking up worker thread if needed */
|
/* Adds a req to the list of pending requests, waking up worker thread if needed */
|
||||||
static void Http_BackendAdd(struct HttpRequest* req, cc_bool priority) {
|
static void Http_BackendAdd(struct HttpRequest* req, cc_bool priority) {
|
||||||
char urlBuffer[URL_MAX_SIZE]; cc_string url;
|
RequestList_Append(&queuedReqs, req, priority);
|
||||||
char urlStr[NATIVE_STR_LEN];
|
Http_StartNextDownload();
|
||||||
String_InitArray(url, urlBuffer);
|
|
||||||
|
|
||||||
RequestList_Append(&pendingReqs, req);
|
|
||||||
Http_GetUrl(req, &url);
|
|
||||||
Platform_Log2("Fetching %s (type %b)", &url, &req->requestType);
|
|
||||||
|
|
||||||
Platform_EncodeUtf8(urlStr, &url);
|
|
||||||
interop_DownloadAsync(urlStr, req->requestType, req->id);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -96,20 +113,19 @@ static void Http_BackendAdd(struct HttpRequest* req, cc_bool priority) {
|
|||||||
*-----------------------------------------------------Http component------------------------------------------------------*
|
*-----------------------------------------------------Http component------------------------------------------------------*
|
||||||
*#########################################################################################################################*/
|
*#########################################################################################################################*/
|
||||||
static void OnInit(void) {
|
static void OnInit(void) {
|
||||||
http_terminate = false;
|
|
||||||
ScheduledTask_Add(30, Http_CleanCacheTask);
|
ScheduledTask_Add(30, Http_CleanCacheTask);
|
||||||
/* If this webpage is https://, browsers deny any http:// downloading */
|
/* If this webpage is https://, browsers deny any http:// downloading */
|
||||||
httpsOnly = interop_IsHttpsOnly();
|
httpsOnly = interop_IsHttpsOnly();
|
||||||
|
|
||||||
RequestList_Init(&pendingReqs);
|
RequestList_Init(&queuedReqs);
|
||||||
|
RequestList_Init(&workingReqs);
|
||||||
RequestList_Init(&processedReqs);
|
RequestList_Init(&processedReqs);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void OnFree(void) {
|
static void OnFree(void) {
|
||||||
http_terminate = true;
|
|
||||||
Http_ClearPending();
|
Http_ClearPending();
|
||||||
|
RequestList_Free(&queuedReqs);
|
||||||
RequestList_Free(&pendingReqs);
|
RequestList_Free(&workingReqs);
|
||||||
RequestList_Free(&processedReqs);
|
RequestList_Free(&processedReqs);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -123,11 +123,7 @@ static void Http_SignalWorker(void) { Waitable_Signal(workerWaitable); }
|
|||||||
static void Http_BackendAdd(struct HttpRequest* req, cc_bool priority) {
|
static void Http_BackendAdd(struct HttpRequest* req, cc_bool priority) {
|
||||||
Mutex_Lock(pendingMutex);
|
Mutex_Lock(pendingMutex);
|
||||||
{
|
{
|
||||||
if (priority) {
|
RequestList_Append(&pendingReqs, req, priority);
|
||||||
RequestList_Prepend(&pendingReqs, req);
|
|
||||||
} else {
|
|
||||||
RequestList_Append(&pendingReqs, req);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
Mutex_Unlock(pendingMutex);
|
Mutex_Unlock(pendingMutex);
|
||||||
Http_SignalWorker();
|
Http_SignalWorker();
|
||||||
|
@ -33,21 +33,24 @@ static void RequestList_EnsureSpace(struct RequestList* list) {
|
|||||||
sizeof(struct HttpRequest), HTTP_DEF_ELEMS, 10);
|
sizeof(struct HttpRequest), HTTP_DEF_ELEMS, 10);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Adds another request to end (for normal priority request) */
|
/* Adds a request to the list */
|
||||||
static void RequestList_Append(struct RequestList* list, struct HttpRequest* item) {
|
static void RequestList_Append(struct RequestList* list, struct HttpRequest* item, cc_bool atFront) {
|
||||||
RequestList_EnsureSpace(list);
|
|
||||||
list->entries[list->count++] = *item;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Inserts a request at start (for high priority request) */
|
|
||||||
static void RequestList_Prepend(struct RequestList* list, struct HttpRequest* item) {
|
|
||||||
int i;
|
int i;
|
||||||
RequestList_EnsureSpace(list);
|
RequestList_EnsureSpace(list);
|
||||||
|
|
||||||
for (i = list->count; i > 0; i--) {
|
if (atFront) {
|
||||||
list->entries[i] = list->entries[i - 1];
|
/* Shift all requests right one place */
|
||||||
|
for (i = list->count; i > 0; i--) {
|
||||||
|
list->entries[i] = list->entries[i - 1];
|
||||||
|
}
|
||||||
|
/* Insert new request at start */
|
||||||
|
i = 0;
|
||||||
|
} else {
|
||||||
|
/* Insert new request at end */
|
||||||
|
i = list->count;
|
||||||
}
|
}
|
||||||
list->entries[0] = *item;
|
|
||||||
|
list->entries[i] = *item;
|
||||||
list->count++;
|
list->count++;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -174,7 +177,7 @@ static void Http_FinishRequest(struct HttpRequest* req) {
|
|||||||
Mutex_Lock(processedMutex);
|
Mutex_Lock(processedMutex);
|
||||||
{
|
{
|
||||||
req->timeDownloaded = DateTime_CurrentUTC_MS();
|
req->timeDownloaded = DateTime_CurrentUTC_MS();
|
||||||
RequestList_Append(&processedReqs, req);
|
RequestList_Append(&processedReqs, req, false);
|
||||||
}
|
}
|
||||||
Mutex_Unlock(processedMutex);
|
Mutex_Unlock(processedMutex);
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user