Change /client gpu to put information into one cc_string separated by \n instead of using an array of cc_strings

This commit is contained in:
UnknownShadow200 2020-11-01 13:10:24 +11:00
parent ef6ed0f764
commit 37dd0c0524
3 changed files with 35 additions and 35 deletions

View File

@ -344,18 +344,16 @@ static struct ChatCommand HelpCommand = {
};
static void GpuInfoCommand_Execute(const cc_string* args, int argsCount) {
char linesBuffer[GFX_APIINFO_LINES][STRING_SIZE];
cc_string lines[GFX_APIINFO_LINES];
char buffer[7 * STRING_SIZE];
cc_string str, line;
int i;
for (i = 0; i < GFX_APIINFO_LINES; i++) {
String_InitArray(lines[i], linesBuffer[i]);
}
Gfx_GetApiInfo(lines);
String_InitArray(str, buffer);
Gfx_GetApiInfo(&str);
for (i = 0; i < GFX_APIINFO_LINES; i++) {
if (!lines[i].length) continue;
Chat_Add1("&a%s", &lines[i]);
while (str.length) {
String_UNSAFE_SplitBy(&str, '\n', &line);
if (line.length) Chat_Add1("&a%s", &line);
}
}

View File

@ -1002,7 +1002,7 @@ static const int D3D9_DepthBufferBts(D3DFORMAT format) {
return 0;
}
void Gfx_GetApiInfo(cc_string* lines) {
void Gfx_GetApiInfo(cc_string* info) {
D3DADAPTER_IDENTIFIER9 adapter = { 0 };
int pointerSize = sizeof(void*) * 8;
int depthBits = D3D9_DepthBufferBts(depthFormat);
@ -1011,12 +1011,12 @@ void Gfx_GetApiInfo(cc_string* lines) {
IDirect3D9_GetAdapterIdentifier(d3d, D3DADAPTER_DEFAULT, 0, &adapter);
curMem = IDirect3DDevice9_GetAvailableTextureMem(device) / (1024.0f * 1024.0f);
String_Format1(&lines[0], "-- Using Direct3D9 (%i bit) --", &pointerSize);
String_Format1(&lines[1], "Adapter: %c", adapter.Description);
String_Format1(&lines[2], "Processing mode: %c", D3D9_StrFlags());
String_Format2(&lines[3], "Video memory: %f2 MB total, %f2 free", &totalMem, &curMem);
String_Format2(&lines[4], "Max texture size: (%i x %i)", &Gfx.MaxTexWidth, &Gfx.MaxTexHeight);
String_Format1(&lines[5], "Depth buffer bits: %i", &depthBits);
String_Format1(info, "-- Using Direct3D9 (%i bit) --\n", &pointerSize);
String_Format1(info, "Adapter: %c\n", adapter.Description);
String_Format1(info, "Processing mode: %c\n", D3D9_StrFlags());
String_Format2(info, "Video memory: %f2 MB total, %f2 free\n", &totalMem, &curMem);
String_Format2(info, "Max texture size: (%i x %i)\n", &Gfx.MaxTexWidth, &Gfx.MaxTexHeight);
String_Format1(info, "Depth buffer bits: %i", &depthBits);
}
void Gfx_OnWindowResize(void) { Gfx_LoseContext(" (resizing window)"); }
@ -1454,32 +1454,35 @@ cc_result Gfx_TakeScreenshot(struct Stream* output) {
return res;
}
void Gfx_GetApiInfo(cc_string* lines) {
static void AppendVRAMStats(cc_string* info) {
static const cc_string memExt = String_FromConst("GL_NVX_gpu_memory_info");
GLint totalKb, curKb, depthBits;
GLint totalKb, curKb;
float total, cur;
cc_string extensions;
int pointerSize = sizeof(void*) * 8;
glGetIntegerv(GL_DEPTH_BITS, &depthBits);
String_Format1(&lines[0], "-- Using OpenGL (%i bit) --", &pointerSize);
String_Format1(&lines[1], "Vendor: %c", glGetString(GL_VENDOR));
String_Format1(&lines[2], "Renderer: %c", glGetString(GL_RENDERER));
String_Format1(&lines[3], "GL version: %c", glGetString(GL_VERSION));
/* Memory usage line goes here */
String_Format2(&lines[5], "Max texture size: (%i, %i)", &Gfx.MaxTexWidth, &Gfx.MaxTexHeight);
String_Format1(&lines[6], "Depth buffer bits: %i", &depthBits);
/* NOTE: glGetString returns UTF8, but I just treat it as code page 437 */
extensions = String_FromReadonly((const char*)glGetString(GL_EXTENSIONS));
if (!String_CaselessContains(&extensions, &memExt)) return;
cc_string exts = String_FromReadonly((const char*)glGetString(GL_EXTENSIONS));
if (!String_CaselessContains(&exts, &memExt)) return;
glGetIntegerv(0x9048, &totalKb);
glGetIntegerv(0x9049, &curKb);
if (totalKb <= 0 || curKb <= 0) return;
total = totalKb / 1024.0f; cur = curKb / 1024.0f;
String_Format2(&lines[4], "Video memory: %f2 MB total, %f2 free", &total, &cur);
String_Format2(info, "Video memory: %f2 MB total, %f2 free\n", &total, &cur);
}
void Gfx_GetApiInfo(cc_string* info) {
GLint depthBits;
int pointerSize = sizeof(void*) * 8;
glGetIntegerv(GL_DEPTH_BITS, &depthBits);
String_Format1(info, "-- Using OpenGL (%i bit) --\n", &pointerSize);
String_Format1(info, "Vendor: %c\n", glGetString(GL_VENDOR));
String_Format1(info, "Renderer: %c\n", glGetString(GL_RENDERER));
String_Format1(info, "GL version: %c\n", glGetString(GL_VERSION));
AppendVRAMStats(info);
String_Format2(info, "Max texture size: (%i, %i)\n", &Gfx.MaxTexWidth, &Gfx.MaxTexHeight);
String_Format1(info, "Depth buffer bits: %i", &depthBits);
}
void Gfx_SetFpsLimit(cc_bool vsync, float minFrameMs) {

View File

@ -48,7 +48,6 @@ CC_VAR extern struct _GfxData {
struct Matrix View, Projection;
} Gfx;
#define GFX_APIINFO_LINES 7
extern GfxResourceID Gfx_defaultIb;
extern GfxResourceID Gfx_quadVb, Gfx_texVb;
@ -197,8 +196,8 @@ void Gfx_SetFpsLimit(cc_bool vsync, float minFrameMillis);
void Gfx_OnWindowResize(void);
/* Gets information about the user's GPU and current backend state. */
/* Backend state may include depth buffer bits, free memory, etc. */
/* NOTE: lines must be an array of at least GFX_APIINFO_LINES */
void Gfx_GetApiInfo(cc_string* lines);
/* NOTE: Each line is separated by \n. */
void Gfx_GetApiInfo(cc_string* info);
/* Raises ContextLost event and updates state for lost contexts. */
void Gfx_LoseContext(const char* reason);