summaryrefslogtreecommitdiff
path: root/indra/newview/llviewertexturelist.cpp
diff options
context:
space:
mode:
authorDave Parks <davep@lindenlab.com>2014-02-27 17:49:50 -0600
committerDave Parks <davep@lindenlab.com>2014-02-27 17:49:50 -0600
commita8e22e11c5d26b3cdfa2d67919fdde8272d52ea4 (patch)
treed37551dbfb1ae24ae73409848faa6416e70272d5 /indra/newview/llviewertexturelist.cpp
parentbfe520387ed3061ba55ea2e58c565016e7a3159e (diff)
MAINT-2980 Rename "Texture Memory" to "Video Memory" in hardware floater and increase limit.
Limit should be however much vram is installed, but underneath the hood, fudge how much memory is used for textures to avoid swapping. Also, catch exceptions when attempting to build a GL context on windows and display an error dialog instead of crashing.
Diffstat (limited to 'indra/newview/llviewertexturelist.cpp')
-rwxr-xr-xindra/newview/llviewertexturelist.cpp15
1 files changed, 6 insertions, 9 deletions
diff --git a/indra/newview/llviewertexturelist.cpp b/indra/newview/llviewertexturelist.cpp
index 783d1f2202..26f32941bf 100755
--- a/indra/newview/llviewertexturelist.cpp
+++ b/indra/newview/llviewertexturelist.cpp
@@ -1270,7 +1270,7 @@ S32 LLViewerTextureList::getMaxVideoRamSetting(bool get_recommended, float mem_m
// - it's going to be swapping constantly regardless
S32 max_vram = gGLManager.mVRAM;
- if(gGLManager.mIsATI)
+ if(!get_recommended && gGLManager.mIsATI)
{
//shrink the availabe vram for ATI cards because some of them do not handel texture swapping well.
max_vram = (S32)(max_vram * 0.75f);
@@ -1285,15 +1285,15 @@ S32 LLViewerTextureList::getMaxVideoRamSetting(bool get_recommended, float mem_m
{
if (!get_recommended)
{
- max_texmem = 512;
+ max_texmem = 2048;
}
else if (gSavedSettings.getBOOL("NoHardwareProbe")) //did not do hardware detection at startup
{
- max_texmem = 512;
+ max_texmem = 2048;
}
else
{
- max_texmem = 128;
+ max_texmem = 512;
}
llwarns << "VRAM amount not detected, defaulting to " << max_texmem << " MB" << llendl;
@@ -1301,10 +1301,7 @@ S32 LLViewerTextureList::getMaxVideoRamSetting(bool get_recommended, float mem_m
S32 system_ram = (S32)BYTES_TO_MEGA_BYTES(gSysMemory.getPhysicalMemoryClamped()); // In MB
//llinfos << "*** DETECTED " << system_ram << " MB of system memory." << llendl;
- if (get_recommended)
- max_texmem = llmin(max_texmem, (S32)(system_ram/2));
- else
- max_texmem = llmin(max_texmem, (S32)(system_ram));
+ max_texmem = llmin(max_texmem, (S32)(system_ram));
// limit the texture memory to a multiple of the default if we've found some cards to behave poorly otherwise
max_texmem = llmin(max_texmem, (S32) (mem_multiplier * (F32) max_texmem));
@@ -1334,7 +1331,7 @@ void LLViewerTextureList::updateMaxResidentTexMem(S32 mem)
mem = llclamp(mem, getMinVideoRamSetting(), getMaxVideoRamSetting(false, mem_multiplier));
if (mem != cur_mem)
{
- gSavedSettings.setS32("TextureMemory", mem);
+ gSavedSettings.setS32("TextureMemory", mem/3);
return; //listener will re-enter this function
}