Utilizes a thread-safe cache.
*/
-std::string getTexturePath(const std::string &filename)
+std::string getTexturePath(const std::string &filename, bool *is_base_pack)
{
std::string fullpath;
+
+ // This can set a wrong value on cached textures, but is irrelevant because
+ // is_base_pack is only passed when initializing the textures the first time
+ if (is_base_pack)
+ *is_base_pack = false;
/*
Check from cache
*/
std::string testpath = base_path + DIR_DELIM + filename;
// Check all filename extensions. Returns "" if not found.
fullpath = getImagePath(testpath);
+ if (is_base_pack && !fullpath.empty())
+ *is_base_pack = true;
}
// Add to cache (also an empty result is cached)
bool need_to_grab = true;
// Try to use local texture instead if asked to
- if (prefer_local){
- std::string path = getTexturePath(name);
- if (!path.empty()) {
+ if (prefer_local) {
+ bool is_base_pack;
+ std::string path = getTexturePath(name, &is_base_pack);
+ // Ignore base pack
+ if (!path.empty() && !is_base_pack) {
video::IImage *img2 = RenderingEngine::get_video_driver()->
createImageFromFile(path.c_str());
if (img2){
driver->removeTexture(t);
}
- infostream << "~TextureSource() "<< textures_before << "/"
- << driver->getTextureCount() << std::endl;
+ infostream << "~TextureSource() before cleanup: "<< textures_before
+ << " after: " << driver->getTextureCount() << std::endl;
}
u32 TextureSource::getTextureId(const std::string &name)
video::ITexture* TextureSource::getTextureForMesh(const std::string &name, u32 *id)
{
- return getTexture(name + "^[applyfiltersformesh", id);
+ static thread_local bool filter_needed =
+ g_settings->getBool("texture_clean_transparent") ||
+ ((m_setting_trilinear_filter || m_setting_bilinear_filter) &&
+ g_settings->getS32("texture_min_size") > 1);
+ // Avoid duplicating texture if it won't actually change
+ if (filter_needed)
+ return getTexture(name + "^[applyfiltersformesh", id);
+ return getTexture(name, id);
}
Palette* TextureSource::getPalette(const std::string &name)
video::IVideoDriver *driver = RenderingEngine::get_video_driver();
sanity_check(driver);
+ infostream << "TextureSource: recreating " << m_textureinfo_cache.size()
+ << " textures" << std::endl;
+
// Recreate textures
for (TextureInfo &ti : m_textureinfo_cache) {
video::IImage *img = generateImage(ti.name);
}
#if ENABLE_GLES
-/**
- * Check and align image to npot2 if required by hardware
- * @param image image to check for npot2 alignment
- * @param driver driver to use for image operations
- * @return image or copy of image aligned to npot2
- */
+
static inline u16 get_GL_major_version()
{
return (u16) (gl_version[0] - '0');
}
-video::IImage * Align2Npot2(video::IImage * image,
- video::IVideoDriver* driver)
-{
- if (image == NULL) {
- return image;
- }
-
- core::dimension2d<u32> dim = image->getDimension();
+/**
+ * Check if hardware requires npot2 aligned textures
+ * @return true if alignment NOT(!) requires, false otherwise
+ */
+bool hasNPotSupport()
+{
// Only GLES2 is trusted to correctly report npot support
- // Note: we cache the boolean result. GL context will never change on Android.
- static const bool hasNPotSupport = get_GL_major_version() > 1 &&
+ // Note: we cache the boolean result, the GL context will never change.
+ static const bool supported = get_GL_major_version() > 1 &&
glGetString(GL_EXTENSIONS) &&
strstr((char *)glGetString(GL_EXTENSIONS), "GL_OES_texture_npot");
+ return supported;
+}
+
+/**
+ * Check and align image to npot2 if required by hardware
+ * @param image image to check for npot2 alignment
+ * @param driver driver to use for image operations
+ * @return image or copy of image aligned to npot2
+ */
+
+video::IImage * Align2Npot2(video::IImage * image,
+ video::IVideoDriver* driver)
+{
+ if (image == NULL)
+ return image;
- if (hasNPotSupport)
+ if (hasNPotSupport())
return image;
+ core::dimension2d<u32> dim = image->getDimension();
unsigned int height = npot2(dim.Height);
unsigned int width = npot2(dim.Width);
- if ((dim.Height == height) &&
- (dim.Width == width)) {
+ if (dim.Height == height && dim.Width == width)
return image;
- }
- if (dim.Height > height) {
+ if (dim.Height > height)
height *= 2;
- }
-
- if (dim.Width > width) {
+ if (dim.Width > width)
width *= 2;
- }
video::IImage *targetimage =
driver->createImage(video::ECF_A8R8G8B8,
core::dimension2d<u32>(width, height));
- if (targetimage != NULL) {
+ if (targetimage != NULL)
image->copyToScaling(targetimage);
- }
image->drop();
return targetimage;
}
video::IImage *img = generateImage(filename);
if (img) {
core::dimension2d<u32> dim = img->getDimension();
- infostream<<"Size "<<dim.Width
- <<"x"<<dim.Height<<std::endl;
core::position2d<s32> pos_base(x, y);
video::IImage *img2 =
driver->createImage(video::ECF_A8R8G8B8, dim);
*/
else if (str_starts_with(part_of_name, "[applyfiltersformesh"))
{
+ /* IMPORTANT: When changing this, getTextureForMesh() needs to be
+ * updated too. */
+
// Apply the "clean transparent" filter, if configured.
if (g_settings->getBool("texture_clean_transparent"))
imageCleanTransparent(baseimg, 127);
return true;
}
+/*
+ Calculate the color of a single pixel drawn on top of another pixel.
+
+ This is a little more complicated than just video::SColor::getInterpolated
+ because getInterpolated does not handle alpha correctly. For example, a
+ pixel with alpha=64 drawn atop a pixel with alpha=128 should yield a
+ pixel with alpha=160, while getInterpolated would yield alpha=96.
+*/
+static inline video::SColor blitPixel(const video::SColor &src_c, const video::SColor &dst_c, u32 ratio)
+{
+ if (dst_c.getAlpha() == 0)
+ return src_c;
+ video::SColor out_c = src_c.getInterpolated(dst_c, (float)ratio / 255.0f);
+ out_c.setAlpha(dst_c.getAlpha() + (255 - dst_c.getAlpha()) *
+ src_c.getAlpha() * ratio / (255 * 255));
+ return out_c;
+}
+
/*
Draw an image on top of an another one, using the alpha channel of the
source image
s32 dst_y = dst_pos.Y + y0;
video::SColor src_c = src->getPixel(src_x, src_y);
video::SColor dst_c = dst->getPixel(dst_x, dst_y);
- dst_c = src_c.getInterpolated(dst_c, (float)src_c.getAlpha()/255.0f);
+ dst_c = blitPixel(src_c, dst_c, src_c.getAlpha());
dst->setPixel(dst_x, dst_y, dst_c);
}
}
video::SColor dst_c = dst->getPixel(dst_x, dst_y);
if (dst_c.getAlpha() == 255 && src_c.getAlpha() != 0)
{
- dst_c = src_c.getInterpolated(dst_c, (float)src_c.getAlpha()/255.0f);
+ dst_c = blitPixel(src_c, dst_c, src_c.getAlpha());
dst->setPixel(dst_x, dst_y, dst_c);
}
}