FreeType2 and OpenGL : Use unicode

544 views Asked by At

I'm trying to make a string with specials characters but I can't get my program run correctly. Specials characters aren't displayed. My program run like this :

- Initialization
- New face
- Set character size
- Select character map ( here Unicode with " ft_encoding_unicode" macro )

while I don't reach the 0xFFth character from the space character :
    - Load glyph
    - Get glyph
    - Convert to bitmap if necessary
    - Load pixels and others

with the code :

ENG_Font* ENG_OpenFont(const char* path, uint size, ENG_Color* color)
{
    ENG_Font* font = (ENG_Font*) malloc(sizeof(ENG_Font));
    ENG_If_AddError_Goto(!font, ENGER_ALLOC, END);
    ENG_If_AddError_Goto(FT_New_Face(ENG_GetConfigFTLib(ENG_GetGlobConfig()), path, 0, &font->face), ENGER_FONT, END_ERR_0);
    ENG_If_AddError_Goto(FT_Set_Char_Size(font->face, size << 6, size << 6, 96, 96), ENGER_FONT, END_ERR_0);
    FT_Select_Charmap(font->face, ft_encoding_unicode);
    label end = &&END;

    for(uint i = ' '; i <= 0xFF; i++)
    {
        FT_Glyph glyph;
        u32 index = FT_Get_Char_Index(font->face, i);

        if(FT_Load_Glyph(font->face, index, FT_LOAD_RENDER))
            continue;

        if(FT_Get_Glyph(font->face->glyph, &glyph))
            continue;

        if(font->face->glyph->format != FT_GLYPH_FORMAT_BITMAP &&     FT_Glyph_To_Bitmap(&glyph, FT_RENDER_MODE_NORMAL, NULL, 1))
            continue;

        u32 w = font->face->glyph->bitmap.width, h = font->face->glyph->bitmap.rows;
        u8* px = (u8*) malloc(w * h * 4 * sizeof(u8));

        if(!px)
        {
            ENG_AddError(ENGER_ALLOC);
            end = &&END_ERR_0;
            break;
        }

        for(uint y = 0; y < h; y++)
        {
            for(uint x = 0; x < w; x++)
            {
                u8* ptr = &font->face->glyph->bitmap.buffer[x + y * w];
                px[(x + y * w) * 4] = (*ptr) ? color->r : 0;
                px[(x + y * w) * 4 + 1] = (*ptr) ? color->g : 0;
                px[(x + y * w) * 4 + 2] = (*ptr) ? color->b : 0;
                px[(x + y * w) * 4 + 3] = (*ptr) ? color->a : 0;
            }
        }

        u32 tex;
        glGenTextures(1, &tex);
        glBindTexture(GL_TEXTURE_2D, tex);
        glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, w, h, 0, GL_RGBA, GL_UNSIGNED_BYTE, px);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
        glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
        glBindTexture(GL_TEXTURE_2D, 0);
        FT_Done_Glyph(glyph);
        free(px);
        font->glyph_tex[i] = tex;
        font->coord[i].x = font->face->glyph->advance.x >> 6;
        font->coord[i].y = h - (font->face->glyph->metrics.horiBearingY >> 6); // to move down the glyphs with a long bottom
        font->coord[i].w = w;
        font->coord[i].h = h;
    }

    goto *end;

    END_ERR_0 :
        free(font);
        font = NULL;

    END :
        return font;
}

All ASCII character are loaded in any case. If you don't know how, I can load the extended ASCII, it's enough for me.

Solution :

Just change the string encoding like this :

"string" => U"string"
0

There are 0 answers