Memory leak while drawing .. only on mac OS Ventura .. windows works

void Scene::drawScene(glm::vec3 lightpos)
{
        int offsetIndices= 0 ;
        int numIndices  = 0;
        int listTexNr = 0;
       
        // draw Textured
        gfxprocessingUnit->shade->use();
        gfxprocessingUnit->shade->setInt("material.diffuse", 0);
       // gfxprocessingUnit->shade->setInt("material.specular",0);
        
        // create transformations
        gfxprocessingUnit->shade->use();
        gfxprocessingUnit->shade->setVec3("light.position", lightpos );  // lightPos physic.Position
     //   gfxprocessingUnit->shade->setVec3("viewPos",        lightpos);  // smae
        
        // light properties
        gfxprocessingUnit->shade->setVec3("light.ambient", 0.8f, 0.8f, 0.8f);
        gfxprocessingUnit->shade->setVec3("light.diffuse", 0.8f, 0.8f, 0.8f);
        gfxprocessingUnit->shade->setVec3("light.specular", 0.4f, 0.4f, 0.4f);
    
    // view/projection transformations
        glm::mat4 projection = glm::perspective(glm::radians(physic.Zoom), (float)SCR_WIDTH / (float)SCR_HEIGHT, 0.1f, 220.0f);
        glm::mat4 view = physic.GetViewMatrix(physic.Position);
        
        gfxprocessingUnit->shade->setMat4("projection", projection);
        gfxprocessingUnit->shade->setMat4("view", view);
        
        // world transformation
        glm::mat4 model = glm::mat4(1.0f) ;
        gfxprocessingUnit->shade->setMat4("model", model  );

        glBindVertexArray(doProzessing->VAO);
        checkQuadrant();
        
        for(int t = 0; t < quadrantsList.size(); t ++)
        {
            int quadNr = quadrantsList.at(t);
            if ((quadNr < 0) || (quadNr >= quadMax ) ) continue;// quadNr = 0; // continue ???
            offsetIndices = sceneOfQuadrants.at(quadNr)->offsetTexturedIndices;
            
            for(std::map<GLuint, std::vector<face>>::iterator it = sceneOfQuadrants.at(quadNr)->facemapTextured.begin(); it != sceneOfQuadrants.at(quadNr)->facemapTextured.end(); ++it)
            {
                
                listTexNr = it->first;
                numIndices  =  (int)sceneOfQuadrants.at(quadNr)->facemapTextured[listTexNr].size();
                if (numIndices == 0) continue;
                
                if (sceneOfQuadrants.at(quadNr)->facemapTextured[listTexNr].at(0).textured == 1)
                {
                   // Commenting out these 3 Rows .. memory leak disappears 

                    glBindTexture(GL_TEXTURE_2D, textureNr[listTexNr]);
                    glActiveTexture(GL_TEXTURE0);
                
                    glDrawElements(GL_TRIANGLES,  numIndices * 3, GL_UNSIGNED_INT,  (void*)(offsetIndices * 3 * sizeof(GL_INT)));
                }
                offsetIndices += numIndices;
                
            }
         
        }

Apple M1 Max … Language C++

I can’t see any GLFW code in your code snippet, and from the names of the functions you are calling I’m guessing there are no GLFW functions being called in those. So this may not be the best place to be asking for help.

Note that sometimes graphics code will appear to leak because graphics drivers are allocating memory and then not freeing it until they need to. For example OpenGL calls result in data being added to a command buffer for later submission to the GPU at minimum.

Okay, thank you for the information. that calms