{"id":226,"date":"2021-01-24T11:03:12","date_gmt":"2021-01-24T03:03:12","guid":{"rendered":"http:\/\/liyanliang.net\/?p=226"},"modified":"2021-01-26T11:28:46","modified_gmt":"2021-01-26T03:28:46","slug":"opengl%e6%a8%a1%e5%9e%8b%e5%8a%a0%e8%bd%bd-%e9%99%84%e6%ba%90%e7%a0%81","status":"publish","type":"post","link":"http:\/\/liyanliang.net\/index.php\/2021\/01\/24\/opengl%e6%a8%a1%e5%9e%8b%e5%8a%a0%e8%bd%bd-%e9%99%84%e6%ba%90%e7%a0%81\/","title":{"rendered":"OpenGL\u6a21\u578b\u52a0\u8f7d-\u9644\u6e90\u7801"},"content":{"rendered":"\n<p class=\"has-cyan-bluish-gray-background-color has-background\">\u6548\u679c\uff1a<\/p>\n\n\n\n<figure class=\"wp-block-image is-style-default\"><img decoding=\"async\" src=\"https:\/\/liyanliangpublic.oss-cn-hongkong.aliyuncs.com\/img\/OpenGL%20Model.gif\" alt=\"\"\/><\/figure>\n\n\n\n<p class=\"has-cyan-bluish-gray-background-color has-background\">\u6458\u8981\uff1a<\/p>\n\n\n\n<p>\u4f7f\u7528ssimp\u5e76\u521b\u5efa\u5b9e\u9645\u7684\u52a0\u8f7d\u548c\u8f6c\u6362\u3002\u76ee\u6807\u662f\u521b\u5efa\u53e6\u4e00\u4e2a\u7c7b\u6765\u5b8c\u6574\u5730\u8868\u793a\u4e00\u4e2a\u6a21\u578b\uff0c\u6216\u8005\u8bf4\u662f\u5305\u542b\u591a\u4e2a\u7f51\u683c\uff0c\u751a\u81f3\u662f\u591a\u4e2a\u7269\u4f53\u7684\u6a21\u578b\u3002\u4e00\u4e2a\u5305\u542b\u6728\u5236\u9633\u53f0\u3001\u5854\u697c\u3001\u751a\u81f3\u6e38\u6cf3\u6c60\u7684\u623f\u5b50\u53ef\u80fd\u4ecd\u4f1a\u88ab\u52a0\u8f7d\u4e3a\u4e00\u4e2a\u6a21\u578b\u3002\u6211\u4eec\u4f1a\u4f7f\u7528Assimp\u6765\u52a0\u8f7d\u6a21\u578b\uff0c\u5e76\u5c06\u5b83\u8f6c\u6362(Translate)\u81f3\u591a\u4e2aMesh\u5bf9\u8c61\u3002<\/p>\n\n\n\n<p>\u9996\u5148\u9700\u8981\u8c03\u7528\u7684\u51fd\u6570\u662floadModel\uff0c\u5b83\u4f1a\u4ece\u6784\u9020\u5668\u4e2d\u76f4\u63a5\u8c03\u7528\u3002\u5728loadModel\u4e2d\uff0c\u6211\u4eec\u4f7f\u7528Assimp\u6765\u52a0\u8f7d\u6a21\u578b\u81f3Assimp\u7684\u4e00\u4e2a\u53eb\u505ascene\u7684\u6570\u636e\u7ed3\u6784\u4e2d\u3002\u4e00\u65e6\u6211\u4eec\u6709\u4e86\u8fd9\u4e2a\u573a\u666f\u5bf9\u8c61\uff0c\u6211\u4eec\u5c31\u80fd\u8bbf\u95ee\u5230\u52a0\u8f7d\u540e\u7684\u6a21\u578b\u4e2d\u6240\u6709\u6240\u9700\u7684\u6570\u636e\u4e86\u3002<\/p>\n\n\n\n<p class=\"has-cyan-bluish-gray-background-color has-background\">\u4e3b\u8981\u4ee3\u7801\uff1a<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"cpp\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">\u200b\n\/\/#define STB_IMAGE_IMPLEMENTATION\n#include \"glad.c\"\n#include &lt;glad\/glad.h>\n#include &lt;GLFW\/glfw3.h>\n\u200b\n#include &lt;glm\/glm.hpp>\n#include &lt;glm\/gtc\/matrix_transform.hpp>\n#include &lt;glm\/gtc\/type_ptr.hpp>\n\u200b\n#include &lt;learnopengl\/shader_m.h>\n#include &lt;learnopengl\/camera.h>\n#include &lt;learnopengl\/model.h>\n\u200b\n#include &lt;iostream>\n\u200b\nvoid framebuffer_size_callback(GLFWwindow* window, int width, int height);\nvoid mouse_callback(GLFWwindow* window, double xpos, double ypos);\nvoid scroll_callback(GLFWwindow* window, double xoffset, double yoffset);\nvoid processInput(GLFWwindow *window);\n\u200b\n\/\/ settings\nconst unsigned int SCR_WIDTH = 800;\nconst unsigned int SCR_HEIGHT = 600;\n\u200b\n\/\/ camera\nCamera camera(glm::vec3(0.0f, 4.0f, 6.0f));\nfloat lastX = SCR_WIDTH \/ 2.0f;\nfloat lastY = SCR_HEIGHT \/ 2.0f;\nbool firstMouse = true;\n\u200b\n\/\/ timing\nfloat deltaTime = 0.0f;\nfloat lastFrame = 0.0f;\n\u200b\nint main()\n{\n    \/\/ glfw: initialize and configure\n    \/\/ ------------------------------\n    glfwInit();\n    glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);\n    glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);\n    glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);\n\u200b\n#ifdef __APPLE__\n    glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);\n#endif\n\u200b\n    \/\/ glfw window creation\n    \/\/ --------------------\n    GLFWwindow* window = glfwCreateWindow(SCR_WIDTH, SCR_HEIGHT, \"LearnOpenGL\", NULL, NULL);\n    if (window == NULL)\n    {\n        std::cout &lt;&lt; \"Failed to create GLFW window\" &lt;&lt; std::endl;\n        glfwTerminate();\n        return -1;\n    }\n    glfwMakeContextCurrent(window);\n    glfwSetFramebufferSizeCallback(window, framebuffer_size_callback);\n    glfwSetCursorPosCallback(window, mouse_callback);\n    glfwSetScrollCallback(window, scroll_callback);\n\u200b\n    \/\/ tell GLFW to capture our mouse\n    glfwSetInputMode(window, GLFW_CURSOR, GLFW_CURSOR_DISABLED);\n\u200b\n    \/\/ glad: load all OpenGL function pointers\n    \/\/ ---------------------------------------\n    if (!gladLoadGLLoader((GLADloadproc)glfwGetProcAddress))\n    {\n        std::cout &lt;&lt; \"Failed to initialize GLAD\" &lt;&lt; std::endl;\n        return -1;\n    }\n\u200b\n    \/\/ tell stb_image.h to flip loaded texture's on the y-axis (before loading model).\n    stbi_set_flip_vertically_on_load(true);\n\u200b\n    \/\/ configure global opengl state\n    \/\/ -----------------------------\n    glEnable(GL_DEPTH_TEST);\n\u200b\n    \/\/ build and compile shaders\n    \/\/ -------------------------\n    Shader lightingShader(\"5.1.light_casters.vs\", \"5.1.light_casters.fs\");\n\u200b\n    \/\/ build and compile shaders\n    \/\/ -------------------------\n    \/\/Shader ourShader(\"1.model_loading.vs\", \"1.model_loading.fs\");\n\u200b\n    \/\/ load models\n    \/\/ -----------\n    Model ourModel(\"resources\/objects\/cyborg\/cyborg.obj\");\n\u200b\n\u200b\n    \/\/ draw in wireframe\n    \/\/glPolygonMode(GL_FRONT_AND_BACK, GL_LINE);\n\u200b\n    \/\/ --------------------\n    lightingShader.use();\n    lightingShader.setInt(\"material.diffuse\", 0);\n    lightingShader.setInt(\"material.specular\", 1);\n\u200b\n    \/\/ render loop\n    \/\/ -----------\n    while (!glfwWindowShouldClose(window))\n    {\n        \/\/ per-frame time logic\n        \/\/ --------------------\n        float currentFrame = glfwGetTime();\n        deltaTime = currentFrame - lastFrame;\n        lastFrame = currentFrame;\n\u200b\n        \/\/ input\n        \/\/ -----\n        processInput(window);\n\u200b\n        \/\/ render\n        \/\/ ------\n        glClearColor(0.2f, 0.5f, 0.5f, 1.0f);\n        glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);\n\u200b\n        \/\/ be sure to activate shader when setting uniforms\/drawing objects\n        lightingShader.use();\n        lightingShader.setVec3(\"light.direction\", -0.2f, -1.0f, -0.3f);\n        lightingShader.setVec3(\"viewPos\", camera.Position);\n\u200b\n        \/\/ light properties\n        lightingShader.setVec3(\"light.ambient\", 0.9f, 0.9f, 0.9f);\n        lightingShader.setVec3(\"light.diffuse\", 0.5f, 0.5f, 0.5f);\n        lightingShader.setVec3(\"light.specular\", 1.0f, 1.0f, 1.0f);\n\u200b\n        \/\/ material properties\n        lightingShader.setFloat(\"material.shininess\", 32.0f);\n\u200b\n        \/\/ don't forget to enable shader before setting uniforms\n        \/\/lightingShader.use();\n\u200b\n        \/\/ view\/projection transformations\n        glm::mat4 projection = glm::perspective(glm::radians(camera.Zoom), (float)SCR_WIDTH \/ (float)SCR_HEIGHT, 0.1f, 100.0f);\n        glm::mat4 view = camera.GetViewMatrix();\n        lightingShader.setMat4(\"projection\", projection);\n        lightingShader.setMat4(\"view\", view);\n\u200b\n        \/\/ render the loaded model\n        glm::mat4 model = glm::mat4(1.0f);\n        model = glm::translate(model, glm::vec3(0.0f, 0.0f, 0.0f)); \/\/ translate it down so it's at the center of the scene\n        model = glm::scale(model, glm::vec3(1.0f, 1.0f, 1.0f)); \/\/ it's a bit too big for our scene, so scale it down\n        lightingShader.setMat4(\"model\", model);\n        ourModel.Draw(lightingShader);\n\u200b\n\u200b\n        \/\/ glfw: swap buffers and poll IO events (keys pressed\/released, mouse moved etc.)\n        \/\/ -------------------------------------------------------------------------------\n        glfwSwapBuffers(window);\n        glfwPollEvents();\n    }\n\u200b\n    \/\/ glfw: terminate, clearing all previously allocated GLFW resources.\n    \/\/ ------------------------------------------------------------------\n    glfwTerminate();\n    return 0;\n}\n\u200b\n\/\/ process all input: query GLFW whether relevant keys are pressed\/released this frame and react accordingly\n\/\/ ---------------------------------------------------------------------------------------------------------\nvoid processInput(GLFWwindow *window)\n{\n    if (glfwGetKey(window, GLFW_KEY_ESCAPE) == GLFW_PRESS)\n        glfwSetWindowShouldClose(window, true);\n\u200b\n    if (glfwGetKey(window, GLFW_KEY_W) == GLFW_PRESS)\n        camera.ProcessKeyboard(FORWARD, deltaTime);\n    if (glfwGetKey(window, GLFW_KEY_S) == GLFW_PRESS)\n        camera.ProcessKeyboard(BACKWARD, deltaTime);\n    if (glfwGetKey(window, GLFW_KEY_A) == GLFW_PRESS)\n        camera.ProcessKeyboard(LEFT, deltaTime);\n    if (glfwGetKey(window, GLFW_KEY_D) == GLFW_PRESS)\n        camera.ProcessKeyboard(RIGHT, deltaTime);\n}\n\u200b\n\/\/ glfw: whenever the window size changed (by OS or user resize) this callback function executes\n\/\/ ---------------------------------------------------------------------------------------------\nvoid framebuffer_size_callback(GLFWwindow* window, int width, int height)\n{\n    \/\/ make sure the viewport matches the new window dimensions; note that width and \n    \/\/ height will be significantly larger than specified on retina displays.\n    glViewport(0, 0, width, height);\n}\n\u200b\n\/\/ glfw: whenever the mouse moves, this callback is called\n\/\/ -------------------------------------------------------\nvoid mouse_callback(GLFWwindow* window, double xpos, double ypos)\n{\n    if (firstMouse)\n    {\n        lastX = xpos;\n        lastY = ypos;\n        firstMouse = false;\n    }\n\u200b\n    float xoffset = xpos - lastX;\n    float yoffset = lastY - ypos; \/\/ reversed since y-coordinates go from bottom to top\n\u200b\n    lastX = xpos;\n    lastY = ypos;\n\u200b\n    camera.ProcessMouseMovement(xoffset, yoffset);\n}\n\u200b\n\/\/ glfw: whenever the mouse scroll wheel scrolls, this callback is called\n\/\/ ----------------------------------------------------------------------\nvoid scroll_callback(GLFWwindow* window, double xoffset, double yoffset)\n{\n    camera.ProcessMouseScroll(yoffset);\n}\n\u200b\n\u200b<\/pre>\n\n\n\n<p class=\"has-cyan-bluish-gray-background-color has-background\">\u53c2\u8003\u8d44\u6599\uff1a<\/p>\n\n\n\n<p><a href=\"https:\/\/learnopengl-cn.github.io\/03%20Model%20Loading\/03%20Model\/\">https:\/\/learnopengl-cn.github.io\/03%20Model%20Loading\/03%20Model\/<\/a><\/p>\n\n\n\n<p class=\"has-cyan-bluish-gray-background-color has-background\">\u5b8c\u6574\u7684\u9879\u76ee\u4ee3\u7801\uff1a<\/p>\n\n\n\n<p>\u94fe\u63a5\uff1a<a href=\"https:\/\/pan.baidu.com\/s\/1IZbmCldx_4N4bAeeWhT2dA\" target=\"_blank\" rel=\"noreferrer noopener\">https:\/\/pan.baidu.com\/s\/1IZbmCldx_4N4bAeeWhT2dA<\/a><br>\u63d0\u53d6\u7801\uff1avtbh<\/p>\n","protected":false},"excerpt":{"rendered":"<p>\u6548\u679c\uff1a \u6458\u8981\uff1a \u4f7f\u7528ssimp\u5e76\u521b\u5efa\u5b9e\u9645\u7684\u52a0\u8f7d\u548c\u8f6c\u6362\u3002\u76ee\u6807\u662f\u521b\u5efa\u53e6\u4e00\u4e2a\u7c7b\u6765\u5b8c\u6574\u5730\u8868\u793a\u4e00\u4e2a\u6a21\u578b\uff0c\u6216\u8005\u8bf4\u662f\u5305\u542b\u591a\u4e2a\u7f51\u683c\uff0c\u751a\u81f3\u662f\u591a\u4e2a\u7269\u4f53\u7684\u6a21\u578b\u3002\u4e00\u4e2a\u5305\u542b\u6728\u5236\u9633\u53f0\u3001\u5854\u697c\u3001\u751a\u81f3\u6e38\u6cf3\u6c60\u7684\u623f\u5b50\u53ef\u80fd\u4ecd\u4f1a\u88ab\u52a0\u8f7d\u4e3a\u4e00\u4e2a\u6a21\u578b\u3002\u6211\u4eec\u4f1a\u4f7f\u7528Assimp\u6765\u52a0\u8f7d\u6a21\u578b\uff0c\u5e76\u5c06\u5b83\u8f6c\u6362(Translate)\u81f3\u591a\u4e2aMesh\u5bf9\u8c61\u3002 \u9996\u5148\u9700\u8981\u8c03\u7528\u7684\u51fd\u6570\u662floadModel\uff0c\u5b83\u4f1a\u4ece\u6784\u9020\u5668\u4e2d\u76f4\u63a5\u8c03\u7528\u3002\u5728loadModel\u4e2d\uff0c\u6211\u4eec\u4f7f\u7528Assimp\u6765\u52a0\u8f7d\u6a21\u578b\u81f3Assimp\u7684\u4e00\u4e2a\u53eb\u505ascene\u7684\u6570\u636e\u7ed3\u6784\u4e2d\u3002\u4e00\u65e6\u6211\u4eec\u6709\u4e86\u8fd9\u4e2a\u573a\u666f\u5bf9\u8c61\uff0c\u6211\u4eec\u5c31\u80fd\u8bbf\u95ee\u5230\u52a0\u8f7d\u540e\u7684\u6a21\u578b\u4e2d\u6240\u6709\u6240\u9700\u7684\u6570\u636e\u4e86\u3002 \u4e3b\u8981\u4ee3\u7801\uff1a \u53c2\u8003\u8d44\u6599\uff1a https:\/\/learnopengl-cn.github.io\/03%20Model%20Loading\/03%20Model\/ \u5b8c\u6574\u7684\u9879\u76ee\u4ee3\u7801\uff1a \u94fe\u63a5\uff1ahttps:\/\/pan.baidu.com\/s\/1IZbmCldx_4N4bAeeWhT2dA\u63d0\u53d6\u7801\uff1avtbh<\/p>\n","protected":false},"author":1,"featured_media":230,"comment_status":"open","ping_status":"open","sticky":false,"template":"","format":"standard","meta":{"footnotes":""},"categories":[14],"tags":[15],"class_list":["post-226","post","type-post","status-publish","format-standard","has-post-thumbnail","hentry","category-opengl","tag-opengl"],"_links":{"self":[{"href":"http:\/\/liyanliang.net\/index.php\/wp-json\/wp\/v2\/posts\/226","targetHints":{"allow":["GET"]}}],"collection":[{"href":"http:\/\/liyanliang.net\/index.php\/wp-json\/wp\/v2\/posts"}],"about":[{"href":"http:\/\/liyanliang.net\/index.php\/wp-json\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"http:\/\/liyanliang.net\/index.php\/wp-json\/wp\/v2\/users\/1"}],"replies":[{"embeddable":true,"href":"http:\/\/liyanliang.net\/index.php\/wp-json\/wp\/v2\/comments?post=226"}],"version-history":[{"count":4,"href":"http:\/\/liyanliang.net\/index.php\/wp-json\/wp\/v2\/posts\/226\/revisions"}],"predecessor-version":[{"id":234,"href":"http:\/\/liyanliang.net\/index.php\/wp-json\/wp\/v2\/posts\/226\/revisions\/234"}],"wp:featuredmedia":[{"embeddable":true,"href":"http:\/\/liyanliang.net\/index.php\/wp-json\/wp\/v2\/media\/230"}],"wp:attachment":[{"href":"http:\/\/liyanliang.net\/index.php\/wp-json\/wp\/v2\/media?parent=226"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"http:\/\/liyanliang.net\/index.php\/wp-json\/wp\/v2\/categories?post=226"},{"taxonomy":"post_tag","embeddable":true,"href":"http:\/\/liyanliang.net\/index.php\/wp-json\/wp\/v2\/tags?post=226"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}