SRV from UAV on the same texture in directx

Notabene
  • SRV from UAV on the same texture in directx Notabene

    I'm programming gpgpu raymarching (volumetric raytracing) in directx11. I succesfully perform compute shader and save raymarched volume data to texture. Then i want to use same texture as SRV in normal graphic pipeline. But it doesnt work, texture is not visible.

    Texture is ok, when i save it file it is what i expect. Texture rendering is ok too, when i render another SRV, it is ok. So problem is only in UAV->SRV. I also triple checked if pointers are ok. Please help, i'm getting mad about this.

    Here is some code:

    //before dispatch
    D3D11_TEXTURE2D_DESC textureDesc;
    ZeroMemory( &textureDesc, sizeof( textureDesc ) );
    textureDesc.Width = xr;
    textureDesc.Height = yr;
    textureDesc.MipLevels = 1;
    textureDesc.ArraySize = 1;
    textureDesc.SampleDesc.Count = 1;
    textureDesc.SampleDesc.Quality = 0;
    textureDesc.Usage = D3D11_USAGE_DEFAULT;
    textureDesc.BindFlags = D3D11_BIND_UNORDERED_ACCESS | D3D11_BIND_SHADER_RESOURCE ;
    textureDesc.Format = DXGI_FORMAT_R32G32B32A32_FLOAT;
    D3D->CreateTexture2D( &textureDesc, NULL, &pTexture );
    
    D3D11_UNORDERED_ACCESS_VIEW_DESC viewDescUAV;
    ZeroMemory( &viewDescUAV, sizeof( viewDescUAV ) );
    viewDescUAV.Format = DXGI_FORMAT_R32G32B32A32_FLOAT;
    viewDescUAV.ViewDimension = D3D11_UAV_DIMENSION_TEXTURE2D;
    viewDescUAV.Texture2D.MipSlice = 0;
    D3DD->CreateUnorderedAccessView( pTexture, &viewDescUAV, &pTextureUAV );
    
    //the getSRV function after dispatch.
    D3D11_SHADER_RESOURCE_VIEW_DESC srvDesc ;
    ZeroMemory( &srvDesc, sizeof( srvDesc ) );
    srvDesc.Format = DXGI_FORMAT_R32G32B32A32_FLOAT;
    srvDesc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
    srvDesc.Texture2D.MipLevels = 1;
    D3DD->CreateShaderResourceView( pTexture, &srvDesc, &pTextureSRV);
    

  • I solved this. Problem was in releasing. You can't access compute shader's resources until it is fully unbind and released.

    Edit you dont have to release it at all. Only "unplug" UAV from from gpu.

    //code is shortened
    //create UAV
    D3D->CreateTexture2D( &textureDesc, NULL, &pTexture );
    D3DD->CreateUnorderedAccessView( pTexture, &viewDescUAV, &pTextureUAV );
    
    D3DC->dispatch(...);
    
    //pTextureUAV->release(); // You dont have to release (if you want to use UAV again)
    outputTexH->SetUnorderedAccessView(NULL); // This is what is important
    
    effpass->Apply(0,D3DC); // unbind compute shader
    
    //create SRV on the texture, use it.
    

Tags
c++ directx directx11
Related questions and answers
  • I'm rendering to a depth map in order to use it as a shader resource view, but when I sample the depth map in my shader, the red component has a value of 1 while all other channels have a value of 0...;texDesc, 0, &this->_depthMap); D3D11_DEPTH_STENCIL_VIEW_DESC dsvd; ZeroMemory(&dsvd, sizeof(dsvd)); dsvd.Format = DXGI_FORMAT_D32_FLOAT; dsvd.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2D; dsvd.Texture2D.MipSlice = 0; _device->CreateDepthStencilView(this->_depthMap, &dsvd, &this->_dmapDSV); D3D11_SHADER_RESOURCE_VIEW_DESC srvd; srvd.Format = DXGI_FORMAT_R32_FLOAT

  • Direct3D for use void initD3D(HWND hWnd) { DXGI_SWAP_CHAIN_DESC scd; ZeroMemory(&scd, sizeof(DXGI_SWAP_CHAIN_DESC)); scd.BufferCount = 1; scd.BufferDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM... buffer ID3D10Texture2D* pDepthBuffer; device->CreateTexture2D(&zbd, NULL, &pDepthBuffer); // create the texture // create the depth buffer D3D10_DEPTH_STENCIL_VIEW_DESC dsvd; ZeroMemory(&dsvd, sizeof(dsvd)); dsvd.Format = DXGI_FORMAT_D32_FLOAT; // one 32-bit float per pixel dsvd.ViewDimension = D3D10_DSV_DIMENSION_TEXTURE2D; // depth buffer is a 2D texture device-&gt

  • (D3D10_PRIMITIVE_TOPOLOGY_LINELIST); UINT stride = sizeof(VERTEX); UINT offset = 0; device->IASetVertexBuffers(0, 1, &pBuffer, &stride, &offset); screenPass->Apply(0... have marked with //THIS LINE, the code works fine.) when I run the code below the line breaks //HERE. Access violation reading location 0x00000000. seems like the create buffer line is not working...; bd.CPUAccessFlags = D3D10_CPU_ACCESS_WRITE; bd.MiscFlags = 0; device->CreateBuffer(&bd, NULL, &pBuffer); void* pVoid; pBuffer->Map(D3D10_MAP_WRITE_DISCARD, 0, &pVoid); //HERE memcpy(pVoid

  • _INVALIDARG: D3D10_MAPPED_TEXTURE2D mapped; HRESULT hr = spTexture-&gt;Map( 0, D3D10_MAP_READ, 0, &amp;mapped ); So I'm thinking that this fails because the texture can't be read from by the CPU. So...: D3D10_TEXTURE2D_DESC desc; ZeroMemory(&amp;desc, sizeof(D3D10_TEXTURE2D_DESC)); desc.CPUAccessFlags = D3D10_CPU_ACCESS_READ | D3D10_CPU_ACCESS_WRITE; desc.Usage...I have a DirectX10 texture (ID3D10Texture2D) that I load from disk with the following code: CComPtr<ID3D10Device&gt; spD3D; // Initialized correctly elsewhere hr = D3DX10CreateTextureFromFile

  • i think i just found the solution. 1) the problem is that backbuffer surface and source surface are of different formats - that is why exception was thrown. 2) the image path needed double slash "C... false; if (!initDirect3D()) { return false; } MSG msg; ZeroMemory(&amp;msg, sizeof(msg)); while(msg.message!=WM_QUIT){ if(PeekMessage(&amp;msg, NULL, 0U, 0U..., Info.Format, D3DPOOL_SYSTEMMEM, &amp;Surface, NULL); D3DXLoadSurfaceFromFile(Surface, NULL, NULL, wsPath.c_str(), NULL, D3DX_FILTER_NONE, 0, NULL); pd3dDevice -&gt; GetBackBuffer(0, 0

  • ; Vec3f normal = v1-&gt;normal * (1 - frac) + v2-&gt;normal * frac; if (normal[0] == 0 &amp;&amp; normal[1] == 0 &amp;&amp; normal[2] == 0) { normal = Vec3f(0, 0, 1...]; Vec3f pos = v1-&gt;pos * (1 - frac) + v2-&gt;pos * frac; Vec3f normal = v1-&gt;normal * (1 - frac) + v2-&gt;normal * frac; if (normal[0] == 0 &amp;&amp; normal[1] == 0 &amp...]; Vec3f pos = v1-&gt;pos * (1 - frac) + v2-&gt;pos * frac; Vec3f normal = v1-&gt;normal * (1 - frac) + v2-&gt;normal * frac; if (normal[0] == 0 &amp;&amp; normal[1] == 0 &amp

  • a buffer of 2D images that I want to use as a Texture2DArray in a shader. The following code snippet works when sTexDesc.ArraySize=1 (as a Texture2D). With sTexDesc.ArraySize=2 the subsequent call.... With larger values of ArraySize CreateTexture2D returns E_INVALIDARG. ubImageStorage contains valid data. As a Texture2D I can add multiples of the slice offset to this buffer address when assigning pSysMem...; sTexDesc.Width = 1024; sTexDesc.Height = 1024; sTexDesc.MipLevels = 1; sTexDesc.ArraySize = 2; sTexDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM; sTexDesc.SampleDesc.Count = 1; sTexDesc.SampleDesc.Quality = 0

  • , and an identity matrix for the view matrix (V). void drawBox(const Matrix4&amp; M) { const Matrix4 MVP = M * V * P; if (boundshader) { glUniformMatrix4fv((*boundshader)("MVP"), 1, GL_FALSE, &amp...); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, m_FBOWidth, m_FBOHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0); glBindTexture(GL_TEXTURE_2D, 0); glGenFramebuffers(1, &amp;fbo_id); glBindFramebuffer(GL_DRAW_FRAMEBUFFER, fbo_id); glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, depth

  • in the shader code) for( int n = 0; n < vShaderArgs.size(); n ++) glBindAttribLocation( m_nProgramId, n, vShaderArgs[n].sFieldName.c_str() ); // Create and bind to a vertex array object, which...I'm writing a generic ShaderProgram class that compiles a set of Shader objects, passes args to the shader (like vertex position, vertex normal, tex coords etc), then links the shader components...: float m_Position[3]; // x, y, z // offset 0, size = 3*sizeof(float) float m_TexCoords[2]; // u, v // offset 3*sizeof(float), size = 2*sizeof(float) float m_Normal[3

Data information