SDL_CreateTexture()
w/SDL_TEXTUREACCESS_STREAMING
+ SDL_UpdateTexture()
seems to work well enough with the right pixel format.
On my system using the default renderer:
Renderer name: direct3d
Texture formats:
SDL_PIXELFORMAT_ARGB8888
SDL_PIXELFORMAT_YV12
SDL_PIXELFORMAT_IYUV
(though the opengl
info is the same:)
Renderer name: opengl
Texture formats:
SDL_PIXELFORMAT_ARGB8888
SDL_PIXELFORMAT_YV12
SDL_PIXELFORMAT_IYUV
SDL_PIXELFORMAT_ARGB8888
gives me ~1ms/frame:
// g++ main.cpp `pkg-config --cflags --libs sdl2`
#include <SDL.h>
#include <iostream>
#include <iomanip>
#include <vector>
#include <algorithm>
#include <chrono>
void UpdateFrameTiming( std::ostream& os = std::cout, float period = 2.0f )
{
static unsigned int frames = 0;
frames++;
static auto start = std::chrono::steady_clock::now();
auto end = std::chrono::steady_clock::now();
auto seconds = std::chrono::duration< float >( end - start ).count();
if( seconds >= period )
{
os
<< frames << " frames in "
<< std::setprecision( 1 ) << std::fixed << seconds << " seconds = "
<< std::setprecision( 1 ) << std::fixed << frames / seconds << " FPS ("
<< std::setprecision( 3 ) << std::fixed << seconds / frames * 1000.0 << " ms/frame)\n";
frames = 0;
start = end;
}
}
int main( int, char** )
{
SDL_Init( SDL_INIT_EVERYTHING );
SDL_Window* window = SDL_CreateWindow( "SDL", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, 600, 600, SDL_WINDOW_SHOWN );
SDL_Renderer* renderer = SDL_CreateRenderer( window, -1, SDL_RENDERER_ACCELERATED );
SDL_SetHint( SDL_HINT_RENDER_SCALE_QUALITY, "1" );
// dump renderer info
SDL_RendererInfo info;
SDL_GetRendererInfo( renderer, &info );
std::cout << "Renderer name: " << info.name << '\n';
std::cout << "Texture formats: " << '\n';
for( Uint32 i = 0; i < info.num_texture_formats; i++ )
{
std::cout << SDL_GetPixelFormatName( info.texture_formats[i] ) << '\n';
}
// create texture
const unsigned int texWidth = 1024;
const unsigned int texHeight = 1024;
SDL_Texture* texture = SDL_CreateTexture( renderer, SDL_PIXELFORMAT_ARGB8888, SDL_TEXTUREACCESS_STREAMING, texWidth, texHeight );
std::vector< unsigned char > pixels( texWidth * texHeight * 4, 0 );
bool useLocktexture = false;
// main loop
bool running = true;
while( running )
{
SDL_SetRenderDrawColor( renderer, 0, 0, 0, SDL_ALPHA_OPAQUE );
SDL_RenderClear( renderer );
// handle events
SDL_Event ev;
while( SDL_PollEvent( &ev ) )
{
if( ( SDL_QUIT == ev.type ) ||
( SDL_KEYDOWN == ev.type && SDL_SCANCODE_ESCAPE == ev.key.keysym.scancode ) )
{
running = false;
break;
}
if( SDL_KEYDOWN == ev.type && SDL_SCANCODE_L == ev.key.keysym.scancode )
{
useLocktexture = !useLocktexture;
std::cout << "Using " << ( useLocktexture ? "SDL_LockTexture() + std::copy_n()" : "SDL_UpdateTexture()" ) << '\n';
}
}
// splat down some random pixels
for( unsigned int i = 0; i < 1000; i++ )
{
const unsigned int x = rand() % texWidth;
const unsigned int y = rand() % texHeight;
const unsigned int offset = ( texWidth * y * 4 ) + x * 4;
pixels[ offset + 0 ] = rand() % 256; // b
pixels[ offset + 1 ] = rand() % 256; // g
pixels[ offset + 2 ] = rand() % 256; // r
pixels[ offset + 3 ] = SDL_ALPHA_OPAQUE; // a
}
// update texture
if( useLocktexture )
{
unsigned char* lockedPixels = nullptr;
int pitch = 0;
SDL_LockTexture( texture, nullptr, reinterpret_cast< void** >( &lockedPixels ), &pitch );
std::copy_n( pixels.data(), pixels.size(), lockedPixels );
SDL_UnlockTexture( texture );
}
else
{
SDL_UpdateTexture( texture, nullptr, pixels.data(), texWidth * 4 );
}
SDL_RenderCopy( renderer, texture, nullptr, nullptr );
SDL_RenderPresent( renderer );
UpdateFrameTiming();
}
SDL_DestroyRenderer( renderer );
SDL_DestroyWindow( window );
SDL_Quit();
return 0;
}
Make sure you don’t have have vsync enabled (forced in the driver, running a compositor, etc.) or else all your frame times will be ~16ms (or whatever your display refresh is set to).