rendering shapes and text on the desktop (click through) - visual-c++

For Windows 7, what is the simplest way to render arbitrary shapes and text straight onto the desktop?
It must have the following properties:
1) Visible and always on top
3) Semi-transparent
2) Click through and type through, as if the objects are not there
Some notable examples range from the simple Fraps which renders framerate, to the complex Rainmeter which has tons of functionality.
EDIT0: I've looked at the Rainmeter sourcecode but I still have no idea how it renders objects...
EDIT1: Window Hud Behavior (Pass through clicks, can't be minimized) (Solutions such as this seem extremely restrictive, there must be a way to render stuff with as much freedom as Rainmeter?)

i am still working on it but here is part of it:
#include <algorithm>
#include <Windows.h>
#include <stdlib.h>
#include <iostream>
#include <sstream>
#include <conio.h>
#include <iomanip>
#include <sstream>
#include <fstream>
#include <stdio.h>
#include <cstdlib>
#include <string>
#include <memory>
#include <cstdio>
#include <glut.h>
#include <io.h>
#using <mscorlib.dll>
#using <System.dll>
using namespace System;
using namespace std;
#pragma comment(lib, "wininet.lib")
#pragma comment (lib, "Urlmon.lib")
#pragma comment(linker, "/subsystem:windows /ENTRY:mainCRTStartup")
HINSTANCE hInstance;
POINT Mouse;
HWND hwnd;
RECT rect;
HDC dc;
float Size = 100;
float angle = 0;
bool Dirty = false;
char TEX;
int posX = 0;
int posY = 0;
int storedDC;
void GetDesktopResolution(int& w, int& h){
RECT desktop;
const HWND hDesktop = GetDesktopWindow();
GetWindowRect(hDesktop, &desktop);
w = desktop.right;
h = desktop.bottom;
}
void EX(){
delete hInstance;
delete hwnd;
exit(0);
}
void Keys(){
if (GetAsyncKeyState(VK_ESCAPE)){
exit(0);
}
if (GetAsyncKeyState(VK_LBUTTON) && GetAsyncKeyState(VK_CONTROL)){
}
}
void Draw(){
int h;
int w;
//Declair Desktop Size
GetDesktopResolution(w, h);
angle += 0.1f;
if (angle >= 2 * 3.141592f){
angle -= 2 * 3.141592f;
}
GetCursorPos(&Mouse);
if (Dirty == true){
rect = { 0, 0, w, h };
RedrawWindow(hwnd, &rect, NULL, RDW_INVALIDATE | RDW_ERASE | RDW_UPDATENOW);
posX = Mouse.x;
posY = Mouse.y;
RedrawWindow(hwnd, &rect, NULL, RDW_INVALIDATE | RDW_ERASE | RDW_UPDATENOW);
Dirty = false;
}
dc = GetDC(hwnd);
storedDC = SaveDC(dc);
//DEFAULT_CHARSET - ANSI_CHARSET
HFONT FMain = CreateFont(36, 20, -300, 0, FW_DONTCARE, FALSE, TRUE, FALSE, DEFAULT_CHARSET, OUT_OUTLINE_PRECIS,
CLIP_DEFAULT_PRECIS, CLEARTYPE_QUALITY, VARIABLE_PITCH, TEXT("Times New Roman"));
SetTextColor(dc, RGB(255, 255, 255));
SetBkColor(dc, RGB(0, 255, 0));
SelectObject(dc, FMain);
TextOut(dc, 15, 15, L"This is what the program does!", 30);
RedrawWindow(hwnd, &rect, NULL, RDW_NOERASE | RDW_INVALIDATE | RDW_UPDATENOW);
RestoreDC(dc, storedDC);
ReleaseDC(hwnd, dc);
}
int main(int argc, char **argv){
int h;
int w;
//Declair Desktop Size
GetDesktopResolution(w, h);
// find Program Manager
hwnd = FindWindowEx(GetDesktopWindow(), 0, L"Progman", L"Program Manager");
// find SHELLDLL_DefView
hwnd = FindWindowEx(hwnd, 0, L"SHELLDLL_DefView", 0);
// find Desktop Folder
hwnd = FindWindowEx(hwnd, 0, L"SysListView32", L"FolderView");
if (hwnd == NULL){
MessageBox(NULL, L"Could not initiate window!", L"ERROR!", MB_OK);
EX();
}
while (1){
Keys();
Draw();
}
//Remove the drawing
rect = { Mouse.x - 50, Mouse.y - 50, Mouse.x + 50, Mouse.y + 50 };
InvalidateRect(hwnd, &rect, true);
delete hInstance;
delete hwnd;
return 0;
}

Related

Framebuffer clone or mirror to an other framebuffer

I want to clone framebuffer content from /dev/fb0 to /dev/fb2.
I am using iMx6Q running Debian 8 and my application runs on Qt5. I am not using X11 or Wayland.
The application runs on a LVDS Display with 1280x800 pixels and it is addressed at framebuffer /dev/fb0.
On the framebuffer /dev/fb2 i have addressed HDMI output with 1920x1080 pixels.
I wrote a c code that clones from 1280x800 to 1920x1080 framebuffer, but this is not efficient.
1-How to make this more efficient?
2-How to up scale 1280x800 to 1920x1080?
3-How to rotate framebuffer to 180° in C?
#include <stdio.h>
#include <syslog.h>
#include <sys/fcntl.h>
#include <sys/ioctl.h>
#include <linux/fb.h>
#include <sys/mman.h>
#include <stdint.h>
#include <unistd.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <fcntl.h>
inline uint32_t pixel_color(uint8_t r, uint8_t g, uint8_t b, struct fb_var_screeninfo *vinfo)
{
return (r<<vinfo->red.offset) | (g<<vinfo->green.offset) | (b<<vinfo->blue.offset);
}
int process() {
uint32_t image_prt;
long int location = 0;
int ret;
int fbfd = 0;
int fbfd2 = 0;
uint8_t *fbp=0;
uint8_t *fbp0=0;
int x=0, y=0;
printf("Display Clone Frame Buffer fb0 to fb2\n");
struct fb_var_screeninfo vinfo;
struct fb_fix_screeninfo finfo;
struct fb_var_screeninfo vinfo2;
struct fb_fix_screeninfo finfo2;
fbfd = open("/dev/fb0", O_RDWR);
if (fbfd == -1) {
printf("Unable to open first display");
return -1;
}
if (ioctl(fbfd, FBIOGET_FSCREENINFO, &finfo)) {
printf("Unable to get first display information");
return -1;
}
if (ioctl(fbfd, FBIOGET_VSCREENINFO, &vinfo)) {
printf("Unable to get first display information");
return -1;
}
printf("First display is %d x %d %dbps\n", vinfo.xres, vinfo.yres, vinfo.bits_per_pixel);
fbfd2 = open("/dev/fb2", O_RDWR);
if (fbfd2 == -1) {
printf("Unable to open secondary display");
return -1;
}
if (ioctl(fbfd2, FBIOGET_FSCREENINFO, &finfo2)) {
printf("Unable to get secondary display information");
return -1;
}
if (ioctl(fbfd2, FBIOGET_VSCREENINFO, &vinfo2)) {
printf("Unable to get secondary display information");
return -1;
}
printf("Second display is %d x %d %dbps\n", vinfo2.xres, vinfo2.yres, vinfo2.bits_per_pixel);
fbp = mmap(0, vinfo2.yres_virtual * finfo2.line_length, PROT_READ | PROT_WRITE, MAP_SHARED, fbfd2,(off_t)0);
if (fbp <= 0)
{
printf("Unable to create memory mapping");
close(fbfd2);
return -1;
}
fbp0 = mmap(0, vinfo.yres_virtual * finfo.line_length, PROT_READ | PROT_WRITE, MAP_SHARED, fbfd,(off_t)0);
if (fbp0 <= 0)
{
printf("Unable to create memory mapping");
close(fbfd);
return -1;
}
long int screensize = finfo.smem_len;
long pix_offset;
long pix_offset_LVDS;
for (x = 0; x < vinfo2.xres; x++)
{
for (y = 0; y < vinfo2.yres;y++)
{
pix_offset = (x+vinfo2.xoffset) * (vinfo2.bits_per_pixel/8) + (y+vinfo2.yoffset) * finfo2.line_length;
*((uint32_t*)(fbp + pix_offset)) = pixel_color(0x00,0x00,0xFF, &vinfo2);
}
}
while (1)
{
//memcpy(fbp,fbp0, screensize);
for (x = 0; x < vinfo.xres; x++)
{
for (y = 0; y < vinfo.yres;y++)
{
pix_offset = (x+vinfo2.xoffset) * (vinfo2.bits_per_pixel/8) + (y+vinfo2.yoffset) * finfo2.line_length;
pix_offset_LVDS=(x+vinfo.xoffset) * (vinfo.bits_per_pixel/8) + (y+vinfo.yoffset) * finfo.line_length;
*((uint32_t*)(fbp + pix_offset)) =*((uint32_t*) (fbp0 + pix_offset_LVDS));
}
}
usleep(25 * 1000);
}
munmap(fbp, screensize);
close(fbfd);
}
int main(int argc, char **argv) {
setlogmask(LOG_UPTO(LOG_DEBUG));
openlog("fbcp", LOG_NDELAY | LOG_PID, LOG_USER);
return process();
}

OpenGL Without GUI

Let's say I run a Linux and I have no desktop environment installed. I boot up my system and all I have is my shell.
Is it possible to compile a program that uses the OpenGL libraries or directly uses the GPU driver to draw to the screen?
As far as I could understand I would always need some kind of desktop environment that would provide me a window that I can draw on. To keep it
simple let's say I just want to draw a simple 2d shape like a triangle in the middle of the screen for example.
And if that's possible how can I do it and where can I read more about the topic? If I am able to draw directly over my terminal does this mean that I would be able to run my app on a system that has a desktop environment and still be able to see my triangle?
Is it possible to compile a program that uses the OpenGL libraries or directly uses the GPU driver to draw to the screen?
Yes. With the EGL API this has been formalized and works most well with NVidia GPUs and their proprietary drivers. NVidia has it described on their dev blog here https://devblogs.nvidia.com/egl-eye-opengl-visualization-without-x-server/
Essentially the steps are:
Create a OpenGL context for a PBuffer
#include <EGL/egl.h>
static const EGLint configAttribs[] = {
EGL_SURFACE_TYPE, EGL_PBUFFER_BIT,
EGL_BLUE_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_RED_SIZE, 8,
EGL_DEPTH_SIZE, 8,
EGL_RENDERABLE_TYPE, EGL_OPENGL_BIT,
EGL_NONE
};
static const int pbufferWidth = 9;
static const int pbufferHeight = 9;
static const EGLint pbufferAttribs[] = {
EGL_WIDTH, pbufferWidth,
EGL_HEIGHT, pbufferHeight,
EGL_NONE,
};
int main(int argc, char *argv[])
{
// 1. Initialize EGL
EGLDisplay eglDpy = eglGetDisplay(EGL_DEFAULT_DISPLAY);
EGLint major, minor;
eglInitialize(eglDpy, &major, &minor);
// 2. Select an appropriate configuration
EGLint numConfigs;
EGLConfig eglCfg;
eglChooseConfig(eglDpy, configAttribs, &eglCfg, 1, &numConfigs);
// 3. Create a surface
EGLSurface eglSurf = eglCreatePbufferSurface(eglDpy, eglCfg,
pbufferAttribs);
// 4. Bind the API
eglBindAPI(EGL_OPENGL_API);
// 5. Create a context and make it current
EGLContext eglCtx = eglCreateContext(eglDpy, eglCfg, EGL_NO_CONTEXT,
NULL);
eglMakeCurrent(eglDpy, eglSurf, eglSurf, eglCtx);
// from now on use your OpenGL context
// 6. Terminate EGL when finished
eglTerminate(eglDpy);
return 0;
}
and then go about the rest as per usual. Or you can even ditch the PBuffer completely and just use OpenGL manages resources, i.e. render to framebuffer objects. For that end you can omit creating the surface and just make the context current.
Here's an example for using EGL without display, no EGL surface, with OpenGL managed framebuffer.
#include <GL/glew.h>
#include <GL/glut.h>
#include <EGL/egl.h>
#include <unistd.h>
#include <stdlib.h>
#include <assert.h>
#include <sys/mman.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <math.h>
#include <stdio.h>
using namespace std;
namespace render
{
int width, height;
float aspect;
void init();
void display();
int const fbo_width = 512;
int const fbo_height = 512;
GLuint fb, color, depth;
void *dumpbuf;
int dumpbuf_fd;
};
static const EGLint configAttribs[] = {
EGL_SURFACE_TYPE, EGL_PBUFFER_BIT,
EGL_BLUE_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_RED_SIZE, 8,
EGL_DEPTH_SIZE, 8,
EGL_RENDERABLE_TYPE, EGL_OPENGL_BIT,
EGL_NONE
};
int main(int argc, char *argv[])
{
// 1. Initialize EGL
EGLDisplay eglDpy = eglGetDisplay(EGL_DEFAULT_DISPLAY);
EGLint major, minor;
eglInitialize(eglDpy, &major, &minor);
// 2. Select an appropriate configuration
EGLint numConfigs;
EGLConfig eglCfg;
eglChooseConfig(eglDpy, configAttribs, &eglCfg, 1, &numConfigs);
// 3. Bind the API
eglBindAPI(EGL_OPENGL_API);
// 3. Create a context and make it current
EGLContext eglCtx = eglCreateContext(eglDpy, eglCfg, EGL_NO_CONTEXT,
NULL);
eglMakeCurrent(eglDpy, EGL_NO_SURFACE, EGL_NO_SURFACE, eglCtx);
glewInit();
// from now on use your OpenGL context
render::init();
render::display();
// 4. Terminate EGL when finished
eglTerminate(eglDpy);
return 0;
}
void CHECK_FRAMEBUFFER_STATUS()
{
GLenum status;
status = glCheckFramebufferStatus(GL_DRAW_FRAMEBUFFER);
switch(status) {
case GL_FRAMEBUFFER_COMPLETE:
break;
case GL_FRAMEBUFFER_UNSUPPORTED:
/* choose different formats */
break;
default:
/* programming error; will fail on all hardware */
throw "Framebuffer Error";
}
}
namespace render
{
float const light_dir[]={1,1,1,0};
float const light_color[]={1,0.95,0.9,1};
void init()
{
glGenFramebuffers(1, &fb);
glGenTextures(1, &color);
glGenRenderbuffers(1, &depth);
glBindFramebuffer(GL_FRAMEBUFFER, fb);
glBindTexture(GL_TEXTURE_2D, color);
glTexImage2D( GL_TEXTURE_2D,
0,
GL_RGB8,
fbo_width, fbo_height,
0,
GL_RGBA,
GL_UNSIGNED_BYTE,
NULL);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, color, 0);
glBindRenderbuffer(GL_RENDERBUFFER, depth);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT24, fbo_width, fbo_height);
glFramebufferRenderbuffer(GL_DRAW_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, depth);
GLint red_bits, green_bits, blue_bits, alpha_bits;
glGetIntegerv(GL_RED_BITS, &red_bits);
glGetIntegerv(GL_GREEN_BITS, &green_bits);
glGetIntegerv(GL_BLUE_BITS, &blue_bits);
glGetIntegerv(GL_ALPHA_BITS, &alpha_bits);
fprintf(stderr, "FBO format R%dG%dB%dA%d\n",
(int)red_bits,
(int)green_bits,
(int)blue_bits,
(int)alpha_bits );
CHECK_FRAMEBUFFER_STATUS();
dumpbuf_fd = open("/tmp/fbodump.rgb", O_CREAT|O_SYNC|O_RDWR, S_IRUSR|S_IWUSR);
assert(-1 != dumpbuf_fd);
dumpbuf = malloc(fbo_width*fbo_height*3);
assert(dumpbuf);
}
void render()
{
static float a=0, b=0, c=0;
glBindTexture(GL_TEXTURE_2D, 0);
glEnable(GL_TEXTURE_2D);
glBindFramebuffer(GL_FRAMEBUFFER, fb);
glViewport(0,0,fbo_width, fbo_height);
glClearColor(0,0,0,0);
glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT );
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(-1, 1, -1, 1, -1, 1);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glBegin(GL_TRIANGLES);
glColor3f(1,0,0);
glVertex3f(1,0,0);
glColor3f(0,1,0);
glVertex3f(0,1,0);
glColor3f(0,0,1);
glVertex3f(0,0,1);
glEnd();
glReadBuffer(GL_COLOR_ATTACHMENT0);
glReadPixels(0,0,fbo_width,fbo_height,GL_RGB,GL_UNSIGNED_BYTE,dumpbuf);
lseek(dumpbuf_fd, SEEK_SET, 0);
write(dumpbuf_fd, dumpbuf, fbo_width*fbo_height*3);
}
}

why the opengl can't print string

#include "stdafx.h"
#include <gl/glut.h>
void resizeEvent(int w, int h)
{
glViewport(0, 0, w, h);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluOrtho2D(0, w, h, 0);
glMatrixMode(GL_MODELVIEW);
glutPostRedisplay();
}
void displayEvent()
{
glClearColor(1, 1, 1, 0);
glClear(GL_COLOR_BUFFER_BIT);
glutSwapBuffers();
char text[] = "Hello World!";
glRasterPos2d(110, 110);
glColor3f(1, 0, 0);
for(int i=0; text[i] != '\0'; i++)
{
glutBitmapCharacter(GLUT_BITMAP_8_BY_13, text[i]);
}
}
int main(int argc, char *argv[])
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGB | GLUT_DEPTH);
glutInitWindowSize(400, 300);
glutInitWindowPosition(0, 0);
glutCreateWindow("Hello");
glutDisplayFunc(displayEvent);
glutReshapeFunc(resizeEvent);
glutMainLoop();
return 0;
}
I learn from a ppt,and think it will print a string.But it does not work .I search from google,but no answer. I don't know why it can't print a string.
if glutBitmapCharacter is wrong ?
All the commands in opengl, renders on the frame buffer only when flush or finish call happens, so all your commands are still in queue and yet to be displayed on frame buffer. You need to call glFlush/glFinish at the end, glut provides glutSwapBuffers, so in the end of your displayEvent function you need to call glutSwapBuffers to see the effect of your displayEvent function.

Converting audio formats with libav in c on linux

I am trying to write an app, where part of it converts an audio file to a different format. But the output is garbage (well, that's what it looks like in audacity).
After trying many different things, I ended up with this: a raw audio file that I try to convert and write out as an MP3 file. I know the raw audio is OK (via audacity). But the MP3 again is garbage.
Here is the code I am using (I pieced this together from the libav examples):
/*
* flac2mp3.cpp
*
* Created on: Aug 20, 2014
* Author: ken
*/
#include <stddef.h>
#include <unistd.h>
#include <stdlib.h>
#include <stdio.h>
#include <sys/ioctl.h>
#include <string.h>
#include <pthread.h>
#include <signal.h>
#include <exception> // std::exception
#include <cerrno>
#include <assert.h>
#include <time.h>
#include <fcntl.h>
#include <linux/types.h>
#ifndef __STDC_FORMAT_MACROS
#define __STDC_FORMAT_MACROS
#include <inttypes.h>
#endif
#ifndef INT64_C
#define INT64_C(c) (c ## LL)
#define UINT64_C(c) (c ## ULL)
#endif
extern "C"
{
#include <libavutil/avassert.h>
#include <libavutil/channel_layout.h>
#include <libavutil/opt.h>
#include <libavutil/mathematics.h>
#include <libavutil/timestamp.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <libswresample/swresample.h>
#ifdef CPP_W_LITERAL_SUFFIX
#undef CPP_W_LITERAL_SUFFIX
#include <libavutil/timestamp.h>
#define CPP_W_LITERAL_SUFFIX
#endif
#ifdef av_err2str
#undef av_err2str
#endif
#define av_err2str(errnum) \
av_make_error_string((char*)(__builtin_alloca(AV_ERROR_MAX_STRING_SIZE)),\
AV_ERROR_MAX_STRING_SIZE, errnum)
#ifdef av_ts2timestr
#undef av_ts2timestr
#endif
#define av_ts2timestr(ts, tb) \
av_ts_make_time_string((char*)(__builtin_alloca(AV_TS_MAX_STRING_SIZE)), ts, tb)
}
typedef unsigned char BYTE;
#include <algorithm>
#include <fcntl.h>
static AVFormatContext *encfmt_ctx = NULL;
static AVCodecContext *audio_enc_ctx;
static AVStream *audio_encstream = NULL;
static int audio_frame_deccount = 0, audio_frame_enccount = 0;
static SwrContext *swr;
static BYTE **outpb;
static const BYTE **inpb;
#define MP3_SAMPLE_FMT AV_SAMPLE_FMT_S16P
#define MP3_BITRATE 256000
#define MP3_SAMPLE_RATE 44100
#define FLAC_SAMPLE_FMT AV_SAMPLE_FMT_S16
#define FLAC_BITRATE 256000
#define FLAC_SAMPLE_RATE 44100
#define SAMPLES_TO_CB(stc, cs) (av_get_bytes_per_sample(stc->codec->sample_fmt) * stc->codec->channels * cs)
#define SAMPLES_FROM_CB(stc, cb) cb/(av_get_bytes_per_sample(stc->codec->sample_fmt) * stc->codec->channels)
#define BYTES_TO_READ 2048
bool encode_buffer(BYTE* buff, int cbdec, bool flush)
{
int ret;
int got_frame;
int line_size;
AVPacket epkt = {0};
AVFrame *eframe = avcodec_alloc_frame();
av_init_packet(&epkt);
/*
* convert audio
*/
long int c_insamples = cbdec/(4);
// long int delay = swr_get_delay(swr,(long int) 44100);
long int delay = 0;
int c_outsamples = av_rescale_rnd(delay + c_insamples,
(long int) MP3_SAMPLE_RATE,
(long int) FLAC_SAMPLE_RATE, AV_ROUND_UP);
ret = av_samples_alloc(outpb, &line_size, 2,
c_outsamples,MP3_SAMPLE_FMT, 0);
av_assert0(ret >= 0);
inpb[0] = buff;
ret = swr_convert(swr, outpb, c_outsamples, inpb, c_insamples);
av_assert0(ret >= 0);
/*
* encode
*/
audio_frame_deccount +=(cbdec/4);
printf("cbdec=%d, deccount=%d\n", cbdec, audio_frame_deccount);
eframe->nb_samples = cbdec/(4);
eframe->pts = (audio_frame_deccount * ( av_rescale_q(1, audio_encstream->codec->time_base, audio_encstream->time_base)));
avcodec_fill_audio_frame(eframe, 2, MP3_SAMPLE_FMT, outpb[0], cbdec, 1);
epkt.stream_index = audio_encstream->index;
eframe->nb_samples = audio_enc_ctx->frame_size;
ret = avcodec_encode_audio2(audio_enc_ctx, &epkt, eframe, &got_frame);
av_assert0(ret >= 0);
if (!got_frame)
{
// av_free(outpb[0]);
return false;
}
epkt.stream_index = audio_encstream->index;
audio_frame_enccount ++ ;
/* Write the compressed frame to the media file. */
ret = av_interleaved_write_frame(encfmt_ctx, &epkt);
av_free(outpb[0]);
avcodec_free_frame(&eframe);
return true;
}
int open_encodec_context(AVFormatContext *fmt_ctx, enum AVMediaType type)
{
int ret;
AVStream *st;
AVCodec *enc = NULL;
enc = avcodec_find_encoder(fmt_ctx->oformat->audio_codec);
av_assert0(enc != NULL);
st = avformat_new_stream(fmt_ctx, enc);
/* find encoder for the stream */
st->codec->sample_fmt = MP3_SAMPLE_FMT;
st->codec->bit_rate = MP3_BITRATE;
st->codec->sample_rate = MP3_SAMPLE_RATE;
st->codec->channels = 2;
ret = avcodec_open2(st->codec, enc, NULL);
av_assert0(ret >= 0);
return 1;
}
int main(int argc, char **argv)
{
const char *fn_out = argv[1];
int ret;
av_register_all();
ret = avformat_alloc_output_context2(&encfmt_ctx, NULL, NULL, fn_out);
av_assert0(ret >= 0);
if (open_encodec_context(encfmt_ctx, AVMEDIA_TYPE_AUDIO) >= 0)
{
audio_encstream = encfmt_ctx->streams[0];
audio_enc_ctx = audio_encstream->codec;
av_dump_format(encfmt_ctx, 0, fn_out, 1);
}
BYTE pbo[sizeof(BYTE*) * av_sample_fmt_is_planar(MP3_SAMPLE_FMT) ? 2 : 1];
outpb = (BYTE**) pbo;
outpb[0] = NULL;
BYTE pbi[sizeof(BYTE*) * 2];
inpb = (const BYTE**) pbi;
inpb[0] = NULL;
swr = swr_alloc();
av_assert0(swr);
/* set options */
av_opt_set_int(swr, "in_channel_layout", 2, 0);
av_opt_set_int(swr, "in_channel_count", 2, 0);
av_opt_set_int(swr, "in_sample_rate", FLAC_SAMPLE_RATE,0);
av_opt_set_sample_fmt(swr, "in_sample_fmt", FLAC_SAMPLE_FMT, 0);
av_opt_set_int(swr, "out_channel_layout",2, 0);
av_opt_set_int(swr, "out_sample_rate", MP3_SAMPLE_RATE, 0);
av_opt_set_sample_fmt(swr, "out_sample_fmt",MP3_SAMPLE_FMT, 0);
ret = swr_init(swr);
av_assert0(ret >= 0);
if (!(encfmt_ctx->oformat->flags & AVFMT_NOFILE))
{
ret = avio_open(&encfmt_ctx->pb, fn_out, AVIO_FLAG_WRITE);
if (ret < 0)
{
fprintf(stderr, "Could not open '%s': %s\n", fn_out,
av_err2str(ret));
return 1;
}
}
ret = avformat_write_header(encfmt_ctx, NULL);
av_assert0(ret >= 0);
int cbread = 0;
int fd = open("/nas/temp/flac.raw", O_RDONLY);
assert(fd > 0);
int cb2read = BYTES_TO_READ;
BYTE b[cb2read];
int c_reads = 0;
while ((cbread = read(fd, b, cb2read)) > 0)
{
c_reads++;
encode_buffer(b, cbread, false);
}
close(fd);
ret = av_interleaved_write_frame(encfmt_ctx, NULL);
av_assert0(ret >= 0);
av_write_trailer(encfmt_ctx);
avio_close(encfmt_ctx->pb);
avcodec_close(audio_enc_ctx);
avformat_free_context(encfmt_ctx);
exit(1);
}
The input data is from a decoded flac file (ie, the raw audio data from it). It is, according to libav/ffmpeg: format=s16, sample rate=44100, channels=2.
Any ideas?
TIA
ken

Qt with XComposite problem

I'm trying to write a simple program, which redirects all the windows to the backbuffer( as the composite manager does ), then write them to pixmap and save to disk. But I got this error:
(.text.startup+0x5e):-1: error: undefined reference to `XCompositeRedirectSubwindows'
(.text.startup+0x171):-1: error: undefined reference to `XCompositeNameWindowPixmap'
:-1: error: collect2: ld returned 1 exit status
Here is the code :
#include <QApplication>
#include <QDebug>
#include <X11/Xlib.h>
#include <QPaintDevice>
#include <QX11Info>
#include <X11/Xlib.h>
#include <X11/Xutil.h>
#include <X11/Xatom.h>
#include <X11/extensions/Xcomposite.h>
#include <X11/extensions/Xrender.h>
#include <X11/extensions/Xdamage.h>
#include <QPixmap>
#include <QWidget>
int main( int argc, char *argv[] )
{
QApplication app( argc, argv );
app.setGraphicsSystem("native");
Picture frontBuffer;
XRenderPictFormat *format;
Window rootWindow;
int depth;
Display *dpy = XOpenDisplay( getenv("DISPLAY") );
rootWindow = XRootWindow( dpy, XDefaultScreen( dpy ) );
depth = DefaultDepth( dpy, DefaultScreen(dpy) );
// Redirect all the windows
XCompositeRedirectSubwindows( dpy, rootWindow, CompositeRedirectManual );
// Get the format
format = XRenderFindVisualFormat( dpy, DefaultVisual( dpy, DefaultScreen(dpy) ) );
XRenderPictureAttributes pa;
pa.subwindow_mode = IncludeInferiors;
// Creating front buffer
frontBuffer = XRenderCreatePicture( dpy, rootWindow, format, CPSubwindowMode, &pa );
uint nwindows;
Window root_return, parent_return, *windows;
XQueryTree( dpy, rootWindow, &root_return,
&parent_return, &windows, &nwindows );
for ( uint i = 0; i < nwindows; i++ ) {
XWindowAttributes attr;
if ( !XGetWindowAttributes( dpy, windows[i], &attr ) )
continue;
Pixmap pix = XCompositeNameWindowPixmap( dpy, windows[i] );
Picture pic = XRenderCreatePicture( dpy, pix, format, 0, 0 );
QPixmap pixmap(540, 900);
XRenderComposite( dpy, PictOpSrc, pic, None, pixmap.x11PictureHandle(),
0, 0, 0, 0, 0 , 0, 540, 900 );
pixmap.save( QString::number( i )+".png", "PNG" );
}
}
XFree( windows );
return app.exec();
}
Did you link your program with libXcomposite? That's the library which defines those functions.
Compile with -lXcomposite or with pkg-config --libs xcomposite.

Resources