// 编译 // arm-openwrt-linux-muslgnueabi-gcc -o 17_safe_lcd_camera_test ./jpeg_show.c -L./lib/ -ljpeg -I./include/ #include <stdio.h> #include <stdlib.h> #include <jpeglib.h> #include <setjmp.h> #include <stdint.h> #include <string.h> // 错误处理结构体 struct my_error_mgr { struct jpeg_error_mgr pub; // "public" fields jmp_buf setjmp_buffer; // For return to caller }; // 错误处理函数 void my_error_exit(j_common_ptr cinfo) { struct my_error_mgr * myerr = (struct my_error_mgr *)cinfo->err; longjmp(myerr->setjmp_buffer, 1); } // 解析 JPEG 数据并转换为 BGRA 格式 // width height 为屏幕宽度和高度 int decode_jpeg_to_bgra(uint8_t* jpeg_data, size_t jpeg_size, int width, int height, unsigned char* bgra_data) { struct jpeg_decompress_struct cinfo; struct my_error_mgr jerr; JSAMPARRAY buffer; int row_stride; // 设置错误处理 cinfo.err = jpeg_std_error(&jerr.pub); jerr.pub.error_exit = my_error_exit; if (setjmp(jerr.setjmp_buffer)) { jpeg_destroy_decompress(&cinfo); return -1; } // 初始化解压缩对象 jpeg_create_decompress(&cinfo); // 指定输入数据 jpeg_mem_src(&cinfo, jpeg_data, jpeg_size); // 读取 JPEG 文件头 jpeg_read_header(&cinfo, TRUE); // 开始解压缩 jpeg_start_decompress(&cinfo); row_stride = cinfo.output_width * cinfo.output_components; // 处理每一行数据 buffer = (*cinfo.mem->alloc_sarray)((j_common_ptr)&cinfo, JPOOL_IMAGE, row_stride, 1); for (int row = 0; row < cinfo.output_height; row++) { jpeg_read_scanlines(&cinfo, buffer, 1); for (int col = 0; col < cinfo.output_width; col++) { uint8_t r = buffer[0][col * cinfo.output_components + 0]; uint8_t g = buffer[0][col * cinfo.output_components + 1]; uint8_t b = buffer[0][col * cinfo.output_components + 2]; uint8_t a = 255; // Alpha channel, set to 255 (opaque) int offset = (row * width + col) * 4; bgra_data[offset + 0] = b; bgra_data[offset + 1] = g; bgra_data[offset + 2] = r; bgra_data[offset + 3] = a; } } // 完成解压缩 jpeg_finish_decompress(&cinfo); jpeg_destroy_decompress(&cinfo); return 0; } #include <stdio.h> #include <string.h> #include <stdlib.h> #include <fcntl.h> #include <sys/mman.h> #include <linux/fb.h> unsigned char* lcd_init(int* w,int* h,int* bits_per_pixel) { static unsigned char *lcd_mem_p = NULL; //保存LCD屏映射到进程空间的首地址 struct fb_var_screeninfo vinfo; struct fb_fix_screeninfo finfo; // "/dev/fb0"; int lcd_fd = open("/dev/fb0", O_RDWR, 0); /*2. 获取可变参数*/ if (ioctl(lcd_fd, FBIOGET_VSCREENINFO, &vinfo)){return -2;} printf("屏幕X:%d 屏幕Y:%d 像素位数:%d\n", vinfo.xres, vinfo.yres, vinfo.bits_per_pixel); *w=vinfo.xres; *h=vinfo.yres; *bits_per_pixel=vinfo.bits_per_pixel; /*3. 获取固定参数*/ if (ioctl(lcd_fd, FBIOGET_FSCREENINFO, &finfo)){return -3;} printf("smem_len=%d Byte,line_length=%d Byte\n", finfo.smem_len, finfo.line_length); /*4. 映射LCD屏物理地址到进程空间*/ lcd_mem_p = (unsigned char *)mmap(0, finfo.smem_len, PROT_READ | PROT_WRITE, MAP_SHARED, lcd_fd, 0); //从文件的那个地方开始映射 memset(lcd_mem_p, 0xFFFFFF, finfo.smem_len);//白屏 printf("映射LCD屏物理地址到进程空间\n"); return lcd_mem_p; } int main(int argc, char **argv) { printf("usage:\n\t%s xxx.jpg\n\n",argv[0]); FILE *jpeg_file; if(argc==2){ // strcpy(dev_video,argv[1]); jpeg_file = fopen(argv[1], "rb"); printf(argv[1]); printf("\n"); } else{ jpeg_file = fopen("/mnt/UDISK/mjpeg_4.jpg", "rb"); } // jpeg_file = fopen("/mnt/UDISK/mjpeg_4.jpg", "rb"); if (!jpeg_file) { perror("Failed to open file"); return 1; } int screen_w=0;//屏幕宽度 int screen_h=0;//屏幕高度 int screen_bits=0;//屏幕每个像素占据多少个bit位 unsigned char* bgra_data=lcd_init(&screen_w,&screen_h,&screen_bits); // 获取文件大小 fseek(jpeg_file, 0, SEEK_END); long file_size = ftell(jpeg_file); fseek(jpeg_file, 0, SEEK_SET); // 读取 JPEG 数据 uint8_t *jpeg_data = (uint8_t*)malloc(file_size); if (!jpeg_data) { perror("Failed to allocate memory"); fclose(jpeg_file); return 1; } fread(jpeg_data, 1, file_size, jpeg_file); fclose(jpeg_file); decode_jpeg_to_bgra(jpeg_data, file_size, screen_w, screen_h,bgra_data); free(jpeg_data); return 0; }
//
标签:int,data,cinfo,lcd,jpeg,file,屏幕显示 From: https://www.cnblogs.com/RYSBlog/p/18369067