convert_jpeg.cc 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332
  1. /*
  2. * Copyright 2011 The LibYuv Project Authors. All rights reserved.
  3. *
  4. * Use of this source code is governed by a BSD-style license
  5. * that can be found in the LICENSE file in the root of the source
  6. * tree. An additional intellectual property rights grant can be found
  7. * in the file PATENTS. All contributing project authors may
  8. * be found in the AUTHORS file in the root of the source tree.
  9. */
  10. #include "libyuv/convert.h"
  11. #include "libyuv/convert_argb.h"
  12. #ifdef HAVE_JPEG
  13. #include "libyuv/mjpeg_decoder.h"
  14. #endif
  15. #ifdef __cplusplus
  16. namespace libyuv {
  17. extern "C" {
  18. #endif
  19. #ifdef HAVE_JPEG
  20. struct I420Buffers {
  21. uint8_t* y;
  22. int y_stride;
  23. uint8_t* u;
  24. int u_stride;
  25. uint8_t* v;
  26. int v_stride;
  27. int w;
  28. int h;
  29. };
  30. static void JpegCopyI420(void* opaque,
  31. const uint8_t* const* data,
  32. const int* strides,
  33. int rows) {
  34. I420Buffers* dest = (I420Buffers*)(opaque);
  35. I420Copy(data[0], strides[0], data[1], strides[1], data[2], strides[2],
  36. dest->y, dest->y_stride, dest->u, dest->u_stride, dest->v,
  37. dest->v_stride, dest->w, rows);
  38. dest->y += rows * dest->y_stride;
  39. dest->u += ((rows + 1) >> 1) * dest->u_stride;
  40. dest->v += ((rows + 1) >> 1) * dest->v_stride;
  41. dest->h -= rows;
  42. }
  43. static void JpegI422ToI420(void* opaque,
  44. const uint8_t* const* data,
  45. const int* strides,
  46. int rows) {
  47. I420Buffers* dest = (I420Buffers*)(opaque);
  48. I422ToI420(data[0], strides[0], data[1], strides[1], data[2], strides[2],
  49. dest->y, dest->y_stride, dest->u, dest->u_stride, dest->v,
  50. dest->v_stride, dest->w, rows);
  51. dest->y += rows * dest->y_stride;
  52. dest->u += ((rows + 1) >> 1) * dest->u_stride;
  53. dest->v += ((rows + 1) >> 1) * dest->v_stride;
  54. dest->h -= rows;
  55. }
  56. static void JpegI444ToI420(void* opaque,
  57. const uint8_t* const* data,
  58. const int* strides,
  59. int rows) {
  60. I420Buffers* dest = (I420Buffers*)(opaque);
  61. I444ToI420(data[0], strides[0], data[1], strides[1], data[2], strides[2],
  62. dest->y, dest->y_stride, dest->u, dest->u_stride, dest->v,
  63. dest->v_stride, dest->w, rows);
  64. dest->y += rows * dest->y_stride;
  65. dest->u += ((rows + 1) >> 1) * dest->u_stride;
  66. dest->v += ((rows + 1) >> 1) * dest->v_stride;
  67. dest->h -= rows;
  68. }
  69. static void JpegI400ToI420(void* opaque,
  70. const uint8_t* const* data,
  71. const int* strides,
  72. int rows) {
  73. I420Buffers* dest = (I420Buffers*)(opaque);
  74. I400ToI420(data[0], strides[0], dest->y, dest->y_stride, dest->u,
  75. dest->u_stride, dest->v, dest->v_stride, dest->w, rows);
  76. dest->y += rows * dest->y_stride;
  77. dest->u += ((rows + 1) >> 1) * dest->u_stride;
  78. dest->v += ((rows + 1) >> 1) * dest->v_stride;
  79. dest->h -= rows;
  80. }
  81. // Query size of MJPG in pixels.
  82. LIBYUV_API
  83. int MJPGSize(const uint8_t* sample,
  84. size_t sample_size,
  85. int* width,
  86. int* height) {
  87. MJpegDecoder mjpeg_decoder;
  88. LIBYUV_BOOL ret = mjpeg_decoder.LoadFrame(sample, sample_size);
  89. if (ret) {
  90. *width = mjpeg_decoder.GetWidth();
  91. *height = mjpeg_decoder.GetHeight();
  92. }
  93. mjpeg_decoder.UnloadFrame();
  94. return ret ? 0 : -1; // -1 for runtime failure.
  95. }
  96. // MJPG (Motion JPeg) to I420
  97. // TODO(fbarchard): review src_width and src_height requirement. dst_width and
  98. // dst_height may be enough.
  99. LIBYUV_API
  100. int MJPGToI420(const uint8_t* sample,
  101. size_t sample_size,
  102. uint8_t* dst_y,
  103. int dst_stride_y,
  104. uint8_t* dst_u,
  105. int dst_stride_u,
  106. uint8_t* dst_v,
  107. int dst_stride_v,
  108. int src_width,
  109. int src_height,
  110. int dst_width,
  111. int dst_height) {
  112. if (sample_size == kUnknownDataSize) {
  113. // ERROR: MJPEG frame size unknown
  114. return -1;
  115. }
  116. // TODO(fbarchard): Port MJpeg to C.
  117. MJpegDecoder mjpeg_decoder;
  118. LIBYUV_BOOL ret = mjpeg_decoder.LoadFrame(sample, sample_size);
  119. if (ret && (mjpeg_decoder.GetWidth() != src_width ||
  120. mjpeg_decoder.GetHeight() != src_height)) {
  121. // ERROR: MJPEG frame has unexpected dimensions
  122. mjpeg_decoder.UnloadFrame();
  123. return 1; // runtime failure
  124. }
  125. if (ret) {
  126. I420Buffers bufs = {dst_y, dst_stride_y, dst_u, dst_stride_u,
  127. dst_v, dst_stride_v, dst_width, dst_height};
  128. // YUV420
  129. if (mjpeg_decoder.GetColorSpace() == MJpegDecoder::kColorSpaceYCbCr &&
  130. mjpeg_decoder.GetNumComponents() == 3 &&
  131. mjpeg_decoder.GetVertSampFactor(0) == 2 &&
  132. mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
  133. mjpeg_decoder.GetVertSampFactor(1) == 1 &&
  134. mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
  135. mjpeg_decoder.GetVertSampFactor(2) == 1 &&
  136. mjpeg_decoder.GetHorizSampFactor(2) == 1) {
  137. ret = mjpeg_decoder.DecodeToCallback(&JpegCopyI420, &bufs, dst_width,
  138. dst_height);
  139. // YUV422
  140. } else if (mjpeg_decoder.GetColorSpace() ==
  141. MJpegDecoder::kColorSpaceYCbCr &&
  142. mjpeg_decoder.GetNumComponents() == 3 &&
  143. mjpeg_decoder.GetVertSampFactor(0) == 1 &&
  144. mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
  145. mjpeg_decoder.GetVertSampFactor(1) == 1 &&
  146. mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
  147. mjpeg_decoder.GetVertSampFactor(2) == 1 &&
  148. mjpeg_decoder.GetHorizSampFactor(2) == 1) {
  149. ret = mjpeg_decoder.DecodeToCallback(&JpegI422ToI420, &bufs, dst_width,
  150. dst_height);
  151. // YUV444
  152. } else if (mjpeg_decoder.GetColorSpace() ==
  153. MJpegDecoder::kColorSpaceYCbCr &&
  154. mjpeg_decoder.GetNumComponents() == 3 &&
  155. mjpeg_decoder.GetVertSampFactor(0) == 1 &&
  156. mjpeg_decoder.GetHorizSampFactor(0) == 1 &&
  157. mjpeg_decoder.GetVertSampFactor(1) == 1 &&
  158. mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
  159. mjpeg_decoder.GetVertSampFactor(2) == 1 &&
  160. mjpeg_decoder.GetHorizSampFactor(2) == 1) {
  161. ret = mjpeg_decoder.DecodeToCallback(&JpegI444ToI420, &bufs, dst_width,
  162. dst_height);
  163. // YUV400
  164. } else if (mjpeg_decoder.GetColorSpace() ==
  165. MJpegDecoder::kColorSpaceGrayscale &&
  166. mjpeg_decoder.GetNumComponents() == 1 &&
  167. mjpeg_decoder.GetVertSampFactor(0) == 1 &&
  168. mjpeg_decoder.GetHorizSampFactor(0) == 1) {
  169. ret = mjpeg_decoder.DecodeToCallback(&JpegI400ToI420, &bufs, dst_width,
  170. dst_height);
  171. } else {
  172. // TODO(fbarchard): Implement conversion for any other colorspace/sample
  173. // factors that occur in practice.
  174. // ERROR: Unable to convert MJPEG frame because format is not supported
  175. mjpeg_decoder.UnloadFrame();
  176. return 1;
  177. }
  178. }
  179. return ret ? 0 : 1;
  180. }
  181. #ifdef HAVE_JPEG
  182. struct ARGBBuffers {
  183. uint8_t* argb;
  184. int argb_stride;
  185. int w;
  186. int h;
  187. };
  188. static void JpegI420ToARGB(void* opaque,
  189. const uint8_t* const* data,
  190. const int* strides,
  191. int rows) {
  192. ARGBBuffers* dest = (ARGBBuffers*)(opaque);
  193. I420ToARGB(data[0], strides[0], data[1], strides[1], data[2], strides[2],
  194. dest->argb, dest->argb_stride, dest->w, rows);
  195. dest->argb += rows * dest->argb_stride;
  196. dest->h -= rows;
  197. }
  198. static void JpegI422ToARGB(void* opaque,
  199. const uint8_t* const* data,
  200. const int* strides,
  201. int rows) {
  202. ARGBBuffers* dest = (ARGBBuffers*)(opaque);
  203. I422ToARGB(data[0], strides[0], data[1], strides[1], data[2], strides[2],
  204. dest->argb, dest->argb_stride, dest->w, rows);
  205. dest->argb += rows * dest->argb_stride;
  206. dest->h -= rows;
  207. }
  208. static void JpegI444ToARGB(void* opaque,
  209. const uint8_t* const* data,
  210. const int* strides,
  211. int rows) {
  212. ARGBBuffers* dest = (ARGBBuffers*)(opaque);
  213. I444ToARGB(data[0], strides[0], data[1], strides[1], data[2], strides[2],
  214. dest->argb, dest->argb_stride, dest->w, rows);
  215. dest->argb += rows * dest->argb_stride;
  216. dest->h -= rows;
  217. }
  218. static void JpegI400ToARGB(void* opaque,
  219. const uint8_t* const* data,
  220. const int* strides,
  221. int rows) {
  222. ARGBBuffers* dest = (ARGBBuffers*)(opaque);
  223. I400ToARGB(data[0], strides[0], dest->argb, dest->argb_stride, dest->w, rows);
  224. dest->argb += rows * dest->argb_stride;
  225. dest->h -= rows;
  226. }
  227. // MJPG (Motion JPeg) to ARGB
  228. // TODO(fbarchard): review src_width and src_height requirement. dst_width and
  229. // dst_height may be enough.
  230. LIBYUV_API
  231. int MJPGToARGB(const uint8_t* sample,
  232. size_t sample_size,
  233. uint8_t* dst_argb,
  234. int dst_stride_argb,
  235. int src_width,
  236. int src_height,
  237. int dst_width,
  238. int dst_height) {
  239. if (sample_size == kUnknownDataSize) {
  240. // ERROR: MJPEG frame size unknown
  241. return -1;
  242. }
  243. // TODO(fbarchard): Port MJpeg to C.
  244. MJpegDecoder mjpeg_decoder;
  245. LIBYUV_BOOL ret = mjpeg_decoder.LoadFrame(sample, sample_size);
  246. if (ret && (mjpeg_decoder.GetWidth() != src_width ||
  247. mjpeg_decoder.GetHeight() != src_height)) {
  248. // ERROR: MJPEG frame has unexpected dimensions
  249. mjpeg_decoder.UnloadFrame();
  250. return 1; // runtime failure
  251. }
  252. if (ret) {
  253. ARGBBuffers bufs = {dst_argb, dst_stride_argb, dst_width, dst_height};
  254. // YUV420
  255. if (mjpeg_decoder.GetColorSpace() == MJpegDecoder::kColorSpaceYCbCr &&
  256. mjpeg_decoder.GetNumComponents() == 3 &&
  257. mjpeg_decoder.GetVertSampFactor(0) == 2 &&
  258. mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
  259. mjpeg_decoder.GetVertSampFactor(1) == 1 &&
  260. mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
  261. mjpeg_decoder.GetVertSampFactor(2) == 1 &&
  262. mjpeg_decoder.GetHorizSampFactor(2) == 1) {
  263. ret = mjpeg_decoder.DecodeToCallback(&JpegI420ToARGB, &bufs, dst_width,
  264. dst_height);
  265. // YUV422
  266. } else if (mjpeg_decoder.GetColorSpace() ==
  267. MJpegDecoder::kColorSpaceYCbCr &&
  268. mjpeg_decoder.GetNumComponents() == 3 &&
  269. mjpeg_decoder.GetVertSampFactor(0) == 1 &&
  270. mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
  271. mjpeg_decoder.GetVertSampFactor(1) == 1 &&
  272. mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
  273. mjpeg_decoder.GetVertSampFactor(2) == 1 &&
  274. mjpeg_decoder.GetHorizSampFactor(2) == 1) {
  275. ret = mjpeg_decoder.DecodeToCallback(&JpegI422ToARGB, &bufs, dst_width,
  276. dst_height);
  277. // YUV444
  278. } else if (mjpeg_decoder.GetColorSpace() ==
  279. MJpegDecoder::kColorSpaceYCbCr &&
  280. mjpeg_decoder.GetNumComponents() == 3 &&
  281. mjpeg_decoder.GetVertSampFactor(0) == 1 &&
  282. mjpeg_decoder.GetHorizSampFactor(0) == 1 &&
  283. mjpeg_decoder.GetVertSampFactor(1) == 1 &&
  284. mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
  285. mjpeg_decoder.GetVertSampFactor(2) == 1 &&
  286. mjpeg_decoder.GetHorizSampFactor(2) == 1) {
  287. ret = mjpeg_decoder.DecodeToCallback(&JpegI444ToARGB, &bufs, dst_width,
  288. dst_height);
  289. // YUV400
  290. } else if (mjpeg_decoder.GetColorSpace() ==
  291. MJpegDecoder::kColorSpaceGrayscale &&
  292. mjpeg_decoder.GetNumComponents() == 1 &&
  293. mjpeg_decoder.GetVertSampFactor(0) == 1 &&
  294. mjpeg_decoder.GetHorizSampFactor(0) == 1) {
  295. ret = mjpeg_decoder.DecodeToCallback(&JpegI400ToARGB, &bufs, dst_width,
  296. dst_height);
  297. } else {
  298. // TODO(fbarchard): Implement conversion for any other colorspace/sample
  299. // factors that occur in practice.
  300. // ERROR: Unable to convert MJPEG frame because format is not supported
  301. mjpeg_decoder.UnloadFrame();
  302. return 1;
  303. }
  304. }
  305. return ret ? 0 : 1;
  306. }
  307. #endif
  308. #endif
  309. #ifdef __cplusplus
  310. } // extern "C"
  311. } // namespace libyuv
  312. #endif