38 #define JP2_SIG_TYPE 0x6A502020
39 #define JP2_SIG_VALUE 0x0D0A870A
43 #define RGB_PIXEL_FORMATS AV_PIX_FMT_RGB24, AV_PIX_FMT_RGBA, \
46 #define GRAY_PIXEL_FORMATS AV_PIX_FMT_GRAY8, AV_PIX_FMT_Y400A, \
49 #define YUV_PIXEL_FORMATS AV_PIX_FMT_YUV410P, AV_PIX_FMT_YUV411P, \
50 AV_PIX_FMT_YUVA420P, \
51 AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV422P, \
52 AV_PIX_FMT_YUV440P, AV_PIX_FMT_YUV444P, \
53 AV_PIX_FMT_YUV420P9, AV_PIX_FMT_YUV422P9, \
54 AV_PIX_FMT_YUV444P9, \
55 AV_PIX_FMT_YUV420P10, AV_PIX_FMT_YUV422P10, \
56 AV_PIX_FMT_YUV444P10, \
57 AV_PIX_FMT_YUV420P16, AV_PIX_FMT_YUV422P16, \
89 1 == img->comps[3].dx &&
90 1 == img->comps[3].dy;
104 1 == img->comps[0].dx &&
105 1 == img->comps[0].dy;
117 int possible_fmts_nb = 0;
119 switch (image->color_space) {
138 for (index = 0; index < possible_fmts_nb; ++
index) {
140 return possible_fmts[
index];
150 int i, component_plane;
157 if (component_plane != desc->
comp[i].
plane)
168 for (y = 0; y < picture->
height; y++) {
169 index = y*picture->
width;
171 for (x = 0; x < picture->
width; x++, index++) {
172 for (c = 0; c < image->numcomps; c++) {
173 *img_ptr++ = image->comps[c].data[
index];
185 for (x = 0; x < image->numcomps; x++)
186 adjust[x] =
FFMAX(
FFMIN(16 - image->comps[x].prec, 8), 0);
188 for (y = 0; y < picture->
height; y++) {
189 index = y*picture->
width;
190 img_ptr = (uint16_t*) (picture->
data[0] + y*picture->
linesize[0]);
191 for (x = 0; x < picture->
width; x++, index++) {
192 for (c = 0; c < image->numcomps; c++) {
193 *img_ptr++ = image->comps[c].data[
index] << adjust[c];
205 for (index = 0; index < image->numcomps; index++) {
206 comp_data = image->comps[
index].data;
207 for (y = 0; y < image->comps[
index].h; y++) {
209 for (x = 0; x < image->comps[
index].w; x++) {
210 *img_ptr = (
uint8_t) *comp_data;
224 for (index = 0; index < image->numcomps; index++) {
225 comp_data = image->comps[
index].data;
226 for (y = 0; y < image->comps[
index].h; y++) {
227 img_ptr = (uint16_t*) (p->
data[index] + y * p->
linesize[index]);
228 for (x = 0; x < image->comps[
index].w; x++) {
229 *img_ptr = *comp_data;
241 opj_set_default_decoder_parameters(&ctx->
dec_params);
256 void *
data,
int *got_frame,
260 int buf_size = avpkt->
size;
278 dec = opj_create_decompress(CODEC_JP2);
284 dec = opj_create_decompress(CODEC_J2K);
291 opj_set_event_mgr((opj_common_ptr)dec,
NULL,
NULL);
293 ctx->
dec_params.cp_limit_decoding = LIMIT_TO_MAIN_HEADER;
298 stream = opj_cio_open((opj_common_ptr)dec, buf, buf_size);
302 "Codestream could not be opened for reading.\n");
303 opj_destroy_decompress(dec);
308 image = opj_decode_with_info(dec, stream,
NULL);
309 opj_cio_close(stream);
313 opj_destroy_decompress(dec);
317 width = image->x1 - image->x0;
318 height = image->y1 - image->y0;
321 width = (width + (1 << ctx->
lowres) - 1) >> ctx->
lowres;
322 height = (height + (1 << ctx->
lowres) - 1) >> ctx->
lowres;
327 "%dx%d dimension invalid.\n", width, height);
346 for (i = 0; i < image->numcomps; i++)
350 if (picture->
data[0])
358 ctx->
dec_params.cp_limit_decoding = NO_LIMITATION;
361 stream = opj_cio_open((opj_common_ptr)dec, buf, buf_size);
364 "Codestream could not be opened for reading.\n");
368 opj_image_destroy(image);
370 image = opj_decode_with_info(dec, stream,
NULL);
371 opj_cio_close(stream);
382 switch (pixel_size) {
414 *output = ctx->
image;
419 opj_image_destroy(image);
420 opj_destroy_decompress(dec);
433 #define OFFSET(x) offsetof(LibOpenJPEGContext, x)
434 #define VD AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_DECODING_PARAM
437 {
"lowqual",
"Limit the number of layers used for decoding",
OFFSET(lowqual),
AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX,
VD },
438 {
"lowres",
"Lower the decoding resolution by a power of two",
OFFSET(lowres),
AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX,
VD },
450 .
name =
"libopenjpeg",
459 .priv_class = &
class,
static void libopenjpeg_copy_to_packed16(AVFrame *picture, opj_image_t *image)
static int libopenjpeg_decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
static int libopenjpeg_ispacked(enum AVPixelFormat pix_fmt)
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
This structure describes decoded (raw) audio or video data.
AVFrame * coded_frame
the picture in the bitstream
#define AV_PIX_FMT_GRAY16
void avcodec_set_dimensions(AVCodecContext *s, int width, int height)
static enum AVPixelFormat rgb_pix_fmts[]
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
int bits_per_raw_sample
Bits per sample/pixel of internal libavcodec pixel/sample format.
uint8_t log2_chroma_w
Amount to shift the luma width right to find the chroma width.
static void libopenjpeg_copyto16(AVFrame *p, opj_image_t *image)
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
static int decode(MimicContext *ctx, int quality, int num_coeffs, int is_iframe)
AVComponentDescriptor comp[4]
Parameters that describe how pixels are packed.
static av_cold int libopenjpeg_decode_init(AVCodecContext *avctx)
static void libopenjpeg_copyto8(AVFrame *picture, opj_image_t *image)
#define YUV_PIXEL_FORMATS
static int init(AVCodecParserContext *s)
uint16_t depth_minus1
number of bits in the component minus 1
int width
width and height of the video frame
uint8_t log2_chroma_h
Amount to shift the luma height right to find the chroma height.
Multithreading support functions.
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
static int libopenjpeg_matches_pix_fmt(const opj_image_t *img, enum AVPixelFormat pix_fmt)
void av_log(void *avcl, int level, const char *fmt,...)
const char * name
Name of the codec implementation.
static void libopenjpeg_copy_to_packed8(AVFrame *picture, opj_image_t *image)
#define RGB_PIXEL_FORMATS
#define ONLY_IF_THREADS_ENABLED(x)
Define a function with only the non-default version specified.
int av_image_check_size(unsigned int w, unsigned int h, int log_offset, void *log_ctx)
Check if the given dimension of an image is valid, meaning that all bytes of the image can be address...
uint8_t nb_components
The number of components each pixel has, (1-4)
#define CODEC_CAP_FRAME_THREADS
enum AVPixelFormat pix_fmt
static enum AVPixelFormat gray_pix_fmts[]
int linesize[AV_NUM_DATA_POINTERS]
Size, in bytes, of the data for each picture/channel plane.
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
main external API structure.
static void close(AVCodecParserContext *s)
void avcodec_get_frame_defaults(AVFrame *frame)
Set the fields of the given AVFrame to default values.
Describe the class of an AVClass context structure.
static enum AVPixelFormat libopenjpeg_guess_pix_fmt(const opj_image_t *image)
#define FF_ARRAY_ELEMS(a)
static av_cold int libopenjpeg_decode_close(AVCodecContext *avctx)
uint16_t step_minus1
Number of elements between 2 horizontally consecutive pixels minus 1.
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
static enum AVPixelFormat any_pix_fmts[]
uint16_t plane
which of the 4 planes contains the component
common internal and external API header
#define AVERROR_INVALIDDATA
static const AVOption options[]
static enum AVPixelFormat yuv_pix_fmts[]
opj_dparameters_t dec_params
AVCodec ff_libopenjpeg_decoder
int ff_thread_get_buffer(AVCodecContext *avctx, AVFrame *f)
Wrapper around get_buffer() for frame-multithreaded codecs.
AVPixelFormat
Pixel format.
This structure stores compressed data.
static av_cold int libopenjpeg_decode_init_thread_copy(AVCodecContext *avctx)
void ff_thread_release_buffer(AVCodecContext *avctx, AVFrame *f)
Wrapper around release_buffer() frame-for multithreaded codecs.
#define GRAY_PIXEL_FORMATS