00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00026 #include "libavutil/avassert.h"
00027 #include "msmpeg4data.h"
00028 #include "vc1.h"
00029 #include "mss12.h"
00030 #include "mss2dsp.h"
00031
00032 typedef struct MSS2Context {
00033 VC1Context v;
00034 int split_position;
00035 AVFrame pic;
00036 AVFrame last_pic;
00037 MSS12Context c;
00038 MSS2DSPContext dsp;
00039 SliceContext sc[2];
00040 } MSS2Context;
00041
00042 static void arith2_normalise(ArithCoder *c)
00043 {
00044 while ((c->high >> 15) - (c->low >> 15) < 2) {
00045 if ((c->low ^ c->high) & 0x10000) {
00046 c->high ^= 0x8000;
00047 c->value ^= 0x8000;
00048 c->low ^= 0x8000;
00049 }
00050 c->high = c->high << 8 & 0xFFFFFF | 0xFF;
00051 c->value = c->value << 8 & 0xFFFFFF | bytestream2_get_byte(c->gbc.gB);
00052 c->low = c->low << 8 & 0xFFFFFF;
00053 }
00054 }
00055
00056 ARITH_GET_BIT(2)
00057
00058
00059
00060
00061 static int arith2_get_scaled_value(int value, int n, int range)
00062 {
00063 int split = (n << 1) - range;
00064
00065 if (value > split)
00066 return split + (value - split >> 1);
00067 else
00068 return value;
00069 }
00070
00071 static void arith2_rescale_interval(ArithCoder *c, int range,
00072 int low, int high, int n)
00073 {
00074 int split = (n << 1) - range;
00075
00076 if (high > split)
00077 c->high = split + (high - split << 1);
00078 else
00079 c->high = high;
00080
00081 c->high += c->low - 1;
00082
00083 if (low > split)
00084 c->low += split + (low - split << 1);
00085 else
00086 c->low += low;
00087 }
00088
00089 static int arith2_get_number(ArithCoder *c, int n)
00090 {
00091 int range = c->high - c->low + 1;
00092 int scale = av_log2(range) - av_log2(n);
00093 int val;
00094
00095 if (n << scale > range)
00096 scale--;
00097
00098 n <<= scale;
00099
00100 val = arith2_get_scaled_value(c->value - c->low, n, range) >> scale;
00101
00102 arith2_rescale_interval(c, range, val << scale, (val + 1) << scale, n);
00103
00104 arith2_normalise(c);
00105
00106 return val;
00107 }
00108
00109 static int arith2_get_prob(ArithCoder *c, int16_t *probs)
00110 {
00111 int range = c->high - c->low + 1, n = *probs;
00112 int scale = av_log2(range) - av_log2(n);
00113 int i = 0, val;
00114
00115 if (n << scale > range)
00116 scale--;
00117
00118 n <<= scale;
00119
00120 val = arith2_get_scaled_value(c->value - c->low, n, range) >> scale;
00121 while (probs[++i] > val) ;
00122
00123 arith2_rescale_interval(c, range,
00124 probs[i] << scale, probs[i - 1] << scale, n);
00125
00126 return i;
00127 }
00128
00129 ARITH_GET_MODEL_SYM(2)
00130
00131 static int arith2_get_consumed_bytes(ArithCoder *c)
00132 {
00133 int diff = (c->high >> 16) - (c->low >> 16);
00134 int bp = bytestream2_tell(c->gbc.gB) - 3 << 3;
00135 int bits = 1;
00136
00137 while (!(diff & 0x80)) {
00138 bits++;
00139 diff <<= 1;
00140 }
00141
00142 return (bits + bp + 7 >> 3) + ((c->low >> 16) + 1 == c->high >> 16);
00143 }
00144
00145 static void arith2_init(ArithCoder *c, GetByteContext *gB)
00146 {
00147 c->low = 0;
00148 c->high = 0xFFFFFF;
00149 c->value = bytestream2_get_be24(gB);
00150 c->gbc.gB = gB;
00151 c->get_model_sym = arith2_get_model_sym;
00152 c->get_number = arith2_get_number;
00153 }
00154
00155 static int decode_pal_v2(MSS12Context *ctx, const uint8_t *buf, int buf_size)
00156 {
00157 int i, ncol;
00158 uint32_t *pal = ctx->pal + 256 - ctx->free_colours;
00159
00160 if (!ctx->free_colours)
00161 return 0;
00162
00163 ncol = *buf++;
00164 if (ncol > ctx->free_colours || buf_size < 2 + ncol * 3)
00165 return -1;
00166 for (i = 0; i < ncol; i++)
00167 *pal++ = AV_RB24(buf + 3 * i);
00168
00169 return 1 + ncol * 3;
00170 }
00171
00172 static int decode_555(GetByteContext *gB, uint16_t *dst, int stride,
00173 int keyframe, int w, int h)
00174 {
00175 int last_symbol = 0, repeat = 0, prev_avail = 0;
00176
00177 if (!keyframe) {
00178 int x, y, endx, endy, t;
00179
00180 #define READ_PAIR(a, b) \
00181 a = bytestream2_get_byte(gB) << 4; \
00182 t = bytestream2_get_byte(gB); \
00183 a |= t >> 4; \
00184 b = (t & 0xF) << 8; \
00185 b |= bytestream2_get_byte(gB); \
00186
00187 READ_PAIR(x, endx)
00188 READ_PAIR(y, endy)
00189
00190 if (endx >= w || endy >= h || x > endx || y > endy)
00191 return -1;
00192 dst += x + stride * y;
00193 w = endx - x + 1;
00194 h = endy - y + 1;
00195 if (y)
00196 prev_avail = 1;
00197 }
00198
00199 do {
00200 uint16_t *p = dst;
00201 do {
00202 if (repeat-- < 1) {
00203 int b = bytestream2_get_byte(gB);
00204 if (b < 128)
00205 last_symbol = b << 8 | bytestream2_get_byte(gB);
00206 else if (b > 129) {
00207 repeat = 0;
00208 while (b-- > 130)
00209 repeat = (repeat << 8) + bytestream2_get_byte(gB) + 1;
00210 if (last_symbol == -2) {
00211 int skip = FFMIN((unsigned)repeat, dst + w - p);
00212 repeat -= skip;
00213 p += skip;
00214 }
00215 } else
00216 last_symbol = 127 - b;
00217 }
00218 if (last_symbol >= 0)
00219 *p = last_symbol;
00220 else if (last_symbol == -1 && prev_avail)
00221 *p = *(p - stride);
00222 } while (++p < dst + w);
00223 dst += stride;
00224 prev_avail = 1;
00225 } while (--h);
00226
00227 return 0;
00228 }
00229
00230 static int decode_rle(GetBitContext *gb, uint8_t *pal_dst, int pal_stride,
00231 uint8_t *rgb_dst, int rgb_stride, uint32_t *pal,
00232 int keyframe, int kf_slipt, int slice, int w, int h)
00233 {
00234 uint8_t bits[270] = { 0 };
00235 uint32_t codes[270];
00236 VLC vlc;
00237
00238 int current_length = 0, read_codes = 0, next_code = 0, current_codes = 0;
00239 int remaining_codes, surplus_codes, i;
00240
00241 const int alphabet_size = 270 - keyframe;
00242
00243 int last_symbol = 0, repeat = 0, prev_avail = 0;
00244
00245 if (!keyframe) {
00246 int x, y, clipw, cliph;
00247
00248 x = get_bits(gb, 12);
00249 y = get_bits(gb, 12);
00250 clipw = get_bits(gb, 12) + 1;
00251 cliph = get_bits(gb, 12) + 1;
00252
00253 if (x + clipw > w || y + cliph > h)
00254 return AVERROR_INVALIDDATA;
00255 pal_dst += pal_stride * y + x;
00256 rgb_dst += rgb_stride * y + x * 3;
00257 w = clipw;
00258 h = cliph;
00259 if (y)
00260 prev_avail = 1;
00261 } else {
00262 if (slice > 0) {
00263 pal_dst += pal_stride * kf_slipt;
00264 rgb_dst += rgb_stride * kf_slipt;
00265 prev_avail = 1;
00266 h -= kf_slipt;
00267 } else
00268 h = kf_slipt;
00269 }
00270
00271
00272 do {
00273 while (current_codes--) {
00274 int symbol = get_bits(gb, 8);
00275 if (symbol >= 204 - keyframe)
00276 symbol += 14 - keyframe;
00277 else if (symbol > 189)
00278 symbol = get_bits1(gb) + (symbol << 1) - 190;
00279 if (bits[symbol])
00280 return AVERROR_INVALIDDATA;
00281 bits[symbol] = current_length;
00282 codes[symbol] = next_code++;
00283 read_codes++;
00284 }
00285 current_length++;
00286 next_code <<= 1;
00287 remaining_codes = (1 << current_length) - next_code;
00288 current_codes = get_bits(gb, av_ceil_log2(remaining_codes + 1));
00289 if (current_length > 22 || current_codes > remaining_codes)
00290 return AVERROR_INVALIDDATA;
00291 } while (current_codes != remaining_codes);
00292
00293 remaining_codes = alphabet_size - read_codes;
00294
00295
00296 while ((surplus_codes = (2 << current_length) -
00297 (next_code << 1) - remaining_codes) < 0) {
00298 current_length++;
00299 next_code <<= 1;
00300 }
00301
00302
00303 for (i = 0; i < alphabet_size; i++)
00304 if (!bits[i]) {
00305 if (surplus_codes-- == 0) {
00306 current_length++;
00307 next_code <<= 1;
00308 }
00309 bits[i] = current_length;
00310 codes[i] = next_code++;
00311 }
00312
00313 if (next_code != 1 << current_length)
00314 return AVERROR_INVALIDDATA;
00315
00316 if (i = init_vlc(&vlc, 9, alphabet_size, bits, 1, 1, codes, 4, 4, 0))
00317 return i;
00318
00319
00320 do {
00321 uint8_t *pp = pal_dst;
00322 uint8_t *rp = rgb_dst;
00323 do {
00324 if (repeat-- < 1) {
00325 int b = get_vlc2(gb, vlc.table, 9, 3);
00326 if (b < 256)
00327 last_symbol = b;
00328 else if (b < 268) {
00329 b -= 256;
00330 if (b == 11)
00331 b = get_bits(gb, 4) + 10;
00332
00333 if (!b)
00334 repeat = 0;
00335 else
00336 repeat = get_bits(gb, b);
00337
00338 repeat += (1 << b) - 1;
00339
00340 if (last_symbol == -2) {
00341 int skip = FFMIN(repeat, pal_dst + w - pp);
00342 repeat -= skip;
00343 pp += skip;
00344 rp += skip * 3;
00345 }
00346 } else
00347 last_symbol = 267 - b;
00348 }
00349 if (last_symbol >= 0) {
00350 *pp = last_symbol;
00351 AV_WB24(rp, pal[last_symbol]);
00352 } else if (last_symbol == -1 && prev_avail) {
00353 *pp = *(pp - pal_stride);
00354 memcpy(rp, rp - rgb_stride, 3);
00355 }
00356 rp += 3;
00357 } while (++pp < pal_dst + w);
00358 pal_dst += pal_stride;
00359 rgb_dst += rgb_stride;
00360 prev_avail = 1;
00361 } while (--h);
00362
00363 ff_free_vlc(&vlc);
00364 return 0;
00365 }
00366
00367 static int decode_wmv9(AVCodecContext *avctx, const uint8_t *buf, int buf_size,
00368 int x, int y, int w, int h, int wmv9_mask)
00369 {
00370 MSS2Context *ctx = avctx->priv_data;
00371 MSS12Context *c = &ctx->c;
00372 VC1Context *v = avctx->priv_data;
00373 MpegEncContext *s = &v->s;
00374 AVFrame *f;
00375
00376 ff_mpeg_flush(avctx);
00377
00378 if (s->current_picture_ptr == NULL || s->current_picture_ptr->f.data[0]) {
00379 int i = ff_find_unused_picture(s, 0);
00380 if (i < 0)
00381 return -1;
00382 s->current_picture_ptr = &s->picture[i];
00383 }
00384
00385 init_get_bits(&s->gb, buf, buf_size * 8);
00386
00387 s->loop_filter = avctx->skip_loop_filter < AVDISCARD_ALL;
00388
00389 if (ff_vc1_parse_frame_header(v, &s->gb) == -1) {
00390 av_log(v->s.avctx, AV_LOG_ERROR, "header error\n");
00391 return AVERROR_INVALIDDATA;
00392 }
00393
00394 if (s->pict_type != AV_PICTURE_TYPE_I) {
00395 av_log(v->s.avctx, AV_LOG_ERROR, "expected I-frame\n");
00396 return AVERROR_INVALIDDATA;
00397 }
00398
00399 avctx->pix_fmt = PIX_FMT_YUV420P;
00400
00401 if (ff_MPV_frame_start(s, avctx) < 0) {
00402 av_log(v->s.avctx, AV_LOG_ERROR, "ff_MPV_frame_start error\n");
00403 avctx->pix_fmt = PIX_FMT_RGB24;
00404 return -1;
00405 }
00406
00407 ff_er_frame_start(s);
00408
00409 v->bits = buf_size * 8;
00410
00411 v->end_mb_x = (w + 15) >> 4;
00412 s->end_mb_y = (h + 15) >> 4;
00413 if (v->respic & 1)
00414 v->end_mb_x = v->end_mb_x + 1 >> 1;
00415 if (v->respic & 2)
00416 s->end_mb_y = s->end_mb_y + 1 >> 1;
00417
00418 ff_vc1_decode_blocks(v);
00419
00420 ff_er_frame_end(s);
00421
00422 ff_MPV_frame_end(s);
00423
00424 f = &s->current_picture.f;
00425
00426 if (v->respic == 3) {
00427 ctx->dsp.upsample_plane(f->data[0], f->linesize[0], w, h);
00428 ctx->dsp.upsample_plane(f->data[1], f->linesize[1], w >> 1, h >> 1);
00429 ctx->dsp.upsample_plane(f->data[2], f->linesize[2], w >> 1, h >> 1);
00430 } else if (v->respic)
00431 av_log_ask_for_sample(v->s.avctx,
00432 "Asymmetric WMV9 rectangle subsampling\n");
00433
00434 av_assert0(f->linesize[1] == f->linesize[2]);
00435
00436 if (wmv9_mask != -1)
00437 ctx->dsp.mss2_blit_wmv9_masked(c->rgb_pic + y * c->rgb_stride + x * 3,
00438 c->rgb_stride, wmv9_mask,
00439 c->pal_pic + y * c->pal_stride + x,
00440 c->pal_stride,
00441 f->data[0], f->linesize[0],
00442 f->data[1], f->data[2], f->linesize[1],
00443 w, h);
00444 else
00445 ctx->dsp.mss2_blit_wmv9(c->rgb_pic + y * c->rgb_stride + x * 3,
00446 c->rgb_stride,
00447 f->data[0], f->linesize[0],
00448 f->data[1], f->data[2], f->linesize[1],
00449 w, h);
00450
00451 avctx->pix_fmt = PIX_FMT_RGB24;
00452
00453 return 0;
00454 }
00455
00456 typedef struct Rectangle {
00457 int coded, x, y, w, h;
00458 } Rectangle;
00459
00460 #define MAX_WMV9_RECTANGLES 20
00461 #define ARITH2_PADDING 2
00462
00463 static int mss2_decode_frame(AVCodecContext *avctx, void *data, int *data_size,
00464 AVPacket *avpkt)
00465 {
00466 const uint8_t *buf = avpkt->data;
00467 int buf_size = avpkt->size;
00468 MSS2Context *ctx = avctx->priv_data;
00469 MSS12Context *c = &ctx->c;
00470 GetBitContext gb;
00471 GetByteContext gB;
00472 ArithCoder acoder;
00473
00474 int keyframe, has_wmv9, has_mv, is_rle, is_555, ret;
00475
00476 Rectangle wmv9rects[MAX_WMV9_RECTANGLES], *r;
00477 int used_rects = 0, i, implicit_rect, av_uninit(wmv9_mask);
00478
00479 av_assert0(FF_INPUT_BUFFER_PADDING_SIZE >=
00480 ARITH2_PADDING + (MIN_CACHE_BITS + 7) / 8);
00481
00482 init_get_bits(&gb, buf, buf_size * 8);
00483
00484 if (keyframe = get_bits1(&gb))
00485 skip_bits(&gb, 7);
00486 has_wmv9 = get_bits1(&gb);
00487 has_mv = keyframe ? 0 : get_bits1(&gb);
00488 is_rle = get_bits1(&gb);
00489 is_555 = is_rle && get_bits1(&gb);
00490 if (c->slice_split > 0)
00491 ctx->split_position = c->slice_split;
00492 else if (c->slice_split < 0) {
00493 if (get_bits1(&gb)) {
00494 if (get_bits1(&gb)) {
00495 if (get_bits1(&gb))
00496 ctx->split_position = get_bits(&gb, 16);
00497 else
00498 ctx->split_position = get_bits(&gb, 12);
00499 } else
00500 ctx->split_position = get_bits(&gb, 8) << 4;
00501 } else {
00502 if (keyframe)
00503 ctx->split_position = avctx->height / 2;
00504 }
00505 } else
00506 ctx->split_position = avctx->height;
00507
00508 if (c->slice_split && (ctx->split_position < 1 - is_555 ||
00509 ctx->split_position > avctx->height - 1))
00510 return AVERROR_INVALIDDATA;
00511
00512 align_get_bits(&gb);
00513 buf += get_bits_count(&gb) >> 3;
00514 buf_size -= get_bits_count(&gb) >> 3;
00515
00516 if (buf_size < 1)
00517 return AVERROR_INVALIDDATA;
00518
00519 if (is_555 && (has_wmv9 || has_mv || c->slice_split && ctx->split_position))
00520 return AVERROR_INVALIDDATA;
00521
00522 avctx->pix_fmt = is_555 ? PIX_FMT_RGB555 : PIX_FMT_RGB24;
00523 if (ctx->pic.data[0] && ctx->pic.format != avctx->pix_fmt)
00524 avctx->release_buffer(avctx, &ctx->pic);
00525
00526 if (has_wmv9) {
00527 bytestream2_init(&gB, buf, buf_size + ARITH2_PADDING);
00528 arith2_init(&acoder, &gB);
00529
00530 implicit_rect = !arith2_get_bit(&acoder);
00531
00532 while (arith2_get_bit(&acoder)) {
00533 if (used_rects == MAX_WMV9_RECTANGLES)
00534 return AVERROR_INVALIDDATA;
00535 r = &wmv9rects[used_rects];
00536 if (!used_rects)
00537 r->x = arith2_get_number(&acoder, avctx->width);
00538 else
00539 r->x = arith2_get_number(&acoder, avctx->width -
00540 wmv9rects[used_rects - 1].x) +
00541 wmv9rects[used_rects - 1].x;
00542 r->y = arith2_get_number(&acoder, avctx->height);
00543 r->w = arith2_get_number(&acoder, avctx->width - r->x) + 1;
00544 r->h = arith2_get_number(&acoder, avctx->height - r->y) + 1;
00545 used_rects++;
00546 }
00547
00548 if (implicit_rect && used_rects) {
00549 av_log(avctx, AV_LOG_ERROR, "implicit_rect && used_rects > 0\n");
00550 return AVERROR_INVALIDDATA;
00551 }
00552
00553 if (implicit_rect) {
00554 wmv9rects[0].x = 0;
00555 wmv9rects[0].y = 0;
00556 wmv9rects[0].w = avctx->width;
00557 wmv9rects[0].h = avctx->height;
00558
00559 used_rects = 1;
00560 }
00561 for (i = 0; i < used_rects; i++) {
00562 if (!implicit_rect && arith2_get_bit(&acoder)) {
00563 av_log(avctx, AV_LOG_ERROR, "Unexpected grandchildren\n");
00564 return AVERROR_INVALIDDATA;
00565 }
00566 if (!i) {
00567 wmv9_mask = arith2_get_bit(&acoder) - 1;
00568 if (!wmv9_mask)
00569 wmv9_mask = arith2_get_number(&acoder, 256);
00570 }
00571 wmv9rects[i].coded = arith2_get_number(&acoder, 2);
00572 }
00573
00574 buf += arith2_get_consumed_bytes(&acoder);
00575 buf_size -= arith2_get_consumed_bytes(&acoder);
00576 if (buf_size < 1)
00577 return AVERROR_INVALIDDATA;
00578 }
00579
00580 c->mvX = c->mvY = 0;
00581 if (keyframe && !is_555) {
00582 if ((i = decode_pal_v2(c, buf, buf_size)) < 0)
00583 return AVERROR_INVALIDDATA;
00584 buf += i;
00585 buf_size -= i;
00586 } else if (has_mv) {
00587 buf += 4;
00588 buf_size -= 4;
00589 if (buf_size < 1)
00590 return AVERROR_INVALIDDATA;
00591 c->mvX = AV_RB16(buf - 4) - avctx->width;
00592 c->mvY = AV_RB16(buf - 2) - avctx->height;
00593 }
00594
00595 if (c->mvX < 0 || c->mvY < 0) {
00596 FFSWAP(AVFrame, ctx->pic, ctx->last_pic);
00597 FFSWAP(uint8_t *, c->pal_pic, c->last_pal_pic);
00598
00599 if (ctx->pic.data[0])
00600 avctx->release_buffer(avctx, &ctx->pic);
00601
00602 ctx->pic.reference = 3;
00603 ctx->pic.buffer_hints = FF_BUFFER_HINTS_VALID |
00604 FF_BUFFER_HINTS_READABLE |
00605 FF_BUFFER_HINTS_PRESERVE |
00606 FF_BUFFER_HINTS_REUSABLE;
00607
00608 if ((ret = avctx->get_buffer(avctx, &ctx->pic)) < 0) {
00609 av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");
00610 return ret;
00611 }
00612
00613 if (ctx->last_pic.data[0]) {
00614 av_assert0(ctx->pic.linesize[0] == ctx->last_pic.linesize[0]);
00615 c->last_rgb_pic = ctx->last_pic.data[0] +
00616 ctx->last_pic.linesize[0] * (avctx->height - 1);
00617 } else {
00618 av_log(avctx, AV_LOG_ERROR, "Missing keyframe\n");
00619 return -1;
00620 }
00621 } else {
00622 if (ctx->last_pic.data[0])
00623 avctx->release_buffer(avctx, &ctx->last_pic);
00624
00625 ctx->pic.reference = 3;
00626 ctx->pic.buffer_hints = FF_BUFFER_HINTS_VALID |
00627 FF_BUFFER_HINTS_READABLE |
00628 FF_BUFFER_HINTS_PRESERVE |
00629 FF_BUFFER_HINTS_REUSABLE;
00630
00631 if ((ret = avctx->reget_buffer(avctx, &ctx->pic)) < 0) {
00632 av_log(avctx, AV_LOG_ERROR, "reget_buffer() failed\n");
00633 return ret;
00634 }
00635
00636 c->last_rgb_pic = NULL;
00637 }
00638 c->rgb_pic = ctx->pic.data[0] +
00639 ctx->pic.linesize[0] * (avctx->height - 1);
00640 c->rgb_stride = -ctx->pic.linesize[0];
00641
00642 ctx->pic.key_frame = keyframe;
00643 ctx->pic.pict_type = keyframe ? AV_PICTURE_TYPE_I : AV_PICTURE_TYPE_P;
00644
00645 if (is_555) {
00646 bytestream2_init(&gB, buf, buf_size);
00647
00648 if (decode_555(&gB, (uint16_t *)c->rgb_pic, c->rgb_stride >> 1,
00649 keyframe, avctx->width, avctx->height))
00650 return AVERROR_INVALIDDATA;
00651
00652 buf_size -= bytestream2_tell(&gB);
00653 } else if (is_rle) {
00654 init_get_bits(&gb, buf, buf_size * 8);
00655 if (ret = decode_rle(&gb, c->pal_pic, c->pal_stride,
00656 c->rgb_pic, c->rgb_stride, c->pal, keyframe,
00657 ctx->split_position, 0,
00658 avctx->width, avctx->height))
00659 return ret;
00660 align_get_bits(&gb);
00661
00662 if (c->slice_split)
00663 if (ret = decode_rle(&gb, c->pal_pic, c->pal_stride,
00664 c->rgb_pic, c->rgb_stride, c->pal, keyframe,
00665 ctx->split_position, 1,
00666 avctx->width, avctx->height))
00667 return ret;
00668
00669 align_get_bits(&gb);
00670 buf += get_bits_count(&gb) >> 3;
00671 buf_size -= get_bits_count(&gb) >> 3;
00672 } else {
00673 if (keyframe) {
00674 c->corrupted = 0;
00675 ff_mss12_slicecontext_reset(&ctx->sc[0]);
00676 if (c->slice_split)
00677 ff_mss12_slicecontext_reset(&ctx->sc[1]);
00678 }
00679 else if (c->corrupted)
00680 return AVERROR_INVALIDDATA;
00681 bytestream2_init(&gB, buf, buf_size + ARITH2_PADDING);
00682 arith2_init(&acoder, &gB);
00683 c->keyframe = keyframe;
00684 if (c->corrupted = ff_mss12_decode_rect(&ctx->sc[0], &acoder, 0, 0,
00685 avctx->width,
00686 ctx->split_position))
00687 return AVERROR_INVALIDDATA;
00688
00689 buf += arith2_get_consumed_bytes(&acoder);
00690 buf_size -= arith2_get_consumed_bytes(&acoder);
00691 if (c->slice_split) {
00692 if (buf_size < 1)
00693 return AVERROR_INVALIDDATA;
00694 bytestream2_init(&gB, buf, buf_size + ARITH2_PADDING);
00695 arith2_init(&acoder, &gB);
00696 if (c->corrupted = ff_mss12_decode_rect(&ctx->sc[1], &acoder, 0,
00697 ctx->split_position,
00698 avctx->width,
00699 avctx->height - ctx->split_position))
00700 return AVERROR_INVALIDDATA;
00701
00702 buf += arith2_get_consumed_bytes(&acoder);
00703 buf_size -= arith2_get_consumed_bytes(&acoder);
00704 }
00705 }
00706
00707 if (has_wmv9) {
00708 for (i = 0; i < used_rects; i++) {
00709 int x = wmv9rects[i].x;
00710 int y = wmv9rects[i].y;
00711 int w = wmv9rects[i].w;
00712 int h = wmv9rects[i].h;
00713 if (wmv9rects[i].coded) {
00714 int WMV9codedFrameSize;
00715 if (buf_size < 4 || !(WMV9codedFrameSize = AV_RL24(buf)))
00716 return AVERROR_INVALIDDATA;
00717 if (ret = decode_wmv9(avctx, buf + 3, buf_size - 3,
00718 x, y, w, h, wmv9_mask))
00719 return ret;
00720 buf += WMV9codedFrameSize + 3;
00721 buf_size -= WMV9codedFrameSize + 3;
00722 } else {
00723 uint8_t *dst = c->rgb_pic + y * c->rgb_stride + x * 3;
00724 if (wmv9_mask != -1) {
00725 ctx->dsp.mss2_gray_fill_masked(dst, c->rgb_stride,
00726 wmv9_mask,
00727 c->pal_pic + y * c->pal_stride + x,
00728 c->pal_stride,
00729 w, h);
00730 } else {
00731 do {
00732 memset(dst, 0x80, w * 3);
00733 dst += c->rgb_stride;
00734 } while (--h);
00735 }
00736 }
00737 }
00738 }
00739
00740 if (buf_size)
00741 av_log(avctx, AV_LOG_WARNING, "buffer not fully consumed\n");
00742
00743 *data_size = sizeof(AVFrame);
00744 *(AVFrame *)data = ctx->pic;
00745
00746 return avpkt->size;
00747 }
00748
00749 static av_cold int wmv9_init(AVCodecContext *avctx)
00750 {
00751 VC1Context *v = avctx->priv_data;
00752
00753 v->s.avctx = avctx;
00754 avctx->flags |= CODEC_FLAG_EMU_EDGE;
00755 v->s.flags |= CODEC_FLAG_EMU_EDGE;
00756
00757 if (avctx->idct_algo == FF_IDCT_AUTO)
00758 avctx->idct_algo = FF_IDCT_WMV2;
00759
00760 if (ff_vc1_init_common(v) < 0)
00761 return -1;
00762 ff_vc1dsp_init(&v->vc1dsp);
00763
00764 v->profile = PROFILE_MAIN;
00765
00766 v->zz_8x4 = ff_wmv2_scantableA;
00767 v->zz_4x8 = ff_wmv2_scantableB;
00768 v->res_y411 = 0;
00769 v->res_sprite = 0;
00770
00771 v->frmrtq_postproc = 7;
00772 v->bitrtq_postproc = 31;
00773
00774 v->res_x8 = 0;
00775 v->multires = 0;
00776 v->res_fasttx = 1;
00777
00778 v->fastuvmc = 0;
00779
00780 v->extended_mv = 0;
00781
00782 v->dquant = 1;
00783 v->vstransform = 1;
00784
00785 v->res_transtab = 0;
00786
00787 v->overlap = 0;
00788
00789 v->s.resync_marker = 0;
00790 v->rangered = 0;
00791
00792 v->s.max_b_frames = avctx->max_b_frames = 0;
00793 v->quantizer_mode = 0;
00794
00795 v->finterpflag = 0;
00796
00797 v->res_rtm_flag = 1;
00798
00799 ff_vc1_init_transposed_scantables(v);
00800
00801 if (ff_msmpeg4_decode_init(avctx) < 0 ||
00802 ff_vc1_decode_init_alloc_tables(v) < 0)
00803 return -1;
00804
00805
00806 v->s.me.qpel_put = v->s.dsp.put_qpel_pixels_tab;
00807 v->s.me.qpel_avg = v->s.dsp.avg_qpel_pixels_tab;
00808
00809 return 0;
00810 }
00811
00812 static av_cold int mss2_decode_end(AVCodecContext *avctx)
00813 {
00814 MSS2Context *const ctx = avctx->priv_data;
00815
00816 if (ctx->pic.data[0])
00817 avctx->release_buffer(avctx, &ctx->pic);
00818 if (ctx->last_pic.data[0])
00819 avctx->release_buffer(avctx, &ctx->last_pic);
00820
00821 ff_mss12_decode_end(&ctx->c);
00822 av_freep(&ctx->c.pal_pic);
00823 av_freep(&ctx->c.last_pal_pic);
00824 ff_vc1_decode_end(avctx);
00825
00826 return 0;
00827 }
00828
00829 static av_cold int mss2_decode_init(AVCodecContext *avctx)
00830 {
00831 MSS2Context * const ctx = avctx->priv_data;
00832 MSS12Context *c = &ctx->c;
00833 int ret;
00834 c->avctx = avctx;
00835 avctx->coded_frame = &ctx->pic;
00836 if (ret = ff_mss12_decode_init(c, 1, &ctx->sc[0], &ctx->sc[1]))
00837 return ret;
00838 c->pal_stride = c->mask_stride;
00839 c->pal_pic = av_malloc(c->pal_stride * avctx->height);
00840 c->last_pal_pic = av_malloc(c->pal_stride * avctx->height);
00841 if (!c->pal_pic || !c->last_pal_pic) {
00842 mss2_decode_end(avctx);
00843 return AVERROR(ENOMEM);
00844 }
00845 if (ret = wmv9_init(avctx)) {
00846 mss2_decode_end(avctx);
00847 return ret;
00848 }
00849 ff_mss2dsp_init(&ctx->dsp);
00850
00851 avctx->pix_fmt = c->free_colours == 127 ? PIX_FMT_RGB555
00852 : PIX_FMT_RGB24;
00853
00854 return 0;
00855 }
00856
00857 AVCodec ff_mss2_decoder = {
00858 .name = "mss2",
00859 .type = AVMEDIA_TYPE_VIDEO,
00860 .id = AV_CODEC_ID_MSS2,
00861 .priv_data_size = sizeof(MSS2Context),
00862 .init = mss2_decode_init,
00863 .close = mss2_decode_end,
00864 .decode = mss2_decode_frame,
00865 .capabilities = CODEC_CAP_DR1,
00866 .long_name = NULL_IF_CONFIG_SMALL("MS Windows Media Video V9 Screen"),
00867 };