project
stringclasses 2
values | commit_id
stringlengths 40
40
| target
int64 0
1
| func
stringlengths 26
142k
| idx
int64 0
27.3k
|
---|---|---|---|---|
qemu | a8170e5e97ad17ca169c64ba87ae2f53850dab4c | 0 | static void m5206_mbar_writeb(void *opaque, target_phys_addr_t offset,
uint32_t value)
{
m5206_mbar_state *s = (m5206_mbar_state *)opaque;
int width;
offset &= 0x3ff;
if (offset >= 0x200) {
hw_error("Bad MBAR write offset 0x%x", (int)offset);
}
width = m5206_mbar_width[offset >> 2];
if (width > 1) {
uint32_t tmp;
tmp = m5206_mbar_readw(opaque, offset & ~1);
if (offset & 1) {
tmp = (tmp & 0xff00) | value;
} else {
tmp = (tmp & 0x00ff) | (value << 8);
}
m5206_mbar_writew(opaque, offset & ~1, tmp);
return;
}
m5206_mbar_write(s, offset, value, 1);
}
| 20,991 |
qemu | c88d6bded69804617f412a60c7375cc93f8687a5 | 0 | static void mv88w8618_pit_write(void *opaque, target_phys_addr_t offset,
uint32_t value)
{
mv88w8618_pit_state *s = opaque;
mv88w8618_timer_state *t;
int i;
switch (offset) {
case MP_PIT_TIMER1_LENGTH ... MP_PIT_TIMER4_LENGTH:
t = &s->timer[offset >> 2];
t->limit = value;
ptimer_set_limit(t->ptimer, t->limit, 1);
break;
case MP_PIT_CONTROL:
for (i = 0; i < 4; i++) {
if (value & 0xf) {
t = &s->timer[i];
ptimer_set_limit(t->ptimer, t->limit, 0);
ptimer_set_freq(t->ptimer, t->freq);
ptimer_run(t->ptimer, 0);
}
value >>= 4;
}
break;
case MP_BOARD_RESET:
if (value == MP_BOARD_RESET_MAGIC) {
qemu_system_reset_request();
}
break;
}
}
| 20,993 |
qemu | 72cf2d4f0e181d0d3a3122e04129c58a95da713e | 0 | static void audio_detach_capture (HWVoiceOut *hw)
{
SWVoiceCap *sc = hw->cap_head.lh_first;
while (sc) {
SWVoiceCap *sc1 = sc->entries.le_next;
SWVoiceOut *sw = &sc->sw;
CaptureVoiceOut *cap = sc->cap;
int was_active = sw->active;
if (sw->rate) {
st_rate_stop (sw->rate);
sw->rate = NULL;
}
LIST_REMOVE (sw, entries);
LIST_REMOVE (sc, entries);
qemu_free (sc);
if (was_active) {
/* We have removed soft voice from the capture:
this might have changed the overall status of the capture
since this might have been the only active voice */
audio_recalc_and_notify_capture (cap);
}
sc = sc1;
}
}
| 20,995 |
qemu | 75973bfe415774babe7c1e18fa682c050fdce73b | 0 | unsigned s390_del_running_cpu(S390CPU *cpu)
{
CPUState *cs = CPU(cpu);
if (cs->halted == 0) {
assert(s390_running_cpus >= 1);
s390_running_cpus--;
cs->halted = 1;
cs->exception_index = EXCP_HLT;
}
return s390_running_cpus;
}
| 20,998 |
qemu | 3c529d935923a70519557d420db1d5a09a65086a | 0 | static int raw_fd_pool_get(BDRVRawState *s)
{
int i;
for (i = 0; i < RAW_FD_POOL_SIZE; i++) {
/* already in use */
if (s->fd_pool[i] != -1)
continue;
/* try to dup file descriptor */
s->fd_pool[i] = dup(s->fd);
if (s->fd_pool[i] != -1)
return s->fd_pool[i];
}
/* we couldn't dup the file descriptor so just use the main one */
return s->fd;
}
| 20,999 |
qemu | db39fcf1f690b02d612e2bfc00980700887abe03 | 0 | static CharDriverState *qemu_chr_open_pipe(ChardevHostdev *opts)
{
const char *filename = opts->device;
CharDriverState *chr;
WinCharState *s;
chr = g_malloc0(sizeof(CharDriverState));
s = g_malloc0(sizeof(WinCharState));
chr->opaque = s;
chr->chr_write = win_chr_write;
chr->chr_close = win_chr_close;
if (win_chr_pipe_init(chr, filename) < 0) {
g_free(s);
g_free(chr);
return NULL;
}
return chr;
}
| 21,000 |
qemu | a8170e5e97ad17ca169c64ba87ae2f53850dab4c | 0 | static void cirrus_linear_write(void *opaque, target_phys_addr_t addr,
uint64_t val, unsigned size)
{
CirrusVGAState *s = opaque;
unsigned mode;
addr &= s->cirrus_addr_mask;
if (((s->vga.sr[0x17] & 0x44) == 0x44) &&
((addr & s->linear_mmio_mask) == s->linear_mmio_mask)) {
/* memory-mapped I/O */
cirrus_mmio_blt_write(s, addr & 0xff, val);
} else if (s->cirrus_srcptr != s->cirrus_srcptr_end) {
/* bitblt */
*s->cirrus_srcptr++ = (uint8_t) val;
if (s->cirrus_srcptr >= s->cirrus_srcptr_end) {
cirrus_bitblt_cputovideo_next(s);
}
} else {
/* video memory */
if ((s->vga.gr[0x0B] & 0x14) == 0x14) {
addr <<= 4;
} else if (s->vga.gr[0x0B] & 0x02) {
addr <<= 3;
}
addr &= s->cirrus_addr_mask;
mode = s->vga.gr[0x05] & 0x7;
if (mode < 4 || mode > 5 || ((s->vga.gr[0x0B] & 0x4) == 0)) {
*(s->vga.vram_ptr + addr) = (uint8_t) val;
memory_region_set_dirty(&s->vga.vram, addr, 1);
} else {
if ((s->vga.gr[0x0B] & 0x14) != 0x14) {
cirrus_mem_writeb_mode4and5_8bpp(s, mode, addr, val);
} else {
cirrus_mem_writeb_mode4and5_16bpp(s, mode, addr, val);
}
}
}
}
| 21,001 |
qemu | 880a7578381d1c7ed4d41c7599ae3cc06567a824 | 0 | static void gdb_chr_receive(void *opaque, const uint8_t *buf, int size)
{
GDBState *s = opaque;
int i;
for (i = 0; i < size; i++) {
gdb_read_byte(s, buf[i]);
}
}
| 21,002 |
FFmpeg | ca4544409e32d692504b44e4ae804c3538993904 | 0 | get_pointer_coordinates(int *x, int *y, Display *dpy, AVFormatContext *s1)
{
Window mrootwindow, childwindow;
int dummy;
mrootwindow = DefaultRootWindow(dpy);
if (XQueryPointer(dpy, mrootwindow, &mrootwindow, &childwindow,
x, y, &dummy, &dummy, (unsigned int*)&dummy)) {
} else {
av_log(s1, AV_LOG_INFO, "couldn't find mouse pointer\n");
*x = -1;
*y = -1;
}
}
| 21,003 |
qemu | 2374e73edafff0586cbfb67c333c5a7588f81fd5 | 0 | uint64_t helper_stq_c_raw(uint64_t t0, uint64_t t1)
{
uint64_t ret;
if (t1 == env->lock) {
stq_raw(t1, t0);
ret = 0;
} else
ret = 1;
env->lock = 1;
return ret;
}
| 21,004 |
FFmpeg | bc488ec28aec4bc91ba47283c49c9f7f25696eaa | 1 | av_cold void ff_pixblockdsp_init(PixblockDSPContext *c, AVCodecContext *avctx)
{
const unsigned high_bit_depth = avctx->bits_per_raw_sample > 8;
c->diff_pixels = diff_pixels_c;
switch (avctx->bits_per_raw_sample) {
case 9:
case 10:
case 12:
case 14:
c->get_pixels = get_pixels_16_c;
break;
default:
if (avctx->bits_per_raw_sample<=8 || avctx->codec_type != AVMEDIA_TYPE_VIDEO) {
c->get_pixels = get_pixels_8_c;
}
break;
}
if (ARCH_ALPHA)
ff_pixblockdsp_init_alpha(c, avctx, high_bit_depth);
if (ARCH_ARM)
ff_pixblockdsp_init_arm(c, avctx, high_bit_depth);
if (ARCH_PPC)
ff_pixblockdsp_init_ppc(c, avctx, high_bit_depth);
if (ARCH_X86)
ff_pixblockdsp_init_x86(c, avctx, high_bit_depth);
if (ARCH_MIPS)
ff_pixblockdsp_init_mips(c, avctx, high_bit_depth);
} | 21,005 |
qemu | 712b4243c761cb6ab6a4367a160fd2a42e2d4b76 | 1 | static void gen_pusha(DisasContext *s)
{
int i;
gen_op_movl_A0_reg(R_ESP);
gen_op_addl_A0_im(-8 << s->dflag);
if (!s->ss32)
tcg_gen_ext16u_tl(cpu_A0, cpu_A0);
tcg_gen_mov_tl(cpu_T[1], cpu_A0);
if (s->addseg)
gen_op_addl_A0_seg(s, R_SS);
for(i = 0;i < 8; i++) {
gen_op_mov_v_reg(MO_32, cpu_T[0], 7 - i);
gen_op_st_v(s, s->dflag, cpu_T[0], cpu_A0);
gen_op_addl_A0_im(1 << s->dflag);
}
gen_op_mov_reg_v(MO_16 + s->ss32, R_ESP, cpu_T[1]);
}
| 21,010 |
qemu | e8ce12d9eaeedeb7f8d9debcd4c9b993903f1abb | 1 | static void usbredir_buffered_bulk_packet(void *priv, uint64_t id,
struct usb_redir_buffered_bulk_packet_header *buffered_bulk_packet,
uint8_t *data, int data_len)
{
USBRedirDevice *dev = priv;
uint8_t status, ep = buffered_bulk_packet->endpoint;
void *free_on_destroy;
int i, len;
DPRINTF("buffered-bulk-in status %d ep %02X len %d id %"PRIu64"\n",
buffered_bulk_packet->status, ep, data_len, id);
if (dev->endpoint[EP2I(ep)].type != USB_ENDPOINT_XFER_BULK) {
ERROR("received buffered-bulk packet for non bulk ep %02X\n", ep);
free(data);
return;
}
if (dev->endpoint[EP2I(ep)].bulk_receiving_started == 0) {
DPRINTF("received buffered-bulk packet on not started ep %02X\n", ep);
free(data);
return;
}
/* Data must be in maxp chunks for buffered_bulk_add_*_data_to_packet */
len = dev->endpoint[EP2I(ep)].max_packet_size;
status = usb_redir_success;
free_on_destroy = NULL;
for (i = 0; i < data_len; i += len) {
if (len >= (data_len - i)) {
len = data_len - i;
status = buffered_bulk_packet->status;
free_on_destroy = data;
}
/* bufp_alloc also adds the packet to the ep queue */
bufp_alloc(dev, data + i, len, status, ep, free_on_destroy);
}
if (dev->endpoint[EP2I(ep)].pending_async_packet) {
USBPacket *p = dev->endpoint[EP2I(ep)].pending_async_packet;
dev->endpoint[EP2I(ep)].pending_async_packet = NULL;
usbredir_buffered_bulk_in_complete(dev, p, ep);
usb_packet_complete(&dev->dev, p);
}
}
| 21,011 |
FFmpeg | 06bf6d3bc04979bd39ecdc7311d0daf8aee7e10f | 1 | static int poll_frame(AVFilterLink *link)
{
AVFilterContext *s = link->src;
OverlayContext *over = s->priv;
int ret = avfilter_poll_frame(s->inputs[OVERLAY]);
if (ret == AVERROR_EOF)
ret = !!over->overpicref;
return ret && avfilter_poll_frame(s->inputs[MAIN]);
}
| 21,013 |
FFmpeg | 944f5b2779e4aa63f7624df6cd4de832a53db81b | 1 | static void sbr_qmf_synthesis(DSPContext *dsp, FFTContext *mdct,
float *out, float X[2][38][64],
float mdct_buf[2][64],
float *v0, int *v_off, const unsigned int div)
{
int i, n;
const float *sbr_qmf_window = div ? sbr_qmf_window_ds : sbr_qmf_window_us;
float *v;
for (i = 0; i < 32; i++) {
if (*v_off == 0) {
int saved_samples = (1280 - 128) >> div;
memcpy(&v0[SBR_SYNTHESIS_BUF_SIZE - saved_samples], v0, saved_samples * sizeof(float));
*v_off = SBR_SYNTHESIS_BUF_SIZE - saved_samples - (128 >> div);
} else {
*v_off -= 128 >> div;
}
v = v0 + *v_off;
if (div) {
for (n = 0; n < 32; n++) {
X[0][i][ n] = -X[0][i][n];
X[0][i][32+n] = X[1][i][31-n];
}
mdct->imdct_half(mdct, mdct_buf[0], X[0][i]);
for (n = 0; n < 32; n++) {
v[ n] = mdct_buf[0][63 - 2*n];
v[63 - n] = -mdct_buf[0][62 - 2*n];
}
} else {
for (n = 1; n < 64; n+=2) {
X[1][i][n] = -X[1][i][n];
}
mdct->imdct_half(mdct, mdct_buf[0], X[0][i]);
mdct->imdct_half(mdct, mdct_buf[1], X[1][i]);
for (n = 0; n < 64; n++) {
v[ n] = -mdct_buf[0][63 - n] + mdct_buf[1][ n ];
v[127 - n] = mdct_buf[0][63 - n] + mdct_buf[1][ n ];
}
}
dsp->vector_fmul_add(out, v , sbr_qmf_window , zero64, 64 >> div);
dsp->vector_fmul_add(out, v + ( 192 >> div), sbr_qmf_window + ( 64 >> div), out , 64 >> div);
dsp->vector_fmul_add(out, v + ( 256 >> div), sbr_qmf_window + (128 >> div), out , 64 >> div);
dsp->vector_fmul_add(out, v + ( 448 >> div), sbr_qmf_window + (192 >> div), out , 64 >> div);
dsp->vector_fmul_add(out, v + ( 512 >> div), sbr_qmf_window + (256 >> div), out , 64 >> div);
dsp->vector_fmul_add(out, v + ( 704 >> div), sbr_qmf_window + (320 >> div), out , 64 >> div);
dsp->vector_fmul_add(out, v + ( 768 >> div), sbr_qmf_window + (384 >> div), out , 64 >> div);
dsp->vector_fmul_add(out, v + ( 960 >> div), sbr_qmf_window + (448 >> div), out , 64 >> div);
dsp->vector_fmul_add(out, v + (1024 >> div), sbr_qmf_window + (512 >> div), out , 64 >> div);
dsp->vector_fmul_add(out, v + (1216 >> div), sbr_qmf_window + (576 >> div), out , 64 >> div);
out += 64 >> div;
}
}
| 21,014 |
qemu | 5f5a1318653c08e435cfa52f60b6a712815b659d | 1 | void virtio_config_writeb(VirtIODevice *vdev, uint32_t addr, uint32_t data)
{
VirtioDeviceClass *k = VIRTIO_DEVICE_GET_CLASS(vdev);
uint8_t val = data;
if (addr > (vdev->config_len - sizeof(val)))
return;
stb_p(vdev->config + addr, val);
if (k->set_config) {
k->set_config(vdev, vdev->config);
}
}
| 21,015 |
FFmpeg | 282bb02839b1ce73963c8e3ee46804f1ade8b12a | 1 | static int get_qcx(J2kDecoderContext *s, int n, J2kQuantStyle *q)
{
int i, x;
if (s->buf_end - s->buf < 1)
return AVERROR(EINVAL);
x = bytestream_get_byte(&s->buf); // Sqcd
q->nguardbits = x >> 5;
q->quantsty = x & 0x1f;
if (q->quantsty == J2K_QSTY_NONE){
n -= 3;
if (s->buf_end - s->buf < n)
return AVERROR(EINVAL);
for (i = 0; i < n; i++)
q->expn[i] = bytestream_get_byte(&s->buf) >> 3;
} else if (q->quantsty == J2K_QSTY_SI){
if (s->buf_end - s->buf < 2)
return AVERROR(EINVAL);
x = bytestream_get_be16(&s->buf);
q->expn[0] = x >> 11;
q->mant[0] = x & 0x7ff;
for (i = 1; i < 32 * 3; i++){
int curexpn = FFMAX(0, q->expn[0] - (i-1)/3);
q->expn[i] = curexpn;
q->mant[i] = q->mant[0];
}
} else{
n = (n - 3) >> 1;
if (s->buf_end - s->buf < n)
return AVERROR(EINVAL);
for (i = 0; i < n; i++){
x = bytestream_get_be16(&s->buf);
q->expn[i] = x >> 11;
q->mant[i] = x & 0x7ff;
}
}
return 0;
}
| 21,016 |
FFmpeg | 25cf9062babc92b1657bf024f872df4b0ffa66a9 | 1 | static int mov_write_video_tag(ByteIOContext *pb, MOVTrack* track)
{
int pos = url_ftell(pb);
char compressor_name[32];
int tag;
put_be32(pb, 0); /* size */
tag = track->enc->codec_tag;
if (!tag)
tag = codec_get_tag(codec_movvideo_tags, track->enc->codec_id);
// if no mac fcc found, try with Microsoft tags
if (!tag)
tag = codec_get_tag(codec_bmp_tags, track->enc->codec_id);
put_le32(pb, tag); // store it byteswapped
put_be32(pb, 0); /* Reserved */
put_be16(pb, 0); /* Reserved */
put_be16(pb, 1); /* Data-reference index */
put_be16(pb, 0); /* Codec stream version */
put_be16(pb, 0); /* Codec stream revision (=0) */
put_tag(pb, "FFMP"); /* Vendor */
if(track->enc->codec_id == CODEC_ID_RAWVIDEO) {
put_be32(pb, 0); /* Temporal Quality */
put_be32(pb, 0x400); /* Spatial Quality = lossless*/
} else {
put_be32(pb, 0x200); /* Temporal Quality = normal */
put_be32(pb, 0x200); /* Spatial Quality = normal */
}
put_be16(pb, track->enc->width); /* Video width */
put_be16(pb, track->enc->height); /* Video height */
put_be32(pb, 0x00480000); /* Horizontal resolution 72dpi */
put_be32(pb, 0x00480000); /* Vertical resolution 72dpi */
put_be32(pb, 0); /* Data size (= 0) */
put_be16(pb, 1); /* Frame count (= 1) */
memset(compressor_name,0,32);
if (track->enc->codec->name)
strncpy(compressor_name,track->enc->codec->name,31);
put_byte(pb, FFMAX(strlen(compressor_name),32) );
put_buffer(pb, compressor_name, 31);
put_be16(pb, 0x18); /* Reserved */
put_be16(pb, 0xffff); /* Reserved */
if(track->enc->codec_id == CODEC_ID_MPEG4)
mov_write_esds_tag(pb, track);
else if(track->enc->codec_id == CODEC_ID_H263)
mov_write_d263_tag(pb);
else if(track->enc->codec_id == CODEC_ID_SVQ3)
mov_write_svq3_tag(pb);
return updateSize (pb, pos);
}
| 21,017 |
FFmpeg | b1a4b735f9b03840803413cb725216d15d5213a7 | 1 | yuv2plane1_16_c_template(const int32_t *src, uint16_t *dest, int dstW,
int big_endian, int output_bits)
{
int i;
int shift = 19 - output_bits;
for (i = 0; i < dstW; i++) {
int val = src[i] + (1 << (shift - 1));
output_pixel(&dest[i], val, 0, uint);
}
}
| 21,018 |
FFmpeg | f1d8763a02b5fce9a7d9789e049d74a45b15e1e8 | 1 | void ff_update_duplicate_context(MpegEncContext *dst, MpegEncContext *src)
{
MpegEncContext bak;
int i;
// FIXME copy only needed parts
// START_TIMER
backup_duplicate_context(&bak, dst);
memcpy(dst, src, sizeof(MpegEncContext));
backup_duplicate_context(dst, &bak);
for (i = 0; i < 12; i++) {
dst->pblocks[i] = &dst->block[i];
}
// STOP_TIMER("update_duplicate_context")
// about 10k cycles / 0.01 sec for 1000frames on 1ghz with 2 threads
}
| 21,019 |
FFmpeg | 1cb0edb40b8e94e1a50ad40c40d43e34ed8435fe | 1 | static int mpeg_mux_read_header(AVFormatContext *s,
AVFormatParameters *ap)
{
MpegDemuxContext *m;
int size, startcode, c, rate_bound, audio_bound, video_bound, mux_rate, val;
int codec_id, n, i, type;
AVStream *st;
m = av_mallocz(sizeof(MpegDemuxContext));
if (!m)
return -ENOMEM;
s->priv_data = m;
/* search first pack header */
m->header_state = 0xff;
size = MAX_SYNC_SIZE;
for(;;) {
while (size > 0) {
startcode = find_start_code(&s->pb, &size, &m->header_state);
if (startcode == PACK_START_CODE)
goto found;
}
return -ENODATA;
found:
/* search system header just after pack header */
/* parse pack header */
get_byte(&s->pb); /* ts1 */
get_be16(&s->pb); /* ts2 */
get_be16(&s->pb); /* ts3 */
mux_rate = get_byte(&s->pb) << 16;
mux_rate |= get_byte(&s->pb) << 8;
mux_rate |= get_byte(&s->pb);
mux_rate &= (1 << 22) - 1;
m->mux_rate = mux_rate;
startcode = find_start_code(&s->pb, &size, &m->header_state);
if (startcode == SYSTEM_HEADER_START_CODE)
break;
}
size = get_be16(&s->pb);
rate_bound = get_byte(&s->pb) << 16;
rate_bound |= get_byte(&s->pb) << 8;
rate_bound |= get_byte(&s->pb);
rate_bound = (rate_bound >> 1) & ((1 << 22) - 1);
audio_bound = get_byte(&s->pb) >> 2;
video_bound = get_byte(&s->pb) & 0x1f;
get_byte(&s->pb); /* reserved byte */
#if 0
printf("mux_rate=%d kbit/s\n", (m->mux_rate * 50 * 8) / 1000);
printf("rate_bound=%d\n", rate_bound);
printf("audio_bound=%d\n", audio_bound);
printf("video_bound=%d\n", video_bound);
#endif
size -= 6;
s->nb_streams = 0;
while (size > 0) {
c = get_byte(&s->pb);
size--;
if ((c & 0x80) == 0)
break;
val = get_be16(&s->pb);
size -= 2;
if (c >= 0xc0 && c <= 0xdf) {
/* mpeg audio stream */
type = CODEC_TYPE_AUDIO;
codec_id = CODEC_ID_MP2;
n = 1;
c = c | 0x100;
} else if (c >= 0xe0 && c <= 0xef) {
type = CODEC_TYPE_VIDEO;
codec_id = CODEC_ID_MPEG1VIDEO;
n = 1;
c = c | 0x100;
} else if (c == 0xb8) {
/* all audio streams */
/* XXX: hack for DVD: we force AC3, although we do not
know that this codec will be used */
type = CODEC_TYPE_AUDIO;
codec_id = CODEC_ID_AC3;
n = audio_bound;
c = 0x80;
/* c = 0x1c0; */
} else if (c == 0xb9) {
/* all video streams */
type = CODEC_TYPE_VIDEO;
codec_id = CODEC_ID_MPEG1VIDEO;
n = video_bound;
c = 0x1e0;
} else {
type = 0;
codec_id = 0;
n = 0;
}
for(i=0;i<n;i++) {
st = av_mallocz(sizeof(AVStream));
if (!st)
return -ENOMEM;
s->streams[s->nb_streams++] = st;
st->id = c + i;
st->codec.codec_type = type;
st->codec.codec_id = codec_id;
}
}
return 0;
}
| 21,021 |
qemu | b0706b716769494f321a0d2bfd9fa9893992f995 | 1 | static bool victim_tlb_hit(CPUArchState *env, size_t mmu_idx, size_t index,
size_t elt_ofs, target_ulong page)
{
size_t vidx;
for (vidx = 0; vidx < CPU_VTLB_SIZE; ++vidx) {
CPUTLBEntry *vtlb = &env->tlb_v_table[mmu_idx][vidx];
target_ulong cmp = *(target_ulong *)((uintptr_t)vtlb + elt_ofs);
if (cmp == page) {
/* Found entry in victim tlb, swap tlb and iotlb. */
CPUTLBEntry tmptlb, *tlb = &env->tlb_table[mmu_idx][index];
CPUIOTLBEntry tmpio, *io = &env->iotlb[mmu_idx][index];
CPUIOTLBEntry *vio = &env->iotlb_v[mmu_idx][vidx];
tmptlb = *tlb; *tlb = *vtlb; *vtlb = tmptlb;
tmpio = *io; *io = *vio; *vio = tmpio;
return true;
}
}
return false;
}
| 21,022 |
qemu | b4ba67d9a702507793c2724e56f98e9b0f7be02b | 1 | static inline unsigned in_reg(IVState *s, enum Reg reg)
{
const char *name = reg2str(reg);
QTestState *qtest = global_qtest;
unsigned res;
global_qtest = s->qtest;
res = qpci_io_readl(s->dev, s->reg_base + reg);
g_test_message("*%s -> %x\n", name, res);
global_qtest = qtest;
return res;
}
| 21,023 |
qemu | 38f3adc34de83bf75d2023831dc520d32568a2d9 | 1 | static void fw_cfg_init1(DeviceState *dev)
{
FWCfgState *s = FW_CFG(dev);
MachineState *machine = MACHINE(qdev_get_machine());
uint32_t version = FW_CFG_VERSION;
assert(!object_resolve_path(FW_CFG_PATH, NULL));
object_property_add_child(OBJECT(machine), FW_CFG_NAME, OBJECT(s), NULL);
qdev_init_nofail(dev);
fw_cfg_add_bytes(s, FW_CFG_SIGNATURE, (char *)"QEMU", 4);
fw_cfg_add_bytes(s, FW_CFG_UUID, &qemu_uuid, 16);
fw_cfg_add_i16(s, FW_CFG_NOGRAPHIC, (uint16_t)!machine->enable_graphics);
fw_cfg_add_i16(s, FW_CFG_BOOT_MENU, (uint16_t)boot_menu);
fw_cfg_bootsplash(s);
fw_cfg_reboot(s);
if (s->dma_enabled) {
version |= FW_CFG_VERSION_DMA;
}
fw_cfg_add_i32(s, FW_CFG_ID, version);
s->machine_ready.notify = fw_cfg_machine_ready;
qemu_add_machine_init_done_notifier(&s->machine_ready);
}
| 21,024 |
qemu | 90228ee395b71cdd64e6bc844e3d553eb9ef643f | 1 | static void bmdma_map(PCIDevice *pci_dev, int region_num,
pcibus_t addr, pcibus_t size, int type)
{
PCIIDEState *d = DO_UPCAST(PCIIDEState, dev, pci_dev);
int i;
for(i = 0;i < 2; i++) {
BMDMAState *bm = &d->bmdma[i];
d->bus[i].bmdma = bm;
bm->bus = d->bus+i;
qemu_add_vm_change_state_handler(ide_dma_restart_cb, bm);
register_ioport_write(addr, 1, 1, bmdma_cmd_writeb, bm);
register_ioport_write(addr + 1, 3, 1, bmdma_writeb, bm);
register_ioport_read(addr, 4, 1, bmdma_readb, bm);
register_ioport_write(addr + 4, 4, 1, bmdma_addr_writeb, bm);
register_ioport_read(addr + 4, 4, 1, bmdma_addr_readb, bm);
register_ioport_write(addr + 4, 4, 2, bmdma_addr_writew, bm);
register_ioport_read(addr + 4, 4, 2, bmdma_addr_readw, bm);
register_ioport_write(addr + 4, 4, 4, bmdma_addr_writel, bm);
register_ioport_read(addr + 4, 4, 4, bmdma_addr_readl, bm);
addr += 8;
}
} | 21,025 |
FFmpeg | 948b54763b6c851b2a0bec6702cd9c419065fdb6 | 1 | static uint32_t softfloat_mul(uint32_t x, uint64_t mantissa)
{
uint64_t l = x * (mantissa & 0xffffffff);
uint64_t h = x * (mantissa >> 32);
h += l >> 32;
l &= 0xffffffff;
l += 1 << av_log2(h >> 21);
h += l >> 32;
return h >> 20;
}
| 21,027 |
FFmpeg | b6ae0866821df5f3b83f6b7f5c281d9a3e36b7ee | 1 | static void dv_decode_ac(GetBitContext *gb, BlockInfo *mb, DCTELEM *block)
{
int last_index = gb->size_in_bits;
const uint8_t *scan_table = mb->scan_table;
const uint32_t *factor_table = mb->factor_table;
int pos = mb->pos;
int partial_bit_count = mb->partial_bit_count;
int level, run, vlc_len, index;
OPEN_READER(re, gb);
UPDATE_CACHE(re, gb);
/* if we must parse a partial VLC, we do it here */
if (partial_bit_count > 0) {
re_cache = ((unsigned)re_cache >> partial_bit_count) |
(mb->partial_bit_buffer << (sizeof(re_cache) * 8 - partial_bit_count));
re_index -= partial_bit_count;
mb->partial_bit_count = 0;
}
/* get the AC coefficients until last_index is reached */
for (;;) {
av_dlog(NULL, "%2d: bits=%04x index=%d\n", pos, SHOW_UBITS(re, gb, 16),
re_index);
/* our own optimized GET_RL_VLC */
index = NEG_USR32(re_cache, TEX_VLC_BITS);
vlc_len = dv_rl_vlc[index].len;
if (vlc_len < 0) {
index = NEG_USR32((unsigned)re_cache << TEX_VLC_BITS, -vlc_len) + dv_rl_vlc[index].level;
vlc_len = TEX_VLC_BITS - vlc_len;
}
level = dv_rl_vlc[index].level;
run = dv_rl_vlc[index].run;
/* gotta check if we're still within gb boundaries */
if (re_index + vlc_len > last_index) {
/* should be < 16 bits otherwise a codeword could have been parsed */
mb->partial_bit_count = last_index - re_index;
mb->partial_bit_buffer = NEG_USR32(re_cache, mb->partial_bit_count);
re_index = last_index;
break;
}
re_index += vlc_len;
av_dlog(NULL, "run=%d level=%d\n", run, level);
pos += run;
if (pos >= 64)
break;
level = (level * factor_table[pos] + (1 << (dv_iweight_bits - 1))) >> dv_iweight_bits;
block[scan_table[pos]] = level;
UPDATE_CACHE(re, gb);
}
CLOSE_READER(re, gb);
mb->pos = pos;
}
| 21,028 |
qemu | 3c0c47e3464f3c54bd3f1cc6d4da2cbf7465e295 | 1 | void spapr_core_plug(HotplugHandler *hotplug_dev, DeviceState *dev,
Error **errp)
{
sPAPRMachineClass *smc = SPAPR_MACHINE_GET_CLASS(OBJECT(hotplug_dev));
sPAPRMachineState *spapr = SPAPR_MACHINE(OBJECT(hotplug_dev));
sPAPRCPUCore *core = SPAPR_CPU_CORE(OBJECT(dev));
CPUCore *cc = CPU_CORE(dev);
CPUState *cs = CPU(core->threads);
sPAPRDRConnector *drc;
sPAPRDRConnectorClass *drck;
Error *local_err = NULL;
void *fdt = NULL;
int fdt_offset = 0;
int index = cc->core_id / smp_threads;
int smt = kvmppc_smt_threads();
g_assert(smc->dr_cpu_enabled);
drc = spapr_dr_connector_by_id(SPAPR_DR_CONNECTOR_TYPE_CPU, index * smt);
spapr->cores[index] = OBJECT(dev);
g_assert(drc);
/*
* Setup CPU DT entries only for hotplugged CPUs. For boot time or
* coldplugged CPUs DT entries are setup in spapr_finalize_fdt().
*/
if (dev->hotplugged) {
fdt = spapr_populate_hotplug_cpu_dt(cs, &fdt_offset, spapr);
}
drck = SPAPR_DR_CONNECTOR_GET_CLASS(drc);
drck->attach(drc, dev, fdt, fdt_offset, !dev->hotplugged, &local_err);
if (local_err) {
g_free(fdt);
spapr->cores[index] = NULL;
error_propagate(errp, local_err);
return;
}
if (dev->hotplugged) {
/*
* Send hotplug notification interrupt to the guest only in case
* of hotplugged CPUs.
*/
spapr_hotplug_req_add_by_index(drc);
} else {
/*
* Set the right DRC states for cold plugged CPU.
*/
drck->set_allocation_state(drc, SPAPR_DR_ALLOCATION_STATE_USABLE);
drck->set_isolation_state(drc, SPAPR_DR_ISOLATION_STATE_UNISOLATED);
}
}
| 21,029 |
FFmpeg | 13705b69ebe9e375fdb52469760a0fbb5f593cc1 | 1 | static void spatial_compose53i_dy(dwt_compose_t *cs, DWTELEM *buffer, int width, int height, int stride){
int y= cs->y;
DWTELEM *b0= cs->b0;
DWTELEM *b1= cs->b1;
DWTELEM *b2= buffer + mirror(y+1, height-1)*stride;
DWTELEM *b3= buffer + mirror(y+2, height-1)*stride;
{START_TIMER
if(b1 <= b3) vertical_compose53iL0(b1, b2, b3, width);
if(b0 <= b2) vertical_compose53iH0(b0, b1, b2, width);
STOP_TIMER("vertical_compose53i*")}
{START_TIMER
if(y-1 >= 0) horizontal_compose53i(b0, width);
if(b0 <= b2) horizontal_compose53i(b1, width);
STOP_TIMER("horizontal_compose53i")}
cs->b0 = b2;
cs->b1 = b3;
cs->y += 2;
}
| 21,031 |
FFmpeg | 51090133b31bc719ea868db15d3ee38e9dbe90f1 | 1 | static int cng_decode_frame(AVCodecContext *avctx, void *data,
int *got_frame_ptr, AVPacket *avpkt)
{
AVFrame *frame = data;
CNGContext *p = avctx->priv_data;
int buf_size = avpkt->size;
int ret, i;
int16_t *buf_out;
float e = 1.0;
float scaling;
if (avpkt->size) {
int dbov = -avpkt->data[0];
p->target_energy = 1081109975 * ff_exp10(dbov / 10.0) * 0.75;
memset(p->target_refl_coef, 0, p->order * sizeof(*p->target_refl_coef));
for (i = 0; i < FFMIN(avpkt->size - 1, p->order); i++) {
p->target_refl_coef[i] = (avpkt->data[1 + i] - 127) / 128.0;
}
}
if (avctx->internal->skip_samples > 10 * avctx->frame_size) {
avctx->internal->skip_samples = 0;
return AVERROR_INVALIDDATA;
}
if (p->inited) {
p->energy = p->energy / 2 + p->target_energy / 2;
for (i = 0; i < p->order; i++)
p->refl_coef[i] = 0.6 *p->refl_coef[i] + 0.4 * p->target_refl_coef[i];
} else {
p->energy = p->target_energy;
memcpy(p->refl_coef, p->target_refl_coef, p->order * sizeof(*p->refl_coef));
p->inited = 1;
}
make_lpc_coefs(p->lpc_coef, p->refl_coef, p->order);
for (i = 0; i < p->order; i++)
e *= 1.0 - p->refl_coef[i]*p->refl_coef[i];
scaling = sqrt(e * p->energy / 1081109975);
for (i = 0; i < avctx->frame_size; i++) {
int r = (av_lfg_get(&p->lfg) & 0xffff) - 0x8000;
p->excitation[i] = scaling * r;
}
ff_celp_lp_synthesis_filterf(p->filter_out + p->order, p->lpc_coef,
p->excitation, avctx->frame_size, p->order);
frame->nb_samples = avctx->frame_size;
if ((ret = ff_get_buffer(avctx, frame, 0)) < 0)
return ret;
buf_out = (int16_t *)frame->data[0];
for (i = 0; i < avctx->frame_size; i++)
buf_out[i] = p->filter_out[i + p->order];
memcpy(p->filter_out, p->filter_out + avctx->frame_size,
p->order * sizeof(*p->filter_out));
*got_frame_ptr = 1;
return buf_size;
}
| 21,032 |
FFmpeg | 344d6db978af251e32b886f4c54e993771004fc0 | 1 | static av_cold int vdadec_init(AVCodecContext *avctx)
{
VDADecoderContext *ctx = avctx->priv_data;
struct vda_context *vda_ctx = &ctx->vda_ctx;
OSStatus status;
int ret;
ctx->h264_initialized = 0;
/* init pix_fmts of codec */
if (!ff_h264_vda_decoder.pix_fmts) {
if (kCFCoreFoundationVersionNumber < kCFCoreFoundationVersionNumber10_7)
ff_h264_vda_decoder.pix_fmts = vda_pixfmts_prior_10_7;
else
ff_h264_vda_decoder.pix_fmts = vda_pixfmts;
/* init vda */
memset(vda_ctx, 0, sizeof(struct vda_context));
vda_ctx->width = avctx->width;
vda_ctx->height = avctx->height;
vda_ctx->format = 'avc1';
vda_ctx->use_sync_decoding = 1;
vda_ctx->use_ref_buffer = 1;
ctx->pix_fmt = avctx->get_format(avctx, avctx->codec->pix_fmts);
switch (ctx->pix_fmt) {
case AV_PIX_FMT_UYVY422:
vda_ctx->cv_pix_fmt_type = '2vuy';
break;
case AV_PIX_FMT_YUYV422:
vda_ctx->cv_pix_fmt_type = 'yuvs';
break;
case AV_PIX_FMT_NV12:
vda_ctx->cv_pix_fmt_type = '420v';
break;
case AV_PIX_FMT_YUV420P:
vda_ctx->cv_pix_fmt_type = 'y420';
break;
default:
av_log(avctx, AV_LOG_ERROR, "Unsupported pixel format: %d\n", avctx->pix_fmt);
status = ff_vda_create_decoder(vda_ctx,
avctx->extradata, avctx->extradata_size);
if (status != kVDADecoderNoErr) {
av_log(avctx, AV_LOG_ERROR,
"Failed to init VDA decoder: %d.\n", status);
/* init H.264 decoder */
set_context(avctx);
ret = ff_h264_decoder.init(avctx);
restore_context(avctx);
if (ret < 0) {
av_log(avctx, AV_LOG_ERROR, "Failed to open H.264 decoder.\n");
ctx->h264_initialized = 1;
return 0;
failed:
vdadec_close(avctx);
return -1; | 21,035 |
FFmpeg | 3e0f7126b53b395d9e79df57b2e626eb99ad846b | 1 | static void inner_add_yblock_bw_16_obmc_32_mmx(const uint8_t *obmc, const long obmc_stride, uint8_t * * block, int b_w, long b_h,
int src_x, int src_y, long src_stride, slice_buffer * sb, int add, uint8_t * dst8){
snow_inner_add_yblock_mmx_header
snow_inner_add_yblock_mmx_start("mm1", "mm5", "3", "0", "0")
snow_inner_add_yblock_mmx_accum("2", "16", "0")
snow_inner_add_yblock_mmx_accum("1", "512", "0")
snow_inner_add_yblock_mmx_accum("0", "528", "0")
snow_inner_add_yblock_mmx_mix("0", "0")
snow_inner_add_yblock_mmx_start("mm1", "mm5", "3", "8", "8")
snow_inner_add_yblock_mmx_accum("2", "24", "8")
snow_inner_add_yblock_mmx_accum("1", "520", "8")
snow_inner_add_yblock_mmx_accum("0", "536", "8")
snow_inner_add_yblock_mmx_mix("32", "8")
snow_inner_add_yblock_mmx_end("32")
}
| 21,036 |
FFmpeg | 2cbe6bac0337939f023bd1c37a9c455e6d535f3a | 1 | static void blend_frames16_c(BLEND_FUNC_PARAMS)
{
int line, pixel;
uint16_t *dstw = (uint16_t *)dst;
uint16_t *src1w = (uint16_t *)src1;
uint16_t *src2w = (uint16_t *)src2;
width /= 2;
src1_linesize /= 2;
src2_linesize /= 2;
dst_linesize /= 2;
for (line = 0; line < height; line++) {
for (pixel = 0; pixel < width; pixel++)
dstw[pixel] = ((src1w[pixel] * factor1) + (src2w[pixel] * factor2) + half) >> shift;
src1w += src1_linesize;
src2w += src2_linesize;
dstw += dst_linesize;
}
}
| 21,038 |
FFmpeg | 7da9f4523159670d577a2808d4481e64008a8894 | 1 | static int cinepak_encode_frame(AVCodecContext *avctx, unsigned char *buf, int buf_size, void *data)
{
CinepakEncContext *s = avctx->priv_data;
AVFrame *frame = data;
int ret;
s->lambda = frame->quality ? frame->quality - 1 : 2 * FF_LAMBDA_SCALE;
frame->key_frame = s->curframe == 0;
frame->pict_type = frame->key_frame ? AV_PICTURE_TYPE_I : AV_PICTURE_TYPE_P;
ret = rd_frame(s, frame, buf, buf_size);
FFSWAP(AVFrame, s->last_frame, s->best_frame);
if (++s->curframe >= s->keyint)
s->curframe = 0;
return ret;
}
| 21,039 |
qemu | e8d3b1a25f284cdf9705b7cf0412281cc9ee3a36 | 1 | void qemu_bh_schedule_idle(QEMUBH *bh)
{
if (bh->scheduled)
return;
bh->idle = 1;
/* Make sure that idle & any writes needed by the callback are done
* before the locations are read in the aio_bh_poll.
*/
smp_wmb();
bh->scheduled = 1;
}
| 21,040 |
FFmpeg | dce25564cc554cc85c8c28928b1b8d3f965c1b16 | 1 | static int mov_read_mdhd(MOVContext *c, ByteIOContext *pb, MOVAtom atom)
{
AVStream *st = c->fc->streams[c->fc->nb_streams-1];
MOVStreamContext *sc = st->priv_data;
int version = get_byte(pb);
int lang;
if (version > 1)
return -1; /* unsupported */
get_be24(pb); /* flags */
if (version == 1) {
get_be64(pb);
get_be64(pb);
} else {
get_be32(pb); /* creation time */
get_be32(pb); /* modification time */
}
sc->time_scale = get_be32(pb);
st->duration = (version == 1) ? get_be64(pb) : get_be32(pb); /* duration */
lang = get_be16(pb); /* language */
ff_mov_lang_to_iso639(lang, st->language);
get_be16(pb); /* quality */
return 0;
}
| 21,041 |
qemu | 1e9b65bb1bad51735cab6c861c29b592dccabf0e | 1 | void error_set(Error **errp, ErrorClass err_class, const char *fmt, ...)
{
va_list ap;
va_start(ap, fmt);
error_setv(errp, err_class, fmt, ap);
va_end(ap);
}
| 21,042 |
qemu | e0dadc1e9ef1f35208e5d2af9c7740c18a0b769f | 1 | void aux_init_mmio(AUXSlave *aux_slave, MemoryRegion *mmio)
{
assert(!aux_slave->mmio);
aux_slave->mmio = mmio;
}
| 21,043 |
qemu | efec3dd631d94160288392721a5f9c39e50fb2bc | 1 | static void s390_virtio_bridge_class_init(ObjectClass *klass, void *data)
{
DeviceClass *dc = DEVICE_CLASS(klass);
SysBusDeviceClass *k = SYS_BUS_DEVICE_CLASS(klass);
k->init = s390_virtio_bridge_init;
dc->no_user = 1;
}
| 21,044 |
qemu | 7364dbdabb7824d5bde1e341bb6d928282f01c83 | 1 | static void start_auth_vencrypt_subauth(VncState *vs)
{
switch (vs->subauth) {
case VNC_AUTH_VENCRYPT_TLSNONE:
case VNC_AUTH_VENCRYPT_X509NONE:
VNC_DEBUG("Accept TLS auth none\n");
vnc_write_u32(vs, 0); /* Accept auth completion */
start_client_init(vs);
break;
case VNC_AUTH_VENCRYPT_TLSVNC:
case VNC_AUTH_VENCRYPT_X509VNC:
VNC_DEBUG("Start TLS auth VNC\n");
start_auth_vnc(vs);
break;
#ifdef CONFIG_VNC_SASL
case VNC_AUTH_VENCRYPT_TLSSASL:
case VNC_AUTH_VENCRYPT_X509SASL:
VNC_DEBUG("Start TLS auth SASL\n");
start_auth_sasl(vs);
break;
#endif /* CONFIG_VNC_SASL */
default: /* Should not be possible, but just in case */
VNC_DEBUG("Reject subauth %d server bug\n", vs->auth);
vnc_write_u8(vs, 1);
if (vs->minor >= 8) {
static const char err[] = "Unsupported authentication type";
vnc_write_u32(vs, sizeof(err));
vnc_write(vs, err, sizeof(err));
}
vnc_client_error(vs);
}
}
| 21,046 |
FFmpeg | e7c1e38ba632f7315e332dd350b38f782f428884 | 1 | static void qpeg_decode_intra(const uint8_t *src, uint8_t *dst, int size,
int stride, int width, int height)
{
int i;
int code;
int c0, c1;
int run, copy;
int filled = 0;
int rows_to_go;
rows_to_go = height;
height--;
dst = dst + height * stride;
while((size > 0) && (rows_to_go > 0)) {
code = *src++;
size--;
run = copy = 0;
if(code == 0xFC) /* end-of-picture code */
break;
if(code >= 0xF8) { /* very long run */
c0 = *src++;
c1 = *src++;
size -= 2;
run = ((code & 0x7) << 16) + (c0 << 8) + c1 + 2;
} else if (code >= 0xF0) { /* long run */
c0 = *src++;
size--;
run = ((code & 0xF) << 8) + c0 + 2;
} else if (code >= 0xE0) { /* short run */
run = (code & 0x1F) + 2;
} else if (code >= 0xC0) { /* very long copy */
c0 = *src++;
c1 = *src++;
size -= 2;
copy = ((code & 0x3F) << 16) + (c0 << 8) + c1 + 1;
} else if (code >= 0x80) { /* long copy */
c0 = *src++;
size--;
copy = ((code & 0x7F) << 8) + c0 + 1;
} else { /* short copy */
copy = code + 1;
}
/* perform actual run or copy */
if(run) {
int p;
p = *src++;
size--;
for(i = 0; i < run; i++) {
dst[filled++] = p;
if (filled >= width) {
filled = 0;
dst -= stride;
rows_to_go--;
if(rows_to_go <= 0)
break;
}
}
} else {
size -= copy;
for(i = 0; i < copy; i++) {
dst[filled++] = *src++;
if (filled >= width) {
filled = 0;
dst -= stride;
rows_to_go--;
if(rows_to_go <= 0)
break;
}
}
}
}
}
| 21,047 |
qemu | 8daea510951dd309a44cea8de415c685c43851cf | 1 | static void parse_drive(DeviceState *dev, const char *str, void **ptr,
const char *propname, Error **errp)
{
BlockBackend *blk;
blk = blk_by_name(str);
if (!blk) {
error_setg(errp, "Property '%s.%s' can't find value '%s'",
object_get_typename(OBJECT(dev)), propname, str);
return;
}
if (blk_attach_dev(blk, dev) < 0) {
DriveInfo *dinfo = blk_legacy_dinfo(blk);
if (dinfo && dinfo->type != IF_NONE) {
error_setg(errp, "Drive '%s' is already in use because "
"it has been automatically connected to another "
"device (did you need 'if=none' in the drive options?)",
str);
} else {
error_setg(errp, "Drive '%s' is already in use by another device",
str);
}
return;
}
*ptr = blk;
}
| 21,048 |
FFmpeg | 4bed06637729ab000b79250c67d53078300e37c4 | 1 | int ff_vp56_decode_frame(AVCodecContext *avctx, void *data, int *got_frame,
AVPacket *avpkt)
{
const uint8_t *buf = avpkt->data;
VP56Context *s = avctx->priv_data;
AVFrame *const p = s->frames[VP56_FRAME_CURRENT];
int remaining_buf_size = avpkt->size;
int av_uninit(alpha_offset);
int i, res;
int ret;
if (s->has_alpha) {
if (remaining_buf_size < 3)
return AVERROR_INVALIDDATA;
alpha_offset = bytestream_get_be24(&buf);
remaining_buf_size -= 3;
if (remaining_buf_size < alpha_offset)
return AVERROR_INVALIDDATA;
}
res = s->parse_header(s, buf, remaining_buf_size);
if (res < 0)
return res;
if (res == VP56_SIZE_CHANGE) {
for (i = 0; i < 4; i++) {
av_frame_unref(s->frames[i]);
if (s->alpha_context)
av_frame_unref(s->alpha_context->frames[i]);
}
}
ret = ff_get_buffer(avctx, p, AV_GET_BUFFER_FLAG_REF);
if (ret < 0)
return ret;
if (avctx->pix_fmt == AV_PIX_FMT_YUVA420P) {
av_frame_unref(s->alpha_context->frames[VP56_FRAME_CURRENT]);
if ((ret = av_frame_ref(s->alpha_context->frames[VP56_FRAME_CURRENT], p)) < 0) {
av_frame_unref(p);
return ret;
}
}
if (res == VP56_SIZE_CHANGE) {
if (vp56_size_changed(s)) {
av_frame_unref(p);
return AVERROR_INVALIDDATA;
}
}
if (avctx->pix_fmt == AV_PIX_FMT_YUVA420P) {
int bak_w = avctx->width;
int bak_h = avctx->height;
int bak_cw = avctx->coded_width;
int bak_ch = avctx->coded_height;
buf += alpha_offset;
remaining_buf_size -= alpha_offset;
res = s->alpha_context->parse_header(s->alpha_context, buf, remaining_buf_size);
if (res != 0) {
if(res==VP56_SIZE_CHANGE) {
av_log(avctx, AV_LOG_ERROR, "Alpha reconfiguration\n");
avctx->width = bak_w;
avctx->height = bak_h;
avctx->coded_width = bak_cw;
avctx->coded_height = bak_ch;
}
av_frame_unref(p);
return AVERROR_INVALIDDATA;
}
}
avctx->execute2(avctx, ff_vp56_decode_mbs, 0, 0, (avctx->pix_fmt == AV_PIX_FMT_YUVA420P) + 1);
if ((res = av_frame_ref(data, p)) < 0)
return res;
*got_frame = 1;
return avpkt->size;
}
| 21,049 |
FFmpeg | 1509c018bd5b054a2354e20021ccbac9c934d213 | 1 | static int analyze(const uint8_t *buf, int size, int packet_size, int *index)
{
int stat[TS_MAX_PACKET_SIZE];
int i;
int x = 0;
int best_score = 0;
memset(stat, 0, packet_size * sizeof(int));
for (x = i = 0; i < size - 3; i++) {
if (buf[i] == 0x47 && !(buf[i + 1] & 0x80) && (buf[i + 3] & 0x30)) {
stat[x]++;
if (stat[x] > best_score) {
best_score = stat[x];
if (index)
*index = x;
}
}
x++;
if (x == packet_size)
x = 0;
}
return best_score;
}
| 21,051 |
qemu | d663640c04f2aab810915c556390211d75457704 | 1 | static int coroutine_fn qcow2_co_is_allocated(BlockDriverState *bs,
int64_t sector_num, int nb_sectors, int *pnum)
{
BDRVQcowState *s = bs->opaque;
uint64_t cluster_offset;
int ret;
*pnum = nb_sectors;
/* FIXME We can get errors here, but the bdrv_co_is_allocated interface
* can't pass them on today */
qemu_co_mutex_lock(&s->lock);
ret = qcow2_get_cluster_offset(bs, sector_num << 9, pnum, &cluster_offset);
qemu_co_mutex_unlock(&s->lock);
if (ret < 0) {
*pnum = 0;
}
return (cluster_offset != 0) || (ret == QCOW2_CLUSTER_ZERO);
}
| 21,052 |
qemu | 2e29bd04786003561303dcad940b38afe790fb9b | 1 | static void pci_unin_config_writel (void *opaque, target_phys_addr_t addr,
uint32_t val)
{
UNINState *s = opaque;
s->config_reg = val;
}
| 21,053 |
FFmpeg | f6774f905fb3cfdc319523ac640be30b14c1bc55 | 1 | static int mpeg_field_start(MpegEncContext *s, const uint8_t *buf, int buf_size)
{
AVCodecContext *avctx = s->avctx;
Mpeg1Context *s1 = (Mpeg1Context *) s;
/* start frame decoding */
if (s->first_field || s->picture_structure == PICT_FRAME) {
AVFrameSideData *pan_scan;
if (ff_MPV_frame_start(s, avctx) < 0)
return -1;
ff_mpeg_er_frame_start(s);
/* first check if we must repeat the frame */
s->current_picture_ptr->f.repeat_pict = 0;
if (s->repeat_first_field) {
if (s->progressive_sequence) {
if (s->top_field_first)
s->current_picture_ptr->f.repeat_pict = 4;
else
s->current_picture_ptr->f.repeat_pict = 2;
} else if (s->progressive_frame) {
s->current_picture_ptr->f.repeat_pict = 1;
}
}
pan_scan = av_frame_new_side_data(&s->current_picture_ptr->f,
AV_FRAME_DATA_PANSCAN,
sizeof(s1->pan_scan));
if (!pan_scan)
return AVERROR(ENOMEM);
memcpy(pan_scan->data, &s1->pan_scan, sizeof(s1->pan_scan));
if (s1->a53_caption) {
AVFrameSideData *sd = av_frame_new_side_data(
&s->current_picture_ptr->f, AV_FRAME_DATA_A53_CC,
s1->a53_caption_size);
if (sd)
memcpy(sd->data, s1->a53_caption, s1->a53_caption_size);
av_freep(&s1->a53_caption);
}
if (s1->has_stereo3d) {
AVStereo3D *stereo = av_stereo3d_create_side_data(&s->current_picture_ptr->f);
if (!stereo)
return AVERROR(ENOMEM);
*stereo = s1->stereo3d;
s1->has_stereo3d = 0;
}
if (HAVE_THREADS && (avctx->active_thread_type & FF_THREAD_FRAME))
ff_thread_finish_setup(avctx);
} else { // second field
int i;
if (!s->current_picture_ptr) {
av_log(s->avctx, AV_LOG_ERROR, "first field missing\n");
return -1;
}
if (s->avctx->hwaccel &&
(s->avctx->slice_flags & SLICE_FLAG_ALLOW_FIELD)) {
if (s->avctx->hwaccel->end_frame(s->avctx) < 0)
av_log(avctx, AV_LOG_ERROR,
"hardware accelerator failed to decode first field\n");
}
for (i = 0; i < 4; i++) {
s->current_picture.f.data[i] = s->current_picture_ptr->f.data[i];
if (s->picture_structure == PICT_BOTTOM_FIELD)
s->current_picture.f.data[i] +=
s->current_picture_ptr->f.linesize[i];
}
}
if (avctx->hwaccel) {
if (avctx->hwaccel->start_frame(avctx, buf, buf_size) < 0)
return -1;
}
#if FF_API_XVMC
FF_DISABLE_DEPRECATION_WARNINGS
// MPV_frame_start will call this function too,
// but we need to call it on every field
if (CONFIG_MPEG_XVMC_DECODER && s->avctx->xvmc_acceleration)
if (ff_xvmc_field_start(s, avctx) < 0)
return -1;
FF_ENABLE_DEPRECATION_WARNINGS
#endif /* FF_API_XVMC */
return 0;
}
| 21,054 |
FFmpeg | ce0453aac99c826ba0093f701e4dea67718ce457 | 1 | static int rm_assemble_video_frame(AVFormatContext *s, ByteIOContext *pb,
RMDemuxContext *rm, RMStream *vst,
AVPacket *pkt, int len)
{
int hdr, seq, pic_num, len2, pos;
int type;
hdr = get_byte(pb); len--;
type = hdr >> 6;
if(type != 3){ // not frame as a part of packet
seq = get_byte(pb); len--;
}
if(type != 1){ // not whole frame
len2 = get_num(pb, &len);
pos = get_num(pb, &len);
pic_num = get_byte(pb); len--;
}
if(len<0)
return -1;
rm->remaining_len = len;
if(type&1){ // frame, not slice
if(type == 3) // frame as a part of packet
len= len2;
if(rm->remaining_len < len)
return -1;
rm->remaining_len -= len;
if(av_new_packet(pkt, len + 9) < 0)
return AVERROR(EIO);
pkt->data[0] = 0;
AV_WL32(pkt->data + 1, 1);
AV_WL32(pkt->data + 5, 0);
get_buffer(pb, pkt->data + 9, len);
return 0;
}
//now we have to deal with single slice
if((seq & 0x7F) == 1 || vst->curpic_num != pic_num){
vst->slices = ((hdr & 0x3F) << 1) + 1;
vst->videobufsize = len2 + 8*vst->slices + 1;
av_free_packet(&vst->pkt); //FIXME this should be output.
if(av_new_packet(&vst->pkt, vst->videobufsize) < 0)
return AVERROR(ENOMEM);
vst->videobufpos = 8*vst->slices + 1;
vst->cur_slice = 0;
vst->curpic_num = pic_num;
vst->pktpos = url_ftell(pb);
}
if(type == 2)
len = FFMIN(len, pos);
if(++vst->cur_slice > vst->slices)
return 1;
AV_WL32(vst->pkt.data - 7 + 8*vst->cur_slice, 1);
AV_WL32(vst->pkt.data - 3 + 8*vst->cur_slice, vst->videobufpos - 8*vst->slices - 1);
if(vst->videobufpos + len > vst->videobufsize)
return 1;
if (get_buffer(pb, vst->pkt.data + vst->videobufpos, len) != len)
return AVERROR(EIO);
vst->videobufpos += len;
rm->remaining_len-= len;
if(type == 2 || (vst->videobufpos) == vst->videobufsize){
vst->pkt.data[0] = vst->cur_slice-1;
*pkt= vst->pkt;
vst->pkt.data=
vst->pkt.size= 0;
if(vst->slices != vst->cur_slice) //FIXME find out how to set slices correct from the begin
memmove(pkt->data + 1 + 8*vst->cur_slice, pkt->data + 1 + 8*vst->slices,
vst->videobufpos - 1 - 8*vst->slices);
pkt->size += 8*(vst->cur_slice - vst->slices);
pkt->pts = AV_NOPTS_VALUE;
pkt->pos = vst->pktpos;
return 0;
}
return 1;
}
| 21,055 |
qemu | 06ef8604e92964cbf30084b7d31091aa7cbbb62f | 1 | static ExitStatus gen_call_pal(DisasContext *ctx, int palcode)
{
/* We're emulating OSF/1 PALcode. Many of these are trivial access
to internal cpu registers. */
/* Unprivileged PAL call */
if (palcode >= 0x80 && palcode < 0xC0) {
switch (palcode) {
case 0x86:
/* IMB */
/* No-op inside QEMU. */
break;
case 0x9E:
/* RDUNIQUE */
tcg_gen_mov_i64(cpu_ir[IR_V0], cpu_unique);
break;
case 0x9F:
/* WRUNIQUE */
tcg_gen_mov_i64(cpu_unique, cpu_ir[IR_A0]);
break;
default:
palcode &= 0xbf;
goto do_call_pal;
}
return NO_EXIT;
}
#ifndef CONFIG_USER_ONLY
/* Privileged PAL code */
if (palcode < 0x40 && (ctx->tb->flags & TB_FLAGS_USER_MODE) == 0) {
switch (palcode) {
case 0x01:
/* CFLUSH */
/* No-op inside QEMU. */
break;
case 0x02:
/* DRAINA */
/* No-op inside QEMU. */
break;
case 0x2D:
/* WRVPTPTR */
tcg_gen_st_i64(cpu_ir[IR_A0], cpu_env, offsetof(CPUAlphaState, vptptr));
break;
case 0x31:
/* WRVAL */
tcg_gen_mov_i64(cpu_sysval, cpu_ir[IR_A0]);
break;
case 0x32:
/* RDVAL */
tcg_gen_mov_i64(cpu_ir[IR_V0], cpu_sysval);
break;
case 0x35: {
/* SWPIPL */
TCGv tmp;
/* Note that we already know we're in kernel mode, so we know
that PS only contains the 3 IPL bits. */
tcg_gen_ld8u_i64(cpu_ir[IR_V0], cpu_env, offsetof(CPUAlphaState, ps));
/* But make sure and store only the 3 IPL bits from the user. */
tmp = tcg_temp_new();
tcg_gen_andi_i64(tmp, cpu_ir[IR_A0], PS_INT_MASK);
tcg_gen_st8_i64(tmp, cpu_env, offsetof(CPUAlphaState, ps));
tcg_temp_free(tmp);
break;
}
case 0x36:
/* RDPS */
tcg_gen_ld8u_i64(cpu_ir[IR_V0], cpu_env, offsetof(CPUAlphaState, ps));
break;
case 0x38:
/* WRUSP */
tcg_gen_mov_i64(cpu_usp, cpu_ir[IR_A0]);
break;
case 0x3A:
/* RDUSP */
tcg_gen_mov_i64(cpu_ir[IR_V0], cpu_usp);
break;
case 0x3C:
/* WHAMI */
tcg_gen_ld32s_i64(cpu_ir[IR_V0], cpu_env,
-offsetof(AlphaCPU, env) + offsetof(CPUState, cpu_index));
break;
default:
palcode &= 0x3f;
goto do_call_pal;
}
return NO_EXIT;
}
#endif
return gen_invalid(ctx);
do_call_pal:
#ifdef CONFIG_USER_ONLY
return gen_excp(ctx, EXCP_CALL_PAL, palcode);
#else
{
TCGv pc = tcg_const_i64(ctx->pc);
TCGv entry = tcg_const_i64(palcode & 0x80
? 0x2000 + (palcode - 0x80) * 64
: 0x1000 + palcode * 64);
gen_helper_call_pal(cpu_env, pc, entry);
tcg_temp_free(entry);
tcg_temp_free(pc);
/* Since the destination is running in PALmode, we don't really
need the page permissions check. We'll see the existence of
the page when we create the TB, and we'll flush all TBs if
we change the PAL base register. */
if (!ctx->singlestep_enabled && !(ctx->tb->cflags & CF_LAST_IO)) {
tcg_gen_goto_tb(0);
tcg_gen_exit_tb((uintptr_t)ctx->tb);
return EXIT_GOTO_TB;
}
return EXIT_PC_UPDATED;
}
#endif
}
| 21,056 |
qemu | fed105e2756dde98efa5e80baca02ae516dd1e51 | 1 | void virtio_blk_data_plane_destroy(VirtIOBlockDataPlane *s)
{
if (!s) {
return;
}
virtio_blk_data_plane_stop(s);
blk_op_unblock_all(s->conf->conf.blk, s->blocker);
error_free(s->blocker);
object_unref(OBJECT(s->iothread));
qemu_bh_delete(s->bh);
g_free(s);
}
| 21,057 |
qemu | 6c2d1c32d084320081b0cd047f8cacd6e722d03a | 1 | static void uhci_class_init(ObjectClass *klass, void *data)
{
DeviceClass *dc = DEVICE_CLASS(klass);
PCIDeviceClass *k = PCI_DEVICE_CLASS(klass);
UHCIPCIDeviceClass *u = container_of(k, UHCIPCIDeviceClass, parent_class);
UHCIInfo *info = data;
k->init = info->initfn ? info->initfn : usb_uhci_common_initfn;
k->exit = info->unplug ? usb_uhci_exit : NULL;
k->vendor_id = info->vendor_id;
k->device_id = info->device_id;
k->revision = info->revision;
k->class_id = PCI_CLASS_SERIAL_USB;
dc->vmsd = &vmstate_uhci;
dc->props = uhci_properties;
u->info = *info;
} | 21,058 |
qemu | 788cf9f8c8cbda53843e060540f3e91a060eb744 | 1 | static int openfile(char *name, int flags, bool writethrough, bool force_share,
QDict *opts)
{
Error *local_err = NULL;
BlockDriverState *bs;
if (qemuio_blk) {
error_report("file open already, try 'help close'");
QDECREF(opts);
return 1;
}
if (force_share) {
if (!opts) {
opts = qdict_new();
}
if (qdict_haskey(opts, BDRV_OPT_FORCE_SHARE)
&& !qdict_get_bool(opts, BDRV_OPT_FORCE_SHARE)) {
error_report("-U conflicts with image options");
QDECREF(opts);
return 1;
}
qdict_put_bool(opts, BDRV_OPT_FORCE_SHARE, true);
}
qemuio_blk = blk_new_open(name, NULL, opts, flags, &local_err);
if (!qemuio_blk) {
error_reportf_err(local_err, "can't open%s%s: ",
name ? " device " : "", name ?: "");
return 1;
}
bs = blk_bs(qemuio_blk);
if (bdrv_is_encrypted(bs) && bdrv_key_required(bs)) {
char password[256];
printf("Disk image '%s' is encrypted.\n", name);
if (qemu_read_password(password, sizeof(password)) < 0) {
error_report("No password given");
goto error;
}
if (bdrv_set_key(bs, password) < 0) {
error_report("invalid password");
goto error;
}
}
blk_set_enable_write_cache(qemuio_blk, !writethrough);
return 0;
error:
blk_unref(qemuio_blk);
qemuio_blk = NULL;
return 1;
}
| 21,060 |
qemu | 7843c0d60db694b6d97e14ec5538fb97424016c1 | 1 | static void powerpc_get_compat(Object *obj, Visitor *v, const char *name,
void *opaque, Error **errp)
{
char *value = (char *)"";
Property *prop = opaque;
uint32_t *max_compat = qdev_get_prop_ptr(DEVICE(obj), prop);
switch (*max_compat) {
case CPU_POWERPC_LOGICAL_2_05:
value = (char *)"power6";
break;
case CPU_POWERPC_LOGICAL_2_06:
value = (char *)"power7";
break;
case CPU_POWERPC_LOGICAL_2_07:
value = (char *)"power8";
break;
case 0:
break;
default:
error_report("Internal error: compat is set to %x", *max_compat);
abort();
break;
}
visit_type_str(v, name, &value, errp);
}
| 21,062 |
qemu | 6d3cb1f970ee85361618f7ff02869180394e012d | 1 | int cache_insert(PageCache *cache, uint64_t addr, uint8_t *pdata)
{
CacheItem *it = NULL;
g_assert(cache);
g_assert(cache->page_cache);
/* actual update of entry */
it = cache_get_by_addr(cache, addr);
/* allocate page */
if (!it->it_data) {
it->it_data = g_try_malloc(cache->page_size);
if (!it->it_data) {
DPRINTF("Error allocating page\n");
return -1;
}
cache->num_items++;
}
memcpy(it->it_data, pdata, cache->page_size);
it->it_age = ++cache->max_item_age;
it->it_addr = addr;
return 0;
}
| 21,063 |
FFmpeg | 5029a406334ad0eaf92130e23d596e405a8a5aa0 | 1 | int ff_alloc_picture(MpegEncContext *s, Picture *pic, int shared){
const int big_mb_num= s->mb_stride*(s->mb_height+1) + 1; //the +1 is needed so memset(,,stride*height) does not sig11
const int mb_array_size= s->mb_stride*s->mb_height;
const int b8_array_size= s->b8_stride*s->mb_height*2;
const int b4_array_size= s->b4_stride*s->mb_height*4;
int i;
int r= -1;
if(shared){
assert(pic->data[0]);
assert(pic->type == 0 || pic->type == FF_BUFFER_TYPE_SHARED);
pic->type= FF_BUFFER_TYPE_SHARED;
}else{
assert(!pic->data[0]);
if (alloc_frame_buffer(s, pic) < 0)
return -1;
s->linesize = pic->linesize[0];
s->uvlinesize= pic->linesize[1];
}
if(pic->qscale_table==NULL){
if (s->encoding) {
FF_ALLOCZ_OR_GOTO(s->avctx, pic->mb_var , mb_array_size * sizeof(int16_t) , fail)
FF_ALLOCZ_OR_GOTO(s->avctx, pic->mc_mb_var, mb_array_size * sizeof(int16_t) , fail)
FF_ALLOCZ_OR_GOTO(s->avctx, pic->mb_mean , mb_array_size * sizeof(int8_t ) , fail)
}
FF_ALLOCZ_OR_GOTO(s->avctx, pic->mbskip_table , mb_array_size * sizeof(uint8_t)+2, fail) //the +2 is for the slice end check
FF_ALLOCZ_OR_GOTO(s->avctx, pic->qscale_table , mb_array_size * sizeof(uint8_t) , fail)
FF_ALLOCZ_OR_GOTO(s->avctx, pic->mb_type_base , (big_mb_num + s->mb_stride) * sizeof(uint32_t), fail)
pic->mb_type= pic->mb_type_base + 2*s->mb_stride+1;
if(s->out_format == FMT_H264){
for(i=0; i<2; i++){
FF_ALLOCZ_OR_GOTO(s->avctx, pic->motion_val_base[i], 2 * (b4_array_size+4) * sizeof(int16_t), fail)
pic->motion_val[i]= pic->motion_val_base[i]+4;
FF_ALLOCZ_OR_GOTO(s->avctx, pic->ref_index[i], 4*mb_array_size * sizeof(uint8_t), fail)
}
pic->motion_subsample_log2= 2;
}else if(s->out_format == FMT_H263 || s->encoding || (s->avctx->debug&FF_DEBUG_MV) || (s->avctx->debug_mv)){
for(i=0; i<2; i++){
FF_ALLOCZ_OR_GOTO(s->avctx, pic->motion_val_base[i], 2 * (b8_array_size+4) * sizeof(int16_t), fail)
pic->motion_val[i]= pic->motion_val_base[i]+4;
FF_ALLOCZ_OR_GOTO(s->avctx, pic->ref_index[i], 4*mb_array_size * sizeof(uint8_t), fail)
}
pic->motion_subsample_log2= 3;
}
if(s->avctx->debug&FF_DEBUG_DCT_COEFF) {
FF_ALLOCZ_OR_GOTO(s->avctx, pic->dct_coeff, 64 * mb_array_size * sizeof(DCTELEM)*6, fail)
}
pic->qstride= s->mb_stride;
FF_ALLOCZ_OR_GOTO(s->avctx, pic->pan_scan , 1 * sizeof(AVPanScan), fail)
}
/* It might be nicer if the application would keep track of these
* but it would require an API change. */
memmove(s->prev_pict_types+1, s->prev_pict_types, PREV_PICT_TYPES_BUFFER_SIZE-1);
s->prev_pict_types[0]= s->dropable ? AV_PICTURE_TYPE_B : s->pict_type;
if(pic->age < PREV_PICT_TYPES_BUFFER_SIZE && s->prev_pict_types[pic->age] == AV_PICTURE_TYPE_B)
pic->age= INT_MAX; // Skipped MBs in B-frames are quite rare in MPEG-1/2 and it is a bit tricky to skip them anyway.
pic->owner2 = NULL;
return 0;
fail: //for the FF_ALLOCZ_OR_GOTO macro
if(r>=0)
free_frame_buffer(s, pic);
return -1;
}
| 21,064 |
qemu | 2b218f5dbcca5fe728b1852d161d7a21fd02b2f5 | 1 | static int raw_open_common(BlockDriverState *bs, QDict *options,
int bdrv_flags, int open_flags, Error **errp)
{
BDRVRawState *s = bs->opaque;
QemuOpts *opts;
Error *local_err = NULL;
const char *filename = NULL;
BlockdevAioOptions aio, aio_default;
int fd, ret;
struct stat st;
OnOffAuto locking;
opts = qemu_opts_create(&raw_runtime_opts, NULL, 0, &error_abort);
qemu_opts_absorb_qdict(opts, options, &local_err);
if (local_err) {
error_propagate(errp, local_err);
ret = -EINVAL;
goto fail;
}
filename = qemu_opt_get(opts, "filename");
ret = raw_normalize_devicepath(&filename);
if (ret != 0) {
error_setg_errno(errp, -ret, "Could not normalize device path");
goto fail;
}
aio_default = (bdrv_flags & BDRV_O_NATIVE_AIO)
? BLOCKDEV_AIO_OPTIONS_NATIVE
: BLOCKDEV_AIO_OPTIONS_THREADS;
aio = qapi_enum_parse(BlockdevAioOptions_lookup, qemu_opt_get(opts, "aio"),
BLOCKDEV_AIO_OPTIONS__MAX, aio_default, &local_err);
if (local_err) {
error_propagate(errp, local_err);
ret = -EINVAL;
goto fail;
}
s->use_linux_aio = (aio == BLOCKDEV_AIO_OPTIONS_NATIVE);
locking = qapi_enum_parse(OnOffAuto_lookup, qemu_opt_get(opts, "locking"),
ON_OFF_AUTO__MAX, ON_OFF_AUTO_AUTO, &local_err);
if (local_err) {
error_propagate(errp, local_err);
ret = -EINVAL;
goto fail;
}
switch (locking) {
case ON_OFF_AUTO_ON:
s->use_lock = true;
#ifndef F_OFD_SETLK
fprintf(stderr,
"File lock requested but OFD locking syscall is unavailable, "
"falling back to POSIX file locks.\n"
"Due to the implementation, locks can be lost unexpectedly.\n");
#endif
break;
case ON_OFF_AUTO_OFF:
s->use_lock = false;
break;
case ON_OFF_AUTO_AUTO:
#ifdef F_OFD_SETLK
s->use_lock = true;
#else
s->use_lock = false;
#endif
break;
default:
abort();
}
s->open_flags = open_flags;
raw_parse_flags(bdrv_flags, &s->open_flags);
s->fd = -1;
fd = qemu_open(filename, s->open_flags, 0644);
if (fd < 0) {
ret = -errno;
error_setg_errno(errp, errno, "Could not open '%s'", filename);
if (ret == -EROFS) {
ret = -EACCES;
}
goto fail;
}
s->fd = fd;
s->lock_fd = -1;
if (s->use_lock) {
fd = qemu_open(filename, s->open_flags);
if (fd < 0) {
ret = -errno;
error_setg_errno(errp, errno, "Could not open '%s' for locking",
filename);
qemu_close(s->fd);
goto fail;
}
s->lock_fd = fd;
}
s->perm = 0;
s->shared_perm = BLK_PERM_ALL;
#ifdef CONFIG_LINUX_AIO
/* Currently Linux does AIO only for files opened with O_DIRECT */
if (s->use_linux_aio && !(s->open_flags & O_DIRECT)) {
error_setg(errp, "aio=native was specified, but it requires "
"cache.direct=on, which was not specified.");
ret = -EINVAL;
goto fail;
}
#else
if (s->use_linux_aio) {
error_setg(errp, "aio=native was specified, but is not supported "
"in this build.");
ret = -EINVAL;
goto fail;
}
#endif /* !defined(CONFIG_LINUX_AIO) */
s->has_discard = true;
s->has_write_zeroes = true;
bs->supported_zero_flags = BDRV_REQ_MAY_UNMAP;
if ((bs->open_flags & BDRV_O_NOCACHE) != 0) {
s->needs_alignment = true;
}
if (fstat(s->fd, &st) < 0) {
ret = -errno;
error_setg_errno(errp, errno, "Could not stat file");
goto fail;
}
if (S_ISREG(st.st_mode)) {
s->discard_zeroes = true;
s->has_fallocate = true;
}
if (S_ISBLK(st.st_mode)) {
#ifdef BLKDISCARDZEROES
unsigned int arg;
if (ioctl(s->fd, BLKDISCARDZEROES, &arg) == 0 && arg) {
s->discard_zeroes = true;
}
#endif
#ifdef __linux__
/* On Linux 3.10, BLKDISCARD leaves stale data in the page cache. Do
* not rely on the contents of discarded blocks unless using O_DIRECT.
* Same for BLKZEROOUT.
*/
if (!(bs->open_flags & BDRV_O_NOCACHE)) {
s->discard_zeroes = false;
s->has_write_zeroes = false;
}
#endif
}
#ifdef __FreeBSD__
if (S_ISCHR(st.st_mode)) {
/*
* The file is a char device (disk), which on FreeBSD isn't behind
* a pager, so force all requests to be aligned. This is needed
* so QEMU makes sure all IO operations on the device are aligned
* to sector size, or else FreeBSD will reject them with EINVAL.
*/
s->needs_alignment = true;
}
#endif
#ifdef CONFIG_XFS
if (platform_test_xfs_fd(s->fd)) {
s->is_xfs = true;
}
#endif
ret = 0;
fail:
if (filename && (bdrv_flags & BDRV_O_TEMPORARY)) {
unlink(filename);
}
qemu_opts_del(opts);
return ret;
}
| 21,065 |
FFmpeg | ebbcdc9ac0ea190748a1605bda86ce84466c8b4e | 0 | int ff_pre_estimate_p_frame_motion(MpegEncContext * s,
int mb_x, int mb_y)
{
int mx, my, range, dmin;
int xmin, ymin, xmax, ymax;
int rel_xmin, rel_ymin, rel_xmax, rel_ymax;
int pred_x=0, pred_y=0;
int P[10][2];
const int shift= 1+s->quarter_sample;
uint16_t * const mv_penalty= s->me.mv_penalty[s->f_code] + MAX_MV;
const int mv_stride= s->mb_width + 2;
const int xy= mb_x + 1 + (mb_y + 1)*mv_stride;
assert(s->quarter_sample==0 || s->quarter_sample==1);
s->me.pre_penalty_factor = get_penalty_factor(s, s->avctx->me_pre_cmp);
get_limits(s, &range, &xmin, &ymin, &xmax, &ymax, s->f_code);
rel_xmin= xmin - mb_x*16;
rel_xmax= xmax - mb_x*16;
rel_ymin= ymin - mb_y*16;
rel_ymax= ymax - mb_y*16;
s->me.skip=0;
P_LEFT[0] = s->p_mv_table[xy + 1][0];
P_LEFT[1] = s->p_mv_table[xy + 1][1];
if(P_LEFT[0] < (rel_xmin<<shift)) P_LEFT[0] = (rel_xmin<<shift);
/* special case for first line */
if (mb_y == s->mb_height-1) {
pred_x= P_LEFT[0];
pred_y= P_LEFT[1];
P_TOP[0]= P_TOPRIGHT[0]= P_MEDIAN[0]=
P_TOP[1]= P_TOPRIGHT[1]= P_MEDIAN[1]= 0; //FIXME
} else {
P_TOP[0] = s->p_mv_table[xy + mv_stride ][0];
P_TOP[1] = s->p_mv_table[xy + mv_stride ][1];
P_TOPRIGHT[0] = s->p_mv_table[xy + mv_stride - 1][0];
P_TOPRIGHT[1] = s->p_mv_table[xy + mv_stride - 1][1];
if(P_TOP[1] < (rel_ymin<<shift)) P_TOP[1] = (rel_ymin<<shift);
if(P_TOPRIGHT[0] > (rel_xmax<<shift)) P_TOPRIGHT[0]= (rel_xmax<<shift);
if(P_TOPRIGHT[1] < (rel_ymin<<shift)) P_TOPRIGHT[1]= (rel_ymin<<shift);
P_MEDIAN[0]= mid_pred(P_LEFT[0], P_TOP[0], P_TOPRIGHT[0]);
P_MEDIAN[1]= mid_pred(P_LEFT[1], P_TOP[1], P_TOPRIGHT[1]);
pred_x = P_MEDIAN[0];
pred_y = P_MEDIAN[1];
}
dmin = s->me.pre_motion_search(s, 0, &mx, &my, P, pred_x, pred_y, rel_xmin, rel_ymin, rel_xmax, rel_ymax,
&s->last_picture, s->p_mv_table, (1<<16)>>shift, mv_penalty);
s->p_mv_table[xy][0] = mx<<shift;
s->p_mv_table[xy][1] = my<<shift;
return dmin;
}
| 21,066 |
FFmpeg | d1adad3cca407f493c3637e20ecd4f7124e69212 | 0 | static void RENAME(chrRangeFromJpeg)(int16_t *dst, int width)
{
int i;
for (i = 0; i < width; i++) {
dst[i ] = (dst[i ]*1799 + 4081085)>>11; //1469
dst[i+VOFW] = (dst[i+VOFW]*1799 + 4081085)>>11; //1469
}
}
| 21,067 |
FFmpeg | e0db41316a94d85c1d6ab7ebeaf1f4b5e0f3c76a | 0 | int ff_draw_init(FFDrawContext *draw, enum AVPixelFormat format, unsigned flags)
{
const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(format);
const AVComponentDescriptor *c;
unsigned i, nb_planes = 0;
int pixelstep[MAX_PLANES] = { 0 };
if (!desc->name)
return AVERROR(EINVAL);
if (desc->flags & ~(AV_PIX_FMT_FLAG_PLANAR | AV_PIX_FMT_FLAG_RGB | AV_PIX_FMT_FLAG_PSEUDOPAL | AV_PIX_FMT_FLAG_ALPHA))
return AVERROR(ENOSYS);
for (i = 0; i < desc->nb_components; i++) {
c = &desc->comp[i];
/* for now, only 8-bits formats */
if (c->depth_minus1 != 8 - 1)
return AVERROR(ENOSYS);
if (c->plane >= MAX_PLANES)
return AVERROR(ENOSYS);
/* strange interleaving */
if (pixelstep[c->plane] != 0 &&
pixelstep[c->plane] != c->step_minus1 + 1)
return AVERROR(ENOSYS);
pixelstep[c->plane] = c->step_minus1 + 1;
if (pixelstep[c->plane] >= 8)
return AVERROR(ENOSYS);
nb_planes = FFMAX(nb_planes, c->plane + 1);
}
if ((desc->log2_chroma_w || desc->log2_chroma_h) && nb_planes < 3)
return AVERROR(ENOSYS); /* exclude NV12 and NV21 */
memset(draw, 0, sizeof(*draw));
draw->desc = desc;
draw->format = format;
draw->nb_planes = nb_planes;
memcpy(draw->pixelstep, pixelstep, sizeof(draw->pixelstep));
draw->hsub[1] = draw->hsub[2] = draw->hsub_max = desc->log2_chroma_w;
draw->vsub[1] = draw->vsub[2] = draw->vsub_max = desc->log2_chroma_h;
for (i = 0; i < ((desc->nb_components - 1) | 1); i++)
draw->comp_mask[desc->comp[i].plane] |=
1 << (desc->comp[i].offset_plus1 - 1);
return 0;
}
| 21,069 |
qemu | c573fc03da65e35bf44bce0448ea12801e3631ac | 1 | static int htab_save_complete(QEMUFile *f, void *opaque)
{
sPAPRMachineState *spapr = opaque;
int fd;
/* Iteration header */
qemu_put_be32(f, 0);
if (!spapr->htab) {
int rc;
assert(kvm_enabled());
fd = get_htab_fd(spapr);
if (fd < 0) {
return fd;
}
rc = kvmppc_save_htab(f, fd, MAX_KVM_BUF_SIZE, -1);
if (rc < 0) {
return rc;
}
close_htab_fd(spapr);
} else {
if (spapr->htab_first_pass) {
htab_save_first_pass(f, spapr, -1);
}
htab_save_later_pass(f, spapr, -1);
}
/* End marker */
qemu_put_be32(f, 0);
qemu_put_be16(f, 0);
qemu_put_be16(f, 0);
return 0;
}
| 21,071 |
qemu | 187337f8b0ec0813dd3876d1efe37d415fb81c2e | 1 | struct pxa2xx_gpio_info_s *pxa2xx_gpio_init(target_phys_addr_t base,
CPUState *env, qemu_irq *pic, int lines)
{
int iomemtype;
struct pxa2xx_gpio_info_s *s;
s = (struct pxa2xx_gpio_info_s *)
qemu_mallocz(sizeof(struct pxa2xx_gpio_info_s));
memset(s, 0, sizeof(struct pxa2xx_gpio_info_s));
s->base = base;
s->pic = pic;
s->lines = lines;
s->cpu_env = env;
iomemtype = cpu_register_io_memory(0, pxa2xx_gpio_readfn,
pxa2xx_gpio_writefn, s);
cpu_register_physical_memory(base, 0x00000fff, iomemtype);
register_savevm("pxa2xx_gpio", 0, 0,
pxa2xx_gpio_save, pxa2xx_gpio_load, s);
return s;
}
| 21,072 |
FFmpeg | 79a0ec1af4817bb7b989803b9f460d1e4acaf7b7 | 1 | AVFilterFormats *avfilter_merge_formats(AVFilterFormats *a, AVFilterFormats *b)
{
AVFilterFormats *ret;
unsigned i, j, k = 0;
ret = av_mallocz(sizeof(AVFilterFormats));
/* merge list of formats */
ret->formats = av_malloc(sizeof(*ret->formats) * FFMIN(a->format_count,
b->format_count));
for(i = 0; i < a->format_count; i ++)
for(j = 0; j < b->format_count; j ++)
if(a->formats[i] == b->formats[j])
ret->formats[k++] = a->formats[i];
ret->format_count = k;
/* check that there was at least one common format */
if(!ret->format_count) {
av_free(ret->formats);
av_free(ret);
return NULL;
}
ret->refs = av_malloc(sizeof(AVFilterFormats**)*(a->refcount+b->refcount));
merge_ref(ret, a);
merge_ref(ret, b);
return ret;
} | 21,073 |
qemu | 8bd6b06d7b718b3e595aab279699ef3651ce2e48 | 1 | void console_select(unsigned int index)
{
TextConsole *s;
if (index >= MAX_CONSOLES)
return;
if (active_console) {
active_console->g_width = ds_get_width(active_console->ds);
active_console->g_height = ds_get_height(active_console->ds);
}
s = consoles[index];
if (s) {
DisplayState *ds = s->ds;
if (active_console->cursor_timer) {
qemu_del_timer(active_console->cursor_timer);
}
active_console = s;
if (ds_get_bits_per_pixel(s->ds)) {
ds->surface = qemu_resize_displaysurface(ds, s->g_width, s->g_height);
} else {
s->ds->surface->width = s->width;
s->ds->surface->height = s->height;
}
if (s->cursor_timer) {
qemu_mod_timer(s->cursor_timer,
qemu_get_clock_ms(rt_clock) + CONSOLE_CURSOR_PERIOD / 2);
}
dpy_resize(s->ds);
vga_hw_invalidate();
}
}
| 21,074 |
FFmpeg | b8664c929437d6d079e16979c496a2db40cf2324 | 0 | static av_always_inline int simple_limit(uint8_t *p, ptrdiff_t stride, int flim)
{
LOAD_PIXELS
return 2*FFABS(p0-q0) + (FFABS(p1-q1) >> 1) <= flim;
}
| 21,075 |
FFmpeg | 190f6135b48a97dadd7586f154640bec6468df1b | 0 | dshow_read_close(AVFormatContext *s)
{
struct dshow_ctx *ctx = s->priv_data;
AVPacketList *pktl;
if (ctx->control) {
IMediaControl_Stop(ctx->control);
IMediaControl_Release(ctx->control);
}
if (ctx->graph) {
IEnumFilters *fenum;
int r;
r = IGraphBuilder_EnumFilters(ctx->graph, &fenum);
if (r == S_OK) {
IBaseFilter *f;
IEnumFilters_Reset(fenum);
while (IEnumFilters_Next(fenum, 1, &f, NULL) == S_OK) {
if (IGraphBuilder_RemoveFilter(ctx->graph, f) == S_OK)
IEnumFilters_Reset(fenum); /* When a filter is removed,
* the list must be reset. */
IBaseFilter_Release(f);
}
IEnumFilters_Release(fenum);
}
IGraphBuilder_Release(ctx->graph);
}
if (ctx->capture_pin[VideoDevice])
libAVPin_Release(ctx->capture_pin[VideoDevice]);
if (ctx->capture_pin[AudioDevice])
libAVPin_Release(ctx->capture_pin[AudioDevice]);
if (ctx->capture_filter[VideoDevice])
libAVFilter_Release(ctx->capture_filter[VideoDevice]);
if (ctx->capture_filter[AudioDevice])
libAVFilter_Release(ctx->capture_filter[AudioDevice]);
if (ctx->device_pin[VideoDevice])
IPin_Release(ctx->device_pin[VideoDevice]);
if (ctx->device_pin[AudioDevice])
IPin_Release(ctx->device_pin[AudioDevice]);
if (ctx->device_filter[VideoDevice])
IBaseFilter_Release(ctx->device_filter[VideoDevice]);
if (ctx->device_filter[AudioDevice])
IBaseFilter_Release(ctx->device_filter[AudioDevice]);
if (ctx->device_name[0])
av_free(ctx->device_name[0]);
if (ctx->device_name[1])
av_free(ctx->device_name[1]);
if(ctx->mutex)
CloseHandle(ctx->mutex);
if(ctx->event)
CloseHandle(ctx->event);
pktl = ctx->pktl;
while (pktl) {
AVPacketList *next = pktl->next;
av_destruct_packet(&pktl->pkt);
av_free(pktl);
pktl = next;
}
return 0;
}
| 21,076 |
FFmpeg | 247e658784ead984f96021acb9c95052ba599f26 | 0 | static int ftp_conn_control_block_control(void *data)
{
FTPContext *s = data;
return s->conn_control_block_flag;
}
| 21,077 |
FFmpeg | 3a8c95f730732b9f1ffacdbfbf79a01b202a67af | 0 | static int probe_file(const char *filename)
{
AVFormatContext *fmt_ctx;
int ret, i;
if ((ret = open_input_file(&fmt_ctx, filename)))
return ret;
if (do_show_packets)
show_packets(fmt_ctx);
if (do_show_streams)
for (i = 0; i < fmt_ctx->nb_streams; i++)
show_stream(fmt_ctx, i);
if (do_show_format)
show_format(fmt_ctx);
close_input_file(&fmt_ctx);
return 0;
}
| 21,078 |
FFmpeg | 212c6a1d70df011b6f2a2aa02f7677503287bd00 | 1 | av_cold int ff_mjpeg_decode_init(AVCodecContext *avctx)
{
MJpegDecodeContext *s = avctx->priv_data;
if (!s->picture_ptr) {
s->picture = av_frame_alloc();
if (!s->picture)
return AVERROR(ENOMEM);
s->picture_ptr = s->picture;
}
s->avctx = avctx;
ff_blockdsp_init(&s->bdsp, avctx);
ff_hpeldsp_init(&s->hdsp, avctx->flags);
ff_idctdsp_init(&s->idsp, avctx);
ff_init_scantable(s->idsp.idct_permutation, &s->scantable,
ff_zigzag_direct);
s->buffer_size = 0;
s->buffer = NULL;
s->start_code = -1;
s->first_picture = 1;
s->org_height = avctx->coded_height;
avctx->chroma_sample_location = AVCHROMA_LOC_CENTER;
avctx->colorspace = AVCOL_SPC_BT470BG;
build_basic_mjpeg_vlc(s);
if (s->extern_huff) {
int ret;
av_log(avctx, AV_LOG_INFO, "mjpeg: using external huffman table\n");
init_get_bits(&s->gb, avctx->extradata, avctx->extradata_size * 8);
if ((ret = ff_mjpeg_decode_dht(s))) {
av_log(avctx, AV_LOG_ERROR,
"mjpeg: error using external huffman table\n");
return ret;
}
}
if (avctx->field_order == AV_FIELD_BB) { /* quicktime icefloe 019 */
s->interlace_polarity = 1; /* bottom field first */
av_log(avctx, AV_LOG_DEBUG, "mjpeg bottom field first\n");
}
if (avctx->codec->id == AV_CODEC_ID_AMV)
s->flipped = 1;
return 0;
}
| 21,079 |
qemu | 79368c81bf8cf93864d7afc88b81b05d8f0a2c90 | 1 | int bdrv_open(BlockDriverState *bs, const char *filename, int flags,
BlockDriver *drv)
{
int ret;
int probed = 0;
if (flags & BDRV_O_SNAPSHOT) {
BlockDriverState *bs1;
int64_t total_size;
int is_protocol = 0;
BlockDriver *bdrv_qcow2;
QEMUOptionParameter *options;
char tmp_filename[PATH_MAX];
char backing_filename[PATH_MAX];
/* if snapshot, we create a temporary backing file and open it
instead of opening 'filename' directly */
/* if there is a backing file, use it */
bs1 = bdrv_new("");
ret = bdrv_open(bs1, filename, 0, drv);
if (ret < 0) {
bdrv_delete(bs1);
return ret;
}
total_size = bdrv_getlength(bs1) & BDRV_SECTOR_MASK;
if (bs1->drv && bs1->drv->protocol_name)
is_protocol = 1;
bdrv_delete(bs1);
get_tmp_filename(tmp_filename, sizeof(tmp_filename));
/* Real path is meaningless for protocols */
if (is_protocol)
snprintf(backing_filename, sizeof(backing_filename),
"%s", filename);
else if (!realpath(filename, backing_filename))
return -errno;
bdrv_qcow2 = bdrv_find_format("qcow2");
options = parse_option_parameters("", bdrv_qcow2->create_options, NULL);
set_option_parameter_int(options, BLOCK_OPT_SIZE, total_size);
set_option_parameter(options, BLOCK_OPT_BACKING_FILE, backing_filename);
if (drv) {
set_option_parameter(options, BLOCK_OPT_BACKING_FMT,
drv->format_name);
}
ret = bdrv_create(bdrv_qcow2, tmp_filename, options);
free_option_parameters(options);
if (ret < 0) {
return ret;
}
filename = tmp_filename;
drv = bdrv_qcow2;
bs->is_temporary = 1;
}
/* Find the right image format driver */
if (!drv) {
drv = find_image_format(filename);
probed = 1;
}
if (!drv) {
ret = -ENOENT;
goto unlink_and_fail;
}
/* Open the image */
ret = bdrv_open_common(bs, filename, flags, drv);
if (ret < 0) {
goto unlink_and_fail;
}
/* If there is a backing file, use it */
if ((flags & BDRV_O_NO_BACKING) == 0 && bs->backing_file[0] != '\0') {
char backing_filename[PATH_MAX];
int back_flags;
BlockDriver *back_drv = NULL;
bs->backing_hd = bdrv_new("");
path_combine(backing_filename, sizeof(backing_filename),
filename, bs->backing_file);
if (bs->backing_format[0] != '\0')
back_drv = bdrv_find_format(bs->backing_format);
/* backing files always opened read-only */
back_flags =
flags & ~(BDRV_O_RDWR | BDRV_O_SNAPSHOT | BDRV_O_NO_BACKING);
ret = bdrv_open(bs->backing_hd, backing_filename, back_flags, back_drv);
if (ret < 0) {
bdrv_close(bs);
return ret;
}
if (bs->is_temporary) {
bs->backing_hd->keep_read_only = !(flags & BDRV_O_RDWR);
} else {
/* base image inherits from "parent" */
bs->backing_hd->keep_read_only = bs->keep_read_only;
}
}
if (!bdrv_key_required(bs)) {
/* call the change callback */
bs->media_changed = 1;
if (bs->change_cb)
bs->change_cb(bs->change_opaque);
}
return 0;
unlink_and_fail:
if (bs->is_temporary) {
unlink(filename);
}
return ret;
} | 21,080 |
FFmpeg | 668673f10ce225d26a96f1aeb62066e8c641c85a | 1 | static int parse_outputs(const char **buf, AVFilterInOut **curr_inputs,
AVFilterInOut **open_inputs,
AVFilterInOut **open_outputs, AVClass *log_ctx)
{
int ret, pad = 0;
while (**buf == '[') {
char *name = parse_link_name(buf, log_ctx);
AVFilterInOut *match;
AVFilterInOut *input = *curr_inputs;
*curr_inputs = (*curr_inputs)->next;
if (!name)
/* First check if the label is not in the open_inputs list */
match = extract_inout(name, open_inputs);
if (match) {
if ((ret = link_filter(input->filter_ctx, input->pad_idx,
match->filter_ctx, match->pad_idx, log_ctx)) < 0)
return ret;
av_free(match->name);
av_free(name);
av_free(match);
av_free(input);
} else {
/* Not in the list, so add the first input as a open_output */
input->name = name;
insert_inout(open_outputs, input);
*buf += strspn(*buf, WHITESPACES);
pad++;
return pad;
| 21,081 |
FFmpeg | a37fd7f9578d2dfbe20a109aae91e5f0a4b58874 | 1 | static inline void rgb2rgb_init_c(void)
{
rgb15to16 = rgb15to16_c;
rgb15tobgr24 = rgb15tobgr24_c;
rgb15to32 = rgb15to32_c;
rgb16tobgr24 = rgb16tobgr24_c;
rgb16to32 = rgb16to32_c;
rgb16to15 = rgb16to15_c;
rgb24tobgr16 = rgb24tobgr16_c;
rgb24tobgr15 = rgb24tobgr15_c;
rgb24tobgr32 = rgb24tobgr32_c;
rgb32to16 = rgb32to16_c;
rgb32to15 = rgb32to15_c;
rgb32tobgr24 = rgb32tobgr24_c;
rgb24to15 = rgb24to15_c;
rgb24to16 = rgb24to16_c;
rgb24tobgr24 = rgb24tobgr24_c;
shuffle_bytes_2103 = shuffle_bytes_2103_c;
rgb32tobgr16 = rgb32tobgr16_c;
rgb32tobgr15 = rgb32tobgr15_c;
yv12toyuy2 = yv12toyuy2_c;
yv12touyvy = yv12touyvy_c;
yuv422ptoyuy2 = yuv422ptoyuy2_c;
yuv422ptouyvy = yuv422ptouyvy_c;
yuy2toyv12 = yuy2toyv12_c;
planar2x = planar2x_c;
rgb24toyv12 = rgb24toyv12_c;
interleaveBytes = interleaveBytes_c;
vu9_to_vu12 = vu9_to_vu12_c;
yvu9_to_yuy2 = yvu9_to_yuy2_c;
uyvytoyuv420 = uyvytoyuv420_c;
uyvytoyuv422 = uyvytoyuv422_c;
yuyvtoyuv420 = yuyvtoyuv420_c;
yuyvtoyuv422 = yuyvtoyuv422_c;
}
| 21,082 |
qemu | cbd8acf38f37544b830086af840bfb1015ce10e0 | 1 | static Visitor *visitor_input_test_init_internal(TestInputVisitorData *data,
const char *json_string,
va_list *ap)
{
visitor_input_teardown(data, NULL);
data->obj = qobject_from_jsonv(json_string, ap);
g_assert(data->obj);
data->qiv = qobject_input_visitor_new(data->obj);
g_assert(data->qiv);
return data->qiv;
}
| 21,083 |
FFmpeg | ce928d7d2b3bd68a222cfbc19a44805ac4cdaf14 | 1 | static int audio_decode_frame(VideoState *is)
{
int data_size, resampled_data_size;
int64_t dec_channel_layout;
int got_frame = 0;
av_unused double audio_clock0;
int wanted_nb_samples;
AVRational tb;
int ret;
int reconfigure;
if (!is->frame)
if (!(is->frame = av_frame_alloc()))
return AVERROR(ENOMEM);
for (;;) {
if (is->audioq.serial != is->auddec.pkt_serial)
is->audio_buf_frames_pending = got_frame = 0;
if (!got_frame)
av_frame_unref(is->frame);
if (is->paused)
return -1;
while (is->audio_buf_frames_pending || got_frame) {
if (!is->audio_buf_frames_pending) {
got_frame = 0;
tb = (AVRational){1, is->frame->sample_rate};
#if CONFIG_AVFILTER
dec_channel_layout = get_valid_channel_layout(is->frame->channel_layout, av_frame_get_channels(is->frame));
reconfigure =
cmp_audio_fmts(is->audio_filter_src.fmt, is->audio_filter_src.channels,
is->frame->format, av_frame_get_channels(is->frame)) ||
is->audio_filter_src.channel_layout != dec_channel_layout ||
is->audio_filter_src.freq != is->frame->sample_rate ||
is->auddec.pkt_serial != is->audio_last_serial;
if (reconfigure) {
char buf1[1024], buf2[1024];
av_get_channel_layout_string(buf1, sizeof(buf1), -1, is->audio_filter_src.channel_layout);
av_get_channel_layout_string(buf2, sizeof(buf2), -1, dec_channel_layout);
av_log(NULL, AV_LOG_DEBUG,
"Audio frame changed from rate:%d ch:%d fmt:%s layout:%s serial:%d to rate:%d ch:%d fmt:%s layout:%s serial:%d\n",
is->audio_filter_src.freq, is->audio_filter_src.channels, av_get_sample_fmt_name(is->audio_filter_src.fmt), buf1, is->audio_last_serial,
is->frame->sample_rate, av_frame_get_channels(is->frame), av_get_sample_fmt_name(is->frame->format), buf2, is->auddec.pkt_serial);
is->audio_filter_src.fmt = is->frame->format;
is->audio_filter_src.channels = av_frame_get_channels(is->frame);
is->audio_filter_src.channel_layout = dec_channel_layout;
is->audio_filter_src.freq = is->frame->sample_rate;
is->audio_last_serial = is->auddec.pkt_serial;
if ((ret = configure_audio_filters(is, afilters, 1)) < 0)
return ret;
}
if ((ret = av_buffersrc_add_frame(is->in_audio_filter, is->frame)) < 0)
return ret;
#endif
}
#if CONFIG_AVFILTER
if ((ret = av_buffersink_get_frame_flags(is->out_audio_filter, is->frame, 0)) < 0) {
if (ret == AVERROR(EAGAIN)) {
is->audio_buf_frames_pending = 0;
continue;
}
if (ret == AVERROR_EOF)
is->auddec.finished = is->auddec.pkt_serial;
return ret;
}
is->audio_buf_frames_pending = 1;
tb = is->out_audio_filter->inputs[0]->time_base;
#endif
data_size = av_samples_get_buffer_size(NULL, av_frame_get_channels(is->frame),
is->frame->nb_samples,
is->frame->format, 1);
dec_channel_layout =
(is->frame->channel_layout && av_frame_get_channels(is->frame) == av_get_channel_layout_nb_channels(is->frame->channel_layout)) ?
is->frame->channel_layout : av_get_default_channel_layout(av_frame_get_channels(is->frame));
wanted_nb_samples = synchronize_audio(is, is->frame->nb_samples);
if (is->frame->format != is->audio_src.fmt ||
dec_channel_layout != is->audio_src.channel_layout ||
is->frame->sample_rate != is->audio_src.freq ||
(wanted_nb_samples != is->frame->nb_samples && !is->swr_ctx)) {
swr_free(&is->swr_ctx);
is->swr_ctx = swr_alloc_set_opts(NULL,
is->audio_tgt.channel_layout, is->audio_tgt.fmt, is->audio_tgt.freq,
dec_channel_layout, is->frame->format, is->frame->sample_rate,
0, NULL);
if (!is->swr_ctx || swr_init(is->swr_ctx) < 0) {
av_log(NULL, AV_LOG_ERROR,
"Cannot create sample rate converter for conversion of %d Hz %s %d channels to %d Hz %s %d channels!\n",
is->frame->sample_rate, av_get_sample_fmt_name(is->frame->format), av_frame_get_channels(is->frame),
is->audio_tgt.freq, av_get_sample_fmt_name(is->audio_tgt.fmt), is->audio_tgt.channels);
break;
}
is->audio_src.channel_layout = dec_channel_layout;
is->audio_src.channels = av_frame_get_channels(is->frame);
is->audio_src.freq = is->frame->sample_rate;
is->audio_src.fmt = is->frame->format;
}
if (is->swr_ctx) {
const uint8_t **in = (const uint8_t **)is->frame->extended_data;
uint8_t **out = &is->audio_buf1;
int out_count = (int64_t)wanted_nb_samples * is->audio_tgt.freq / is->frame->sample_rate + 256;
int out_size = av_samples_get_buffer_size(NULL, is->audio_tgt.channels, out_count, is->audio_tgt.fmt, 0);
int len2;
if (out_size < 0) {
av_log(NULL, AV_LOG_ERROR, "av_samples_get_buffer_size() failed\n");
break;
}
if (wanted_nb_samples != is->frame->nb_samples) {
if (swr_set_compensation(is->swr_ctx, (wanted_nb_samples - is->frame->nb_samples) * is->audio_tgt.freq / is->frame->sample_rate,
wanted_nb_samples * is->audio_tgt.freq / is->frame->sample_rate) < 0) {
av_log(NULL, AV_LOG_ERROR, "swr_set_compensation() failed\n");
break;
}
}
av_fast_malloc(&is->audio_buf1, &is->audio_buf1_size, out_size);
if (!is->audio_buf1)
return AVERROR(ENOMEM);
len2 = swr_convert(is->swr_ctx, out, out_count, in, is->frame->nb_samples);
if (len2 < 0) {
av_log(NULL, AV_LOG_ERROR, "swr_convert() failed\n");
break;
}
if (len2 == out_count) {
av_log(NULL, AV_LOG_WARNING, "audio buffer is probably too small\n");
swr_init(is->swr_ctx);
}
is->audio_buf = is->audio_buf1;
resampled_data_size = len2 * is->audio_tgt.channels * av_get_bytes_per_sample(is->audio_tgt.fmt);
} else {
is->audio_buf = is->frame->data[0];
resampled_data_size = data_size;
}
audio_clock0 = is->audio_clock;
/* update the audio clock with the pts */
if (is->frame->pts != AV_NOPTS_VALUE)
is->audio_clock = is->frame->pts * av_q2d(tb) + (double) is->frame->nb_samples / is->frame->sample_rate;
else
is->audio_clock = NAN;
is->audio_clock_serial = is->auddec.pkt_serial;
#ifdef DEBUG
{
static double last_clock;
printf("audio: delay=%0.3f clock=%0.3f clock0=%0.3f\n",
is->audio_clock - last_clock,
is->audio_clock, audio_clock0);
last_clock = is->audio_clock;
}
#endif
return resampled_data_size;
}
if ((got_frame = decoder_decode_frame(&is->auddec, is->frame, NULL)) < 0)
return -1;
if (is->auddec.flushed)
is->audio_buf_frames_pending = 0;
}
}
| 21,084 |
qemu | 31e8fd86f24b4eec8a1708d712bf0532460bb0a5 | 1 | static void scsi_do_read(void *opaque, int ret)
{
SCSIDiskReq *r = opaque;
SCSIDiskState *s = DO_UPCAST(SCSIDiskState, qdev, r->req.dev);
uint32_t n;
if (r->req.aiocb != NULL) {
r->req.aiocb = NULL;
bdrv_acct_done(s->qdev.conf.bs, &r->acct);
if (ret < 0) {
if (scsi_handle_rw_error(r, -ret)) {
goto done;
if (r->req.sg) {
dma_acct_start(s->qdev.conf.bs, &r->acct, r->req.sg, BDRV_ACCT_READ);
r->req.resid -= r->req.sg->size;
r->req.aiocb = dma_bdrv_read(s->qdev.conf.bs, r->req.sg, r->sector,
scsi_dma_complete, r);
} else {
n = scsi_init_iovec(r, SCSI_DMA_BUF_SIZE);
bdrv_acct_start(s->qdev.conf.bs, &r->acct, n * BDRV_SECTOR_SIZE, BDRV_ACCT_READ);
r->req.aiocb = bdrv_aio_readv(s->qdev.conf.bs, r->sector, &r->qiov, n,
scsi_read_complete, r);
done:
if (!r->req.io_canceled) {
scsi_req_unref(&r->req); | 21,085 |
qemu | 498f21405a286f718a0767c791b7d2db19f4e5bd | 0 | static void coroutine_fn sd_write_done(SheepdogAIOCB *acb)
{
BDRVSheepdogState *s = acb->common.bs->opaque;
struct iovec iov;
AIOReq *aio_req;
uint32_t offset, data_len, mn, mx;
mn = s->min_dirty_data_idx;
mx = s->max_dirty_data_idx;
if (mn <= mx) {
/* we need to update the vdi object. */
offset = sizeof(s->inode) - sizeof(s->inode.data_vdi_id) +
mn * sizeof(s->inode.data_vdi_id[0]);
data_len = (mx - mn + 1) * sizeof(s->inode.data_vdi_id[0]);
s->min_dirty_data_idx = UINT32_MAX;
s->max_dirty_data_idx = 0;
iov.iov_base = &s->inode;
iov.iov_len = sizeof(s->inode);
aio_req = alloc_aio_req(s, acb, vid_to_vdi_oid(s->inode.vdi_id),
data_len, offset, 0, false, 0, offset);
QLIST_INSERT_HEAD(&s->inflight_aio_head, aio_req, aio_siblings);
add_aio_request(s, aio_req, &iov, 1, AIOCB_WRITE_UDATA);
acb->aio_done_func = sd_finish_aiocb;
acb->aiocb_type = AIOCB_WRITE_UDATA;
return;
}
sd_finish_aiocb(acb);
}
| 21,086 |
qemu | 85c97ca7a10b93216bc95052e9dabe3a4bb8736a | 0 | static int coroutine_fn bdrv_aligned_pwritev(BlockDriverState *bs,
BdrvTrackedRequest *req, int64_t offset, unsigned int bytes,
int64_t align, QEMUIOVector *qiov, int flags)
{
BlockDriver *drv = bs->drv;
bool waited;
int ret;
int64_t start_sector = offset >> BDRV_SECTOR_BITS;
int64_t end_sector = DIV_ROUND_UP(offset + bytes, BDRV_SECTOR_SIZE);
uint64_t bytes_remaining = bytes;
int max_transfer;
assert(is_power_of_2(align));
assert((offset & (align - 1)) == 0);
assert((bytes & (align - 1)) == 0);
assert(!qiov || bytes == qiov->size);
assert((bs->open_flags & BDRV_O_NO_IO) == 0);
assert(!(flags & ~BDRV_REQ_MASK));
max_transfer = QEMU_ALIGN_DOWN(MIN_NON_ZERO(bs->bl.max_transfer, INT_MAX),
align);
waited = wait_serialising_requests(req);
assert(!waited || !req->serialising);
assert(req->overlap_offset <= offset);
assert(offset + bytes <= req->overlap_offset + req->overlap_bytes);
ret = notifier_with_return_list_notify(&bs->before_write_notifiers, req);
if (!ret && bs->detect_zeroes != BLOCKDEV_DETECT_ZEROES_OPTIONS_OFF &&
!(flags & BDRV_REQ_ZERO_WRITE) && drv->bdrv_co_pwrite_zeroes &&
qemu_iovec_is_zero(qiov)) {
flags |= BDRV_REQ_ZERO_WRITE;
if (bs->detect_zeroes == BLOCKDEV_DETECT_ZEROES_OPTIONS_UNMAP) {
flags |= BDRV_REQ_MAY_UNMAP;
}
}
if (ret < 0) {
/* Do nothing, write notifier decided to fail this request */
} else if (flags & BDRV_REQ_ZERO_WRITE) {
bdrv_debug_event(bs, BLKDBG_PWRITEV_ZERO);
ret = bdrv_co_do_pwrite_zeroes(bs, offset, bytes, flags);
} else if (flags & BDRV_REQ_WRITE_COMPRESSED) {
ret = bdrv_driver_pwritev_compressed(bs, offset, bytes, qiov);
} else if (bytes <= max_transfer) {
bdrv_debug_event(bs, BLKDBG_PWRITEV);
ret = bdrv_driver_pwritev(bs, offset, bytes, qiov, flags);
} else {
bdrv_debug_event(bs, BLKDBG_PWRITEV);
while (bytes_remaining) {
int num = MIN(bytes_remaining, max_transfer);
QEMUIOVector local_qiov;
int local_flags = flags;
assert(num);
if (num < bytes_remaining && (flags & BDRV_REQ_FUA) &&
!(bs->supported_write_flags & BDRV_REQ_FUA)) {
/* If FUA is going to be emulated by flush, we only
* need to flush on the last iteration */
local_flags &= ~BDRV_REQ_FUA;
}
qemu_iovec_init(&local_qiov, qiov->niov);
qemu_iovec_concat(&local_qiov, qiov, bytes - bytes_remaining, num);
ret = bdrv_driver_pwritev(bs, offset + bytes - bytes_remaining,
num, &local_qiov, local_flags);
qemu_iovec_destroy(&local_qiov);
if (ret < 0) {
break;
}
bytes_remaining -= num;
}
}
bdrv_debug_event(bs, BLKDBG_PWRITEV_DONE);
++bs->write_gen;
bdrv_set_dirty(bs, start_sector, end_sector - start_sector);
if (bs->wr_highest_offset < offset + bytes) {
bs->wr_highest_offset = offset + bytes;
}
if (ret >= 0) {
bs->total_sectors = MAX(bs->total_sectors, end_sector);
ret = 0;
}
return ret;
}
| 21,087 |
qemu | 32a2003af9cb0cb11b3992fd3248cb89752c53e9 | 0 | static target_ulong get_sigframe(struct target_sigaction *ka,
CPUPPCState *env,
int frame_size)
{
target_ulong oldsp, newsp;
oldsp = env->gpr[1];
if ((ka->sa_flags & TARGET_SA_ONSTACK) &&
(sas_ss_flags(oldsp))) {
oldsp = (target_sigaltstack_used.ss_sp
+ target_sigaltstack_used.ss_size);
}
newsp = (oldsp - frame_size) & ~0xFUL;
return newsp;
}
| 21,088 |
qemu | 9eca6cc64392b4ad8bd8723e840f491fa36524ad | 0 | vsprintf_len(string, format, args)
char *string;
const char *format;
va_list args;
{
vsprintf(string, format, args);
return strlen(string);
}
| 21,089 |
qemu | 8d2f850a5ab7579a852f23b28273940a47dfd7ff | 0 | void HELPER(idte)(CPUS390XState *env, uint64_t r1, uint64_t r2, uint32_t m4)
{
CPUState *cs = CPU(s390_env_get_cpu(env));
const uintptr_t ra = GETPC();
uint64_t table, entry, raddr;
uint16_t entries, i, index = 0;
if (r2 & 0xff000) {
cpu_restore_state(cs, ra);
program_interrupt(env, PGM_SPECIFICATION, 4);
}
if (!(r2 & 0x800)) {
/* invalidation-and-clearing operation */
table = r1 & _ASCE_ORIGIN;
entries = (r2 & 0x7ff) + 1;
switch (r1 & _ASCE_TYPE_MASK) {
case _ASCE_TYPE_REGION1:
index = (r2 >> 53) & 0x7ff;
break;
case _ASCE_TYPE_REGION2:
index = (r2 >> 42) & 0x7ff;
break;
case _ASCE_TYPE_REGION3:
index = (r2 >> 31) & 0x7ff;
break;
case _ASCE_TYPE_SEGMENT:
index = (r2 >> 20) & 0x7ff;
break;
}
for (i = 0; i < entries; i++) {
/* addresses are not wrapped in 24/31bit mode but table index is */
raddr = table + ((index + i) & 0x7ff) * sizeof(entry);
entry = cpu_ldq_real_ra(env, raddr, ra);
if (!(entry & _REGION_ENTRY_INV)) {
/* we are allowed to not store if already invalid */
entry |= _REGION_ENTRY_INV;
cpu_stq_real_ra(env, raddr, entry, ra);
}
}
}
/* We simply flush the complete tlb, therefore we can ignore r3. */
if (m4 & 1) {
tlb_flush(cs);
} else {
tlb_flush_all_cpus_synced(cs);
}
}
| 21,090 |
qemu | 72cf2d4f0e181d0d3a3122e04129c58a95da713e | 0 | CharDriverState *qemu_chr_open_opts(QemuOpts *opts,
void (*init)(struct CharDriverState *s))
{
CharDriverState *chr;
int i;
if (qemu_opts_id(opts) == NULL) {
fprintf(stderr, "chardev: no id specified\n");
return NULL;
}
for (i = 0; i < ARRAY_SIZE(backend_table); i++) {
if (strcmp(backend_table[i].name, qemu_opt_get(opts, "backend")) == 0)
break;
}
if (i == ARRAY_SIZE(backend_table)) {
fprintf(stderr, "chardev: backend \"%s\" not found\n",
qemu_opt_get(opts, "backend"));
return NULL;
}
chr = backend_table[i].open(opts);
if (!chr) {
fprintf(stderr, "chardev: opening backend \"%s\" failed\n",
qemu_opt_get(opts, "backend"));
return NULL;
}
if (!chr->filename)
chr->filename = qemu_strdup(qemu_opt_get(opts, "backend"));
chr->init = init;
TAILQ_INSERT_TAIL(&chardevs, chr, next);
if (qemu_opt_get_bool(opts, "mux", 0)) {
CharDriverState *base = chr;
int len = strlen(qemu_opts_id(opts)) + 6;
base->label = qemu_malloc(len);
snprintf(base->label, len, "%s-base", qemu_opts_id(opts));
chr = qemu_chr_open_mux(base);
chr->filename = base->filename;
TAILQ_INSERT_TAIL(&chardevs, chr, next);
}
chr->label = qemu_strdup(qemu_opts_id(opts));
return chr;
}
| 21,093 |
FFmpeg | 486637af8ef29ec215e0e0b7ecd3b5470f0e04e5 | 0 | static int ac3_decode_frame(AVCodecContext * avctx, void *data, int *data_size, uint8_t *buf, int buf_size)
{
AC3DecodeContext *ctx = (AC3DecodeContext *)avctx->priv_data;
ac3_audio_block *ab = &ctx->audio_block;
int frame_start;
int i, j, k, l, value;
float tmp_block_first_half[128], tmp_block_second_half[128];
int16_t *out_samples = (int16_t *)data;
int nfchans;
//Synchronize the frame.
frame_start = ac3_synchronize(buf, buf_size);
if (frame_start == -1) {
av_log(avctx, AV_LOG_ERROR, "frame is not synchronized\n");
*data_size = 0;
return buf_size;
}
//Initialize the GetBitContext with the start of valid AC3 Frame.
init_get_bits(&(ctx->gb), buf + frame_start, (buf_size - frame_start) * 8);
//Parse the syncinfo.
//If 'fscod' or 'bsid' is not valid the decoder shall mute as per the standard.
if (!ac3_parse_sync_info(ctx)) {
av_log(avctx, AV_LOG_ERROR, "\n");
*data_size = 0;
return -1;
}
//Check for the errors.
/* if (ac3_error_check(ctx)) {
*data_size = 0;
return -1;
} */
//Parse the BSI.
//If 'bsid' is not valid decoder shall not decode the audio as per the standard.
if (ac3_parse_bsi(ctx)) {
av_log(avctx, AV_LOG_ERROR, "bsid is not valid\n");
*data_size = 0;
return -1;
}
for (i = 0; i < MAX_BLOCKS; i++)
memset(ctx->delay[i], 0, sizeof(ctx->delay[i]));
avctx->sample_rate = ctx->sync_info.sampling_rate;
avctx->bit_rate = ctx->sync_info.bit_rate;
if (avctx->channels == 0) {
//avctx->channels = ctx->bsi.nfchans + ((ctx->bsi.flags & AC3_BSI_LFEON) ? 1 : 0);
ctx->output = AC3_OUTPUT_UNMODIFIED;
}
else if ((ctx->bsi.nfchans + ((ctx->bsi.flags & AC3_BSI_LFEON) ? 1 : 0)) < avctx->channels) {
av_log(avctx, AV_LOG_INFO, "ac3_decoder: AC3 Source Channels Are Less Then Specified %d: Output to %d Channels\n",
avctx->channels, (ctx->bsi.nfchans + ((ctx->bsi.flags & AC3_BSI_LFEON) ? 1 : 0)));
//avctx->channels = ctx->bsi.nfchans + ((ctx->bsi.flags & AC3_BSI_LFEON) ? 1 : 0);
ctx->output = AC3_OUTPUT_UNMODIFIED;
}
else if (avctx->channels == 1) {
ctx->output = AC3_OUTPUT_MONO;
} else if (avctx->channels == 2) {
if (ctx->bsi.dsurmod == 0x02)
ctx->output = AC3_OUTPUT_DOLBY;
else
ctx->output = AC3_OUTPUT_STEREO;
}
av_log(avctx, AV_LOG_INFO, "channels = %d \t bit rate = %d \t sampling rate = %d \n", avctx->channels, avctx->sample_rate, avctx->bit_rate);
//Parse the Audio Blocks.
*data_size = 0;
for (i = 0; i < 6; i++) {
if (ac3_parse_audio_block(ctx, i)) {
av_log(avctx, AV_LOG_ERROR, "error parsing the audio block\n");
*data_size = 0;
return -1;
}
av_log(NULL, AV_LOG_INFO, "doing imdct\n");
if (ctx->bsi.flags & AC3_BSI_LFEON) {
ff_imdct_calc(&ctx->imdct_ctx_512, ctx->tmp_output, ab->transform_coeffs[0], ctx->tmp_imdct);
for (l = 0; l < 256; l++)
ab->block_output[0][l] = ctx->tmp_output[l] * window[l] + ctx->delay[0][l] * window[255 -l];
memcpy(ctx->delay[0], ctx->tmp_output + 256, sizeof(ctx->delay[0]));
}
for (j = 0; j < ctx->bsi.nfchans; j++) {
if (ctx->audio_block.blksw & (1 << j)) {
for (k = 0; k < 128; k++) {
tmp_block_first_half[k] = ab->transform_coeffs[j + 1][2 * k];
tmp_block_second_half[k] = ab->transform_coeffs[j + 1][2 * k + 1];
}
ff_imdct_calc(&ctx->imdct_ctx_256, ctx->tmp_output, tmp_block_first_half, ctx->tmp_imdct);
for (l = 0; l < 256; l++)
ab->block_output[j + 1][l] = ctx->tmp_output[l] * window[l] + ctx->delay[j + 1][l] * window[255 - l];
ff_imdct_calc(&ctx->imdct_ctx_256, ctx->delay[j + 1], tmp_block_second_half, ctx->tmp_imdct);
} else {
ff_imdct_calc(&ctx->imdct_ctx_512, ctx->tmp_output, ab->transform_coeffs[j + 1], ctx->tmp_imdct);
for (l = 0; l < 256; l++)
ab->block_output[j + 1][l] = ctx->tmp_output[l] * window[l] + ctx->delay[j + 1][l] * window[255 - l];
memcpy(ctx->delay[j + 1], ctx->tmp_output + 256, sizeof(ctx->delay[j + 1]));
}
}
if (ctx->bsi.flags & AC3_BSI_LFEON) {
for (l = 0; l < 256; l++) {
value = lrint(ab->block_output[0][l]);
if (value < -32768)
value = -32768;
else if (value > 32767)
value = 32767;
*(out_samples++) = value;
}
*data_size += 256 * sizeof(int16_t);
}
do_downmix(ctx);
if (ctx->output == AC3_OUTPUT_UNMODIFIED)
nfchans = ctx->bsi.nfchans;
else
nfchans = avctx->channels;
for (k = 0; k < nfchans; k++)
for (l = 0; l < 256; l++) {
value = lrint(ab->block_output[k + 1][l]);
if (value < -32768)
value = -32768;
else if (value > 32767)
value = 32767;
*(out_samples++) = value;
}
*data_size += nfchans * 256 * sizeof (int16_t);
}
return ctx->sync_info.framesize;
}
| 21,094 |
qemu | 10ee2aaa417d8d8978cdb2bbed55ebb152df5f6b | 0 | static void ac97_save (QEMUFile *f, void *opaque)
{
size_t i;
uint8_t active[LAST_INDEX];
AC97LinkState *s = opaque;
pci_device_save (s->pci_dev, f);
qemu_put_be32s (f, &s->glob_cnt);
qemu_put_be32s (f, &s->glob_sta);
qemu_put_be32s (f, &s->cas);
for (i = 0; i < ARRAY_SIZE (s->bm_regs); ++i) {
AC97BusMasterRegs *r = &s->bm_regs[i];
qemu_put_be32s (f, &r->bdbar);
qemu_put_8s (f, &r->civ);
qemu_put_8s (f, &r->lvi);
qemu_put_be16s (f, &r->sr);
qemu_put_be16s (f, &r->picb);
qemu_put_8s (f, &r->piv);
qemu_put_8s (f, &r->cr);
qemu_put_be32s (f, &r->bd_valid);
qemu_put_be32s (f, &r->bd.addr);
qemu_put_be32s (f, &r->bd.ctl_len);
}
qemu_put_buffer (f, s->mixer_data, sizeof (s->mixer_data));
active[PI_INDEX] = AUD_is_active_in (s->voice_pi) ? 1 : 0;
active[PO_INDEX] = AUD_is_active_out (s->voice_po) ? 1 : 0;
active[MC_INDEX] = AUD_is_active_in (s->voice_mc) ? 1 : 0;
qemu_put_buffer (f, active, sizeof (active));
}
| 21,095 |
qemu | b03b2e48cb322cb695ff7a6666b25712140ea3c9 | 0 | static void timer_save(QEMUFile *f, void *opaque)
{
if (cpu_ticks_enabled) {
hw_error("cannot save state if virtual timers are running");
}
qemu_put_be64(f, cpu_ticks_offset);
qemu_put_be64(f, ticks_per_sec);
qemu_put_be64(f, cpu_clock_offset);
}
| 21,096 |
qemu | eb700029c7836798046191d62d595363d92c84d4 | 0 | void eth_get_protocols(const uint8_t *headers,
uint32_t hdr_length,
bool *isip4, bool *isip6,
bool *isudp, bool *istcp)
{
int proto;
size_t l2hdr_len = eth_get_l2_hdr_length(headers);
assert(hdr_length >= eth_get_l2_hdr_length(headers));
*isip4 = *isip6 = *isudp = *istcp = false;
proto = eth_get_l3_proto(headers, l2hdr_len);
if (proto == ETH_P_IP) {
*isip4 = true;
struct ip_header *iphdr;
assert(hdr_length >=
eth_get_l2_hdr_length(headers) + sizeof(struct ip_header));
iphdr = PKT_GET_IP_HDR(headers);
if (IP_HEADER_VERSION(iphdr) == IP_HEADER_VERSION_4) {
if (iphdr->ip_p == IP_PROTO_TCP) {
*istcp = true;
} else if (iphdr->ip_p == IP_PROTO_UDP) {
*isudp = true;
}
}
} else if (proto == ETH_P_IPV6) {
uint8_t l4proto;
size_t full_ip6hdr_len;
struct iovec hdr_vec;
hdr_vec.iov_base = (void *) headers;
hdr_vec.iov_len = hdr_length;
*isip6 = true;
if (eth_parse_ipv6_hdr(&hdr_vec, 1, l2hdr_len,
&l4proto, &full_ip6hdr_len)) {
if (l4proto == IP_PROTO_TCP) {
*istcp = true;
} else if (l4proto == IP_PROTO_UDP) {
*isudp = true;
}
}
}
}
| 21,097 |
qemu | bd269ebc82fbaa5fe7ce5bc7c1770ac8acecd884 | 0 | VncInfo *qmp_query_vnc(Error **errp)
{
VncInfo *info = g_malloc0(sizeof(*info));
VncDisplay *vd = vnc_display_find(NULL);
SocketAddressLegacy *addr = NULL;
if (vd == NULL || !vd->nlsock) {
info->enabled = false;
} else {
info->enabled = true;
/* for compatibility with the original command */
info->has_clients = true;
info->clients = qmp_query_client_list(vd);
if (vd->lsock == NULL) {
return info;
}
addr = qio_channel_socket_get_local_address(vd->lsock[0], errp);
if (!addr) {
goto out_error;
}
switch (addr->type) {
case SOCKET_ADDRESS_LEGACY_KIND_INET:
info->host = g_strdup(addr->u.inet.data->host);
info->service = g_strdup(addr->u.inet.data->port);
if (addr->u.inet.data->ipv6) {
info->family = NETWORK_ADDRESS_FAMILY_IPV6;
} else {
info->family = NETWORK_ADDRESS_FAMILY_IPV4;
}
break;
case SOCKET_ADDRESS_LEGACY_KIND_UNIX:
info->host = g_strdup("");
info->service = g_strdup(addr->u.q_unix.data->path);
info->family = NETWORK_ADDRESS_FAMILY_UNIX;
break;
case SOCKET_ADDRESS_LEGACY_KIND_VSOCK:
case SOCKET_ADDRESS_LEGACY_KIND_FD:
error_setg(errp, "Unsupported socket address type %s",
SocketAddressLegacyKind_lookup[addr->type]);
goto out_error;
default:
abort();
}
info->has_host = true;
info->has_service = true;
info->has_family = true;
info->has_auth = true;
info->auth = g_strdup(vnc_auth_name(vd));
}
qapi_free_SocketAddressLegacy(addr);
return info;
out_error:
qapi_free_SocketAddressLegacy(addr);
qapi_free_VncInfo(info);
return NULL;
}
| 21,098 |
qemu | b018ddf633f77195e9ae859c6d940a334e68879f | 0 | static void unassigned_mem_write(void *opaque, hwaddr addr,
uint64_t val, unsigned size)
{
#ifdef DEBUG_UNASSIGNED
printf("Unassigned mem write " TARGET_FMT_plx " = 0x%"PRIx64"\n", addr, val);
#endif
#if defined(TARGET_ALPHA) || defined(TARGET_SPARC) || defined(TARGET_MICROBLAZE)
cpu_unassigned_access(cpu_single_env, addr, 1, 0, 0, size);
#endif
}
| 21,099 |
qemu | 2c62f08ddbf3fa80dc7202eb9a2ea60ae44e2cc5 | 0 | static int milkymist_vgafb_init(SysBusDevice *dev)
{
MilkymistVgafbState *s = FROM_SYSBUS(typeof(*s), dev);
memory_region_init_io(&s->regs_region, &vgafb_mmio_ops, s,
"milkymist-vgafb", R_MAX * 4);
sysbus_init_mmio(dev, &s->regs_region);
s->con = graphic_console_init(vgafb_update_display,
vgafb_invalidate_display,
NULL, NULL, s);
return 0;
}
| 21,101 |
qemu | 374f2981d1f10bc4307f250f24b2a7ddb9b14be0 | 0 | static void memory_init(void)
{
qemu_mutex_init(&flat_view_mutex);
}
| 21,103 |
qemu | eabb7b91b36b202b4dac2df2d59d698e3aff197a | 0 | static void tcg_reg_alloc_bb_end(TCGContext *s, TCGRegSet allocated_regs)
{
int i;
for (i = s->nb_globals; i < s->nb_temps; i++) {
TCGTemp *ts = &s->temps[i];
if (ts->temp_local) {
temp_save(s, ts, allocated_regs);
} else {
#ifdef USE_LIVENESS_ANALYSIS
/* ??? Liveness does not yet incorporate indirect bases. */
if (!ts->indirect_base) {
/* The liveness analysis already ensures that temps are dead.
Keep an assert for safety. */
assert(ts->val_type == TEMP_VAL_DEAD);
continue;
}
#endif
temp_dead(s, ts);
}
}
save_globals(s, allocated_regs);
}
| 21,104 |
FFmpeg | e5540b3fd30367ce3cc33b2f34a04b660dbc4b38 | 0 | static int bitplane_decoding(uint8_t* plane, int width, int height, VC9Context *v)
{
int imode, x, y, i, code, use_vertical_tile, tile_w, tile_h;
uint8_t invert, *planep = plane;
int stride= width;
invert = get_bits(&v->gb, 1);
imode = get_vlc2(&v->gb, vc9_imode_vlc.table, VC9_IMODE_VLC_BITS, 2);
av_log(v->avctx, AV_LOG_DEBUG, "Bitplane: imode=%i, invert=%i\n",
imode, invert);
switch (imode)
{
case IMODE_RAW:
for (y=0; y<height; y++)
{
for (x=0; x<width; x++)
planep[x] = (-get_bits(&v->gb, 1)); //-1=0xFF
planep += stride;
}
invert=0; //spec says ignore invert if raw
break;
case IMODE_DIFF2:
case IMODE_NORM2:
if ((height*width) & 1) *(++planep) = get_bits(&v->gb, 1);
for(i=0; i<(height*width)>>1; i++){
code = get_vlc2(&v->gb, vc9_norm2_vlc.table, VC9_NORM2_VLC_BITS, 2);
*(++planep) = code&1; //lsb => left
*(++planep) = code&2; //msb => right - this is a bitplane, so only !0 matters
//FIXME width->stride
}
break;
case IMODE_DIFF6:
case IMODE_NORM6:
use_vertical_tile= height%3==0 && width%3!=0;
tile_w= use_vertical_tile ? 2 : 3;
tile_h= use_vertical_tile ? 3 : 2;
for(y= height%tile_h; y<height; y+=tile_h){
for(x= width%tile_w; x<width; x+=tile_w){
code = get_vlc2(&v->gb, vc9_norm6_vlc.table, VC9_NORM6_VLC_BITS, 2);
//FIXME following is a pure guess and probably wrong
planep[x + 0*stride]= (code>>0)&1;
planep[x + 1 + 0*stride]= (code>>1)&1;
if(use_vertical_tile){
planep[x + 0 + 1*stride]= (code>>2)&1;
planep[x + 1 + 1*stride]= (code>>3)&1;
planep[x + 0 + 2*stride]= (code>>4)&1;
planep[x + 1 + 2*stride]= (code>>5)&1;
}else{
planep[x + 2 + 0*stride]= (code>>2)&1;
planep[x + 0 + 1*stride]= (code>>3)&1;
planep[x + 1 + 1*stride]= (code>>4)&1;
planep[x + 2 + 1*stride]= (code>>5)&1;
}
}
}
x= width % tile_w;
decode_colskip(plane , x, height , stride, v);
decode_rowskip(plane+x, width - x, height % tile_h, stride, v);
break;
case IMODE_ROWSKIP:
decode_rowskip(plane, width, height, stride, v);
break;
case IMODE_COLSKIP: //Teh ugly
decode_colskip(plane, width, height, stride, v);
break;
default: break;
}
/* Applying diff operator */
if (imode == IMODE_DIFF2 || imode == IMODE_DIFF6)
{
planep = plane;
planep[0] ^= invert;
for (x=1; x<width; x++)
planep[x] ^= planep[x-1];
for (y=1; y<height; y++)
{
planep += stride;
planep[0] ^= planep[-stride];
for (x=1; x<width; x++)
{
if (planep[x-1] != planep[x-stride]) planep[x] ^= invert;
else planep[x] ^= planep[x-1];
}
}
}
else if (invert)
{
planep = plane;
for (x=0; x<width*height; x++) planep[x] = !planep[x]; //FIXME stride
}
return 0;
}
| 21,105 |
qemu | 1ea879e5580f63414693655fcf0328559cdce138 | 0 | static int qpa_init_out (HWVoiceOut *hw, audsettings_t *as)
{
int error;
static pa_sample_spec ss;
audsettings_t obt_as = *as;
PAVoiceOut *pa = (PAVoiceOut *) hw;
ss.format = audfmt_to_pa (as->fmt, as->endianness);
ss.channels = as->nchannels;
ss.rate = as->freq;
obt_as.fmt = pa_to_audfmt (ss.format, &obt_as.endianness);
pa->s = pa_simple_new (
conf.server,
"qemu",
PA_STREAM_PLAYBACK,
conf.sink,
"pcm.playback",
&ss,
NULL, /* channel map */
NULL, /* buffering attributes */
&error
);
if (!pa->s) {
qpa_logerr (error, "pa_simple_new for playback failed\n");
goto fail1;
}
audio_pcm_init_info (&hw->info, &obt_as);
hw->samples = conf.samples;
pa->pcm_buf = audio_calloc (AUDIO_FUNC, hw->samples, 1 << hw->info.shift);
if (!pa->pcm_buf) {
dolog ("Could not allocate buffer (%d bytes)\n",
hw->samples << hw->info.shift);
goto fail2;
}
if (audio_pt_init (&pa->pt, qpa_thread_out, hw, AUDIO_CAP, AUDIO_FUNC)) {
goto fail3;
}
return 0;
fail3:
free (pa->pcm_buf);
pa->pcm_buf = NULL;
fail2:
pa_simple_free (pa->s);
pa->s = NULL;
fail1:
return -1;
}
| 21,107 |
qemu | 494a8ebe713055d3946183f4b395f85a18b43e9e | 0 | static int proxy_name_to_path(FsContext *ctx, V9fsPath *dir_path,
const char *name, V9fsPath *target)
{
if (dir_path) {
v9fs_string_sprintf((V9fsString *)target, "%s/%s",
dir_path->data, name);
} else {
v9fs_string_sprintf((V9fsString *)target, "%s", name);
}
/* Bump the size for including terminating NULL */
target->size++;
return 0;
}
| 21,108 |
qemu | 364031f17932814484657e5551ba12957d993d7e | 0 | static int v9fs_synth_mknod(FsContext *fs_ctx, V9fsPath *path,
const char *buf, FsCred *credp)
{
errno = EPERM;
return -1;
}
| 21,109 |
FFmpeg | 3beb9cbad35218ed1fb3473eeb3cfc97a931bff4 | 0 | static int roq_encode_frame(AVCodecContext *avctx, AVPacket *pkt,
const AVFrame *frame, int *got_packet)
{
RoqContext *enc = avctx->priv_data;
int size, ret;
enc->avctx = avctx;
enc->frame_to_enc = frame;
if (frame->quality)
enc->lambda = frame->quality - 1;
else
enc->lambda = 2*ROQ_LAMBDA_SCALE;
/* 138 bits max per 8x8 block +
* 256 codebooks*(6 bytes 2x2 + 4 bytes 4x4) + 8 bytes frame header */
size = ((enc->width * enc->height / 64) * 138 + 7) / 8 + 256 * (6 + 4) + 8;
if ((ret = ff_alloc_packet(pkt, size)) < 0) {
av_log(avctx, AV_LOG_ERROR, "Error getting output packet with size %d.\n", size);
return ret;
}
enc->out_buf = pkt->data;
/* Check for I frame */
if (enc->framesSinceKeyframe == avctx->gop_size)
enc->framesSinceKeyframe = 0;
if (enc->first_frame) {
/* Alloc memory for the reconstruction data (we must know the stride
for that) */
if (ff_get_buffer(avctx, enc->current_frame, 0) ||
ff_get_buffer(avctx, enc->last_frame, 0)) {
av_log(avctx, AV_LOG_ERROR, " RoQ: get_buffer() failed\n");
return -1;
}
/* Before the first video frame, write a "video info" chunk */
roq_write_video_info_chunk(enc);
enc->first_frame = 0;
}
/* Encode the actual frame */
roq_encode_video(enc);
pkt->size = enc->out_buf - pkt->data;
if (enc->framesSinceKeyframe == 1)
pkt->flags |= AV_PKT_FLAG_KEY;
*got_packet = 1;
return 0;
}
| 21,111 |
FFmpeg | d3e18ad02795f9761b7e5a5c018dfef786046acf | 0 | static int swf_write_video(AVFormatContext *s,
AVCodecContext *enc, const uint8_t *buf, int size)
{
SWFContext *swf = s->priv_data;
ByteIOContext *pb = &s->pb;
int c = 0;
int outSize = 0;
int outSamples = 0;
/* Flash Player limit */
if ( swf->swf_frame_number >= 16000 ) {
return 0;
}
/* Store video data in queue */
if ( enc->codec_type == CODEC_TYPE_VIDEO ) {
SWFFrame *new_frame = av_malloc(sizeof(SWFFrame));
new_frame->prev = 0;
new_frame->next = swf->frame_head;
new_frame->data = av_malloc(size);
new_frame->size = size;
memcpy(new_frame->data,buf,size);
swf->frame_head = new_frame;
if ( swf->frame_tail == 0 ) {
swf->frame_tail = new_frame;
}
}
if ( swf->audio_type ) {
/* Prescan audio data for this swf frame */
retry_swf_audio_packet:
if ( ( swf->audio_size-outSize ) >= 4 ) {
int mp3FrameSize = 0;
int mp3SampleRate = 0;
int mp3IsMono = 0;
int mp3SamplesPerFrame = 0;
/* copy out mp3 header from ring buffer */
uint8_t header[4];
for (c=0; c<4; c++) {
header[c] = swf->audio_fifo[(swf->audio_in_pos+outSize+c) % AUDIO_FIFO_SIZE];
}
if ( swf_mp3_info(header,&mp3FrameSize,&mp3SamplesPerFrame,&mp3SampleRate,&mp3IsMono) ) {
if ( ( swf->audio_size-outSize ) >= mp3FrameSize ) {
outSize += mp3FrameSize;
outSamples += mp3SamplesPerFrame;
if ( ( swf->sound_samples + outSamples + swf->samples_per_frame ) < swf->video_samples ) {
goto retry_swf_audio_packet;
}
}
} else {
/* invalid mp3 data, skip forward
we need to do this since the Flash Player
does not like custom headers */
swf->audio_in_pos ++;
swf->audio_size --;
swf->audio_in_pos %= AUDIO_FIFO_SIZE;
goto retry_swf_audio_packet;
}
}
/* audio stream is behind video stream, bail */
if ( ( swf->sound_samples + outSamples + swf->samples_per_frame ) < swf->video_samples ) {
return 0;
}
/* compute audio/video drift */
if ( enc->codec_type == CODEC_TYPE_VIDEO ) {
swf->skip_samples = (int)( ( (double)(swf->swf_frame_number) * (double)enc->frame_rate_base * 44100. ) / (double)(enc->frame_rate) );
swf->skip_samples -= swf->video_samples;
}
}
/* check if we need to insert a padding frame */
if (swf->skip_samples <= ( swf->samples_per_frame / 2 ) ) {
/* no, it is time for a real frame, check if one is available */
if ( swf->frame_tail ) {
if ( swf->video_type == CODEC_ID_FLV1 ) {
if ( swf->video_frame_number == 0 ) {
/* create a new video object */
put_swf_tag(s, TAG_VIDEOSTREAM);
put_le16(pb, VIDEO_ID);
put_le16(pb, 15000 ); /* hard flash player limit */
put_le16(pb, enc->width);
put_le16(pb, enc->height);
put_byte(pb, 0);
put_byte(pb, SWF_VIDEO_CODEC_FLV1);
put_swf_end_tag(s);
/* place the video object for the first time */
put_swf_tag(s, TAG_PLACEOBJECT2);
put_byte(pb, 0x36);
put_le16(pb, 1);
put_le16(pb, VIDEO_ID);
put_swf_matrix(pb, 1 << FRAC_BITS, 0, 0, 1 << FRAC_BITS, 0, 0);
put_le16(pb, swf->video_frame_number );
put_byte(pb, 'v');
put_byte(pb, 'i');
put_byte(pb, 'd');
put_byte(pb, 'e');
put_byte(pb, 'o');
put_byte(pb, 0x00);
put_swf_end_tag(s);
} else {
/* mark the character for update */
put_swf_tag(s, TAG_PLACEOBJECT2);
put_byte(pb, 0x11);
put_le16(pb, 1);
put_le16(pb, swf->video_frame_number );
put_swf_end_tag(s);
}
// write out pending frames
for (; ( enc->frame_number - swf->video_frame_number ) > 0;) {
/* set video frame data */
put_swf_tag(s, TAG_VIDEOFRAME | TAG_LONG);
put_le16(pb, VIDEO_ID);
put_le16(pb, swf->video_frame_number++ );
put_buffer(pb, swf->frame_tail->data, swf->frame_tail->size);
put_swf_end_tag(s);
}
} else if ( swf->video_type == CODEC_ID_MJPEG ) {
if (swf->swf_frame_number > 0) {
/* remove the shape */
put_swf_tag(s, TAG_REMOVEOBJECT);
put_le16(pb, SHAPE_ID); /* shape ID */
put_le16(pb, 1); /* depth */
put_swf_end_tag(s);
/* free the bitmap */
put_swf_tag(s, TAG_FREECHARACTER);
put_le16(pb, BITMAP_ID);
put_swf_end_tag(s);
}
put_swf_tag(s, TAG_JPEG2 | TAG_LONG);
put_le16(pb, BITMAP_ID); /* ID of the image */
/* a dummy jpeg header seems to be required */
put_byte(pb, 0xff);
put_byte(pb, 0xd8);
put_byte(pb, 0xff);
put_byte(pb, 0xd9);
/* write the jpeg image */
put_buffer(pb, swf->frame_tail->data, swf->frame_tail->size);
put_swf_end_tag(s);
/* draw the shape */
put_swf_tag(s, TAG_PLACEOBJECT);
put_le16(pb, SHAPE_ID); /* shape ID */
put_le16(pb, 1); /* depth */
put_swf_matrix(pb, 20 << FRAC_BITS, 0, 0, 20 << FRAC_BITS, 0, 0);
put_swf_end_tag(s);
} else {
/* invalid codec */
}
av_free(swf->frame_tail->data);
swf->frame_tail = swf->frame_tail->prev;
if ( swf->frame_tail ) {
if ( swf->frame_tail->next ) {
av_free(swf->frame_tail->next);
}
swf->frame_tail->next = 0;
} else {
swf->frame_head = 0;
}
swf->swf_frame_number ++;
}
}
swf->video_samples += swf->samples_per_frame;
/* streaming sound always should be placed just before showframe tags */
if ( outSize > 0 ) {
put_swf_tag(s, TAG_STREAMBLOCK | TAG_LONG);
put_le16(pb, outSamples);
put_le16(pb, 0);
for (c=0; c<outSize; c++) {
put_byte(pb,swf->audio_fifo[(swf->audio_in_pos+c) % AUDIO_FIFO_SIZE]);
}
put_swf_end_tag(s);
/* update FIFO */
swf->sound_samples += outSamples;
swf->audio_in_pos += outSize;
swf->audio_size -= outSize;
swf->audio_in_pos %= AUDIO_FIFO_SIZE;
}
/* output the frame */
put_swf_tag(s, TAG_SHOWFRAME);
put_swf_end_tag(s);
put_flush_packet(&s->pb);
return 0;
}
| 21,112 |
FFmpeg | cc13bc8c4f0f4afa30d0b94c3f3a369ccd2aaf0b | 0 | static int parse_nal_units(AVCodecParserContext *s, const uint8_t *buf,
int buf_size, AVCodecContext *avctx)
{
HEVCParserContext *ctx = s->priv_data;
int ret, i;
ret = ff_h2645_packet_split(&ctx->pkt, buf, buf_size, avctx, 0, 0,
AV_CODEC_ID_HEVC);
if (ret < 0)
return ret;
for (i = 0; i < ctx->pkt.nb_nals; i++) {
H2645NAL *nal = &ctx->pkt.nals[i];
/* ignore everything except parameter sets and VCL NALUs */
switch (nal->type) {
case NAL_VPS: ff_hevc_decode_nal_vps(&nal->gb, avctx, &ctx->ps); break;
case NAL_SPS: ff_hevc_decode_nal_sps(&nal->gb, avctx, &ctx->ps, 1); break;
case NAL_PPS: ff_hevc_decode_nal_pps(&nal->gb, avctx, &ctx->ps); break;
case NAL_TRAIL_R:
case NAL_TRAIL_N:
case NAL_TSA_N:
case NAL_TSA_R:
case NAL_STSA_N:
case NAL_STSA_R:
case NAL_BLA_W_LP:
case NAL_BLA_W_RADL:
case NAL_BLA_N_LP:
case NAL_IDR_W_RADL:
case NAL_IDR_N_LP:
case NAL_CRA_NUT:
case NAL_RADL_N:
case NAL_RADL_R:
case NAL_RASL_N:
case NAL_RASL_R:
if (buf == avctx->extradata) {
av_log(avctx, AV_LOG_ERROR, "Invalid NAL unit: %d\n", nal->type);
return AVERROR_INVALIDDATA;
}
hevc_parse_slice_header(s, nal, avctx);
break;
}
}
return 0;
}
| 21,113 |
FFmpeg | 9f6431c8f6c4e92e3f6ea2f3bc8f58677a7e7ce3 | 0 | static int get_channel_idx(char **map, int *ch, char delim, int max_ch)
{
char *next = split(*map, delim);
int len;
int n = 0;
if (!next && delim == '-')
return AVERROR(EINVAL);
if (!*map)
return AVERROR(EINVAL);
len = strlen(*map);
sscanf(*map, "%d%n", ch, &n);
if (n != len)
return AVERROR(EINVAL);
if (*ch < 0 || *ch > max_ch)
return AVERROR(EINVAL);
*map = next;
return 0;
}
| 21,114 |
FFmpeg | 4a71da0f3ab7f5542decd11c81994f849d5b2c78 | 1 | static int decode_residual_block(AVSContext *h, GetBitContext *gb,
const struct dec_2dvlc *r, int esc_golomb_order,
int qp, uint8_t *dst, int stride) {
int i, level_code, esc_code, level, run, mask;
DCTELEM level_buf[65];
uint8_t run_buf[65];
DCTELEM *block = h->block;
for(i=0;i<65;i++) {
level_code = get_ue_code(gb,r->golomb_order);
if(level_code >= ESCAPE_CODE) {
run = ((level_code - ESCAPE_CODE) >> 1) + 1;
esc_code = get_ue_code(gb,esc_golomb_order);
level = esc_code + (run > r->max_run ? 1 : r->level_add[run]);
while(level > r->inc_limit)
r++;
mask = -(level_code & 1);
level = (level^mask) - mask;
} else {
level = r->rltab[level_code][0];
if(!level) //end of block signal
break;
run = r->rltab[level_code][1];
r += r->rltab[level_code][2];
}
level_buf[i] = level;
run_buf[i] = run;
}
if(dequant(h,level_buf, run_buf, block, ff_cavs_dequant_mul[qp],
ff_cavs_dequant_shift[qp], i))
return -1;
h->cdsp.cavs_idct8_add(dst,block,stride);
h->s.dsp.clear_block(block);
return 0;
}
| 21,115 |
qemu | 36cccb8c575b74a691f685911fbb0301af19f924 | 1 | static void device_set_hotplugged(Object *obj, bool value, Error **err)
{
DeviceState *dev = DEVICE(obj);
dev->hotplugged = value;
}
| 21,116 |
qemu | d8f94e1bb275ab6a14a15220fd6afd0d04324aeb | 1 | static void mips_fulong2e_init(MachineState *machine)
{
ram_addr_t ram_size = machine->ram_size;
const char *cpu_model = machine->cpu_model;
const char *kernel_filename = machine->kernel_filename;
const char *kernel_cmdline = machine->kernel_cmdline;
const char *initrd_filename = machine->initrd_filename;
char *filename;
MemoryRegion *address_space_mem = get_system_memory();
MemoryRegion *ram = g_new(MemoryRegion, 1);
MemoryRegion *bios = g_new(MemoryRegion, 1);
long bios_size;
int64_t kernel_entry;
qemu_irq *i8259;
qemu_irq *cpu_exit_irq;
PCIBus *pci_bus;
ISABus *isa_bus;
I2CBus *smbus;
int i;
DriveInfo *hd[MAX_IDE_BUS * MAX_IDE_DEVS];
MIPSCPU *cpu;
CPUMIPSState *env;
/* init CPUs */
if (cpu_model == NULL) {
cpu_model = "Loongson-2E";
}
cpu = cpu_mips_init(cpu_model);
if (cpu == NULL) {
fprintf(stderr, "Unable to find CPU definition\n");
exit(1);
}
env = &cpu->env;
qemu_register_reset(main_cpu_reset, cpu);
/* fulong 2e has 256M ram. */
ram_size = 256 * 1024 * 1024;
/* fulong 2e has a 1M flash.Winbond W39L040AP70Z */
bios_size = 1024 * 1024;
/* allocate RAM */
memory_region_init_ram(ram, NULL, "fulong2e.ram", ram_size, &error_abort);
vmstate_register_ram_global(ram);
memory_region_init_ram(bios, NULL, "fulong2e.bios", bios_size,
&error_abort);
vmstate_register_ram_global(bios);
memory_region_set_readonly(bios, true);
memory_region_add_subregion(address_space_mem, 0, ram);
memory_region_add_subregion(address_space_mem, 0x1fc00000LL, bios);
/* We do not support flash operation, just loading pmon.bin as raw BIOS.
* Please use -L to set the BIOS path and -bios to set bios name. */
if (kernel_filename) {
loaderparams.ram_size = ram_size;
loaderparams.kernel_filename = kernel_filename;
loaderparams.kernel_cmdline = kernel_cmdline;
loaderparams.initrd_filename = initrd_filename;
kernel_entry = load_kernel (env);
write_bootloader(env, memory_region_get_ram_ptr(bios), kernel_entry);
} else {
if (bios_name == NULL) {
bios_name = FULONG_BIOSNAME;
}
filename = qemu_find_file(QEMU_FILE_TYPE_BIOS, bios_name);
if (filename) {
bios_size = load_image_targphys(filename, 0x1fc00000LL,
BIOS_SIZE);
g_free(filename);
} else {
bios_size = -1;
}
if ((bios_size < 0 || bios_size > BIOS_SIZE) &&
!kernel_filename && !qtest_enabled()) {
error_report("Could not load MIPS bios '%s'", bios_name);
exit(1);
}
}
/* Init internal devices */
cpu_mips_irq_init_cpu(env);
cpu_mips_clock_init(env);
/* North bridge, Bonito --> IP2 */
pci_bus = bonito_init((qemu_irq *)&(env->irq[2]));
/* South bridge */
ide_drive_get(hd, MAX_IDE_BUS);
isa_bus = vt82c686b_init(pci_bus, PCI_DEVFN(FULONG2E_VIA_SLOT, 0));
if (!isa_bus) {
fprintf(stderr, "vt82c686b_init error\n");
exit(1);
}
/* Interrupt controller */
/* The 8259 -> IP5 */
i8259 = i8259_init(isa_bus, env->irq[5]);
isa_bus_irqs(isa_bus, i8259);
vt82c686b_ide_init(pci_bus, hd, PCI_DEVFN(FULONG2E_VIA_SLOT, 1));
pci_create_simple(pci_bus, PCI_DEVFN(FULONG2E_VIA_SLOT, 2),
"vt82c686b-usb-uhci");
pci_create_simple(pci_bus, PCI_DEVFN(FULONG2E_VIA_SLOT, 3),
"vt82c686b-usb-uhci");
smbus = vt82c686b_pm_init(pci_bus, PCI_DEVFN(FULONG2E_VIA_SLOT, 4),
0xeee1, NULL);
/* TODO: Populate SPD eeprom data. */
smbus_eeprom_init(smbus, 1, eeprom_spd, sizeof(eeprom_spd));
/* init other devices */
pit = pit_init(isa_bus, 0x40, 0, NULL);
cpu_exit_irq = qemu_allocate_irqs(cpu_request_exit, NULL, 1);
DMA_init(0, cpu_exit_irq);
/* Super I/O */
isa_create_simple(isa_bus, "i8042");
rtc_init(isa_bus, 2000, NULL);
for(i = 0; i < MAX_SERIAL_PORTS; i++) {
if (serial_hds[i]) {
serial_isa_init(isa_bus, i, serial_hds[i]);
}
}
if (parallel_hds[0]) {
parallel_init(isa_bus, 0, parallel_hds[0]);
}
/* Sound card */
audio_init(pci_bus);
/* Network card */
network_init(pci_bus);
}
| 21,117 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.