当前位置:文档之家› ffmpeg+sdl 播放器最新代码

ffmpeg+sdl 播放器最新代码

ffmpeg+sdl 播放器最新代码
ffmpeg+sdl 播放器最新代码

#include "libavformat/avformat.h"

#include "libswscale/swscale.h"

//#include

#include

#include

#include

#include

#include

#include

#ifdef main

#undef main

#endif

#define SDL_AUDIO_BUFFER_SIZE 1024

#define MAX_AUDIOQ_SIZE (5 * 16 * 1024)

#define MAX_VIDEOQ_SIZE (5 * 256 * 1024)

#define FF_ALLOC_EVENT (SDL_USEREVENT)

#define FF_REFRESH_EVENT (SDL_USEREVENT + 1) #define FF_QUIT_EVENT (SDL_USEREVENT + 2)

#define VIDEO_PICTURE_QUEUE_SIZE 1

#define SDL_AUDIO_BUFFER_SIZE 1024

static int sws_flags = SWS_BICUBIC;

typedef struct PacketQueue

{

A VPacketList *first_pkt, *last_pkt;

int nb_packets;

int size;

SDL_mutex *mutex;

SDL_cond *cond;

} PacketQueue;

typedef struct V ideoPicture

{

SDL_Overlay *bmp;

int width, height;

int allocated;

} V ideoPicture;

typedef struct V ideoState

{

A VFormatContext *pFormatCtx;

int videoStream, audioStream;

A VStream *audio_st;

PacketQueue audioq;

uint8_t audio_buf[(A VCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 2];

unsigned int audio_buf_size;

unsigned int audio_buf_index;

A VPacket audio_pkt;

uint8_t *audio_pkt_data;

int audio_pkt_size;

A VStream *video_st;

PacketQueue videoq;

V ideoPicture pictq[VIDEO_PICTURE_QUEUE_SIZE];

int pictq_size, pictq_rindex, pictq_windex;

SDL_mutex *pictq_mutex;

SDL_cond *pictq_cond;

SDL_Thread *parse_tid;

SDL_Thread *video_tid;

char filename[1024];

int quit;

} V ideoState;

SDL_Surface *screen;

V ideoState *global_video_state;

void packet_queue_init(PacketQueue *q)

{

memset(q, 0, sizeof(PacketQueue));

q->mutex = SDL_CreateMutex();

q->cond = SDL_CreateCond();

}

int packet_queue_put(PacketQueue *q, A VPacket *pkt)

{

A VPacketList *pkt1;

if(av_dup_packet(pkt) < 0)

{

return -1;

}

pkt1 = (A VPacketList *)av_malloc(sizeof(A VPacketList));

if (!pkt1)

return -1;

pkt1->pkt = *pkt;

pkt1->next = NULL;

SDL_LockMutex(q->mutex);

if (!q->last_pkt)

q->first_pkt = pkt1;

else

q->last_pkt->next = pkt1;

q->last_pkt = pkt1;

q->nb_packets++;

q->size += pkt1->pkt.size;

SDL_CondSignal(q->cond);

SDL_UnlockMutex(q->mutex);

return 0;

}

static int packet_queue_get(PacketQueue *q, A VPacket *pkt, int block) {

A VPacketList *pkt1;

int ret;

SDL_LockMutex(q->mutex);

for(;;)

{

if(global_video_state->quit)

{

ret = -1;

break;

}

pkt1 = q->first_pkt;

if (pkt1)

{

q->first_pkt = pkt1->next;

if (!q->first_pkt)

q->last_pkt = NULL;

q->nb_packets--;

q->size -= pkt1->pkt.size;

*pkt = pkt1->pkt;

av_free(pkt1);

ret = 1;

break;

}

else if (!block)

{

ret = 0;

break;

}

else

{

SDL_CondWait(q->cond, q->mutex);

}

}

SDL_UnlockMutex(q->mutex);

return ret;

}

int audio_decode_frame(VideoState *is, uint8_t *audio_buf, int buf_size) {

int len1, data_size;

A VPacket *pkt = &is->audio_pkt;

for(;;)

{

while(is->audio_pkt_size > 0)

{

data_size = buf_size;

len1 = avcodec_decode_audio2(is->audio_st->codec,

(int16_t *)audio_buf, &data_size,

is->audio_pkt_data, is->audio_pkt_size);

if(len1 < 0)

{

is->audio_pkt_size = 0;

break;

}

is->audio_pkt_data += len1;

is->audio_pkt_size -= len1;

if(data_size <= 0)

{

continue;

}

return data_size;

}

if(pkt->data)

av_free_packet(pkt);

if(is->quit)

{

return -1;

}

if(packet_queue_get(&is->audioq, pkt, 1) < 0)

{

return -1;

}

is->audio_pkt_data = pkt->data;

is->audio_pkt_size = pkt->size;

}

}

void audio_callback(void *userdata, Uint8 *stream, int len)

{

V ideoState *is = (V ideoState *)userdata;

int len1, audio_size;

while(len > 0)

{

if(is->audio_buf_index >= is->audio_buf_size)

{

audio_size = audio_decode_frame(is, is->audio_buf, sizeof(is->audio_buf));

if(audio_size < 0)

{

is->audio_buf_size = 1024;

memset(is->audio_buf, 0, is->audio_buf_size);

}

else

{

is->audio_buf_size = audio_size;

}

is->audio_buf_index = 0;

}

len1 = is->audio_buf_size - is->audio_buf_index;

if(len1 > len)

len1 = len;

memcpy(stream, (uint8_t *)is->audio_buf + is->audio_buf_index, len1);

len -= len1;

stream += len1;

is->audio_buf_index += len1;

}

}

static Uint32 sdl_refresh_timer_cb(Uint32 interval, void *opaque)

{

//printf("sdl_refresh_timer_cb called:interval--%d\n",interval);

SDL_Event event;

event.type = FF_REFRESH_EVENT;

https://www.doczj.com/doc/b73247156.html,er.data1 = opaque;

SDL_PushEvent(&event); //派发FF_REFRESH_EVENT事件

return 0;

}

static void schedule_refresh(V ideoState *is, int delay)

{

//printf("schedule_refresh called:delay--%d\n",delay);

SDL_AddTimer(delay, sdl_refresh_timer_cb, is); //sdl_refresh_timer_cb函数在延时delay毫秒后,只会被执行一次,is是sdl_refresh_timer_cb的参数

}

void video_display(VideoState *is)

{

//printf("video_display called\n");

SDL_Rect rect;

V ideoPicture *vp;

A VPicture pict;

float aspect_ratio;

int w, h, x, y;

int i;

vp = &is->pictq[is->pictq_rindex];

if(vp->bmp)

{

if(is->video_st->codec->sample_aspect_ratio.num == 0)

{

aspect_ratio = 0;

}

else

{

aspect_ratio = av_q2d(is->video_st->codec->sample_aspect_ratio) *

is->video_st->codec->width / is->video_st->codec->height;

}

if(aspect_ratio <= 0.0) //aspect_ratio 宽高比

{

aspect_ratio = (float)is->video_st->codec->width /

(float)is->video_st->codec->height;

}

h = screen->h;

w = ((int)(h * aspect_ratio)) & -3;

if(w > screen->w)

{

w = screen->w;

h = ((int)(w / aspect_ratio)) & -3;

}

x = (screen->w - w) / 2;

y = (screen->h - h) / 2;

rect.x = x;

rect.y = y;

rect.w = w;

rect.h = h;

SDL_DisplayYUVOverlay(vp->bmp, &rect);

}

}

void video_refresh_timer(void *userdata)

{

V ideoState *is = (V ideoState *)userdata;

V ideoPicture *vp;

if(is->video_st)

{

if(is->pictq_size == 0)

{

schedule_refresh(is, 1);

}

else

{

vp = &is->pictq[is->pictq_rindex];

schedule_refresh(is, 80);

video_display(is);

if(++is->pictq_rindex == VIDEO_PICTURE_QUEUE_SIZE)

{

is->pictq_rindex = 0;

}

SDL_LockMutex(is->pictq_mutex);

is->pictq_size--;

SDL_CondSignal(is->pictq_cond);

SDL_UnlockMutex(is->pictq_mutex);

}

}

else

{

schedule_refresh(is, 100);

}

}

void alloc_picture(void *userdata)

{

V ideoState *is = (V ideoState *)userdata;

V ideoPicture *vp;

vp = &is->pictq[is->pictq_windex];

if(vp->bmp)

{

// we already have one make another, bigger/smaller

SDL_FreeYUVOverlay(vp->bmp);

}

// Allocate a place to put our YUV image on that screen

vp->bmp = SDL_CreateYUVOverlay(is->video_st->codec->width,

is->video_st->codec->height,

SDL_YV12_OVERLAY,

screen);

vp->width = is->video_st->codec->width;

vp->height = is->video_st->codec->height;

SDL_LockMutex(is->pictq_mutex);

vp->allocated = 1;

SDL_CondSignal(is->pictq_cond);

SDL_UnlockMutex(is->pictq_mutex);

}

int queue_picture(V ideoState *is, A VFrame *pFrame)

{

//printf("queue_picture called\n");

V ideoPicture *vp;

int dst_pix_fmt;

A VPicture pict;

static struct SwsContext *img_convert_ctx;

if (img_convert_ctx == NULL)

{

img_convert_ctx = sws_getContext(is->video_st->codec->width, is->video_st->codec->height,

is->video_st->codec->pix_fmt,

is->video_st->codec->width,

is->video_st->codec->height,

PIX_FMT_YUV420P,

sws_flags, NULL, NULL, NULL);

if (img_convert_ctx == NULL)

{

fprintf(stderr, "Cannot initialize the conversion context\n");

exit(1);

}

}

SDL_LockMutex(is->pictq_mutex);

while(is->pictq_size >= VIDEO_PICTURE_QUEUE_SIZE &&

!is->quit)

{

SDL_CondWait(is->pictq_cond, is->pictq_mutex);

}

SDL_UnlockMutex(is->pictq_mutex);

if(is->quit)

return -1;

// windex is set to 0 initially

vp = &is->pictq[is->pictq_windex];

if(!vp->bmp ||

vp->width != is->video_st->codec->width ||

vp->height != is->video_st->codec->height)

{

SDL_Event event;

vp->allocated = 0;

event.type = FF_ALLOC_EVENT;

https://www.doczj.com/doc/b73247156.html,er.data1 = is;

SDL_PushEvent(&event);

SDL_LockMutex(is->pictq_mutex);

while(!vp->allocated && !is->quit)

{

SDL_CondWait(is->pictq_cond, is->pictq_mutex); //没有得到消息时解锁,得到消息后加锁,和SDL_CondSignal配对使用

}

SDL_UnlockMutex(is->pictq_mutex);

if(is->quit)

{

return -1;

}

}

if(vp->bmp)

{

SDL_LockYUVOverlay(vp->bmp);

dst_pix_fmt = PIX_FMT_YUV420P;

pict.data[0] = vp->bmp->pixels[0];

pict.data[1] = vp->bmp->pixels[2];

pict.data[2] = vp->bmp->pixels[1];

pict.linesize[0] = vp->bmp->pitches[0];

pict.linesize[1] = vp->bmp->pitches[2];

pict.linesize[2] = vp->bmp->pitches[1];

// Convert the image into YUV format that SDL uses

sws_scale(img_convert_ctx, pFrame->data, pFrame->linesize,

0, is->video_st->codec->height, pict.data, pict.linesize);

SDL_UnlockYUVOverlay(vp->bmp);

if(++is->pictq_windex == VIDEO_PICTURE_QUEUE_SIZE)

{

is->pictq_windex = 0;

}

SDL_LockMutex(is->pictq_mutex);

is->pictq_size++;

SDL_UnlockMutex(is->pictq_mutex);

}

return 0;

}

int video_thread(void *arg)

{

//printf("video_thread called");

V ideoState *is = (V ideoState *)arg;

A VPacket pkt1, *packet = &pkt1;

int len1, frameFinished;

A VFrame *pFrame;

pFrame = avcodec_alloc_frame();

for(;;)

{

if(packet_queue_get(&is->videoq, packet, 1) < 0)

{

// means we quit getting packets

break;

}

// Decode video frame

len1 = avcodec_decode_video(is->video_st->codec, pFrame, &frameFinished, packet->data, packet->size);

// Did we get a video frame?

if(frameFinished)

{

if(queue_picture(is, pFrame) < 0)

{

break;

}

}

av_free_packet(packet);

}

av_free(pFrame);

return 0;

}

int stream_component_open(V ideoState *is, int stream_index)

{

A VFormatContext *pFormatCtx = is->pFormatCtx;

A VCodecContext *codecCtx;

A VCodec *codec;

SDL_AudioSpec wanted_spec, spec;

if(stream_index < 0 || stream_index >= pFormatCtx->nb_streams)

{

return -1;

}

// Get a pointer to the codec context for the video stream

codecCtx = pFormatCtx->streams[stream_index]->codec;

if(codecCtx->codec_type == CODEC_TYPE_AUDIO)

{

// Set audio settings from codec info

wanted_spec.freq = codecCtx->sample_rate;

wanted_spec.format = AUDIO_S16SYS;

wanted_spec.channels = codecCtx->channels;

wanted_spec.silence = 0;

wanted_spec.samples = SDL_AUDIO_BUFFER_SIZE;

wanted_spec.callback = audio_callback;

wanted_https://www.doczj.com/doc/b73247156.html,erdata = is;

if(SDL_OpenAudio(&wanted_spec, &spec) < 0)

{

fprintf(stderr, "SDL_OpenAudio: %s\n", SDL_GetError());

return -1;

}

}

codec = avcodec_find_decoder(codecCtx->codec_id);

if(!codec || (avcodec_open(codecCtx, codec) < 0))

{

fprintf(stderr, "Unsupported codec!\n");

return -1;

}

switch(codecCtx->codec_type)

{

case CODEC_TYPE_AUDIO:

is->audioStream = stream_index;

is->audio_st = pFormatCtx->streams[stream_index];

is->audio_buf_size = 0;

is->audio_buf_index = 0;

memset(&is->audio_pkt, 0, sizeof(is->audio_pkt));

packet_queue_init(&is->audioq);

SDL_PauseAudio(0);

break;

case CODEC_TYPE_VIDEO:

is->videoStream = stream_index;

is->video_st = pFormatCtx->streams[stream_index];

packet_queue_init(&is->videoq);

is->video_tid = SDL_CreateThread(video_thread, is);

break;

default:

break;

}

}

int decode_interrupt_cb(void)

{

return (global_video_state && global_video_state->quit);

}

int decode_thread(void *arg)

{

V ideoState *is = (V ideoState *)arg;

A VFormatContext *pFormatCtx;

A VPacket pkt1, *packet = &pkt1;

int audio_index = -1;

int i;

is->videoStream=-1;

is->audioStream=-1;

global_video_state = is;

// will interrupt blocking functions if we quit!

url_set_interrupt_cb(decode_interrupt_cb);

// Open video file

if(av_open_input_file(&pFormatCtx, is->filename, NULL, 0, NULL)!=0) return -1; // Couldn't open file

is->pFormatCtx = pFormatCtx;

// Retrieve stream information

if(av_find_stream_info(pFormatCtx)<0)

return -1; // Couldn't find stream information

// Dump information about file onto standard error

dump_format(pFormatCtx, 0, is->filename, 0);

// Find the first video stream

for(i=0; inb_streams; i++)

{

if(pFormatCtx->streams[i]->codec->codec_type==CODEC_TYPE_VIDEO && video_index < 0)

{

video_index=i;

}

if(pFormatCtx->streams[i]->codec->codec_type==CODEC_TYPE_AUDIO && audio_index < 0)

{

audio_index=i;

}

}

if(audio_index >= 0)

{

stream_component_open(is, audio_index);

}

if(video_index >= 0)

{

stream_component_open(is, video_index);

}

if(is->videoStream < 0 || is->audioStream < 0)

{

fprintf(stderr, "%s: could not open codecs\n", is->filename);

goto fail;

}

for(;;)

{

if(is->quit)

{

break;

}

// seek stuff goes here

if(is->audioq.size > MAX_AUDIOQ_SIZE ||

is->videoq.size > MAX_VIDEOQ_SIZE) {

SDL_Delay(10);

continue;

}

if(av_read_frame(is->pFormatCtx, packet) < 0)

{

if(url_ferror(pFormatCtx->pb) == 0)

{

SDL_Delay(100);

continue;

}

else

{

break;

}

}

// Is this a packet from the video stream?

if(packet->stream_index == is->videoStream)

{

packet_queue_put(&is->videoq, packet);

}

else if(packet->stream_index == is->audioStream) {

packet_queue_put(&is->audioq, packet);

}

else

{

av_free_packet(packet);

}

}

while(!is->quit)

{

SDL_Delay(100);

}

fail:

SDL_Event event;

event.type = FF_QUIT_EVENT;

https://www.doczj.com/doc/b73247156.html,er.data1 = is;

SDL_PushEvent(&event);

return 0;

}

int main(int argc, char *argv[])

{

SDL_Event event;

V ideoState *is;

is = (V ideoState *)av_mallocz(sizeof(VideoState));

if(argc < 2)

{

fprintf(stderr, "Usage: test \n");

exit(1);

}

// Register all formats and codecs

av_register_all();

if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {

fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());

exit(1);

}

// Make a screen to put our video

#ifndef __DARWIN__

screen = SDL_SetVideoMode(640, 480, 0, 0);

#else

screen = SDL_SetVideoMode(640, 480, 24, 0);

#endif

if(!screen)

{

fprintf(stderr, "SDL: could not set video mode - exiting\n");

exit(1);

}

//pstrcpy(is->filename, sizeof(is->filename), argv[1]);

strcpy(is->filename,argv[1]);

is->pictq_mutex = SDL_CreateMutex();

is->pictq_cond = SDL_CreateCond();

schedule_refresh(is, 40);

is->parse_tid = SDL_CreateThread(decode_thread, is);

if(!is->parse_tid)

{

av_free(is);

return -1;

}

for(;;)

{

SDL_WaitEvent(&event);

switch(event.type)

{

case FF_QUIT_EVENT:

printf("FF_QUIT_EVENT recieved");

case SDL_QUIT:

printf("SDL_QUIT recieved");

is->quit = 1;

SDL_Quit();

return 0;

break;

case FF_ALLOC_EVENT:

alloc_picture(https://www.doczj.com/doc/b73247156.html,er.data1);

break;

case FF_REFRESH_EVENT:

video_refresh_timer(https://www.doczj.com/doc/b73247156.html,er.data1);

break;

default:

break;

}

}

return 0;

}

程序结构分析:

main函数主要做了三件事:

1.创建了一个线程decode_Thread

2.调用了schedule_refresh函数

3.创建了一个无限循环,处理程序运行过程中派发的退出播放事件,内存分配事件,屏幕刷新事件

先从简单地schedule_refresh函数说起

这个函数调用了SDL库函数SDL_AddTimer(delay, sdl_refresh_timer_cb, is);这个函数会让sdl_refresh_timer_cb函数延迟delay毫秒后只执行一次,is参数是大结构体VideoState 类型,包含了视频播放的各种信息,作为参数传递给回调函数sdl_refresh_timer_cb,sdl_refresh_timer_cb函数又会派发事件FF_REFRESH_EVENT,大结构体类型的数据也跟着派发出去,让main的事件监听模块监听到,然后调用video_refresh_timer函数。

video_refresh_timer函数

video_refresh_timer函数主要是负责播放视频的每一帧,如果帧队列is->pictq中有可

以播放的图片帧,就调用video_display函数播放它,然后改变is->pictq_rindex(影响video_display的播放),改变is->pictq_size(帧队列大小)影响queue_picture函数地执行。

video_refresh_timer函数只要被调用,就会让schedule_refresh函数执行一次,又会派发屏幕刷新事件FF_REFRESH_EVENT,让video_refresh_timer又有机会执行,实际上是个隐形的循环不断地调用video_refresh_timer函数,虽然没有for或while关键字。

decode_Thread线程函数

decode_Thread函数首先调用ffmpeg库函数av_open_input_file打开main函数参数中指定的视频文件,然后调用stream_component_open函数分别创建了两个线程分别去播放音频和视频,最后又是一个无限循环不断地调用库函数av_read_frame把数据读入包变量packet中,然后根据packet->stream_index来分流,分出视频包和音频包,分别放到视频包队列is->videoq和音频包队列is->audioq中,其中包队列is->videoq中的数据包还有待进行解码和格式转换,然后在放到帧队列is->pictq中,让video_refresh_timer函数有帧可读,才能播放出图像。

stream_component_open函数

会根据参数的不同分别创建两个线程audio_callback进行音频的解码和播放,和video_thread进行视频的解码和格式转换(视频的播放是在video_refresh_timer函数中进行)video_thread函数

首先调用packet_queue_get函数得到视频数据包,然后调用库函数avcodec_decode_video解码数据包,当数据包中的数据解码满一帧后,把该帧的数据pFrame 送到queue_picture函数中进行格式转换,queue_picture函数把格式转换好后会把该帧存入帧队列is->pictq中,应该存到is->pictq的队前还是队后是通过is->pictq_windex来指示的。queue_picture函数

queue_picture函数在向帧队列is->pictq写入一帧之前会先判断帧队列的大小is->pictq_size,如果帧队列中有多于一帧的数据,就通过库函数SDL_CondWait阻塞起来,等待video_refresh_timer函数去播放一帧,然后改变帧队列大小。

线程间的协调

整个程序有四个工作线程:main线程,decode_thread线程,audio_callback线程,video_thread线程,这四个线程如果不加以协调肯定会乱套,让程序无法正常工作。协调这些线程用到的变量有VideoState结构体中的audioq,videoq来协调decode_thread线程和他的两个子线程(线程audio_callback线程、video_thread线程)之间读包、取包。

用pictq,pictq_size, 来协调main线程与video_thread线程之间的写帧、读帧。

互斥量pictq_mutex和信号量pictq_cond,通过SDL_LockMutex函数、SDL_UnlockMutex函数保证线程协调变量的读写安全;SDL_CondWait函数,SDL_CondSignal函数来阻塞和解除阻塞

java视频播放器源代码

import java.awt.*; import java.io.*; import java.util.*; import javax.media.*; import javax.media.format.*; import javax.swing.*; public class VideoPlayer implements ControllerListener { Vector audioCapDevList = null; Vector videoCapDevList = null; CaptureDeviceInfo audioCapDevInfo = null; CaptureDeviceInfo videoCapDevInfo = null; MediaLocator audioCapDevLoc = null; MediaLocator videoCapDevLoc = null; Player audioPlayer; Player videoPlayer; public void initAudioCapDevLoc() { //这里可以填写其它的音频编码格式,具体请看AudioFormat类 audioCapDevList = CaptureDeviceManager.getDeviceList(new AudioFormat( AudioFormat.LINEAR)); if ((audioCapDevList.size() > 0)) { //或许有几个CaptureDevice,这里取第一个 audioCapDevInfo = (CaptureDeviceInfo) audioCapDevList.elementAt(0); audioCapDevLoc = audioCapDevInfo.getLocator(); } else { System.out.println("找不到音频采集设备"); System.exit(0); } } public void initVideoCapDevLoc() { //这里可以填写其它的编码视频格式,具体请看VideoFormat类 videoCapDevList = CaptureDeviceManager.getDeviceList(new VideoFormat( VideoFormat.YUV)); if ((videoCapDevList.size() > 0)) {

音乐播放器程序源代码及注释

音乐播放器程序源代码及注释: #include #define uchar unsigned char #define uint unsigned int sbit duan=P2^6; sbit key1=P3^2;//按key1可切换花样 sbit key2=P3^3;//按key2可切换歌曲 sbit fm=P2^4;//蜂鸣器连续的IO口 sbit P30=P3^0;//矩阵键盘的一列 uchar code huayang1[]={0x7f,0xbf,0xdf,0xef,0xf7,0xfb,0xfd,0xfe, 0xfd,0xfb,0xf7,0xef,0xdf,0xbf};//花样1 uchar code huayang2[]={0x7f,0xfe,0xbf,0xfd,0xdf,0xfb,0xef,0xf7, 0xef,0xfb,0xdf,0xfd,0xbf,0xfe};//花样2 uchar code huayang3[]={0x7f,0x3f,0x1f,0x0f,0x07,0x03,0x01,0x0, 0x80,0xc0,0xe0,0xf0,0xf8,0xfc,0xfe,0xff}; char code huayang4[]={ 0x55,0xaa,0xcc,0x33,0x99,0x66,0x0f,0xf0}; uchar count1;//花样标志 uchar count2;//歌曲标志 uchar timeh,timel,i; //编程规则:字节高位是简谱,低位是持续时间, //代表多少个十六分音符 //1-7代表中央C调,8-E代表高八度,0代表停顿

视频播放器通用代码

ASP视频播放器通用代码 1.avi格式 2.mpg格式

各视频格式播放代码

1。avi格式 代码片断如下: <object id="video" width="400" height="200" border="0" classid="clsid:CFCDAA03-8BE4-11cf-B84B-0020AFBBCCFA"> <param name="ShowDisplay" value="0"> <param name="ShowControls" value="1"> <param name="AutoStart" value="1"> <param name="AutoRewind" value="0"> <param name="PlayCount" value="0"> <param name="Appearance value="0 value="""> <param name="BorderStyle value="0 value="""> <param name="MovieWindowHeight" value="240"> <param name="MovieWindowWidth" value="320"> <param name="FileName" value="/Mbar。avi"> <embed width="400" height="200" border="0" showdisplay="0" showc autostart="1" autorewind="0" playcount="0" moviewindowheight="240" moviewindowwidth="320" filename="/Mbar。avi" src="Mbar。avi"> </embed> </object> 2。mpg格式 代码片断如下: <object classid="clsid:05589FA1-C356-11CE-BF01-00AA0055595A" id="ActiveMovie1" width="239" height="250">

Java音乐播放器源代码即结果显示

简单的音乐播放器一、程序代码: import java.io.File; import java.awt.BorderLayout; import java.awt.Button; import java.awt.Color; import java.awt.FileDialog; import java.awt.Frame; import java.awt.GridLayout; import https://www.doczj.com/doc/b73247156.html,bel; import java.awt.List; import java.awt.Menu; import java.awt.MenuBar; import java.awt.MenuItem; import java.awt.MenuShortcut; import java.awt.Panel; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.KeyEvent; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import javax.sound.sampled.AudioFormat; import javax.sound.sampled.AudioInputStream; import javax.sound.sampled.AudioSystem; import javax.sound.sampled.DataLine; import javax.sound.sampled.SourceDataLine; public class Example extends Frame { private static final long serialVersionUID = 1L; boolean isStop = true;// 控制播放线程 boolean hasStop = true;// 播放线程状态

java视频播放器几种

package com.bird.jmf; import java.awt.*; import java.awt.event.*; import java.io.*; import https://www.doczj.com/doc/b73247156.html,.*; import javax.swing.*; import javax.media.*; // 视频播放程序 public class VideoPlayDemo extends JFrame { private Player player; // 播放器对象 private Component visualMedia; // 视频显示组件 private Component mediaControl; // 视频播放控制组件 private Container container; // 主容器 private File mediaFile; //媒体文件 private URL fileURL; //媒体文件URL地址 public VideoPlayDemo() { // 构造函数 super("视频播放程序"); //调用父类构造函数 container = getContentPane(); //得到窗口容器 JToolBar toobar = new JToolBar(); //实例化工具栏 JButton openFile = new JButton("打开媒体文件"); //实例化按钮 toobar.add(openFile); //增加按钮到工具栏 JButton openURL = new JButton("打开网络地址"); toobar.add(openURL); container.add(toobar, BorderLayout.NORTH); //设置工具栏 openFile.addActionListener(new ActionListener() { //打开文件按钮事件处理 public void actionPerformed(ActionEvent event) { JFileChooser fileChooser = new JFileChooser(); //实例化文件选择器 fileChooser.setFileSelectionMode(JFileChooser.FILES_ONLY);//设置文件打开模式为仅打开文件 int result = fileChooser.showOpenDialog(VideoPlayDemo.this);//显示对话框 if (result == JFileChooser.APPROVE_OPTION) { //得到用户行为 mediaFile = fileChooser.getSelectedFile(); //得到选择的文件 }

java简单媒体播放器源码.

//程序所用到的各种程序包 import java.awt.BorderLayout; import java.awt.CheckboxMenuItem; import java.awt.Color; import https://www.doczj.com/doc/b73247156.html,ponent; import java.awt.Container; import java.awt.FileDialog; import java.awt.Font; import java.awt.FontMetrics; import java.awt.Graphics; import java.awt.Menu; import java.awt.MenuBar; import java.awt.MenuItem; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.ItemEvent; import java.awt.event.ItemListener; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent;

import javax.media.ControllerClosedEvent; import javax.media.ControllerEvent; import javax.media.ControllerListener; import javax.media.EndOfMediaEvent; import javax.media.GainControl; import javax.media.Manager; import javax.media.MediaLocator; import javax.media.NoPlayerException; import javax.media.Player; import javax.media.PrefetchCompleteEvent; import javax.media.RealizeCompleteEvent; import javax.media.Time; import javax.swing.JFrame; //本程序对应的类; class MediaPlayer extends JFrame implements ActionListener, ControllerListener, ItemListener { //MediaPlayer类的变量成员的声明; //JMF提供的播放器对象;

音乐播放器代码大全

音乐播放器代码大全 autostart="true"中true或1表示自动播放,false或0表示手动播放 loop="true" 中的true或1表示重复播放,false或0表示只播放一次width= height= 中的数字分别表示播放器的宽度和高度=0表示隐藏播放器 EnableContextMenu="0" 禁右键 ShowStatusBar="1" (带显示文件播放信息) 1隐藏播放器(不循环) 代码: 2.隐藏播放器(循环播放) 代码: 3.黑色皮肤播放器 代码: 4.淡蓝色播放器 代码:

5.迷幻播放器 代码:

6.带菜单的播放器 代码: 7.深黄色带菜单播放器 代码: 8.灰色播放器 代码:

安卓音乐播放器开发,含源代码

基于an droid平台的音乐播放器开发 实验报告 学生姓名:_______ 温从林 _________________ 学号: ___________________________________ 班级:计自1201 _____________ 第一章引言 1.1项目背景 当今社会的生活节奏越来越快,人们对手机的要求也越来越高,由于手机市场发展迅速,使得手机操作系统也出现了不同各类,现在的市场上主要有三个手机操作系统,Win dowsmobile,symbia n,以及谷歌的An droid操作系统,其中占有开放源代码优势的An droid系统有最大的发展前景。那么能否在手机上拥有自己编写的个性音乐播放器呢?能的,谷歌An droid系统就能做到。本文的音乐播放器就是基于谷歌An droid手机平台的播放器。 An droid :是谷歌于2007年公布的开放式源代码手机系统,它的开放性就优于其它封闭式的手机系统,因此,任何人都可能根据自己的喜好将手机系统中的所有功能重新编写。这使得越来越多的人关注这个操作系统。本次作品音乐播放器就是基于An droid平台的。 1.2编写目的 现今社会生活紧张,而欣赏音乐是其中最好的舒缓压力的方式之一,本项目的目的是开发一个可以播放主流音乐文件格式的播放器,本设计实现的主要功能是播放Mp3 Wav多种格式的音乐文件,并且能够控制播放,暂停,停止,播放列等基本播放控制功能,界面简明,操作简单。

本项目是一款基于An droid手机平台的音乐播放器,使An droid手机拥有个性的 多媒体播放器,使手机显得更生动灵活化,与人们更为接近,让手机主人随时随地处于音乐视频的旋律之中。使人们的生活更加多样化。也使设计者更加熟练An droid的技术和其它在市场上的特点。 1.3开发环境 Eclipse、An droid SDK 320 第二章系统需求分析 2.1功能需求(用例图分析) 根据项目的目标,我们可获得项目系统的基本需求,以下从不同角度来描述系统的需求,并且使用用例图来描述,系统的功能需求,我们分成四部分来概括,即播放器的基本控制需要,播放列表管理需求,播放器友好性需求和播放器扩展卡需求。以下分别描述: 2.1.1播放器的用例图 假设安装了音乐播放器的用户是系统的主要设计对象,其拥有以下操作, 启动软件、播放音乐、暂停播放、停止播放、退出软件,其用例图如下 图2.1 播放器基本用例图 2.1.2用例分析

几种在网页中播放FLV视频文件的代码

几种在网页中播放FLV视频文件的代码(附FLV播放器) 方法一、js嵌入 直接copy下面代码,修改其中红色部分,即:swf_width、swf_height、texts、files 参数引用 其中: 群英传WEB版宣传视频是标题。 https://www.doczj.com/doc/b73247156.html,/flv/qyzweb.flv是FLV文件地址。 https://www.doczj.com/doc/b73247156.html,/flv/flvplayer.swf 是FLV文件播放器地址(如果你不会制作,可下载别人的然后上传到自己的空间再调用)。

音乐播放器设计文档

生产实习报告 题目:音乐播放器 学生姓名:张凡 学号: 201220220123 班级: 1222201 专业:数字媒体技术 指导教师:张金 2015年08 月08日

目录 一、引言 (3) 1.1 项目背景 (3) 1.2 项目研究的目的 (4) 1.3 安卓简介 (4) 二.功能分析 (5) 2.1 功能需求分析 (5) 2.2 系统性能需求 (6) 2.3 运行环境需求 (6) 三.程序详细设计 (6) 3.1 主界面的设计 (6) 3.2 播放界面设计 (11) 3.3 其他功能 (14) 四.调试与运行 (18) 4.1 调试 (18) 4.2 运行结果 (19) 五.总结 (21)

一、引言 1.1 项目背景 当今社会的生活节奏越来越快,人们对手机的要求也越来越高,由于手机市场发展迅速,使得手机操作系统也出现了不同各类,现在的市场上主要有三个手机操作系统,symbian,Windows mobile,以及谷歌的Android操作系统,其中占有开放源代码优势的Android系统有最大的发展前景。那么能否在手机上拥有自己编写的个性音乐播放器呢?答案是:肯定的,谷歌Android系统就能做到。本文的音乐播放器就是基于谷歌Android手机平台的播放器。 随着计算机的广泛运用,手机市场的迅速发展,各种音频视频资源也在网上广为流传,这些资源看似平常,但已经渐渐成为人们生活中必不可少的一部分了。于是各种手机播放器也紧跟着发展起来,但是很多播放器一味追求外观花哨,功能庞大,对用户的手机造成了很多资源浪费,比如CPU,内存等的占用率过高,在用户需要多任务操作时,受到了不小的影响,带来了许多不便,而对于大多数普通用户,许多功能用不上,形同虚设。针对以上各种弊端,选择了开发多语种的音频视频播放器,将各种性能优化,继承播放器的常用功能,满足一般用户(如听歌,看电影)的需求,除了能播放常见格式的语音视频文件,高级功能:还能播放RMVB格式的视频文件。此外,还能支持中文、英文等语言界面。

空间播放器代码大全:网页中插入各种格式视频播放代码大全

空间播放器代码大全:网页中插入各种格式视频播放代码大全 疯狂代码 https://www.doczj.com/doc/b73247156.html,/ ?:http:/https://www.doczj.com/doc/b73247156.html,/HtmlJiaoCheng/Article19101.html 1.avi格式 代码片断如下: 2.mpg格式 代码片断如下:

51单片机蜂鸣器播放音乐代码

/*生日快乐歌曲*/ #include <> #define uint unsigned int #define uchar unsigned char sbit beep = P1^5; uchar code SONG_TONE[]={212,212,190,212,159,169,212,212,190,212,142,159, 212,212,106,126,159,169,190,119,119,126,159,142,159,0}; uchar code SONG_LONG[]={9,3,12,12,12,24,9,3,12,12,12,24, 9,3,12,12,12,12,12,9,3,12,12,12,24,0}; //延时 void DelayMS(uint x) { uchar t; while(x--) for(t=0;t<120;t++); } void PlayMusic() { uint i=0,j,k; while(SONG_LONG[i]!=0||SONG_TONE[i]!=0) { //播放各个音符,SONG_LONG 为拍子长度 for(j=0;j // 这是单片机音乐代码生成器生成的代码 #define uchar unsigned char sbit beepIO=P1^5; // 输出为可以修改成其它 IO 口uchar m,n;

网页视频播放器代码

网页视频播放器代码大全网页mtv播放器代码 一. RM格式(无边框按钮)RealPlayer 代码如下:

二. RM格式(带边框按钮)RealPlayer 代码如下:

三. WMV格式(带边框按钮)Windows Media Player 代码如下:

视频播放器源码

SoundView.java package com.zh_weir.videoplayer; import com.zh_weir.videoplayer.R; import android.content.Context; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Canvas; import android.graphics.Rect; import android.media.AudioManager; import android.util.AttributeSet; import android.util.Log; import android.view.MotionEvent; import android.view.View; public class SoundView extends View{ public final static String TAG = "SoundView"; private Context mContext; private Bitmap bm , bm1; private int bitmapWidth , bitmapHeight; private int index; private OnVolumeChangedListener mOnVolumeChangedListener; private final static int HEIGHT = 11; public final static int MY_HEIGHT = 163; public final static int MY_WIDTH = 44; public interface OnVolumeChangedListener{ public void setYourVolume(int index); } public void setOnVolumeChangeListener(OnVolumeChangedListener l){ mOnVolumeChangedListener = l; } public SoundView(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); mContext = context; // TODO Auto-generated constructor stub

网页视频播放器代码大全

网页视频播放器代码大全 1.avi格式 代码片断如下: 2.mpg格式 代码片断如下:

文本预览