├── README.md ├── .gitignore ├── play_vedio.c ├── play_audio.c ├── multi_thread.c ├── sync_video_to_audio_clock.c ├── multi_clock.c └── LICENSE /README.md: -------------------------------------------------------------------------------- 1 | # ffmpeg-tutorial 2 | 参考Dranger的教程,采用最新的ffmpeg版本实现的最新教程示例 3 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | /bin 3 | lib_finder.script 4 | player.cbp 5 | player.depend 6 | player.layout 7 | 8 | # Object files 9 | *.o 10 | *.ko 11 | *.obj 12 | *.elf 13 | 14 | # Precompiled Headers 15 | *.gch 16 | *.pch 17 | 18 | # Libraries 19 | *.lib 20 | *.a 21 | *.la 22 | *.lo 23 | 24 | # Shared objects (inc. Windows DLLs) 25 | *.dll 26 | *.so 27 | *.so.* 28 | *.dylib 29 | 30 | # Executables 31 | *.exe 32 | *.out 33 | *.app 34 | *.i*86 35 | *.x86_64 36 | *.hex 37 | 38 | # Debug files 39 | *.dSYM/ 40 | *.su 41 | -------------------------------------------------------------------------------- /play_vedio.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | 5 | #include 6 | #include 7 | #include 8 | 9 | int main(int argc, char *argv[]) { 10 | AVFormatContext *pFormatCtx = NULL; 11 | int i, videoStream; 12 | AVCodecContext *pCodecCtx = NULL; 13 | AVCodec *pCodec = NULL; 14 | AVFrame *pFrame = NULL; 15 | AVPacket packet; 16 | int frameFinished; 17 | 18 | AVDictionary *optionDict = NULL; 19 | struct SwsContext *sws_ctx = NULL; 20 | 21 | SDL_Overlay *bmp = NULL; 22 | SDL_Surface *screen = NULL; 23 | SDL_Rect rect; 24 | SDL_Event event; 25 | 26 | if(argc < 2){ 27 | fprintf(stderr, "Usage: test \n"); 28 | exit(1); 29 | } 30 | 31 | av_register_all(); 32 | 33 | if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)){ 34 | fprintf(stderr,"Could not initialize SDL - %s " + *SDL_GetError()); 35 | exit(1); 36 | } 37 | 38 | /** 39 | *打开一个文件 40 | */ 41 | if(avformat_open_input(&pFormatCtx, argv[1], NULL, NULL) != 0) 42 | return -1; 43 | 44 | /** 45 | *为pFormatCtx->streams填充上正确的信息 46 | */ 47 | if(avformat_find_stream_info(pFormatCtx, NULL) < 0) 48 | return -1; 49 | 50 | /** 51 | *手工调试函数,将文件信息在终端输出 52 | */ 53 | av_dump_format(pFormatCtx, 0, argv[1], 0); 54 | 55 | 56 | videoStream=-1; 57 | for ( i = 0; i < pFormatCtx->nb_streams; i++) 58 | if(pFormatCtx -> streams[i] -> codec -> codec_type == AVMEDIA_TYPE_VIDEO) { 59 | videoStream = i; 60 | break; 61 | } 62 | 63 | if(videoStream == -1) 64 | return -1; 65 | 66 | /** 67 | *从 vedio stream 中获取对应的解码器上下文的指针 68 | */ 69 | pCodecCtx = pFormatCtx -> streams[videoStream] -> codec; 70 | 71 | /** 72 | *根据 codec_id 找到对应的解码器 73 | */ 74 | pCodec = avcodec_find_decoder(pCodecCtx -> codec_id); 75 | 76 | if(pCodec == NULL){ 77 | fprintf(stderr, "Unsupported codec ! \n"); 78 | return -1; 79 | } 80 | 81 | /** 82 | * 打开解码器 83 | */ 84 | if(avcodec_open2(pCodecCtx, pCodec, &optionDict) <0 ) 85 | return -1; 86 | 87 | /** 88 | * 为frame 申请内存 89 | */ 90 | pFrame = av_frame_alloc(); 91 | 92 | #ifdef __DARWIN__ 93 | screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 0, 0); 94 | #else 95 | screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 24, 0); 96 | #endif // __DARWIN__ 97 | 98 | if(!screen){ 99 | fprintf(stderr, "SDL : could not set video mode - exiting \n"); 100 | exit(1); 101 | } 102 | 103 | /** 104 | * 申请一个 overlay , 将 yuv数据给 screen 105 | */ 106 | bmp = SDL_CreateYUVOverlay(pCodecCtx->width, pCodecCtx->height, SDL_YV12_OVERLAY, screen); 107 | 108 | sws_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, 109 | AV_PIX_FMT_YUV420P, SWS_BILINEAR, NULL, NULL, NULL); 110 | 111 | i = 0; 112 | while (av_read_frame(pFormatCtx, &packet) >= 0){ 113 | 114 | if(packet.stream_index == videoStream){ 115 | printf("\n"); 116 | printf("packet pts: %d \n", packet.pts); 117 | printf("packet dts: %d \n", packet.dts); 118 | printf("packet size: %d \n", packet.size); 119 | //printf("packet duration: %d \n", packet.duration); 120 | printf("packet pos: %d \n", packet.pos); 121 | 122 | printf("\n"); 123 | //为视频流解码 124 | avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet); 125 | 126 | printf("frame pts: %d \n", pFrame->pts); 127 | printf("frame pkt_dts: %d \n", pFrame->pkt_dts); 128 | printf("frame coded_picture_number: %d \n", pFrame->coded_picture_number); 129 | printf("frame pkt_pos: %d \n", pFrame->pkt_pos); 130 | printf("frame pkt_duration: %d \n", pFrame->pkt_duration); 131 | printf("frame pkt_size: %d \n", pFrame->pkt_size); 132 | 133 | if(frameFinished){ 134 | SDL_LockYUVOverlay(bmp); 135 | 136 | printf("finish one frame \n"); 137 | printf("\n"); 138 | /** 139 | *AVPicture 结构体有一个数据指针指向一个有 4 个元素的指针数组。由于我们处理的是 YUV420P,所以 140 | *我们只需要 3 个通道即只要三组数据。其它的格式可能需要第四个指针来表示 alpha 通道或者其它参数。行尺寸 141 | *正如它的名字表示的意义一样。在 YUV 覆盖中相同功能的结构体是像素(pixel)和间距(pitch)。(“间距”是 142 | *在 SDL 里用来表示指定行数据宽度的值)。所以我们现在做的是让我们的 pict.data 中的三个数组指针指向我们的 143 | *覆盖,这样当我们写(数据)到 pict 的时候,实际上是写入到我们的覆盖中,当然要先申请必要的空间。 144 | */ 145 | 146 | AVPicture pict; 147 | pict.data[0] = bmp->pixels[0]; 148 | pict.data[1] = bmp->pixels[2]; 149 | pict.data[2] = bmp->pixels[1]; 150 | 151 | pict.linesize[0] = bmp->pitches[0]; 152 | pict.linesize[1] = bmp->pitches[2]; 153 | pict.linesize[2] = bmp->pitches[1]; 154 | 155 | sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data, pFrame->linesize, 156 | 0, pCodecCtx->height, pict.data, pict.linesize); 157 | 158 | SDL_UnlockYUVOverlay(bmp); 159 | 160 | rect.x = 0; 161 | rect.y = 0; 162 | rect.w = pCodecCtx->width; 163 | rect.h = pCodecCtx->height; 164 | 165 | SDL_DisplayYUVOverlay(bmp, &rect); 166 | SDL_Delay(10); 167 | 168 | } 169 | } 170 | 171 | av_free_packet(&packet); 172 | SDL_PollEvent(&event); 173 | 174 | switch (event.type) { 175 | 176 | case SDL_QUIT: 177 | SDL_Quit(); 178 | exit(0); 179 | break; 180 | 181 | default: 182 | break; 183 | } 184 | 185 | } 186 | 187 | 188 | av_free(pFrame); 189 | 190 | avcodec_close(pCodecCtx); 191 | 192 | avformat_close_input(&pFormatCtx); 193 | 194 | return 0; 195 | } 196 | -------------------------------------------------------------------------------- /play_audio.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include 7 | 8 | #include 9 | #include 10 | 11 | #include 12 | 13 | #define SDL_AUDIO_BUFFER_SIZE 1024 14 | #define MAX_AUDIO_FRAME_SIZE 192000 15 | 16 | #define FIX_INPUT 1 17 | 18 | typedef struct PacketQueue 19 | { 20 | AVPacketList *first_pkt, *last_pkt; 21 | int nb_packets; 22 | int size; 23 | SDL_mutex *mutex; 24 | SDL_cond *cond; 25 | } PacketQueue; 26 | 27 | PacketQueue audioq; 28 | 29 | int quit = 0; 30 | 31 | AVFrame wanted_frame; 32 | 33 | void packet_queue_init (PacketQueue *q) 34 | { 35 | memset(q, 0, sizeof(PacketQueue)); 36 | q->mutex = SDL_CreateMutex(); 37 | q->cond = SDL_CreateCond(); 38 | } 39 | 40 | int packet_queue_put(PacketQueue *q, AVPacket *pkt) 41 | { 42 | 43 | AVPacketList *pkt1; 44 | if(av_dup_packet(pkt) < 0) 45 | { 46 | return -1; 47 | } 48 | 49 | pkt1 = av_malloc(sizeof(AVPacketList)); 50 | if(!pkt1) 51 | return -1; 52 | pkt1->pkt = *pkt; 53 | pkt1->next = NULL; 54 | 55 | SDL_LockMutex(q->mutex); 56 | 57 | if(!q->last_pkt) 58 | { 59 | q->first_pkt = pkt1; 60 | } 61 | else 62 | { 63 | q->last_pkt->next = pkt1; 64 | } 65 | 66 | q->last_pkt = pkt1; 67 | q->nb_packets++; 68 | q->size += pkt1->pkt.size; 69 | SDL_CondSignal(q->cond); 70 | 71 | SDL_UnlockMutex(q->mutex); 72 | 73 | return 0; 74 | } 75 | 76 | int packet_queue_get(PacketQueue *q, AVPacket *pkt, int block) 77 | { 78 | AVPacketList *pkt1; 79 | int ret; 80 | 81 | for(;;) 82 | { 83 | 84 | if(quit) 85 | { 86 | ret = -1; 87 | break; 88 | } 89 | 90 | pkt1 = q->first_pkt; 91 | if(pkt1) 92 | { 93 | q->first_pkt = pkt1->next; 94 | if(!q->first_pkt) 95 | q->last_pkt = NULL; 96 | 97 | q->nb_packets--; 98 | q->size -= pkt1->pkt.size; 99 | 100 | *pkt = pkt1->pkt; 101 | 102 | av_free(pkt1); 103 | ret = 1; 104 | break; 105 | } 106 | else if(!block) 107 | { 108 | ret = 0; 109 | break; 110 | } 111 | else 112 | { 113 | SDL_CondWait(q->cond, q->mutex); 114 | } 115 | } 116 | 117 | SDL_UnlockMutex(q->mutex); 118 | 119 | return ret; 120 | } 121 | 122 | int audio_decode_frame(AVCodecContext *aCodecCtx, uint8_t *audio_buf, int buf_size) 123 | { 124 | 125 | static AVPacket pkt; 126 | static int audio_pkt_size = 0; 127 | static AVFrame frame; 128 | 129 | int len1, data_size = 0; 130 | 131 | SwrContext *swr_ctx = NULL; 132 | 133 | int resampled_data_size; 134 | 135 | for(;;) 136 | { 137 | if(pkt.data) 138 | av_free_packet(&pkt); 139 | 140 | 141 | if(quit) 142 | { 143 | return -1; 144 | } 145 | 146 | if(packet_queue_get(&audioq, &pkt, 1) < 0) 147 | { 148 | return -1; 149 | } 150 | 151 | audio_pkt_size = pkt.size; 152 | 153 | while(audio_pkt_size > 0) 154 | { 155 | 156 | int got_frame = 0; 157 | len1 = avcodec_decode_audio4(aCodecCtx, &frame, &got_frame, &pkt); 158 | 159 | // printf( "pkt size=%d \n", pkt.size); 160 | //printf( "frame size=%u \n", frame.data); 161 | //printf( "codec size=%d \n",len1); 162 | //printf("got_frame=%d \n", got_frame); 163 | 164 | if(len1 < 0) 165 | { 166 | audio_pkt_size = 0; 167 | break; 168 | } 169 | 170 | audio_pkt_size -= len1; 171 | 172 | if(!got_frame) 173 | continue; 174 | 175 | data_size = av_samples_get_buffer_size(NULL, aCodecCtx->channels, frame.nb_samples, 176 | AUDIO_S16SYS, 1); 177 | 178 | if (frame.channels > 0 && frame.channel_layout == 0) 179 | frame.channel_layout = av_get_default_channel_layout(frame.channels); 180 | else if (frame.channels == 0 && frame.channel_layout > 0) 181 | frame.channels = av_get_channel_layout_nb_channels(frame.channel_layout); 182 | 183 | printf("frame.sample_rate = %d \n", frame.sample_rate); 184 | printf("frame.format = %d \n", frame.format); 185 | printf("frame.format bits = %d \n", av_get_bytes_per_sample(frame.format)); 186 | printf("frame.channels = %d \n", frame.channels); 187 | printf("frame.channel_layout = %d \n", frame.channel_layout); 188 | printf("frame.nb_samples = %d \n", frame.nb_samples); 189 | printf("\n"); 190 | 191 | /** 192 | * 接下来判断我们之前设置SDL时设置的声音格式(AV_SAMPLE_FMT_S16),声道布局, 193 | * 采样频率,每个AVFrame的每个声道采样数与 194 | * 得到的该AVFrame分别是否相同,如有任意不同,我们就需要swr_convert该AvFrame, 195 | * 然后才能符合之前设置好的SDL的需要,才能播放 196 | */ 197 | if(frame.format != AUDIO_S16SYS 198 | || frame.channel_layout != aCodecCtx->channel_layout 199 | || frame.sample_rate != aCodecCtx->sample_rate 200 | || frame.nb_samples != SDL_AUDIO_BUFFER_SIZE) 201 | { 202 | 203 | if (swr_ctx != NULL) 204 | { 205 | swr_free(&swr_ctx); 206 | swr_ctx = NULL; 207 | } 208 | 209 | swr_ctx = swr_alloc_set_opts(NULL, wanted_frame.channel_layout, (enum AVSampleFormat)wanted_frame.format, wanted_frame.sample_rate, 210 | frame.channel_layout, (enum AVSampleFormat)frame.format, frame.sample_rate, 0, NULL); 211 | 212 | if (swr_ctx == NULL || swr_init(swr_ctx) < 0) 213 | { 214 | fprintf(stderr, "swr_init failed: \n" ); 215 | break; 216 | } 217 | } 218 | 219 | if(swr_ctx) 220 | { 221 | int dst_nb_samples = av_rescale_rnd(swr_get_delay(swr_ctx, frame.sample_rate) + frame.nb_samples, 222 | wanted_frame.sample_rate, wanted_frame.format, AV_ROUND_INF); 223 | printf("swr convert ! \n"); 224 | printf("dst_nb_samples : %d \n", dst_nb_samples); 225 | /** 226 | * 转换该AVFrame到设置好的SDL需要的样子,有些旧的代码示例最主要就是少了这一部分, 227 | * 往往一些音频能播,一些不能播,这就是原因,比如有些源文件音频恰巧是AV_SAMPLE_FMT_S16的。 228 | * swr_convert 返回的是转换后每个声道(channel)的采样数 229 | */ 230 | int len2 = swr_convert(swr_ctx, &audio_buf, dst_nb_samples,(const uint8_t**)frame.data, frame.nb_samples); 231 | if (len2 < 0) 232 | { 233 | fprintf(stderr, "swr_convert failed \n" ); 234 | break; 235 | } 236 | 237 | resampled_data_size = wanted_frame.channels * len2 * av_get_bytes_per_sample((enum AVSampleFormat)wanted_frame.format); 238 | }else{ 239 | resampled_data_size = data_size; 240 | } 241 | 242 | 243 | 244 | return resampled_data_size; 245 | } 246 | 247 | } 248 | } 249 | 250 | void audio_callback(void *userdata, Uint8 *stream, int len) 251 | { 252 | 253 | AVCodecContext *aCodecCtx = (AVCodecContext *) userdata; 254 | int len1, audio_size; 255 | 256 | static uint8_t audio_buf[(MAX_AUDIO_FRAME_SIZE * 3) / 2]; 257 | static unsigned int audio_buf_size = 0; 258 | static unsigned int audio_buf_index = 0; 259 | 260 | SDL_memset(stream, 0, len); 261 | 262 | //printf("audio_callback len=%d \n", len); 263 | 264 | //向设备发送长度为len的数据 265 | while(len > 0) 266 | { 267 | //缓冲区中无数据 268 | if(audio_buf_index >= audio_buf_size) 269 | { 270 | //从packet中解码数据 271 | audio_size = audio_decode_frame(aCodecCtx, audio_buf, audio_buf_size); 272 | //printf("audio_decode_frame finish audio_size=%d \n", audio_size); 273 | if(audio_size < 0) //没有解码到数据或者出错,填充0 274 | { 275 | audio_buf_size = 1024; 276 | memset(audio_buf, 0, audio_buf_size); 277 | } 278 | else 279 | { 280 | audio_buf_size = audio_size; 281 | } 282 | 283 | audio_buf_index = 0; 284 | } 285 | 286 | len1 = audio_buf_size - audio_buf_index; 287 | if(len1 > len) 288 | len1 = len; 289 | 290 | //memcpy(stream, (uint8_t *)audio_buf + audio_buf_index, len1); 291 | SDL_MixAudio(stream, audio_buf + audio_buf_index, len1, SDL_MIX_MAXVOLUME); 292 | 293 | len -= len1; 294 | stream += len1; 295 | audio_buf_index += len1; 296 | } 297 | } 298 | 299 | 300 | int main(int argc, char *argv[]) 301 | { 302 | AVFormatContext *pFormatCtx = NULL; 303 | int i, audioStream; 304 | 305 | AVPacket packet; 306 | 307 | AVCodecContext *aCodecCtx = NULL; 308 | AVCodec *aCodec = NULL; 309 | 310 | SDL_AudioSpec wanted_spec, spec; 311 | 312 | SDL_Event event; 313 | 314 | char filename[100]; 315 | 316 | #ifdef FIX_INPUT 317 | strcpy(filename, "/home/wanghuatian/oceans.mp4"); 318 | #else 319 | if(argc < 2) 320 | { 321 | fprintf(stderr, "Usage: test \n"); 322 | exit(1); 323 | } 324 | 325 | strcpy(filename, argv[1]); 326 | #endif // FIX_INPUT 327 | 328 | 329 | 330 | av_register_all(); 331 | 332 | if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) 333 | { 334 | fprintf(stderr,"Could not initialize SDL - %s " + *SDL_GetError()); 335 | exit(1); 336 | } 337 | 338 | // 读取文件头,将格式相关信息存放在AVFormatContext结构体中 339 | if(avformat_open_input(&pFormatCtx, filename, NULL, NULL) != 0) 340 | return -1; 341 | // 检测文件的流信息 342 | if(avformat_find_stream_info(pFormatCtx, NULL) < 0) 343 | return -1; 344 | 345 | // 在控制台输出文件信息 346 | av_dump_format(pFormatCtx, 0, filename, 0); 347 | 348 | for(i = 0; i < pFormatCtx->nb_streams; i++) 349 | { 350 | if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO) 351 | audioStream = i; 352 | } 353 | 354 | aCodecCtx = pFormatCtx->streams[audioStream]->codec; 355 | 356 | aCodec = avcodec_find_decoder(aCodecCtx->codec_id); 357 | if(!aCodec) 358 | { 359 | fprintf(stderr, "Unsupported codec ! \n"); 360 | return -1; 361 | } 362 | 363 | wanted_spec.freq = aCodecCtx->sample_rate; 364 | wanted_spec.format = AUDIO_S16SYS; 365 | wanted_spec.channels = aCodecCtx->channels; 366 | wanted_spec.silence = 0; 367 | wanted_spec.samples = SDL_AUDIO_BUFFER_SIZE; 368 | wanted_spec.callback = audio_callback; 369 | wanted_spec.userdata = aCodecCtx; 370 | 371 | printf("codecCtx sample_rate = %d \n", aCodecCtx->sample_rate); 372 | printf("codecCtx channels = %d \n", aCodecCtx->channels); 373 | printf("codecCtx sample_fmt = %d \n", aCodecCtx->sample_fmt); 374 | printf("AUDIO_S16SYS = %d \n", AUDIO_S16SYS); 375 | printf("\n"); 376 | 377 | /** 378 | *SDL_OpenAudio 函数通过wanted_spec来打开音频设备,成功返回零,将实际的硬件参数传递给spec的指向的结构体。 379 | *如果spec为NULL,audio data将通过callback函数,保证将自动转换成硬件音频格式。 380 | * 381 | *音频设备刚开始播放静音,当callback变得可用时,通过调用SDL_PauseAudio(0)来开始播放。 382 | *由于audio diver 可能修改音频缓存的请求大小,所以你应该申请任何的混合缓存(mixing buffers),在你打开音频设备之后。*/ 383 | if(SDL_OpenAudio(&wanted_spec, &spec) < 0) 384 | { 385 | fprintf(stderr, "SDL_OpenAudio: %s \n", SDL_GetError()); 386 | return -1; 387 | } 388 | 389 | printf("spec freq = %d \n", spec.freq); 390 | printf("spec format = %d \n", spec.format); 391 | printf("spec channels = %d \n", spec.channels); 392 | printf("spec samples = %d \n", spec.samples); 393 | printf("spec silence = %d \n", spec.silence); 394 | printf("spec padding = %d \n", spec.padding); 395 | printf("spec size = %d \n", spec.size); 396 | printf("\n"); 397 | 398 | printf("AV_SAMPLE_FMT_S16 = %d \n", AV_SAMPLE_FMT_S16); 399 | wanted_frame.format = AV_SAMPLE_FMT_S16; 400 | wanted_frame.sample_rate = spec.freq; 401 | wanted_frame.channel_layout = av_get_default_channel_layout(spec.channels); 402 | wanted_frame.channels = spec.channels; 403 | 404 | avcodec_open2(aCodecCtx, aCodec, NULL); 405 | 406 | packet_queue_init(&audioq); 407 | SDL_PauseAudio(0); 408 | 409 | 410 | while (av_read_frame(pFormatCtx, &packet) >= 0) 411 | { 412 | if (packet.stream_index == audioStream) 413 | packet_queue_put(&audioq, &packet); 414 | else 415 | av_free_packet(&packet); 416 | 417 | SDL_PollEvent(&event); 418 | 419 | switch (event.type) { 420 | 421 | case SDL_QUIT: 422 | quit = 1; 423 | SDL_Quit(); 424 | exit(0); 425 | break; 426 | 427 | default: 428 | break; 429 | } 430 | } 431 | 432 | getchar(); 433 | // avformat_close_input(&pFormatCtx); 434 | 435 | return 0; 436 | } 437 | -------------------------------------------------------------------------------- /multi_thread.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | 10 | #include 11 | #include 12 | 13 | #include 14 | #include 15 | 16 | #define SDL_AUDIO_BUFFER_SIZE 1024 17 | #define SDL_AUDIO_FRAME_SIZE 192000 18 | 19 | #define MAX_AUDIOQ_SIZE (5 * 16 *1024) 20 | #define MAX_VIDEOQ_SIZE (5 * 256 * 1024) 21 | 22 | #define FF_ALLOC_EVENT (SDL_USEREVENT) 23 | #define FF_REFRESH_EVENT (SDL_USEREVENT + 1) 24 | #define FF_QUIT_EVENT (SDL_USEREVENT + 2) 25 | 26 | #define VIDEO_PICTURE_QUEUE_SIZE 1 27 | 28 | #define FIX_INPUT 0 29 | 30 | typedef struct PacketQueue 31 | { 32 | AVPacketList *first_pkt, *last_pkt; 33 | int nb_packets; 34 | int size; 35 | SDL_mutex *mutex; 36 | SDL_cond *cond; 37 | } PacketQueue; 38 | 39 | typedef struct VideoPicture 40 | { 41 | SDL_Overlay *bmp; 42 | int width, height; 43 | int allocated; 44 | } VideoPicture; 45 | 46 | typedef struct VideoState 47 | { 48 | 49 | AVFormatContext *pFormatCtx; 50 | int videoStream, audioStream; 51 | 52 | AVStream *audio_st; 53 | PacketQueue audioq; 54 | uint8_t audio_buf[(192000 * 3) / 2]; 55 | unsigned int audio_buf_size; 56 | unsigned int audio_buf_index; 57 | AVFrame audio_frame, wanted_frame; 58 | AVPacket audio_pkt; 59 | uint8_t *audio_pkt_data; 60 | int audio_pkt_size; 61 | 62 | AVStream *video_st; 63 | PacketQueue videoq; 64 | 65 | VideoPicture pictq[VIDEO_PICTURE_QUEUE_SIZE]; 66 | int pictq_size, pictq_rindex, pictq_windex; 67 | SDL_mutex *pictq_mutex; 68 | SDL_cond *pictq_cond; 69 | 70 | SDL_Thread *parse_tid; 71 | SDL_Thread *video_tid; 72 | 73 | char filename[1024]; 74 | int quit; 75 | 76 | AVIOContext *io_context; 77 | struct SwsContext *sws_ctx; 78 | 79 | } VideoState; 80 | 81 | SDL_Surface *screen; 82 | VideoState *global_video_state; 83 | 84 | void packet_queue_init(PacketQueue *q) 85 | { 86 | memset(q, 0, sizeof(PacketQueue)); 87 | q->mutex = SDL_CreateMutex(); 88 | q->cond = SDL_CreateCond(); 89 | } 90 | 91 | int packet_queue_put(PacketQueue *q, AVPacket *pkt) 92 | { 93 | 94 | AVPacketList *pkt_list; 95 | if(av_dup_packet(pkt) < 0) 96 | { 97 | return -1; 98 | } 99 | 100 | pkt_list = av_malloc(sizeof(AVPacketList)); 101 | if(!pkt_list) 102 | return -1; 103 | pkt_list->pkt = *pkt; 104 | pkt_list->next = NULL; 105 | 106 | SDL_LockMutex(q->mutex); 107 | 108 | if(!q->last_pkt) 109 | q->first_pkt = pkt_list; 110 | else 111 | q->last_pkt->next = pkt_list; 112 | 113 | q->last_pkt = pkt_list; 114 | q->nb_packets++; 115 | q->size += pkt->size; 116 | SDL_CondSignal(q->cond); 117 | 118 | SDL_UnlockMutex(q->mutex); 119 | return 0; 120 | } 121 | 122 | static int packet_queue_get(PacketQueue *q, AVPacket *pkt, int block) 123 | { 124 | AVPacketList *pkt_list; 125 | int ret; 126 | 127 | SDL_LockMutex(q->mutex); 128 | 129 | for(;;) 130 | { 131 | 132 | if(global_video_state->quit) 133 | { 134 | ret = -1; 135 | break; 136 | } 137 | 138 | pkt_list = q->first_pkt; 139 | if(pkt_list) 140 | { 141 | q->first_pkt = pkt_list->next; 142 | if(!q->first_pkt) 143 | q->last_pkt = NULL; 144 | 145 | q->nb_packets--; 146 | q->size -= pkt_list->pkt.size; 147 | 148 | *pkt = pkt_list->pkt; 149 | av_free(pkt_list); 150 | ret = 1; 151 | break; 152 | } 153 | else if(!block) 154 | { 155 | ret = 0; 156 | break; 157 | } 158 | else 159 | { 160 | SDL_CondWait(q->cond, q->mutex); 161 | } 162 | } 163 | 164 | SDL_UnlockMutex(q->mutex); 165 | return ret; 166 | } 167 | 168 | int audio_decode_frame(VideoState *is) 169 | { 170 | 171 | AVPacket *pkt = &is->audio_pkt; 172 | 173 | int len1, data_size = 0; 174 | 175 | SwrContext *swr_ctx = NULL; 176 | 177 | int resampled_data_size; 178 | 179 | uint8_t *buf = &is->audio_buf; 180 | AVFrame *frame = &is->audio_frame; 181 | 182 | for(;;) 183 | { 184 | if(pkt->data) 185 | av_free_packet(pkt); 186 | 187 | 188 | if(is->quit) 189 | { 190 | return -1; 191 | } 192 | 193 | if(packet_queue_get(&is->audioq, pkt, 1) < 0) 194 | { 195 | return -1; 196 | } 197 | 198 | is->audio_pkt_size = pkt->size; 199 | 200 | while(is->audio_pkt_size > 0) 201 | { 202 | 203 | int got_frame = 0; 204 | len1 = avcodec_decode_audio4(is->audio_st->codec, &is->audio_frame, &got_frame, pkt); 205 | 206 | if(len1 < 0) 207 | { 208 | is->audio_pkt_size = 0; 209 | break; 210 | } 211 | 212 | is->audio_pkt_size -= len1; 213 | 214 | if(!got_frame) 215 | continue; 216 | 217 | 218 | if (is->audio_frame.channels > 0 && is->audio_frame.channel_layout == 0) 219 | is->audio_frame.channel_layout = av_get_default_channel_layout(is->audio_frame.channels); 220 | else if (is->audio_frame.channels == 0 && is->audio_frame.channel_layout > 0) 221 | is->audio_frame.channels = av_get_channel_layout_nb_channels(is->audio_frame.channel_layout); 222 | 223 | /** 224 | * 接下来判断我们之前设置SDL时设置的声音格式(AV_SAMPLE_FMT_S16),声道布局, 225 | * 采样频率,每个AVFrame的每个声道采样数与 226 | * 得到的该AVFrame分别是否相同,如有任意不同,我们就需要swr_convert该AvFrame, 227 | * 然后才能符合之前设置好的SDL的需要,才能播放 228 | */ 229 | if(is->audio_frame.format != is->wanted_frame.format 230 | || is->audio_frame.channel_layout != is->wanted_frame.channel_layout 231 | || is->audio_frame.sample_rate != is->wanted_frame.sample_rate 232 | || is->audio_frame.nb_samples != SDL_AUDIO_BUFFER_SIZE) 233 | { 234 | 235 | if (swr_ctx != NULL) 236 | { 237 | swr_free(&swr_ctx); 238 | swr_ctx = NULL; 239 | } 240 | 241 | swr_ctx = swr_alloc_set_opts(NULL, is->wanted_frame.channel_layout, (enum AVSampleFormat)is->wanted_frame.format, is->wanted_frame.sample_rate, 242 | is->audio_frame.channel_layout, (enum AVSampleFormat)is->audio_frame.format, is->audio_frame.sample_rate, 0, NULL); 243 | 244 | if (swr_ctx == NULL || swr_init(swr_ctx) < 0) 245 | { 246 | fprintf(stderr, "swr_init failed: \n" ); 247 | break; 248 | } 249 | } 250 | 251 | if(swr_ctx) 252 | { 253 | int dst_nb_samples = av_rescale_rnd(swr_get_delay(swr_ctx, is->audio_frame.sample_rate) + is->audio_frame.nb_samples, 254 | is->wanted_frame.sample_rate, is->wanted_frame.format, AV_ROUND_INF); 255 | printf("swr convert !%d \n", dst_nb_samples); 256 | printf("audio_frame.nb_samples : %d \n", is->audio_frame.nb_samples); 257 | printf("is->audio_buf : %d \n", *is->audio_buf); 258 | printf("is->audio_buf : %d \n", &is->audio_buf); 259 | printf("is->buf : %d \n", &buf); 260 | printf("is->buf : %d \n", *buf); 261 | /** 262 | * 转换该AVFrame到设置好的SDL需要的样子,有些旧的代码示例最主要就是少了这一部分, 263 | * 往往一些音频能播,一些不能播,这就是原因,比如有些源文件音频恰巧是AV_SAMPLE_FMT_S16的。 264 | * swr_convert 返回的是转换后每个声道(channel)的采样数 265 | */ 266 | int len2 = swr_convert(swr_ctx, &buf, dst_nb_samples,(const uint8_t**)&is->audio_frame.data, is->audio_frame.nb_samples); 267 | if (len2 < 0) 268 | { 269 | fprintf(stderr, "swr_convert failed \n" ); 270 | break; 271 | } 272 | 273 | resampled_data_size = is->wanted_frame.channels * len2 * av_get_bytes_per_sample((enum AVSampleFormat)is->wanted_frame.format); 274 | } 275 | else 276 | { 277 | data_size = av_samples_get_buffer_size(NULL, is->audio_st->codec->channels, is->audio_frame.nb_samples, 278 | AUDIO_S16SYS, 1); 279 | resampled_data_size = data_size; 280 | } 281 | 282 | 283 | 284 | return resampled_data_size; 285 | } 286 | 287 | } 288 | } 289 | 290 | void audio_callback(void *userdata, Uint8 *stream, int len) 291 | { 292 | 293 | VideoState *is = (VideoState *) userdata; 294 | int len1, audio_size; 295 | 296 | SDL_memset(stream, 0, len); 297 | 298 | printf("audio_callback len=%d \n", len); 299 | 300 | //向设备发送长度为len的数据 301 | while(len > 0) 302 | { 303 | //缓冲区中无数据 304 | if(is->audio_buf_index >= is->audio_buf_size) 305 | { 306 | //从packet中解码数据 307 | audio_size = audio_decode_frame(is); 308 | printf("audio_decode_frame finish audio_size=%d \n", audio_size); 309 | if(audio_size < 0) //没有解码到数据或者出错,填充0 310 | { 311 | is->audio_buf_size = 1024; 312 | memset(is->audio_buf, 0, is->audio_buf_size); 313 | } 314 | else 315 | { 316 | is->audio_buf_size = audio_size; 317 | } 318 | 319 | is->audio_buf_index = 0; 320 | } 321 | 322 | len1 = is->audio_buf_size - is->audio_buf_index; 323 | if(len1 > len) 324 | len1 = len; 325 | 326 | //memcpy(stream, (uint8_t *)audio_buf + audio_buf_index, len1); 327 | SDL_MixAudio(stream, is->audio_buf + is->audio_buf_index, len1, SDL_MIX_MAXVOLUME); 328 | 329 | len -= len1; 330 | stream += len1; 331 | is->audio_buf_index += len1; 332 | } 333 | } 334 | 335 | static Uint32 sdl_refresh_timer_cb(Uint32 interval, void *opaque) 336 | { 337 | SDL_Event event; 338 | event.type = FF_REFRESH_EVENT; 339 | event.user.data1 = opaque; 340 | SDL_PushEvent(&event); 341 | return 0; 342 | } 343 | 344 | //让视频按固定延迟时间刷新 345 | static void schedule_refresh(VideoState *is, int delay) 346 | { 347 | SDL_AddTimer(delay, sdl_refresh_timer_cb, is); 348 | } 349 | 350 | void video_display(VideoState *is) 351 | { 352 | 353 | SDL_Rect rect; 354 | VideoPicture *vp; 355 | 356 | float aspect_ratio;//宽高比例 357 | int w, h, x, y; 358 | 359 | vp = &is->pictq[is->pictq_rindex]; 360 | if(vp->bmp) 361 | { 362 | if(is->video_st->codec->sample_aspect_ratio.num == 0) 363 | { 364 | aspect_ratio = 0; 365 | } 366 | else 367 | { 368 | aspect_ratio = av_q2d(is->video_st->codec->sample_aspect_ratio) * 369 | is->video_st->codec->width / is->video_st->codec->height; 370 | } 371 | 372 | if(aspect_ratio <= 0.0) 373 | { 374 | aspect_ratio = (float)is->video_st->codec->width / (float)is->video_st->codec->height; 375 | } 376 | 377 | h = screen->h; 378 | w = ((int) rint(h * aspect_ratio)) & -3; 379 | if(w > screen->w) 380 | { 381 | w = screen->w; 382 | h = ((int) rint(w / aspect_ratio)) & -3; 383 | } 384 | 385 | x = (screen->w - w) / 2; 386 | y = (screen->h - h) / 2; 387 | 388 | rect.x = x; 389 | rect.y = y; 390 | rect.w = w; 391 | rect.h = h; 392 | SDL_DisplayYUVOverlay(vp->bmp, &rect); 393 | } 394 | } 395 | 396 | 397 | void video_refresh_timer(void *userdata) 398 | { 399 | 400 | VideoState *is = (VideoState *)userdata; 401 | 402 | if(is->video_st) 403 | { 404 | if(is->pictq_size == 0) 405 | { 406 | schedule_refresh(is, 1); 407 | } 408 | else 409 | { 410 | schedule_refresh(is, 80); 411 | 412 | video_display(is); 413 | 414 | if(++is->pictq_rindex == VIDEO_PICTURE_QUEUE_SIZE) 415 | { 416 | is->pictq_rindex = 0; 417 | } 418 | 419 | SDL_LockMutex(is->pictq_mutex); 420 | is->pictq_size--; 421 | SDL_CondSignal(is->pictq_cond); 422 | SDL_UnlockMutex(is->pictq_mutex); 423 | } 424 | } 425 | else 426 | { 427 | schedule_refresh(is, 100); 428 | } 429 | } 430 | 431 | void alloc_picture(void *userdata) 432 | { 433 | VideoState *is = (VideoState *) userdata; 434 | VideoPicture *vp; 435 | 436 | vp = &is->pictq[is->pictq_windex]; 437 | if(vp->bmp) 438 | SDL_FreeYUVOverlay(vp->bmp); 439 | 440 | vp->bmp = SDL_CreateYUVOverlay(is->video_st->codec->width, 441 | is->video_st->codec->height, 442 | SDL_YV12_OVERLAY, 443 | screen); 444 | 445 | vp->width = is->video_st->codec->width; 446 | vp->height = is->video_st->codec->height; 447 | 448 | SDL_LockMutex(is->pictq_mutex); 449 | vp->allocated = 1; 450 | SDL_CondSignal(is->pictq_cond); 451 | SDL_UnlockMutex(is->pictq_mutex); 452 | } 453 | 454 | /*为is->pictq生成并填充VideoPicture */ 455 | int queue_picture(VideoState *is, AVFrame *pFrame) 456 | { 457 | 458 | VideoPicture *vp; 459 | AVPicture pict; 460 | 461 | SDL_LockMutex(is->pictq_mutex); 462 | while(is->pictq_size >= VIDEO_PICTURE_QUEUE_SIZE && !is->quit) 463 | { 464 | SDL_CondWait(is->pictq_cond, is->pictq_mutex); 465 | } 466 | SDL_UnlockMutex(is->pictq_mutex); 467 | 468 | if(is->quit) 469 | return -1; 470 | 471 | vp = &is->pictq[is->pictq_windex]; 472 | 473 | if(!vp->bmp || vp->width != is->video_st->codec->width 474 | || vp->height != is->video_st->codec->height) 475 | { 476 | SDL_Event event; 477 | 478 | vp->allocated = 0; 479 | //在主线程操作 480 | event.type = FF_ALLOC_EVENT; 481 | event.user.data1 = is; 482 | SDL_PushEvent(&event); 483 | 484 | //等待直到我们申请到一个picture的内存 485 | SDL_LockMutex(is->pictq_mutex); 486 | while(!vp->allocated && !is->quit) 487 | { 488 | SDL_CondWait(is->pictq_cond, is->pictq_mutex); 489 | } 490 | SDL_UnlockMutex(is->pictq_mutex); 491 | 492 | if(is->quit) 493 | return -1; 494 | } 495 | 496 | if(vp->bmp) 497 | { 498 | SDL_LockYUVOverlay(vp->bmp); 499 | 500 | pict.data[0] = vp->bmp->pixels[0]; 501 | pict.data[1] = vp->bmp->pixels[2]; 502 | pict.data[2] = vp->bmp->pixels[1]; 503 | 504 | pict.linesize[0] = vp->bmp->pitches[0]; 505 | pict.linesize[1] = vp->bmp->pitches[2]; 506 | pict.linesize[2] = vp->bmp->pitches[1]; 507 | 508 | sws_scale(is->sws_ctx, (uint8_t const * const *)pFrame->data, pFrame->linesize, 509 | 0, is->video_st->codec->height, pict.data, pict.linesize); 510 | 511 | SDL_UnlockYUVOverlay(vp->bmp); 512 | 513 | if(++is->pictq_windex == VIDEO_PICTURE_QUEUE_SIZE) 514 | is->pictq_windex = 0; 515 | 516 | SDL_LockMutex(is->pictq_mutex); 517 | is->pictq_size++; 518 | SDL_UnlockMutex(is->pictq_mutex); 519 | } 520 | return 0; 521 | } 522 | 523 | int video_thread(void *arg){ 524 | VideoState *is = (VideoState *)arg; 525 | AVPacket pkt1, *packet = &pkt1; 526 | int frameFinished; 527 | AVFrame *pFrame; 528 | 529 | pFrame = av_frame_alloc(); 530 | 531 | for(;;){ 532 | if(packet_queue_get(&is->videoq, packet, 1) < 0) 533 | break; 534 | 535 | avcodec_decode_video2(is->video_st->codec, pFrame, &frameFinished, packet); 536 | 537 | if(frameFinished){ 538 | if(queue_picture(is, pFrame) < 0) 539 | break; 540 | } 541 | 542 | av_free_packet(packet); 543 | } 544 | 545 | av_free(pFrame); 546 | return 0; 547 | } 548 | 549 | int stream_commponent_open(VideoState *is, int stream_index){ 550 | AVFormatContext *pFormatCtx = is->pFormatCtx; 551 | AVCodecContext *codecCtx = NULL; 552 | AVCodec *codec = NULL; 553 | AVDictionary *optionDict = NULL; 554 | SDL_AudioSpec wanted_spec, spec; 555 | 556 | if(stream_index < 0 || stream_index >= pFormatCtx->nb_streams) 557 | return -1; 558 | 559 | codecCtx = pFormatCtx->streams[stream_index]->codec; 560 | 561 | if(codecCtx->codec_type == AVMEDIA_TYPE_AUDIO){ 562 | wanted_spec.freq = codecCtx->sample_rate; 563 | wanted_spec.format = AUDIO_S16SYS; 564 | wanted_spec.channels = codecCtx->channels; 565 | wanted_spec.silence = 0; 566 | wanted_spec.samples = SDL_AUDIO_BUFFER_SIZE; 567 | wanted_spec.callback = audio_callback; 568 | wanted_spec.userdata = is; 569 | 570 | if(SDL_OpenAudio(&wanted_spec, &spec)){ 571 | fprintf(stderr, "SDL_OpenAudio: %s \n", SDL_GetError()); 572 | return -1; 573 | } 574 | 575 | printf("spec format: %d \n", spec.format); 576 | is->wanted_frame.format = AV_SAMPLE_FMT_S16; 577 | is->wanted_frame.sample_rate = spec.freq; 578 | is->wanted_frame.channel_layout = av_get_default_channel_layout(spec.channels); 579 | is->wanted_frame.channels = spec.channels; 580 | } 581 | 582 | codec = avcodec_find_decoder(codecCtx->codec_id); 583 | if(!codec || (avcodec_open2(codecCtx, codec, &optionDict) < 0)){ 584 | fprintf(stderr, "Unsupported codec! \n"); 585 | return -1; 586 | } 587 | 588 | switch(codecCtx->codec_type){ 589 | case AVMEDIA_TYPE_AUDIO: 590 | is->audioStream = stream_index; 591 | is->audio_st = pFormatCtx->streams[stream_index]; 592 | is->audio_buf_size = 0; 593 | is->audio_buf_index = 0; 594 | memset(&is->audio_pkt, 0, sizeof(is->audio_pkt)); 595 | packet_queue_init(&is->audioq); 596 | SDL_PauseAudio(0); 597 | break; 598 | 599 | case AVMEDIA_TYPE_VIDEO: 600 | is->videoStream = stream_index; 601 | is->video_st = pFormatCtx->streams[stream_index]; 602 | 603 | packet_queue_init(&is->videoq); 604 | is->video_tid = SDL_CreateThread(video_thread,is); 605 | is->sws_ctx = sws_getContext(is->video_st->codec->width, 606 | is->video_st->codec->height, 607 | is->video_st->codec->pix_fmt, 608 | is->video_st->codec->width, 609 | is->video_st->codec->height, 610 | AV_PIX_FMT_YUV420P, 611 | SWS_BILINEAR,NULL,NULL,NULL); 612 | 613 | break; 614 | 615 | default: 616 | break; 617 | } 618 | return 0; 619 | } 620 | 621 | int decode_interrupt_cb(void *opaque){ 622 | return (global_video_state && global_video_state->quit); 623 | } 624 | 625 | int decode_thread(void *arg){ 626 | VideoState *is = (VideoState *)arg; 627 | AVFormatContext *pFormatCtx = NULL; 628 | AVPacket pkt1, *packet = &pkt1; 629 | 630 | int video_index = -1; 631 | int audio_index = -1; 632 | int i; 633 | 634 | AVDictionary *io_dict = NULL; 635 | AVIOInterruptCB callback; 636 | 637 | is->videoStream = -1; 638 | is->audioStream = -1; 639 | 640 | global_video_state = is; 641 | callback.callback = decode_interrupt_cb; 642 | callback.opaque = is; 643 | 644 | if(avio_open2(&is->io_context, is->filename, 0, &callback, &io_dict) != 0){ 645 | fprintf(stderr, "Unable to open I/O for %s \n", is->filename); 646 | return -1; 647 | } 648 | 649 | if(avformat_open_input(&pFormatCtx, is->filename, NULL, NULL) != 0) 650 | return -1; 651 | 652 | is->pFormatCtx = pFormatCtx; 653 | 654 | if(avformat_find_stream_info(pFormatCtx, NULL) < 0) 655 | return -1; 656 | 657 | av_dump_format(pFormatCtx, 0, is->filename, 0); 658 | 659 | for(i=0; i < pFormatCtx->nb_streams; i++){ 660 | if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO && video_index < 0) 661 | video_index = i; 662 | 663 | if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO && audio_index < 0) 664 | audio_index = i; 665 | } 666 | 667 | printf("audio_index:%d, video_index:%d \n", audio_index, video_index); 668 | 669 | if(audio_index >= 0) 670 | stream_commponent_open(is, audio_index); 671 | 672 | if(video_index >= 0) 673 | stream_commponent_open(is, video_index); 674 | 675 | if(is->videoStream < 0 || is->audioStream < 0){ 676 | fprintf(stderr, "%s: could not open codecs \n", is->filename); 677 | goto fail; 678 | } 679 | 680 | //main decode loop 681 | for(;;){ 682 | if(is->quit) 683 | break; 684 | 685 | if(is->audioq.size > MAX_AUDIOQ_SIZE || is->videoq.size > MAX_VIDEOQ_SIZE){ 686 | SDL_Delay(10); 687 | continue; 688 | } 689 | 690 | if(av_read_frame(is->pFormatCtx, packet) < 0){ 691 | if(is->pFormatCtx->pb->error == 0){ 692 | SDL_Delay(100); 693 | continue; 694 | }else{ 695 | break; 696 | } 697 | } 698 | 699 | if(packet->stream_index == is->videoStream){ 700 | packet_queue_put(&is->videoq, packet); 701 | }else if(packet->stream_index == is->audioStream){ 702 | packet_queue_put(&is->audioq, packet); 703 | }else{ 704 | av_free_packet(packet); 705 | } 706 | } 707 | 708 | while(!is->quit) 709 | SDL_Delay(100); 710 | 711 | fail: 712 | if(1){ 713 | SDL_Event event; 714 | event.type = FF_QUIT_EVENT; 715 | event.user.data1 = is; 716 | SDL_PushEvent(&event); 717 | } 718 | 719 | return 0; 720 | } 721 | 722 | int main(int argc, char* argv[]){ 723 | 724 | #if !FIX_INPUT 725 | if(argc < 2){ 726 | fprintf(stderr, "Usage: test \n"); 727 | exit(1); 728 | } 729 | #endif // FIX_INPUT 730 | 731 | SDL_Event event; 732 | 733 | VideoState *is; 734 | 735 | is = (VideoState *)av_mallocz(sizeof(VideoState)); 736 | 737 | if (is == NULL) 738 | { 739 | fprintf(stderr, "malloc ps error\n"); 740 | } 741 | 742 | av_register_all(); 743 | 744 | if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)){ 745 | fprintf(stderr, "Could not initialize SDL - %s \n", SDL_GetError()); 746 | exit(1); 747 | } 748 | 749 | #ifndef __DARWIN__ 750 | screen = SDL_SetVideoMode(640, 480, 0, 0); 751 | #else 752 | screen = SDL_SetVideoMode(640, 480, 24, 0); 753 | #endif 754 | 755 | if(!screen){ 756 | fprintf(stderr, "SDL: could not set video mode - exiting \n"); 757 | exit(1); 758 | } 759 | 760 | #if FIX_INPUT 761 | strcpy(is->filename, "/home/wanghuatian/oceans.mp4"); 762 | #else 763 | av_strlcpy(is->filename, argv[1], 1024); 764 | #endif // FIX_INPUT 765 | //av_strlcpy(is->filename, argv[1], 1024); 766 | //char url[] = "/home/wanghuatian/oceans.mp4"; 767 | //av_strlcpy(is->filename, url, 1024); 768 | 769 | is->pictq_mutex = SDL_CreateMutex(); 770 | is->pictq_cond = SDL_CreateCond(); 771 | 772 | schedule_refresh(is, 40); 773 | 774 | is->parse_tid = SDL_CreateThread(decode_thread, is); 775 | if(!is->parse_tid){ 776 | av_free(is); 777 | return -1; 778 | } 779 | 780 | for(;;){ 781 | SDL_WaitEvent(&event); 782 | 783 | switch (event.type){ 784 | case FF_QUIT_EVENT: 785 | case SDL_QUIT: 786 | is->quit = 1; 787 | SDL_CondSignal(is->audioq.cond); 788 | SDL_CondSignal(is->videoq.cond); 789 | return 0; 790 | break; 791 | 792 | case FF_ALLOC_EVENT: 793 | alloc_picture(event.user.data1); 794 | break; 795 | 796 | case FF_REFRESH_EVENT: 797 | video_refresh_timer(event.user.data1); 798 | break; 799 | 800 | default: 801 | break; 802 | } 803 | } 804 | 805 | getchar(); 806 | 807 | return 0; 808 | } 809 | -------------------------------------------------------------------------------- /sync_video_to_audio_clock.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | 11 | #include 12 | #include 13 | 14 | #include 15 | #include 16 | 17 | #define SDL_AUDIO_BUFFER_SIZE 1024 18 | #define SDL_AUDIO_FRAME_SIZE 192000 19 | 20 | #define MAX_AUDIOQ_SIZE (5 * 16 *1024) 21 | #define MAX_VIDEOQ_SIZE (5 * 256 * 1024) 22 | 23 | #define AV_SYNC_THRESHOLD 0.01 24 | #define AV_NOSYNC_THRESHOLD 10.0 25 | 26 | #define FF_ALLOC_EVENT (SDL_USEREVENT) 27 | #define FF_REFRESH_EVENT (SDL_USEREVENT + 1) 28 | #define FF_QUIT_EVENT (SDL_USEREVENT + 2) 29 | 30 | #define VIDEO_PICTURE_QUEUE_SIZE 1 31 | 32 | #define FIX_INPUT 0 33 | 34 | typedef struct PacketQueue 35 | { 36 | AVPacketList *first_pkt, *last_pkt; 37 | int nb_packets; 38 | int size; 39 | SDL_mutex *mutex; 40 | SDL_cond *cond; 41 | } PacketQueue; 42 | 43 | typedef struct VideoPicture 44 | { 45 | SDL_Overlay *bmp; 46 | int width, height; 47 | int allocated; 48 | double pts; 49 | } VideoPicture; 50 | 51 | typedef struct VideoState 52 | { 53 | 54 | AVFormatContext *pFormatCtx; 55 | int videoStream, audioStream; 56 | 57 | AVStream *audio_st; 58 | PacketQueue audioq; 59 | uint8_t audio_buf[(192000 * 3) / 2]; 60 | unsigned int audio_buf_size; 61 | unsigned int audio_buf_index; 62 | AVFrame audio_frame, wanted_frame; 63 | AVPacket audio_pkt; 64 | uint8_t *audio_pkt_data; 65 | int audio_pkt_size; 66 | /* 时钟同步变量 */ 67 | double audio_clock; 68 | int audio_hw_buf_size; 69 | double frame_timer; 70 | double frame_last_pts; 71 | double frame_last_delay; 72 | double video_clock; 73 | 74 | AVStream *video_st; 75 | PacketQueue videoq; 76 | 77 | VideoPicture pictq[VIDEO_PICTURE_QUEUE_SIZE]; 78 | int pictq_size, pictq_rindex, pictq_windex; 79 | SDL_mutex *pictq_mutex; 80 | SDL_cond *pictq_cond; 81 | 82 | SDL_Thread *parse_tid; 83 | SDL_Thread *video_tid; 84 | 85 | char filename[1024]; 86 | int quit; 87 | 88 | AVIOContext *io_context; 89 | struct SwsContext *sws_ctx; 90 | 91 | } VideoState; 92 | 93 | SDL_Surface *screen; 94 | VideoState *global_video_state; 95 | 96 | void packet_queue_init(PacketQueue *q) 97 | { 98 | memset(q, 0, sizeof(PacketQueue)); 99 | q->mutex = SDL_CreateMutex(); 100 | q->cond = SDL_CreateCond(); 101 | } 102 | 103 | int packet_queue_put(PacketQueue *q, AVPacket *pkt) 104 | { 105 | 106 | AVPacketList *pkt_list; 107 | if(av_dup_packet(pkt) < 0) 108 | { 109 | return -1; 110 | } 111 | 112 | pkt_list = av_malloc(sizeof(AVPacketList)); 113 | if(!pkt_list) 114 | return -1; 115 | pkt_list->pkt = *pkt; 116 | pkt_list->next = NULL; 117 | 118 | SDL_LockMutex(q->mutex); 119 | 120 | if(!q->last_pkt) 121 | q->first_pkt = pkt_list; 122 | else 123 | q->last_pkt->next = pkt_list; 124 | 125 | q->last_pkt = pkt_list; 126 | q->nb_packets++; 127 | q->size += pkt->size; 128 | SDL_CondSignal(q->cond); 129 | 130 | SDL_UnlockMutex(q->mutex); 131 | return 0; 132 | } 133 | 134 | static int packet_queue_get(PacketQueue *q, AVPacket *pkt, int block) 135 | { 136 | AVPacketList *pkt_list; 137 | int ret; 138 | 139 | SDL_LockMutex(q->mutex); 140 | 141 | for(;;) 142 | { 143 | 144 | if(global_video_state->quit) 145 | { 146 | ret = -1; 147 | break; 148 | } 149 | 150 | pkt_list = q->first_pkt; 151 | if(pkt_list) 152 | { 153 | q->first_pkt = pkt_list->next; 154 | if(!q->first_pkt) 155 | q->last_pkt = NULL; 156 | 157 | q->nb_packets--; 158 | q->size -= pkt_list->pkt.size; 159 | 160 | *pkt = pkt_list->pkt; 161 | av_free(pkt_list); 162 | ret = 1; 163 | break; 164 | } 165 | else if(!block) 166 | { 167 | ret = 0; 168 | break; 169 | } 170 | else 171 | { 172 | SDL_CondWait(q->cond, q->mutex); 173 | } 174 | } 175 | 176 | SDL_UnlockMutex(q->mutex); 177 | return ret; 178 | } 179 | 180 | double get_audio_clock(VideoState *is){ 181 | double pts; 182 | int hw_buf_size, bytes_per_sec, n; 183 | 184 | pts = is->audio_clock; 185 | hw_buf_size = is->audio_buf_size - is->audio_buf_index; 186 | bytes_per_sec = 0; 187 | n = is->audio_st->codec->channels * 2; 188 | 189 | if(is->audio_st){ 190 | bytes_per_sec = is->audio_st->codec->sample_rate * n; 191 | } 192 | 193 | if(bytes_per_sec){ 194 | pts -= (double)hw_buf_size / bytes_per_sec; 195 | } 196 | 197 | return pts; 198 | } 199 | 200 | int audio_decode_frame(VideoState *is) 201 | { 202 | 203 | AVPacket *pkt = &is->audio_pkt; 204 | 205 | int len1, data_size = 0; 206 | 207 | SwrContext *swr_ctx = NULL; 208 | 209 | int resampled_data_size; 210 | 211 | uint8_t *buf = &is->audio_buf; 212 | AVFrame *frame = &is->audio_frame; 213 | 214 | for(;;) 215 | { 216 | if(pkt->data) 217 | av_free_packet(pkt); 218 | 219 | 220 | if(is->quit) 221 | { 222 | return -1; 223 | } 224 | 225 | if(packet_queue_get(&is->audioq, pkt, 1) < 0) 226 | { 227 | return -1; 228 | } 229 | 230 | is->audio_pkt_size = pkt->size; 231 | 232 | /* if update, update the audio clock w/pts */ 233 | if(pkt->pts != AV_NOPTS_VALUE) { 234 | is->audio_clock = av_q2d(is->audio_st->time_base)*pkt->pts; 235 | } 236 | 237 | while(is->audio_pkt_size > 0) 238 | { 239 | 240 | int got_frame = 0; 241 | len1 = avcodec_decode_audio4(is->audio_st->codec, &is->audio_frame, &got_frame, pkt); 242 | 243 | if(len1 < 0) 244 | { 245 | is->audio_pkt_size = 0; 246 | break; 247 | } 248 | 249 | is->audio_pkt_size -= len1; 250 | 251 | if(!got_frame) 252 | continue; 253 | 254 | 255 | if (is->audio_frame.channels > 0 && is->audio_frame.channel_layout == 0) 256 | is->audio_frame.channel_layout = av_get_default_channel_layout(is->audio_frame.channels); 257 | else if (is->audio_frame.channels == 0 && is->audio_frame.channel_layout > 0) 258 | is->audio_frame.channels = av_get_channel_layout_nb_channels(is->audio_frame.channel_layout); 259 | 260 | /** 261 | * 接下来判断我们之前设置SDL时设置的声音格式(AV_SAMPLE_FMT_S16),声道布局, 262 | * 采样频率,每个AVFrame的每个声道采样数与 263 | * 得到的该AVFrame分别是否相同,如有任意不同,我们就需要swr_convert该AvFrame, 264 | * 然后才能符合之前设置好的SDL的需要,才能播放 265 | */ 266 | if(is->audio_frame.format != is->wanted_frame.format 267 | || is->audio_frame.channel_layout != is->wanted_frame.channel_layout 268 | || is->audio_frame.sample_rate != is->wanted_frame.sample_rate 269 | || is->audio_frame.nb_samples != SDL_AUDIO_BUFFER_SIZE) 270 | { 271 | 272 | if (swr_ctx != NULL) 273 | { 274 | swr_free(&swr_ctx); 275 | swr_ctx = NULL; 276 | } 277 | 278 | swr_ctx = swr_alloc_set_opts(NULL, is->wanted_frame.channel_layout, (enum AVSampleFormat)is->wanted_frame.format, is->wanted_frame.sample_rate, 279 | is->audio_frame.channel_layout, (enum AVSampleFormat)is->audio_frame.format, is->audio_frame.sample_rate, 0, NULL); 280 | 281 | if (swr_ctx == NULL || swr_init(swr_ctx) < 0) 282 | { 283 | fprintf(stderr, "swr_init failed: \n" ); 284 | break; 285 | } 286 | } 287 | 288 | if(swr_ctx) 289 | { 290 | int dst_nb_samples = av_rescale_rnd(swr_get_delay(swr_ctx, is->audio_frame.sample_rate) + is->audio_frame.nb_samples, 291 | is->wanted_frame.sample_rate, is->wanted_frame.format, AV_ROUND_INF); 292 | printf("swr convert !%d \n", dst_nb_samples); 293 | printf("audio_frame.nb_samples : %d \n", is->audio_frame.nb_samples); 294 | printf("is->audio_buf : %d \n", *is->audio_buf); 295 | printf("is->audio_buf : %d \n", &is->audio_buf); 296 | printf("is->buf : %d \n", &buf); 297 | printf("is->buf : %d \n", *buf); 298 | /** 299 | * 转换该AVFrame到设置好的SDL需要的样子,有些旧的代码示例最主要就是少了这一部分, 300 | * 往往一些音频能播,一些不能播,这就是原因,比如有些源文件音频恰巧是AV_SAMPLE_FMT_S16的。 301 | * swr_convert 返回的是转换后每个声道(channel)的采样数 302 | */ 303 | int len2 = swr_convert(swr_ctx, &buf, dst_nb_samples,(const uint8_t**)&is->audio_frame.data, is->audio_frame.nb_samples); 304 | if (len2 < 0) 305 | { 306 | fprintf(stderr, "swr_convert failed \n" ); 307 | break; 308 | } 309 | 310 | resampled_data_size = is->wanted_frame.channels * len2 * av_get_bytes_per_sample((enum AVSampleFormat)is->wanted_frame.format); 311 | } 312 | else 313 | { 314 | data_size = av_samples_get_buffer_size(NULL, is->audio_st->codec->channels, is->audio_frame.nb_samples, 315 | AUDIO_S16SYS, 1); 316 | resampled_data_size = data_size; 317 | } 318 | 319 | is->audio_clock += (double)resampled_data_size / 320 | (double)(2 * is->audio_st->codec->channels * is->audio_st->codec->sample_rate); 321 | 322 | return resampled_data_size; 323 | } 324 | 325 | } 326 | } 327 | 328 | void audio_callback(void *userdata, Uint8 *stream, int len) 329 | { 330 | 331 | VideoState *is = (VideoState *) userdata; 332 | int len1, audio_size; 333 | 334 | SDL_memset(stream, 0, len); 335 | 336 | printf("audio_callback len=%d \n", len); 337 | 338 | //向设备发送长度为len的数据 339 | while(len > 0) 340 | { 341 | //缓冲区中无数据 342 | if(is->audio_buf_index >= is->audio_buf_size) 343 | { 344 | //从packet中解码数据 345 | audio_size = audio_decode_frame(is); 346 | printf("audio_decode_frame finish audio_size=%d \n", audio_size); 347 | if(audio_size < 0) //没有解码到数据或者出错,填充0 348 | { 349 | is->audio_buf_size = 1024; 350 | memset(is->audio_buf, 0, is->audio_buf_size); 351 | } 352 | else 353 | { 354 | is->audio_buf_size = audio_size; 355 | } 356 | 357 | is->audio_buf_index = 0; 358 | } 359 | 360 | len1 = is->audio_buf_size - is->audio_buf_index; 361 | if(len1 > len) 362 | len1 = len; 363 | 364 | //memcpy(stream, (uint8_t *)audio_buf + audio_buf_index, len1); 365 | SDL_MixAudio(stream, is->audio_buf + is->audio_buf_index, len1, SDL_MIX_MAXVOLUME); 366 | 367 | len -= len1; 368 | stream += len1; 369 | is->audio_buf_index += len1; 370 | } 371 | } 372 | 373 | static Uint32 sdl_refresh_timer_cb(Uint32 interval, void *opaque) 374 | { 375 | SDL_Event event; 376 | event.type = FF_REFRESH_EVENT; 377 | event.user.data1 = opaque; 378 | SDL_PushEvent(&event); 379 | return 0; 380 | } 381 | 382 | //让视频按固定延迟时间刷新 383 | static void schedule_refresh(VideoState *is, int delay) 384 | { 385 | SDL_AddTimer(delay, sdl_refresh_timer_cb, is); 386 | } 387 | 388 | void video_display(VideoState *is) 389 | { 390 | 391 | SDL_Rect rect; 392 | VideoPicture *vp; 393 | 394 | float aspect_ratio;//宽高比例 395 | int w, h, x, y; 396 | 397 | vp = &is->pictq[is->pictq_rindex]; 398 | if(vp->bmp) 399 | { 400 | if(is->video_st->codec->sample_aspect_ratio.num == 0) 401 | { 402 | aspect_ratio = 0; 403 | } 404 | else 405 | { 406 | aspect_ratio = av_q2d(is->video_st->codec->sample_aspect_ratio) * 407 | is->video_st->codec->width / is->video_st->codec->height; 408 | } 409 | 410 | if(aspect_ratio <= 0.0) 411 | { 412 | aspect_ratio = (float)is->video_st->codec->width / (float)is->video_st->codec->height; 413 | } 414 | 415 | h = screen->h; 416 | w = ((int) rint(h * aspect_ratio)) & -3; 417 | if(w > screen->w) 418 | { 419 | w = screen->w; 420 | h = ((int) rint(w / aspect_ratio)) & -3; 421 | } 422 | 423 | x = (screen->w - w) / 2; 424 | y = (screen->h - h) / 2; 425 | 426 | rect.x = x; 427 | rect.y = y; 428 | rect.w = w; 429 | rect.h = h; 430 | SDL_DisplayYUVOverlay(vp->bmp, &rect); 431 | } 432 | } 433 | 434 | 435 | void video_refresh_timer(void *userdata) 436 | { 437 | 438 | VideoState *is = (VideoState *)userdata; 439 | VideoPicture *vp; 440 | double actual_delay, delay, sync_threshold, ref_clock, diff; 441 | 442 | if(is->video_st) 443 | { 444 | if(is->pictq_size == 0) 445 | { 446 | schedule_refresh(is, 1); 447 | } 448 | else 449 | { 450 | vp = &is->pictq[is->pictq_rindex]; 451 | 452 | //设置延迟,首先和上次的pts对比得出延迟,更新延迟和pts; 453 | //通过与音频时钟比较,得到更精确的延迟 454 | //最后与外部时钟对比,得出最终可用的延迟,并刷新视频 455 | delay = vp->pts - is->frame_last_pts; 456 | if(delay <= 0 || delay >= 1.0){ 457 | delay = is->frame_last_delay;//如果延迟不正确,我们使用上一个延迟 458 | } 459 | 460 | is->frame_last_delay = delay; 461 | is->frame_last_pts = vp->pts; 462 | 463 | ref_clock = get_audio_clock(is); 464 | diff = vp->pts - ref_clock; 465 | 466 | sync_threshold = (delay > AV_SYNC_THRESHOLD) ? delay : AV_SYNC_THRESHOLD; 467 | if(fabs(diff) < AV_NOSYNC_THRESHOLD){ 468 | if(diff <= -sync_threshold){//音频快于视频 469 | delay = 0; 470 | }else if(diff >= sync_threshold){//视频快于音频 471 | delay = 2 * delay; 472 | } 473 | } 474 | 475 | is->frame_timer += delay; 476 | 477 | actual_delay = is->frame_timer - (av_gettime() / 1000000.0); 478 | if(actual_delay < 0.010){ 479 | actual_delay = 0.010; 480 | } 481 | 482 | schedule_refresh(is, (int)(actual_delay * 1000 + 0.5)); 483 | 484 | video_display(is); 485 | 486 | if(++is->pictq_rindex == VIDEO_PICTURE_QUEUE_SIZE) 487 | { 488 | is->pictq_rindex = 0; 489 | } 490 | 491 | SDL_LockMutex(is->pictq_mutex); 492 | is->pictq_size--; 493 | SDL_CondSignal(is->pictq_cond); 494 | SDL_UnlockMutex(is->pictq_mutex); 495 | } 496 | } 497 | else 498 | { 499 | schedule_refresh(is, 100); 500 | } 501 | } 502 | 503 | void alloc_picture(void *userdata) 504 | { 505 | VideoState *is = (VideoState *) userdata; 506 | VideoPicture *vp; 507 | 508 | vp = &is->pictq[is->pictq_windex]; 509 | if(vp->bmp) 510 | SDL_FreeYUVOverlay(vp->bmp); 511 | 512 | vp->bmp = SDL_CreateYUVOverlay(is->video_st->codec->width, 513 | is->video_st->codec->height, 514 | SDL_YV12_OVERLAY, 515 | screen); 516 | 517 | vp->width = is->video_st->codec->width; 518 | vp->height = is->video_st->codec->height; 519 | 520 | SDL_LockMutex(is->pictq_mutex); 521 | vp->allocated = 1; 522 | SDL_CondSignal(is->pictq_cond); 523 | SDL_UnlockMutex(is->pictq_mutex); 524 | } 525 | 526 | int queue_picture(VideoState *is, AVFrame *pFrame, double pts) 527 | { 528 | 529 | VideoPicture *vp; 530 | AVPicture pict; 531 | 532 | SDL_LockMutex(is->pictq_mutex); 533 | while(is->pictq_size >= VIDEO_PICTURE_QUEUE_SIZE && !is->quit) 534 | { 535 | SDL_CondWait(is->pictq_cond, is->pictq_mutex); 536 | } 537 | SDL_UnlockMutex(is->pictq_mutex); 538 | 539 | if(is->quit) 540 | return -1; 541 | 542 | vp = &is->pictq[is->pictq_windex]; 543 | 544 | if(!vp->bmp || vp->width != is->video_st->codec->width 545 | || vp->height != is->video_st->codec->height) 546 | { 547 | SDL_Event event; 548 | 549 | vp->allocated = 0; 550 | //在主线程操作 551 | event.type = FF_ALLOC_EVENT; 552 | event.user.data1 = is; 553 | SDL_PushEvent(&event); 554 | 555 | //等待直到我们申请到一个picture的内存 556 | SDL_LockMutex(is->pictq_mutex); 557 | while(!vp->allocated && !is->quit) 558 | { 559 | SDL_CondWait(is->pictq_cond, is->pictq_mutex); 560 | } 561 | SDL_UnlockMutex(is->pictq_mutex); 562 | 563 | if(is->quit) 564 | return -1; 565 | } 566 | 567 | if(vp->bmp) 568 | { 569 | SDL_LockYUVOverlay(vp->bmp); 570 | 571 | pict.data[0] = vp->bmp->pixels[0]; 572 | pict.data[1] = vp->bmp->pixels[2]; 573 | pict.data[2] = vp->bmp->pixels[1]; 574 | 575 | pict.linesize[0] = vp->bmp->pitches[0]; 576 | pict.linesize[1] = vp->bmp->pitches[2]; 577 | pict.linesize[2] = vp->bmp->pitches[1]; 578 | 579 | sws_scale(is->sws_ctx, (uint8_t const * const *)pFrame->data, pFrame->linesize, 580 | 0, is->video_st->codec->height, pict.data, pict.linesize); 581 | 582 | SDL_UnlockYUVOverlay(vp->bmp); 583 | vp->pts = pts; 584 | 585 | if(++is->pictq_windex == VIDEO_PICTURE_QUEUE_SIZE) 586 | is->pictq_windex = 0; 587 | 588 | SDL_LockMutex(is->pictq_mutex); 589 | is->pictq_size++; 590 | SDL_UnlockMutex(is->pictq_mutex); 591 | } 592 | return 0; 593 | } 594 | 595 | /* 将 PTS 传入大结构体,并计算延迟 */ 596 | double synchronize_video(VideoState *is, AVFrame *src_frame, double pts){ 597 | 598 | double frame_delay; 599 | 600 | if(pts != 0){ 601 | is->video_clock = pts; 602 | }else{ 603 | pts = is->video_clock; 604 | } 605 | 606 | frame_delay = av_q2d(is->video_st->codec->time_base); 607 | 608 | frame_delay += src_frame->repeat_pict * (frame_delay * 0.5); 609 | is->video_clock += frame_delay; 610 | 611 | return pts; 612 | } 613 | 614 | int video_thread(void *arg){ 615 | VideoState *is = (VideoState *)arg; 616 | AVPacket pkt1, *packet = &pkt1; 617 | int frameFinished; 618 | AVFrame *pFrame; 619 | double pts; 620 | 621 | pFrame = av_frame_alloc(); 622 | 623 | for(;;){ 624 | if(packet_queue_get(&is->videoq, packet, 1) < 0) 625 | break; 626 | 627 | pts = 0; 628 | 629 | avcodec_decode_video2(is->video_st->codec, pFrame, &frameFinished, packet); 630 | 631 | if(packet->dts == AV_NOPTS_VALUE && packet->pts && packet->pts != AV_NOPTS_VALUE){ 632 | pts = packet->pts; 633 | }else if(packet->dts != AV_NOPTS_VALUE){ 634 | pts = packet->dts; 635 | }else{ 636 | pts = 0; 637 | } 638 | pts *= av_q2d(is->video_st->time_base); 639 | 640 | if(frameFinished){ 641 | pts = synchronize_video(is, pFrame, pts); 642 | if(queue_picture(is, pFrame, pts) < 0) 643 | break; 644 | } 645 | 646 | av_free_packet(packet); 647 | } 648 | 649 | av_free(pFrame); 650 | return 0; 651 | } 652 | 653 | int stream_commponent_open(VideoState *is, int stream_index){ 654 | AVFormatContext *pFormatCtx = is->pFormatCtx; 655 | AVCodecContext *codecCtx = NULL; 656 | AVCodec *codec = NULL; 657 | AVDictionary *optionDict = NULL; 658 | SDL_AudioSpec wanted_spec, spec; 659 | 660 | if(stream_index < 0 || stream_index >= pFormatCtx->nb_streams) 661 | return -1; 662 | 663 | codecCtx = pFormatCtx->streams[stream_index]->codec; 664 | 665 | if(codecCtx->codec_type == AVMEDIA_TYPE_AUDIO){ 666 | wanted_spec.freq = codecCtx->sample_rate; 667 | wanted_spec.format = AUDIO_S16SYS; 668 | wanted_spec.channels = codecCtx->channels; 669 | wanted_spec.silence = 0; 670 | wanted_spec.samples = SDL_AUDIO_BUFFER_SIZE; 671 | wanted_spec.callback = audio_callback; 672 | wanted_spec.userdata = is; 673 | 674 | if(SDL_OpenAudio(&wanted_spec, &spec)){ 675 | fprintf(stderr, "SDL_OpenAudio: %s \n", SDL_GetError()); 676 | return -1; 677 | } 678 | 679 | printf("spec format: %d \n", spec.format); 680 | is->wanted_frame.format = AV_SAMPLE_FMT_S16; 681 | is->wanted_frame.sample_rate = spec.freq; 682 | is->wanted_frame.channel_layout = av_get_default_channel_layout(spec.channels); 683 | is->wanted_frame.channels = spec.channels; 684 | } 685 | 686 | codec = avcodec_find_decoder(codecCtx->codec_id); 687 | if(!codec || (avcodec_open2(codecCtx, codec, &optionDict) < 0)){ 688 | fprintf(stderr, "Unsupported codec! \n"); 689 | return -1; 690 | } 691 | 692 | switch(codecCtx->codec_type){ 693 | case AVMEDIA_TYPE_AUDIO: 694 | is->audioStream = stream_index; 695 | is->audio_st = pFormatCtx->streams[stream_index]; 696 | is->audio_buf_size = 0; 697 | is->audio_buf_index = 0; 698 | memset(&is->audio_pkt, 0, sizeof(is->audio_pkt)); 699 | packet_queue_init(&is->audioq); 700 | SDL_PauseAudio(0); 701 | break; 702 | 703 | case AVMEDIA_TYPE_VIDEO: 704 | is->videoStream = stream_index; 705 | is->video_st = pFormatCtx->streams[stream_index]; 706 | 707 | is->frame_timer = (double) av_gettime() / 1000000.0; 708 | is->frame_last_delay = 40e-3; 709 | 710 | packet_queue_init(&is->videoq); 711 | is->video_tid = SDL_CreateThread(video_thread,is); 712 | is->sws_ctx = sws_getContext(is->video_st->codec->width, 713 | is->video_st->codec->height, 714 | is->video_st->codec->pix_fmt, 715 | is->video_st->codec->width, 716 | is->video_st->codec->height, 717 | AV_PIX_FMT_YUV420P, 718 | SWS_BILINEAR,NULL,NULL,NULL); 719 | 720 | break; 721 | 722 | default: 723 | break; 724 | } 725 | return 0; 726 | } 727 | 728 | int decode_interrupt_cb(void *opaque){ 729 | return (global_video_state && global_video_state->quit); 730 | } 731 | 732 | int decode_thread(void *arg){ 733 | VideoState *is = (VideoState *)arg; 734 | AVFormatContext *pFormatCtx = NULL; 735 | AVPacket pkt1, *packet = &pkt1; 736 | 737 | int video_index = -1; 738 | int audio_index = -1; 739 | int i; 740 | 741 | AVDictionary *io_dict = NULL; 742 | AVIOInterruptCB callback; 743 | 744 | is->videoStream = -1; 745 | is->audioStream = -1; 746 | 747 | global_video_state = is; 748 | callback.callback = decode_interrupt_cb; 749 | callback.opaque = is; 750 | 751 | if(avio_open2(&is->io_context, is->filename, 0, &callback, &io_dict) != 0){ 752 | fprintf(stderr, "Unable to open I/O for %s \n", is->filename); 753 | return -1; 754 | } 755 | 756 | if(avformat_open_input(&pFormatCtx, is->filename, NULL, NULL) != 0) 757 | return -1; 758 | 759 | is->pFormatCtx = pFormatCtx; 760 | 761 | if(avformat_find_stream_info(pFormatCtx, NULL) < 0) 762 | return -1; 763 | 764 | av_dump_format(pFormatCtx, 0, is->filename, 0); 765 | 766 | for(i=0; i < pFormatCtx->nb_streams; i++){ 767 | if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO && video_index < 0) 768 | video_index = i; 769 | 770 | if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO && audio_index < 0) 771 | audio_index = i; 772 | } 773 | 774 | printf("audio_index:%d, video_index:%d \n", audio_index, video_index); 775 | 776 | if(audio_index >= 0) 777 | stream_commponent_open(is, audio_index); 778 | 779 | if(video_index >= 0) 780 | stream_commponent_open(is, video_index); 781 | 782 | if(is->videoStream < 0 || is->audioStream < 0){ 783 | fprintf(stderr, "%s: could not open codecs \n", is->filename); 784 | goto fail; 785 | } 786 | 787 | //main decode loop 788 | for(;;){ 789 | if(is->quit) 790 | break; 791 | 792 | if(is->audioq.size > MAX_AUDIOQ_SIZE || is->videoq.size > MAX_VIDEOQ_SIZE){ 793 | SDL_Delay(10); 794 | continue; 795 | } 796 | 797 | if(av_read_frame(is->pFormatCtx, packet) < 0){ 798 | if(is->pFormatCtx->pb->error == 0){ 799 | SDL_Delay(100); 800 | continue; 801 | }else{ 802 | break; 803 | } 804 | } 805 | 806 | if(packet->stream_index == is->videoStream){ 807 | packet_queue_put(&is->videoq, packet); 808 | }else if(packet->stream_index == is->audioStream){ 809 | packet_queue_put(&is->audioq, packet); 810 | }else{ 811 | av_free_packet(packet); 812 | } 813 | } 814 | 815 | while(!is->quit) 816 | SDL_Delay(100); 817 | 818 | fail: 819 | if(1){ 820 | SDL_Event event; 821 | event.type = FF_QUIT_EVENT; 822 | event.user.data1 = is; 823 | SDL_PushEvent(&event); 824 | } 825 | 826 | return 0; 827 | } 828 | 829 | int main(int argc, char* argv[]){ 830 | 831 | #if !FIX_INPUT 832 | if(argc < 2){ 833 | fprintf(stderr, "Usage: test \n"); 834 | exit(1); 835 | } 836 | #endif // FIX_INPUT 837 | 838 | SDL_Event event; 839 | 840 | VideoState *is; 841 | 842 | is = (VideoState *)av_mallocz(sizeof(VideoState)); 843 | 844 | if (is == NULL) 845 | { 846 | fprintf(stderr, "malloc ps error\n"); 847 | } 848 | 849 | av_register_all(); 850 | 851 | if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)){ 852 | fprintf(stderr, "Could not initialize SDL - %s \n", SDL_GetError()); 853 | exit(1); 854 | } 855 | 856 | #ifndef __DARWIN__ 857 | screen = SDL_SetVideoMode(640, 480, 0, 0); 858 | #else 859 | screen = SDL_SetVideoMode(640, 480, 24, 0); 860 | #endif 861 | 862 | if(!screen){ 863 | fprintf(stderr, "SDL: could not set video mode - exiting \n"); 864 | exit(1); 865 | } 866 | 867 | #if FIX_INPUT 868 | strcpy(is->filename, "/home/wanghuatian/oceans.mp4"); 869 | #else 870 | av_strlcpy(is->filename, argv[1], 1024); 871 | #endif // FIX_INPUT 872 | //av_strlcpy(is->filename, argv[1], 1024); 873 | //char url[] = "/home/wanghuatian/oceans.mp4"; 874 | //av_strlcpy(is->filename, url, 1024); 875 | 876 | is->pictq_mutex = SDL_CreateMutex(); 877 | is->pictq_cond = SDL_CreateCond(); 878 | 879 | schedule_refresh(is, 40); 880 | 881 | is->parse_tid = SDL_CreateThread(decode_thread, is); 882 | if(!is->parse_tid){ 883 | av_free(is); 884 | return -1; 885 | } 886 | 887 | for(;;){ 888 | SDL_WaitEvent(&event); 889 | 890 | switch (event.type){ 891 | case FF_QUIT_EVENT: 892 | case SDL_QUIT: 893 | is->quit = 1; 894 | SDL_CondSignal(is->audioq.cond); 895 | SDL_CondSignal(is->videoq.cond); 896 | return 0; 897 | break; 898 | 899 | case FF_ALLOC_EVENT: 900 | alloc_picture(event.user.data1); 901 | break; 902 | 903 | case FF_REFRESH_EVENT: 904 | video_refresh_timer(event.user.data1); 905 | break; 906 | 907 | default: 908 | break; 909 | } 910 | } 911 | 912 | getchar(); 913 | 914 | return 0; 915 | } 916 | -------------------------------------------------------------------------------- /multi_clock.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | 11 | #include 12 | #include 13 | 14 | #include 15 | #include 16 | 17 | #define SDL_AUDIO_BUFFER_SIZE 1024 18 | #define SDL_AUDIO_FRAME_SIZE 192000 19 | 20 | #define MAX_AUDIOQ_SIZE (5 * 16 *1024) 21 | #define MAX_VIDEOQ_SIZE (5 * 256 * 1024) 22 | 23 | #define AV_SYNC_THRESHOLD 0.01 24 | #define AV_NOSYNC_THRESHOLD 10.0 25 | 26 | #define FF_ALLOC_EVENT (SDL_USEREVENT) 27 | #define FF_REFRESH_EVENT (SDL_USEREVENT + 1) 28 | #define FF_QUIT_EVENT (SDL_USEREVENT + 2) 29 | 30 | #define VIDEO_PICTURE_QUEUE_SIZE 1 31 | 32 | #define FIX_INPUT 0 33 | 34 | typedef struct PacketQueue 35 | { 36 | AVPacketList *first_pkt, *last_pkt; 37 | int nb_packets; 38 | int size; 39 | SDL_mutex *mutex; 40 | SDL_cond *cond; 41 | } PacketQueue; 42 | 43 | typedef struct VideoPicture 44 | { 45 | SDL_Overlay *bmp; 46 | int width, height; 47 | int allocated; 48 | double pts; 49 | } VideoPicture; 50 | 51 | typedef struct VideoState 52 | { 53 | 54 | AVFormatContext *pFormatCtx; 55 | int videoStream, audioStream; 56 | 57 | AVStream *audio_st; 58 | PacketQueue audioq; 59 | uint8_t audio_buf[(192000 * 3) / 2]; 60 | unsigned int audio_buf_size; 61 | unsigned int audio_buf_index; 62 | AVFrame audio_frame, wanted_frame; 63 | AVPacket audio_pkt; 64 | uint8_t *audio_pkt_data; 65 | int audio_pkt_size; 66 | /* 时钟同步变量 */ 67 | int av_sync_type; 68 | double external_clock; 69 | int64_t external_clock_time; 70 | 71 | double audio_clock; 72 | int audio_hw_buf_size; 73 | 74 | double audio_diff_cum; 75 | double audio_diff_avg_coef; 76 | double audio_diff_threshold; 77 | double audio_diff_avg_count; 78 | 79 | double frame_timer; 80 | double frame_last_pts; 81 | double frame_last_delay; 82 | double video_clock;//最后解码的那一帧的pts 83 | double video_current_pts;//当前播放的pts 84 | int64_t video_current_pts_time;//time (av_gettime)更新video_current_pts时的时间 85 | 86 | AVStream *video_st; 87 | PacketQueue videoq; 88 | 89 | VideoPicture pictq[VIDEO_PICTURE_QUEUE_SIZE]; 90 | int pictq_size, pictq_rindex, pictq_windex; 91 | SDL_mutex *pictq_mutex; 92 | SDL_cond *pictq_cond; 93 | 94 | SDL_Thread *parse_tid; 95 | SDL_Thread *video_tid; 96 | 97 | char filename[1024]; 98 | int quit; 99 | 100 | AVIOContext *io_context; 101 | struct SwsContext *sws_ctx; 102 | 103 | } VideoState; 104 | 105 | enum { 106 | AV_SYNC_AUDIO_MASTER, 107 | AV_SYNC_VIDEO_MASTER, 108 | AV_SYNC_EXTERNAL_MASTER, 109 | }; 110 | 111 | SDL_Surface *screen; 112 | VideoState *global_video_state; 113 | 114 | void packet_queue_init(PacketQueue *q) 115 | { 116 | memset(q, 0, sizeof(PacketQueue)); 117 | q->mutex = SDL_CreateMutex(); 118 | q->cond = SDL_CreateCond(); 119 | } 120 | 121 | int packet_queue_put(PacketQueue *q, AVPacket *pkt) 122 | { 123 | 124 | AVPacketList *pkt_list; 125 | if(av_dup_packet(pkt) < 0) 126 | { 127 | return -1; 128 | } 129 | 130 | pkt_list = av_malloc(sizeof(AVPacketList)); 131 | if(!pkt_list) 132 | return -1; 133 | pkt_list->pkt = *pkt; 134 | pkt_list->next = NULL; 135 | 136 | SDL_LockMutex(q->mutex); 137 | 138 | if(!q->last_pkt) 139 | q->first_pkt = pkt_list; 140 | else 141 | q->last_pkt->next = pkt_list; 142 | 143 | q->last_pkt = pkt_list; 144 | q->nb_packets++; 145 | q->size += pkt->size; 146 | SDL_CondSignal(q->cond); 147 | 148 | SDL_UnlockMutex(q->mutex); 149 | return 0; 150 | } 151 | 152 | static int packet_queue_get(PacketQueue *q, AVPacket *pkt, int block) 153 | { 154 | AVPacketList *pkt_list; 155 | int ret; 156 | 157 | SDL_LockMutex(q->mutex); 158 | 159 | for(;;) 160 | { 161 | 162 | if(global_video_state->quit) 163 | { 164 | ret = -1; 165 | break; 166 | } 167 | 168 | pkt_list = q->first_pkt; 169 | if(pkt_list) 170 | { 171 | q->first_pkt = pkt_list->next; 172 | if(!q->first_pkt) 173 | q->last_pkt = NULL; 174 | 175 | q->nb_packets--; 176 | q->size -= pkt_list->pkt.size; 177 | 178 | *pkt = pkt_list->pkt; 179 | av_free(pkt_list); 180 | ret = 1; 181 | break; 182 | } 183 | else if(!block) 184 | { 185 | ret = 0; 186 | break; 187 | } 188 | else 189 | { 190 | SDL_CondWait(q->cond, q->mutex); 191 | } 192 | } 193 | 194 | SDL_UnlockMutex(q->mutex); 195 | return ret; 196 | } 197 | 198 | double get_audio_clock(VideoState *is){ 199 | double pts; 200 | int hw_buf_size, bytes_per_sec, n; 201 | 202 | pts = is->audio_clock; 203 | hw_buf_size = is->audio_buf_size - is->audio_buf_index; 204 | bytes_per_sec = 0; 205 | n = is->audio_st->codec->channels * 2; 206 | 207 | if(is->audio_st){ 208 | bytes_per_sec = is->audio_st->codec->sample_rate * n; 209 | } 210 | 211 | if(bytes_per_sec){ 212 | pts -= (double)hw_buf_size / bytes_per_sec; 213 | } 214 | 215 | return pts; 216 | } 217 | 218 | double get_video_clock(VideoState *is){ 219 | double delta; 220 | 221 | delta = (av_gettime() - is->video_current_pts_time) / 1000000.0; 222 | return is->video_current_pts + delta; 223 | } 224 | 225 | double get_external_clock(VideoState *is){ 226 | return av_gettime() / 1000000.0; 227 | } 228 | 229 | double get_master_clock(VideoState *is){ 230 | if(is->av_sync_type == AV_SYNC_VIDEO_MASTER){ 231 | return get_video_clock(is); 232 | }else if(is->av_sync_type == AV_SYNC_AUDIO_MASTER){ 233 | return get_audio_clock(is); 234 | }else{ 235 | return get_external_clock(is); 236 | } 237 | } 238 | 239 | //添加或减少samples来更好的同步,返回一个新的音频缓存的大小 240 | int synchronize_audio(VideoState *is, short *samples, 241 | int samples_size, double pts){ 242 | int n; 243 | double ref_clock; 244 | 245 | n = 2 * is->audio_st->codec->channels; 246 | 247 | if(is->av_sync_type != AV_SYNC_AUDIO_MASTER){//不是以音频时钟来同步 248 | double diff, avg_diff; 249 | int wanted_size, min_size, max_size; 250 | 251 | ref_clock = get_master_clock(is); 252 | diff = get_audio_clock(is) - ref_clock;//与主时钟对比 253 | 254 | if(diff < AV_NOSYNC_THRESHOLD){ 255 | is->audio_diff_cum = diff + is->audio_diff_avg_coef * is->audio_diff_cum; 256 | } 257 | } 258 | } 259 | 260 | int audio_decode_frame(VideoState *is) 261 | { 262 | 263 | AVPacket *pkt = &is->audio_pkt; 264 | 265 | int len1, data_size = 0; 266 | 267 | SwrContext *swr_ctx = NULL; 268 | 269 | int resampled_data_size; 270 | 271 | uint8_t *buf = &is->audio_buf; 272 | AVFrame *frame = &is->audio_frame; 273 | 274 | for(;;) 275 | { 276 | if(pkt->data) 277 | av_free_packet(pkt); 278 | 279 | 280 | if(is->quit) 281 | { 282 | return -1; 283 | } 284 | 285 | if(packet_queue_get(&is->audioq, pkt, 1) < 0) 286 | { 287 | return -1; 288 | } 289 | 290 | is->audio_pkt_size = pkt->size; 291 | 292 | /* if update, update the audio clock w/pts */ 293 | if(pkt->pts != AV_NOPTS_VALUE) { 294 | is->audio_clock = av_q2d(is->audio_st->time_base)*pkt->pts; 295 | } 296 | 297 | while(is->audio_pkt_size > 0) 298 | { 299 | 300 | int got_frame = 0; 301 | len1 = avcodec_decode_audio4(is->audio_st->codec, &is->audio_frame, &got_frame, pkt); 302 | 303 | if(len1 < 0) 304 | { 305 | is->audio_pkt_size = 0; 306 | break; 307 | } 308 | 309 | is->audio_pkt_size -= len1; 310 | 311 | if(!got_frame) 312 | continue; 313 | 314 | 315 | if (is->audio_frame.channels > 0 && is->audio_frame.channel_layout == 0) 316 | is->audio_frame.channel_layout = av_get_default_channel_layout(is->audio_frame.channels); 317 | else if (is->audio_frame.channels == 0 && is->audio_frame.channel_layout > 0) 318 | is->audio_frame.channels = av_get_channel_layout_nb_channels(is->audio_frame.channel_layout); 319 | 320 | /** 321 | * 接下来判断我们之前设置SDL时设置的声音格式(AV_SAMPLE_FMT_S16),声道布局, 322 | * 采样频率,每个AVFrame的每个声道采样数与 323 | * 得到的该AVFrame分别是否相同,如有任意不同,我们就需要swr_convert该AvFrame, 324 | * 然后才能符合之前设置好的SDL的需要,才能播放 325 | */ 326 | if(is->audio_frame.format != is->wanted_frame.format 327 | || is->audio_frame.channel_layout != is->wanted_frame.channel_layout 328 | || is->audio_frame.sample_rate != is->wanted_frame.sample_rate 329 | || is->audio_frame.nb_samples != SDL_AUDIO_BUFFER_SIZE) 330 | { 331 | 332 | if (swr_ctx != NULL) 333 | { 334 | swr_free(&swr_ctx); 335 | swr_ctx = NULL; 336 | } 337 | 338 | swr_ctx = swr_alloc_set_opts(NULL, is->wanted_frame.channel_layout, (enum AVSampleFormat)is->wanted_frame.format, is->wanted_frame.sample_rate, 339 | is->audio_frame.channel_layout, (enum AVSampleFormat)is->audio_frame.format, is->audio_frame.sample_rate, 0, NULL); 340 | 341 | if (swr_ctx == NULL || swr_init(swr_ctx) < 0) 342 | { 343 | fprintf(stderr, "swr_init failed: \n" ); 344 | break; 345 | } 346 | } 347 | 348 | if(swr_ctx) 349 | { 350 | int dst_nb_samples = av_rescale_rnd(swr_get_delay(swr_ctx, is->audio_frame.sample_rate) + is->audio_frame.nb_samples, 351 | is->wanted_frame.sample_rate, is->wanted_frame.format, AV_ROUND_INF); 352 | printf("swr convert !%d \n", dst_nb_samples); 353 | printf("audio_frame.nb_samples : %d \n", is->audio_frame.nb_samples); 354 | printf("is->audio_buf : %d \n", *is->audio_buf); 355 | printf("is->audio_buf : %d \n", &is->audio_buf); 356 | printf("is->buf : %d \n", &buf); 357 | printf("is->buf : %d \n", *buf); 358 | /** 359 | * 转换该AVFrame到设置好的SDL需要的样子,有些旧的代码示例最主要就是少了这一部分, 360 | * 往往一些音频能播,一些不能播,这就是原因,比如有些源文件音频恰巧是AV_SAMPLE_FMT_S16的。 361 | * swr_convert 返回的是转换后每个声道(channel)的采样数 362 | */ 363 | int len2 = swr_convert(swr_ctx, &buf, dst_nb_samples,(const uint8_t**)&is->audio_frame.data, is->audio_frame.nb_samples); 364 | if (len2 < 0) 365 | { 366 | fprintf(stderr, "swr_convert failed \n" ); 367 | break; 368 | } 369 | 370 | resampled_data_size = is->wanted_frame.channels * len2 * av_get_bytes_per_sample((enum AVSampleFormat)is->wanted_frame.format); 371 | } 372 | else 373 | { 374 | data_size = av_samples_get_buffer_size(NULL, is->audio_st->codec->channels, is->audio_frame.nb_samples, 375 | AUDIO_S16SYS, 1); 376 | resampled_data_size = data_size; 377 | } 378 | 379 | is->audio_clock += (double)resampled_data_size / 380 | (double)(2 * is->audio_st->codec->channels * is->audio_st->codec->sample_rate); 381 | 382 | return resampled_data_size; 383 | } 384 | 385 | } 386 | } 387 | 388 | void audio_callback(void *userdata, Uint8 *stream, int len) 389 | { 390 | 391 | VideoState *is = (VideoState *) userdata; 392 | int len1, audio_size; 393 | 394 | SDL_memset(stream, 0, len); 395 | 396 | printf("audio_callback len=%d \n", len); 397 | 398 | //向设备发送长度为len的数据 399 | while(len > 0) 400 | { 401 | //缓冲区中无数据 402 | if(is->audio_buf_index >= is->audio_buf_size) 403 | { 404 | //从packet中解码数据 405 | audio_size = audio_decode_frame(is); 406 | printf("audio_decode_frame finish audio_size=%d \n", audio_size); 407 | if(audio_size < 0) //没有解码到数据或者出错,填充0 408 | { 409 | is->audio_buf_size = 1024; 410 | memset(is->audio_buf, 0, is->audio_buf_size); 411 | } 412 | else 413 | { 414 | is->audio_buf_size = audio_size; 415 | } 416 | 417 | is->audio_buf_index = 0; 418 | } 419 | 420 | len1 = is->audio_buf_size - is->audio_buf_index; 421 | if(len1 > len) 422 | len1 = len; 423 | 424 | //memcpy(stream, (uint8_t *)audio_buf + audio_buf_index, len1); 425 | SDL_MixAudio(stream, is->audio_buf + is->audio_buf_index, len1, SDL_MIX_MAXVOLUME); 426 | 427 | len -= len1; 428 | stream += len1; 429 | is->audio_buf_index += len1; 430 | } 431 | } 432 | 433 | static Uint32 sdl_refresh_timer_cb(Uint32 interval, void *opaque) 434 | { 435 | SDL_Event event; 436 | event.type = FF_REFRESH_EVENT; 437 | event.user.data1 = opaque; 438 | SDL_PushEvent(&event); 439 | return 0; 440 | } 441 | 442 | //让视频按固定延迟时间刷新 443 | static void schedule_refresh(VideoState *is, int delay) 444 | { 445 | SDL_AddTimer(delay, sdl_refresh_timer_cb, is); 446 | } 447 | 448 | void video_display(VideoState *is) 449 | { 450 | 451 | SDL_Rect rect; 452 | VideoPicture *vp; 453 | 454 | float aspect_ratio;//宽高比例 455 | int w, h, x, y; 456 | 457 | vp = &is->pictq[is->pictq_rindex]; 458 | if(vp->bmp) 459 | { 460 | if(is->video_st->codec->sample_aspect_ratio.num == 0) 461 | { 462 | aspect_ratio = 0; 463 | } 464 | else 465 | { 466 | aspect_ratio = av_q2d(is->video_st->codec->sample_aspect_ratio) * 467 | is->video_st->codec->width / is->video_st->codec->height; 468 | } 469 | 470 | if(aspect_ratio <= 0.0) 471 | { 472 | aspect_ratio = (float)is->video_st->codec->width / (float)is->video_st->codec->height; 473 | } 474 | 475 | h = screen->h; 476 | w = ((int) rint(h * aspect_ratio)) & -3; 477 | if(w > screen->w) 478 | { 479 | w = screen->w; 480 | h = ((int) rint(w / aspect_ratio)) & -3; 481 | } 482 | 483 | x = (screen->w - w) / 2; 484 | y = (screen->h - h) / 2; 485 | 486 | rect.x = x; 487 | rect.y = y; 488 | rect.w = w; 489 | rect.h = h; 490 | SDL_DisplayYUVOverlay(vp->bmp, &rect); 491 | } 492 | } 493 | 494 | 495 | void video_refresh_timer(void *userdata) 496 | { 497 | 498 | VideoState *is = (VideoState *)userdata; 499 | VideoPicture *vp; 500 | double actual_delay, delay, sync_threshold, ref_clock, diff; 501 | 502 | if(is->video_st) 503 | { 504 | if(is->pictq_size == 0) 505 | { 506 | schedule_refresh(is, 1); 507 | } 508 | else 509 | { 510 | vp = &is->pictq[is->pictq_rindex]; 511 | 512 | delay = vp->pts - is->frame_last_pts; 513 | if(delay <= 0 || delay >= 1.0){ 514 | delay = is->frame_last_delay;//如果延迟不正确,我们使用上一个延迟 515 | } 516 | 517 | is->frame_last_delay = delay; 518 | is->frame_last_pts = vp->pts; 519 | 520 | ref_clock = get_audio_clock(is); 521 | diff = vp->pts - ref_clock; 522 | 523 | sync_threshold = (delay > AV_SYNC_THRESHOLD) ? delay : AV_SYNC_THRESHOLD; 524 | if(fabs(diff) < AV_NOSYNC_THRESHOLD){ 525 | if(diff <= -sync_threshold){//音频快于视频 526 | delay = 0; 527 | }else if(diff >= sync_threshold){//视频快于音频 528 | delay = 2 * delay; 529 | } 530 | } 531 | 532 | is->frame_timer += delay; 533 | 534 | actual_delay = is->frame_timer - (av_gettime() / 1000000.0); 535 | if(actual_delay < 0.010){ 536 | actual_delay = 0.010; 537 | } 538 | 539 | schedule_refresh(is, (int)(actual_delay * 1000 + 0.5)); 540 | 541 | video_display(is); 542 | 543 | if(++is->pictq_rindex == VIDEO_PICTURE_QUEUE_SIZE) 544 | { 545 | is->pictq_rindex = 0; 546 | } 547 | 548 | SDL_LockMutex(is->pictq_mutex); 549 | is->pictq_size--; 550 | SDL_CondSignal(is->pictq_cond); 551 | SDL_UnlockMutex(is->pictq_mutex); 552 | } 553 | } 554 | else 555 | { 556 | schedule_refresh(is, 100); 557 | } 558 | } 559 | 560 | void alloc_picture(void *userdata) 561 | { 562 | VideoState *is = (VideoState *) userdata; 563 | VideoPicture *vp; 564 | 565 | vp = &is->pictq[is->pictq_windex]; 566 | if(vp->bmp) 567 | SDL_FreeYUVOverlay(vp->bmp); 568 | 569 | vp->bmp = SDL_CreateYUVOverlay(is->video_st->codec->width, 570 | is->video_st->codec->height, 571 | SDL_YV12_OVERLAY, 572 | screen); 573 | 574 | vp->width = is->video_st->codec->width; 575 | vp->height = is->video_st->codec->height; 576 | 577 | SDL_LockMutex(is->pictq_mutex); 578 | vp->allocated = 1; 579 | SDL_CondSignal(is->pictq_cond); 580 | SDL_UnlockMutex(is->pictq_mutex); 581 | } 582 | 583 | int queue_picture(VideoState *is, AVFrame *pFrame, double pts) 584 | { 585 | 586 | VideoPicture *vp; 587 | AVPicture pict; 588 | 589 | SDL_LockMutex(is->pictq_mutex); 590 | while(is->pictq_size >= VIDEO_PICTURE_QUEUE_SIZE && !is->quit) 591 | { 592 | SDL_CondWait(is->pictq_cond, is->pictq_mutex); 593 | } 594 | SDL_UnlockMutex(is->pictq_mutex); 595 | 596 | if(is->quit) 597 | return -1; 598 | 599 | vp = &is->pictq[is->pictq_windex]; 600 | 601 | if(!vp->bmp || vp->width != is->video_st->codec->width 602 | || vp->height != is->video_st->codec->height) 603 | { 604 | SDL_Event event; 605 | 606 | vp->allocated = 0; 607 | //在主线程操作 608 | event.type = FF_ALLOC_EVENT; 609 | event.user.data1 = is; 610 | SDL_PushEvent(&event); 611 | 612 | //等待直到我们申请到一个picture的内存 613 | SDL_LockMutex(is->pictq_mutex); 614 | while(!vp->allocated && !is->quit) 615 | { 616 | SDL_CondWait(is->pictq_cond, is->pictq_mutex); 617 | } 618 | SDL_UnlockMutex(is->pictq_mutex); 619 | 620 | if(is->quit) 621 | return -1; 622 | } 623 | 624 | if(vp->bmp) 625 | { 626 | SDL_LockYUVOverlay(vp->bmp); 627 | 628 | pict.data[0] = vp->bmp->pixels[0]; 629 | pict.data[1] = vp->bmp->pixels[2]; 630 | pict.data[2] = vp->bmp->pixels[1]; 631 | 632 | pict.linesize[0] = vp->bmp->pitches[0]; 633 | pict.linesize[1] = vp->bmp->pitches[2]; 634 | pict.linesize[2] = vp->bmp->pitches[1]; 635 | 636 | sws_scale(is->sws_ctx, (uint8_t const * const *)pFrame->data, pFrame->linesize, 637 | 0, is->video_st->codec->height, pict.data, pict.linesize); 638 | 639 | SDL_UnlockYUVOverlay(vp->bmp); 640 | vp->pts = pts; 641 | 642 | if(++is->pictq_windex == VIDEO_PICTURE_QUEUE_SIZE) 643 | is->pictq_windex = 0; 644 | 645 | SDL_LockMutex(is->pictq_mutex); 646 | is->pictq_size++; 647 | SDL_UnlockMutex(is->pictq_mutex); 648 | } 649 | return 0; 650 | } 651 | 652 | /* 将 PTS 传入大结构体,并计算延迟 */ 653 | double synchronize_video(VideoState *is, AVFrame *src_frame, double pts){ 654 | 655 | double frame_delay; 656 | 657 | if(pts != 0){ 658 | is->video_clock = pts; 659 | }else{ 660 | pts = is->video_clock; 661 | } 662 | 663 | frame_delay = av_q2d(is->video_st->codec->time_base); 664 | 665 | frame_delay += src_frame->repeat_pict * (frame_delay * 0.5); 666 | is->video_clock += frame_delay; 667 | 668 | return pts; 669 | } 670 | 671 | int video_thread(void *arg){ 672 | VideoState *is = (VideoState *)arg; 673 | AVPacket pkt1, *packet = &pkt1; 674 | int frameFinished; 675 | AVFrame *pFrame; 676 | double pts; 677 | 678 | pFrame = av_frame_alloc(); 679 | 680 | for(;;){ 681 | if(packet_queue_get(&is->videoq, packet, 1) < 0) 682 | break; 683 | 684 | pts = 0; 685 | 686 | avcodec_decode_video2(is->video_st->codec, pFrame, &frameFinished, packet); 687 | 688 | if(packet->dts == AV_NOPTS_VALUE && packet->pts && packet->pts != AV_NOPTS_VALUE){ 689 | pts = packet->pts; 690 | }else if(packet->dts != AV_NOPTS_VALUE){ 691 | pts = packet->dts; 692 | }else{ 693 | pts = 0; 694 | } 695 | pts *= av_q2d(is->video_st->time_base); 696 | 697 | if(frameFinished){ 698 | pts = synchronize_video(is, pFrame, pts); 699 | if(queue_picture(is, pFrame, pts) < 0) 700 | break; 701 | } 702 | 703 | av_free_packet(packet); 704 | } 705 | 706 | av_free(pFrame); 707 | return 0; 708 | } 709 | 710 | int stream_commponent_open(VideoState *is, int stream_index){ 711 | AVFormatContext *pFormatCtx = is->pFormatCtx; 712 | AVCodecContext *codecCtx = NULL; 713 | AVCodec *codec = NULL; 714 | AVDictionary *optionDict = NULL; 715 | SDL_AudioSpec wanted_spec, spec; 716 | 717 | if(stream_index < 0 || stream_index >= pFormatCtx->nb_streams) 718 | return -1; 719 | 720 | codecCtx = pFormatCtx->streams[stream_index]->codec; 721 | 722 | if(codecCtx->codec_type == AVMEDIA_TYPE_AUDIO){ 723 | wanted_spec.freq = codecCtx->sample_rate; 724 | wanted_spec.format = AUDIO_S16SYS; 725 | wanted_spec.channels = codecCtx->channels; 726 | wanted_spec.silence = 0; 727 | wanted_spec.samples = SDL_AUDIO_BUFFER_SIZE; 728 | wanted_spec.callback = audio_callback; 729 | wanted_spec.userdata = is; 730 | 731 | if(SDL_OpenAudio(&wanted_spec, &spec)){ 732 | fprintf(stderr, "SDL_OpenAudio: %s \n", SDL_GetError()); 733 | return -1; 734 | } 735 | 736 | printf("spec format: %d \n", spec.format); 737 | is->wanted_frame.format = AV_SAMPLE_FMT_S16; 738 | is->wanted_frame.sample_rate = spec.freq; 739 | is->wanted_frame.channel_layout = av_get_default_channel_layout(spec.channels); 740 | is->wanted_frame.channels = spec.channels; 741 | } 742 | 743 | codec = avcodec_find_decoder(codecCtx->codec_id); 744 | if(!codec || (avcodec_open2(codecCtx, codec, &optionDict) < 0)){ 745 | fprintf(stderr, "Unsupported codec! \n"); 746 | return -1; 747 | } 748 | 749 | switch(codecCtx->codec_type){ 750 | case AVMEDIA_TYPE_AUDIO: 751 | is->audioStream = stream_index; 752 | is->audio_st = pFormatCtx->streams[stream_index]; 753 | is->audio_buf_size = 0; 754 | is->audio_buf_index = 0; 755 | memset(&is->audio_pkt, 0, sizeof(is->audio_pkt)); 756 | packet_queue_init(&is->audioq); 757 | SDL_PauseAudio(0); 758 | break; 759 | 760 | case AVMEDIA_TYPE_VIDEO: 761 | is->videoStream = stream_index; 762 | is->video_st = pFormatCtx->streams[stream_index]; 763 | 764 | is->frame_timer = (double) av_gettime() / 1000000.0; 765 | is->frame_last_delay = 40e-3; 766 | 767 | packet_queue_init(&is->videoq); 768 | is->video_tid = SDL_CreateThread(video_thread,is); 769 | is->sws_ctx = sws_getContext(is->video_st->codec->width, 770 | is->video_st->codec->height, 771 | is->video_st->codec->pix_fmt, 772 | is->video_st->codec->width, 773 | is->video_st->codec->height, 774 | AV_PIX_FMT_YUV420P, 775 | SWS_BILINEAR,NULL,NULL,NULL); 776 | 777 | break; 778 | 779 | default: 780 | break; 781 | } 782 | return 0; 783 | } 784 | 785 | int decode_interrupt_cb(void *opaque){ 786 | return (global_video_state && global_video_state->quit); 787 | } 788 | 789 | int decode_thread(void *arg){ 790 | VideoState *is = (VideoState *)arg; 791 | AVFormatContext *pFormatCtx = NULL; 792 | AVPacket pkt1, *packet = &pkt1; 793 | 794 | int video_index = -1; 795 | int audio_index = -1; 796 | int i; 797 | 798 | AVDictionary *io_dict = NULL; 799 | AVIOInterruptCB callback; 800 | 801 | is->videoStream = -1; 802 | is->audioStream = -1; 803 | 804 | global_video_state = is; 805 | callback.callback = decode_interrupt_cb; 806 | callback.opaque = is; 807 | 808 | if(avio_open2(&is->io_context, is->filename, 0, &callback, &io_dict) != 0){ 809 | fprintf(stderr, "Unable to open I/O for %s \n", is->filename); 810 | return -1; 811 | } 812 | 813 | if(avformat_open_input(&pFormatCtx, is->filename, NULL, NULL) != 0) 814 | return -1; 815 | 816 | is->pFormatCtx = pFormatCtx; 817 | 818 | if(avformat_find_stream_info(pFormatCtx, NULL) < 0) 819 | return -1; 820 | 821 | av_dump_format(pFormatCtx, 0, is->filename, 0); 822 | 823 | for(i=0; i < pFormatCtx->nb_streams; i++){ 824 | if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO && video_index < 0) 825 | video_index = i; 826 | 827 | if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO && audio_index < 0) 828 | audio_index = i; 829 | } 830 | 831 | printf("audio_index:%d, video_index:%d \n", audio_index, video_index); 832 | 833 | if(audio_index >= 0) 834 | stream_commponent_open(is, audio_index); 835 | 836 | if(video_index >= 0) 837 | stream_commponent_open(is, video_index); 838 | 839 | if(is->videoStream < 0 || is->audioStream < 0){ 840 | fprintf(stderr, "%s: could not open codecs \n", is->filename); 841 | goto fail; 842 | } 843 | 844 | //main decode loop 845 | for(;;){ 846 | if(is->quit) 847 | break; 848 | 849 | if(is->audioq.size > MAX_AUDIOQ_SIZE || is->videoq.size > MAX_VIDEOQ_SIZE){ 850 | SDL_Delay(10); 851 | continue; 852 | } 853 | 854 | if(av_read_frame(is->pFormatCtx, packet) < 0){ 855 | if(is->pFormatCtx->pb->error == 0){ 856 | SDL_Delay(100); 857 | continue; 858 | }else{ 859 | break; 860 | } 861 | } 862 | 863 | if(packet->stream_index == is->videoStream){ 864 | packet_queue_put(&is->videoq, packet); 865 | }else if(packet->stream_index == is->audioStream){ 866 | packet_queue_put(&is->audioq, packet); 867 | }else{ 868 | av_free_packet(packet); 869 | } 870 | } 871 | 872 | while(!is->quit) 873 | SDL_Delay(100); 874 | 875 | fail: 876 | if(1){ 877 | SDL_Event event; 878 | event.type = FF_QUIT_EVENT; 879 | event.user.data1 = is; 880 | SDL_PushEvent(&event); 881 | } 882 | 883 | return 0; 884 | } 885 | 886 | int main(int argc, char* argv[]){ 887 | 888 | #if !FIX_INPUT 889 | if(argc < 2){ 890 | fprintf(stderr, "Usage: test \n"); 891 | exit(1); 892 | } 893 | #endif // FIX_INPUT 894 | 895 | SDL_Event event; 896 | 897 | VideoState *is; 898 | 899 | is = (VideoState *)av_mallocz(sizeof(VideoState)); 900 | 901 | if (is == NULL) 902 | { 903 | fprintf(stderr, "malloc ps error\n"); 904 | } 905 | 906 | av_register_all(); 907 | 908 | if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)){ 909 | fprintf(stderr, "Could not initialize SDL - %s \n", SDL_GetError()); 910 | exit(1); 911 | } 912 | 913 | #ifndef __DARWIN__ 914 | screen = SDL_SetVideoMode(640, 480, 0, 0); 915 | #else 916 | screen = SDL_SetVideoMode(640, 480, 24, 0); 917 | #endif 918 | 919 | if(!screen){ 920 | fprintf(stderr, "SDL: could not set video mode - exiting \n"); 921 | exit(1); 922 | } 923 | 924 | #if FIX_INPUT 925 | strcpy(is->filename, "/home/wanghuatian/oceans.mp4"); 926 | #else 927 | av_strlcpy(is->filename, argv[1], 1024); 928 | #endif // FIX_INPUT 929 | //av_strlcpy(is->filename, argv[1], 1024); 930 | //char url[] = "/home/wanghuatian/oceans.mp4"; 931 | //av_strlcpy(is->filename, url, 1024); 932 | 933 | is->pictq_mutex = SDL_CreateMutex(); 934 | is->pictq_cond = SDL_CreateCond(); 935 | 936 | schedule_refresh(is, 40); 937 | 938 | is->parse_tid = SDL_CreateThread(decode_thread, is); 939 | if(!is->parse_tid){ 940 | av_free(is); 941 | return -1; 942 | } 943 | 944 | for(;;){ 945 | SDL_WaitEvent(&event); 946 | 947 | switch (event.type){ 948 | case FF_QUIT_EVENT: 949 | case SDL_QUIT: 950 | is->quit = 1; 951 | SDL_CondSignal(is->audioq.cond); 952 | SDL_CondSignal(is->videoq.cond); 953 | return 0; 954 | break; 955 | 956 | case FF_ALLOC_EVENT: 957 | alloc_picture(event.user.data1); 958 | break; 959 | 960 | case FF_REFRESH_EVENT: 961 | video_refresh_timer(event.user.data1); 962 | break; 963 | 964 | default: 965 | break; 966 | } 967 | } 968 | 969 | getchar(); 970 | 971 | return 0; 972 | } 973 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU GENERAL PUBLIC LICENSE 2 | Version 3, 29 June 2007 3 | 4 | Copyright (C) 2007 Free Software Foundation, Inc. 5 | Everyone is permitted to copy and distribute verbatim copies 6 | of this license document, but changing it is not allowed. 7 | 8 | Preamble 9 | 10 | The GNU General Public License is a free, copyleft license for 11 | software and other kinds of works. 12 | 13 | The licenses for most software and other practical works are designed 14 | to take away your freedom to share and change the works. By contrast, 15 | the GNU General Public License is intended to guarantee your freedom to 16 | share and change all versions of a program--to make sure it remains free 17 | software for all its users. We, the Free Software Foundation, use the 18 | GNU General Public License for most of our software; it applies also to 19 | any other work released this way by its authors. You can apply it to 20 | your programs, too. 21 | 22 | When we speak of free software, we are referring to freedom, not 23 | price. Our General Public Licenses are designed to make sure that you 24 | have the freedom to distribute copies of free software (and charge for 25 | them if you wish), that you receive source code or can get it if you 26 | want it, that you can change the software or use pieces of it in new 27 | free programs, and that you know you can do these things. 28 | 29 | To protect your rights, we need to prevent others from denying you 30 | these rights or asking you to surrender the rights. Therefore, you have 31 | certain responsibilities if you distribute copies of the software, or if 32 | you modify it: responsibilities to respect the freedom of others. 33 | 34 | For example, if you distribute copies of such a program, whether 35 | gratis or for a fee, you must pass on to the recipients the same 36 | freedoms that you received. You must make sure that they, too, receive 37 | or can get the source code. And you must show them these terms so they 38 | know their rights. 39 | 40 | Developers that use the GNU GPL protect your rights with two steps: 41 | (1) assert copyright on the software, and (2) offer you this License 42 | giving you legal permission to copy, distribute and/or modify it. 43 | 44 | For the developers' and authors' protection, the GPL clearly explains 45 | that there is no warranty for this free software. For both users' and 46 | authors' sake, the GPL requires that modified versions be marked as 47 | changed, so that their problems will not be attributed erroneously to 48 | authors of previous versions. 49 | 50 | Some devices are designed to deny users access to install or run 51 | modified versions of the software inside them, although the manufacturer 52 | can do so. This is fundamentally incompatible with the aim of 53 | protecting users' freedom to change the software. The systematic 54 | pattern of such abuse occurs in the area of products for individuals to 55 | use, which is precisely where it is most unacceptable. Therefore, we 56 | have designed this version of the GPL to prohibit the practice for those 57 | products. If such problems arise substantially in other domains, we 58 | stand ready to extend this provision to those domains in future versions 59 | of the GPL, as needed to protect the freedom of users. 60 | 61 | Finally, every program is threatened constantly by software patents. 62 | States should not allow patents to restrict development and use of 63 | software on general-purpose computers, but in those that do, we wish to 64 | avoid the special danger that patents applied to a free program could 65 | make it effectively proprietary. To prevent this, the GPL assures that 66 | patents cannot be used to render the program non-free. 67 | 68 | The precise terms and conditions for copying, distribution and 69 | modification follow. 70 | 71 | TERMS AND CONDITIONS 72 | 73 | 0. Definitions. 74 | 75 | "This License" refers to version 3 of the GNU General Public License. 76 | 77 | "Copyright" also means copyright-like laws that apply to other kinds of 78 | works, such as semiconductor masks. 79 | 80 | "The Program" refers to any copyrightable work licensed under this 81 | License. Each licensee is addressed as "you". "Licensees" and 82 | "recipients" may be individuals or organizations. 83 | 84 | To "modify" a work means to copy from or adapt all or part of the work 85 | in a fashion requiring copyright permission, other than the making of an 86 | exact copy. The resulting work is called a "modified version" of the 87 | earlier work or a work "based on" the earlier work. 88 | 89 | A "covered work" means either the unmodified Program or a work based 90 | on the Program. 91 | 92 | To "propagate" a work means to do anything with it that, without 93 | permission, would make you directly or secondarily liable for 94 | infringement under applicable copyright law, except executing it on a 95 | computer or modifying a private copy. Propagation includes copying, 96 | distribution (with or without modification), making available to the 97 | public, and in some countries other activities as well. 98 | 99 | To "convey" a work means any kind of propagation that enables other 100 | parties to make or receive copies. Mere interaction with a user through 101 | a computer network, with no transfer of a copy, is not conveying. 102 | 103 | An interactive user interface displays "Appropriate Legal Notices" 104 | to the extent that it includes a convenient and prominently visible 105 | feature that (1) displays an appropriate copyright notice, and (2) 106 | tells the user that there is no warranty for the work (except to the 107 | extent that warranties are provided), that licensees may convey the 108 | work under this License, and how to view a copy of this License. If 109 | the interface presents a list of user commands or options, such as a 110 | menu, a prominent item in the list meets this criterion. 111 | 112 | 1. Source Code. 113 | 114 | The "source code" for a work means the preferred form of the work 115 | for making modifications to it. "Object code" means any non-source 116 | form of a work. 117 | 118 | A "Standard Interface" means an interface that either is an official 119 | standard defined by a recognized standards body, or, in the case of 120 | interfaces specified for a particular programming language, one that 121 | is widely used among developers working in that language. 122 | 123 | The "System Libraries" of an executable work include anything, other 124 | than the work as a whole, that (a) is included in the normal form of 125 | packaging a Major Component, but which is not part of that Major 126 | Component, and (b) serves only to enable use of the work with that 127 | Major Component, or to implement a Standard Interface for which an 128 | implementation is available to the public in source code form. A 129 | "Major Component", in this context, means a major essential component 130 | (kernel, window system, and so on) of the specific operating system 131 | (if any) on which the executable work runs, or a compiler used to 132 | produce the work, or an object code interpreter used to run it. 133 | 134 | The "Corresponding Source" for a work in object code form means all 135 | the source code needed to generate, install, and (for an executable 136 | work) run the object code and to modify the work, including scripts to 137 | control those activities. However, it does not include the work's 138 | System Libraries, or general-purpose tools or generally available free 139 | programs which are used unmodified in performing those activities but 140 | which are not part of the work. For example, Corresponding Source 141 | includes interface definition files associated with source files for 142 | the work, and the source code for shared libraries and dynamically 143 | linked subprograms that the work is specifically designed to require, 144 | such as by intimate data communication or control flow between those 145 | subprograms and other parts of the work. 146 | 147 | The Corresponding Source need not include anything that users 148 | can regenerate automatically from other parts of the Corresponding 149 | Source. 150 | 151 | The Corresponding Source for a work in source code form is that 152 | same work. 153 | 154 | 2. Basic Permissions. 155 | 156 | All rights granted under this License are granted for the term of 157 | copyright on the Program, and are irrevocable provided the stated 158 | conditions are met. This License explicitly affirms your unlimited 159 | permission to run the unmodified Program. The output from running a 160 | covered work is covered by this License only if the output, given its 161 | content, constitutes a covered work. This License acknowledges your 162 | rights of fair use or other equivalent, as provided by copyright law. 163 | 164 | You may make, run and propagate covered works that you do not 165 | convey, without conditions so long as your license otherwise remains 166 | in force. You may convey covered works to others for the sole purpose 167 | of having them make modifications exclusively for you, or provide you 168 | with facilities for running those works, provided that you comply with 169 | the terms of this License in conveying all material for which you do 170 | not control copyright. Those thus making or running the covered works 171 | for you must do so exclusively on your behalf, under your direction 172 | and control, on terms that prohibit them from making any copies of 173 | your copyrighted material outside their relationship with you. 174 | 175 | Conveying under any other circumstances is permitted solely under 176 | the conditions stated below. Sublicensing is not allowed; section 10 177 | makes it unnecessary. 178 | 179 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law. 180 | 181 | No covered work shall be deemed part of an effective technological 182 | measure under any applicable law fulfilling obligations under article 183 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or 184 | similar laws prohibiting or restricting circumvention of such 185 | measures. 186 | 187 | When you convey a covered work, you waive any legal power to forbid 188 | circumvention of technological measures to the extent such circumvention 189 | is effected by exercising rights under this License with respect to 190 | the covered work, and you disclaim any intention to limit operation or 191 | modification of the work as a means of enforcing, against the work's 192 | users, your or third parties' legal rights to forbid circumvention of 193 | technological measures. 194 | 195 | 4. Conveying Verbatim Copies. 196 | 197 | You may convey verbatim copies of the Program's source code as you 198 | receive it, in any medium, provided that you conspicuously and 199 | appropriately publish on each copy an appropriate copyright notice; 200 | keep intact all notices stating that this License and any 201 | non-permissive terms added in accord with section 7 apply to the code; 202 | keep intact all notices of the absence of any warranty; and give all 203 | recipients a copy of this License along with the Program. 204 | 205 | You may charge any price or no price for each copy that you convey, 206 | and you may offer support or warranty protection for a fee. 207 | 208 | 5. Conveying Modified Source Versions. 209 | 210 | You may convey a work based on the Program, or the modifications to 211 | produce it from the Program, in the form of source code under the 212 | terms of section 4, provided that you also meet all of these conditions: 213 | 214 | a) The work must carry prominent notices stating that you modified 215 | it, and giving a relevant date. 216 | 217 | b) The work must carry prominent notices stating that it is 218 | released under this License and any conditions added under section 219 | 7. This requirement modifies the requirement in section 4 to 220 | "keep intact all notices". 221 | 222 | c) You must license the entire work, as a whole, under this 223 | License to anyone who comes into possession of a copy. This 224 | License will therefore apply, along with any applicable section 7 225 | additional terms, to the whole of the work, and all its parts, 226 | regardless of how they are packaged. This License gives no 227 | permission to license the work in any other way, but it does not 228 | invalidate such permission if you have separately received it. 229 | 230 | d) If the work has interactive user interfaces, each must display 231 | Appropriate Legal Notices; however, if the Program has interactive 232 | interfaces that do not display Appropriate Legal Notices, your 233 | work need not make them do so. 234 | 235 | A compilation of a covered work with other separate and independent 236 | works, which are not by their nature extensions of the covered work, 237 | and which are not combined with it such as to form a larger program, 238 | in or on a volume of a storage or distribution medium, is called an 239 | "aggregate" if the compilation and its resulting copyright are not 240 | used to limit the access or legal rights of the compilation's users 241 | beyond what the individual works permit. Inclusion of a covered work 242 | in an aggregate does not cause this License to apply to the other 243 | parts of the aggregate. 244 | 245 | 6. Conveying Non-Source Forms. 246 | 247 | You may convey a covered work in object code form under the terms 248 | of sections 4 and 5, provided that you also convey the 249 | machine-readable Corresponding Source under the terms of this License, 250 | in one of these ways: 251 | 252 | a) Convey the object code in, or embodied in, a physical product 253 | (including a physical distribution medium), accompanied by the 254 | Corresponding Source fixed on a durable physical medium 255 | customarily used for software interchange. 256 | 257 | b) Convey the object code in, or embodied in, a physical product 258 | (including a physical distribution medium), accompanied by a 259 | written offer, valid for at least three years and valid for as 260 | long as you offer spare parts or customer support for that product 261 | model, to give anyone who possesses the object code either (1) a 262 | copy of the Corresponding Source for all the software in the 263 | product that is covered by this License, on a durable physical 264 | medium customarily used for software interchange, for a price no 265 | more than your reasonable cost of physically performing this 266 | conveying of source, or (2) access to copy the 267 | Corresponding Source from a network server at no charge. 268 | 269 | c) Convey individual copies of the object code with a copy of the 270 | written offer to provide the Corresponding Source. This 271 | alternative is allowed only occasionally and noncommercially, and 272 | only if you received the object code with such an offer, in accord 273 | with subsection 6b. 274 | 275 | d) Convey the object code by offering access from a designated 276 | place (gratis or for a charge), and offer equivalent access to the 277 | Corresponding Source in the same way through the same place at no 278 | further charge. You need not require recipients to copy the 279 | Corresponding Source along with the object code. If the place to 280 | copy the object code is a network server, the Corresponding Source 281 | may be on a different server (operated by you or a third party) 282 | that supports equivalent copying facilities, provided you maintain 283 | clear directions next to the object code saying where to find the 284 | Corresponding Source. Regardless of what server hosts the 285 | Corresponding Source, you remain obligated to ensure that it is 286 | available for as long as needed to satisfy these requirements. 287 | 288 | e) Convey the object code using peer-to-peer transmission, provided 289 | you inform other peers where the object code and Corresponding 290 | Source of the work are being offered to the general public at no 291 | charge under subsection 6d. 292 | 293 | A separable portion of the object code, whose source code is excluded 294 | from the Corresponding Source as a System Library, need not be 295 | included in conveying the object code work. 296 | 297 | A "User Product" is either (1) a "consumer product", which means any 298 | tangible personal property which is normally used for personal, family, 299 | or household purposes, or (2) anything designed or sold for incorporation 300 | into a dwelling. In determining whether a product is a consumer product, 301 | doubtful cases shall be resolved in favor of coverage. For a particular 302 | product received by a particular user, "normally used" refers to a 303 | typical or common use of that class of product, regardless of the status 304 | of the particular user or of the way in which the particular user 305 | actually uses, or expects or is expected to use, the product. A product 306 | is a consumer product regardless of whether the product has substantial 307 | commercial, industrial or non-consumer uses, unless such uses represent 308 | the only significant mode of use of the product. 309 | 310 | "Installation Information" for a User Product means any methods, 311 | procedures, authorization keys, or other information required to install 312 | and execute modified versions of a covered work in that User Product from 313 | a modified version of its Corresponding Source. The information must 314 | suffice to ensure that the continued functioning of the modified object 315 | code is in no case prevented or interfered with solely because 316 | modification has been made. 317 | 318 | If you convey an object code work under this section in, or with, or 319 | specifically for use in, a User Product, and the conveying occurs as 320 | part of a transaction in which the right of possession and use of the 321 | User Product is transferred to the recipient in perpetuity or for a 322 | fixed term (regardless of how the transaction is characterized), the 323 | Corresponding Source conveyed under this section must be accompanied 324 | by the Installation Information. But this requirement does not apply 325 | if neither you nor any third party retains the ability to install 326 | modified object code on the User Product (for example, the work has 327 | been installed in ROM). 328 | 329 | The requirement to provide Installation Information does not include a 330 | requirement to continue to provide support service, warranty, or updates 331 | for a work that has been modified or installed by the recipient, or for 332 | the User Product in which it has been modified or installed. Access to a 333 | network may be denied when the modification itself materially and 334 | adversely affects the operation of the network or violates the rules and 335 | protocols for communication across the network. 336 | 337 | Corresponding Source conveyed, and Installation Information provided, 338 | in accord with this section must be in a format that is publicly 339 | documented (and with an implementation available to the public in 340 | source code form), and must require no special password or key for 341 | unpacking, reading or copying. 342 | 343 | 7. Additional Terms. 344 | 345 | "Additional permissions" are terms that supplement the terms of this 346 | License by making exceptions from one or more of its conditions. 347 | Additional permissions that are applicable to the entire Program shall 348 | be treated as though they were included in this License, to the extent 349 | that they are valid under applicable law. If additional permissions 350 | apply only to part of the Program, that part may be used separately 351 | under those permissions, but the entire Program remains governed by 352 | this License without regard to the additional permissions. 353 | 354 | When you convey a copy of a covered work, you may at your option 355 | remove any additional permissions from that copy, or from any part of 356 | it. (Additional permissions may be written to require their own 357 | removal in certain cases when you modify the work.) You may place 358 | additional permissions on material, added by you to a covered work, 359 | for which you have or can give appropriate copyright permission. 360 | 361 | Notwithstanding any other provision of this License, for material you 362 | add to a covered work, you may (if authorized by the copyright holders of 363 | that material) supplement the terms of this License with terms: 364 | 365 | a) Disclaiming warranty or limiting liability differently from the 366 | terms of sections 15 and 16 of this License; or 367 | 368 | b) Requiring preservation of specified reasonable legal notices or 369 | author attributions in that material or in the Appropriate Legal 370 | Notices displayed by works containing it; or 371 | 372 | c) Prohibiting misrepresentation of the origin of that material, or 373 | requiring that modified versions of such material be marked in 374 | reasonable ways as different from the original version; or 375 | 376 | d) Limiting the use for publicity purposes of names of licensors or 377 | authors of the material; or 378 | 379 | e) Declining to grant rights under trademark law for use of some 380 | trade names, trademarks, or service marks; or 381 | 382 | f) Requiring indemnification of licensors and authors of that 383 | material by anyone who conveys the material (or modified versions of 384 | it) with contractual assumptions of liability to the recipient, for 385 | any liability that these contractual assumptions directly impose on 386 | those licensors and authors. 387 | 388 | All other non-permissive additional terms are considered "further 389 | restrictions" within the meaning of section 10. If the Program as you 390 | received it, or any part of it, contains a notice stating that it is 391 | governed by this License along with a term that is a further 392 | restriction, you may remove that term. If a license document contains 393 | a further restriction but permits relicensing or conveying under this 394 | License, you may add to a covered work material governed by the terms 395 | of that license document, provided that the further restriction does 396 | not survive such relicensing or conveying. 397 | 398 | If you add terms to a covered work in accord with this section, you 399 | must place, in the relevant source files, a statement of the 400 | additional terms that apply to those files, or a notice indicating 401 | where to find the applicable terms. 402 | 403 | Additional terms, permissive or non-permissive, may be stated in the 404 | form of a separately written license, or stated as exceptions; 405 | the above requirements apply either way. 406 | 407 | 8. Termination. 408 | 409 | You may not propagate or modify a covered work except as expressly 410 | provided under this License. Any attempt otherwise to propagate or 411 | modify it is void, and will automatically terminate your rights under 412 | this License (including any patent licenses granted under the third 413 | paragraph of section 11). 414 | 415 | However, if you cease all violation of this License, then your 416 | license from a particular copyright holder is reinstated (a) 417 | provisionally, unless and until the copyright holder explicitly and 418 | finally terminates your license, and (b) permanently, if the copyright 419 | holder fails to notify you of the violation by some reasonable means 420 | prior to 60 days after the cessation. 421 | 422 | Moreover, your license from a particular copyright holder is 423 | reinstated permanently if the copyright holder notifies you of the 424 | violation by some reasonable means, this is the first time you have 425 | received notice of violation of this License (for any work) from that 426 | copyright holder, and you cure the violation prior to 30 days after 427 | your receipt of the notice. 428 | 429 | Termination of your rights under this section does not terminate the 430 | licenses of parties who have received copies or rights from you under 431 | this License. If your rights have been terminated and not permanently 432 | reinstated, you do not qualify to receive new licenses for the same 433 | material under section 10. 434 | 435 | 9. Acceptance Not Required for Having Copies. 436 | 437 | You are not required to accept this License in order to receive or 438 | run a copy of the Program. Ancillary propagation of a covered work 439 | occurring solely as a consequence of using peer-to-peer transmission 440 | to receive a copy likewise does not require acceptance. However, 441 | nothing other than this License grants you permission to propagate or 442 | modify any covered work. These actions infringe copyright if you do 443 | not accept this License. Therefore, by modifying or propagating a 444 | covered work, you indicate your acceptance of this License to do so. 445 | 446 | 10. Automatic Licensing of Downstream Recipients. 447 | 448 | Each time you convey a covered work, the recipient automatically 449 | receives a license from the original licensors, to run, modify and 450 | propagate that work, subject to this License. You are not responsible 451 | for enforcing compliance by third parties with this License. 452 | 453 | An "entity transaction" is a transaction transferring control of an 454 | organization, or substantially all assets of one, or subdividing an 455 | organization, or merging organizations. If propagation of a covered 456 | work results from an entity transaction, each party to that 457 | transaction who receives a copy of the work also receives whatever 458 | licenses to the work the party's predecessor in interest had or could 459 | give under the previous paragraph, plus a right to possession of the 460 | Corresponding Source of the work from the predecessor in interest, if 461 | the predecessor has it or can get it with reasonable efforts. 462 | 463 | You may not impose any further restrictions on the exercise of the 464 | rights granted or affirmed under this License. For example, you may 465 | not impose a license fee, royalty, or other charge for exercise of 466 | rights granted under this License, and you may not initiate litigation 467 | (including a cross-claim or counterclaim in a lawsuit) alleging that 468 | any patent claim is infringed by making, using, selling, offering for 469 | sale, or importing the Program or any portion of it. 470 | 471 | 11. Patents. 472 | 473 | A "contributor" is a copyright holder who authorizes use under this 474 | License of the Program or a work on which the Program is based. The 475 | work thus licensed is called the contributor's "contributor version". 476 | 477 | A contributor's "essential patent claims" are all patent claims 478 | owned or controlled by the contributor, whether already acquired or 479 | hereafter acquired, that would be infringed by some manner, permitted 480 | by this License, of making, using, or selling its contributor version, 481 | but do not include claims that would be infringed only as a 482 | consequence of further modification of the contributor version. For 483 | purposes of this definition, "control" includes the right to grant 484 | patent sublicenses in a manner consistent with the requirements of 485 | this License. 486 | 487 | Each contributor grants you a non-exclusive, worldwide, royalty-free 488 | patent license under the contributor's essential patent claims, to 489 | make, use, sell, offer for sale, import and otherwise run, modify and 490 | propagate the contents of its contributor version. 491 | 492 | In the following three paragraphs, a "patent license" is any express 493 | agreement or commitment, however denominated, not to enforce a patent 494 | (such as an express permission to practice a patent or covenant not to 495 | sue for patent infringement). To "grant" such a patent license to a 496 | party means to make such an agreement or commitment not to enforce a 497 | patent against the party. 498 | 499 | If you convey a covered work, knowingly relying on a patent license, 500 | and the Corresponding Source of the work is not available for anyone 501 | to copy, free of charge and under the terms of this License, through a 502 | publicly available network server or other readily accessible means, 503 | then you must either (1) cause the Corresponding Source to be so 504 | available, or (2) arrange to deprive yourself of the benefit of the 505 | patent license for this particular work, or (3) arrange, in a manner 506 | consistent with the requirements of this License, to extend the patent 507 | license to downstream recipients. "Knowingly relying" means you have 508 | actual knowledge that, but for the patent license, your conveying the 509 | covered work in a country, or your recipient's use of the covered work 510 | in a country, would infringe one or more identifiable patents in that 511 | country that you have reason to believe are valid. 512 | 513 | If, pursuant to or in connection with a single transaction or 514 | arrangement, you convey, or propagate by procuring conveyance of, a 515 | covered work, and grant a patent license to some of the parties 516 | receiving the covered work authorizing them to use, propagate, modify 517 | or convey a specific copy of the covered work, then the patent license 518 | you grant is automatically extended to all recipients of the covered 519 | work and works based on it. 520 | 521 | A patent license is "discriminatory" if it does not include within 522 | the scope of its coverage, prohibits the exercise of, or is 523 | conditioned on the non-exercise of one or more of the rights that are 524 | specifically granted under this License. You may not convey a covered 525 | work if you are a party to an arrangement with a third party that is 526 | in the business of distributing software, under which you make payment 527 | to the third party based on the extent of your activity of conveying 528 | the work, and under which the third party grants, to any of the 529 | parties who would receive the covered work from you, a discriminatory 530 | patent license (a) in connection with copies of the covered work 531 | conveyed by you (or copies made from those copies), or (b) primarily 532 | for and in connection with specific products or compilations that 533 | contain the covered work, unless you entered into that arrangement, 534 | or that patent license was granted, prior to 28 March 2007. 535 | 536 | Nothing in this License shall be construed as excluding or limiting 537 | any implied license or other defenses to infringement that may 538 | otherwise be available to you under applicable patent law. 539 | 540 | 12. No Surrender of Others' Freedom. 541 | 542 | If conditions are imposed on you (whether by court order, agreement or 543 | otherwise) that contradict the conditions of this License, they do not 544 | excuse you from the conditions of this License. If you cannot convey a 545 | covered work so as to satisfy simultaneously your obligations under this 546 | License and any other pertinent obligations, then as a consequence you may 547 | not convey it at all. For example, if you agree to terms that obligate you 548 | to collect a royalty for further conveying from those to whom you convey 549 | the Program, the only way you could satisfy both those terms and this 550 | License would be to refrain entirely from conveying the Program. 551 | 552 | 13. Use with the GNU Affero General Public License. 553 | 554 | Notwithstanding any other provision of this License, you have 555 | permission to link or combine any covered work with a work licensed 556 | under version 3 of the GNU Affero General Public License into a single 557 | combined work, and to convey the resulting work. The terms of this 558 | License will continue to apply to the part which is the covered work, 559 | but the special requirements of the GNU Affero General Public License, 560 | section 13, concerning interaction through a network will apply to the 561 | combination as such. 562 | 563 | 14. Revised Versions of this License. 564 | 565 | The Free Software Foundation may publish revised and/or new versions of 566 | the GNU General Public License from time to time. Such new versions will 567 | be similar in spirit to the present version, but may differ in detail to 568 | address new problems or concerns. 569 | 570 | Each version is given a distinguishing version number. If the 571 | Program specifies that a certain numbered version of the GNU General 572 | Public License "or any later version" applies to it, you have the 573 | option of following the terms and conditions either of that numbered 574 | version or of any later version published by the Free Software 575 | Foundation. If the Program does not specify a version number of the 576 | GNU General Public License, you may choose any version ever published 577 | by the Free Software Foundation. 578 | 579 | If the Program specifies that a proxy can decide which future 580 | versions of the GNU General Public License can be used, that proxy's 581 | public statement of acceptance of a version permanently authorizes you 582 | to choose that version for the Program. 583 | 584 | Later license versions may give you additional or different 585 | permissions. However, no additional obligations are imposed on any 586 | author or copyright holder as a result of your choosing to follow a 587 | later version. 588 | 589 | 15. Disclaimer of Warranty. 590 | 591 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY 592 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT 593 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY 594 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, 595 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 596 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM 597 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF 598 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 599 | 600 | 16. Limitation of Liability. 601 | 602 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 603 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS 604 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY 605 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE 606 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF 607 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD 608 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), 609 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF 610 | SUCH DAMAGES. 611 | 612 | 17. Interpretation of Sections 15 and 16. 613 | 614 | If the disclaimer of warranty and limitation of liability provided 615 | above cannot be given local legal effect according to their terms, 616 | reviewing courts shall apply local law that most closely approximates 617 | an absolute waiver of all civil liability in connection with the 618 | Program, unless a warranty or assumption of liability accompanies a 619 | copy of the Program in return for a fee. 620 | 621 | END OF TERMS AND CONDITIONS 622 | 623 | How to Apply These Terms to Your New Programs 624 | 625 | If you develop a new program, and you want it to be of the greatest 626 | possible use to the public, the best way to achieve this is to make it 627 | free software which everyone can redistribute and change under these terms. 628 | 629 | To do so, attach the following notices to the program. It is safest 630 | to attach them to the start of each source file to most effectively 631 | state the exclusion of warranty; and each file should have at least 632 | the "copyright" line and a pointer to where the full notice is found. 633 | 634 | {one line to give the program's name and a brief idea of what it does.} 635 | Copyright (C) {year} {name of author} 636 | 637 | This program is free software: you can redistribute it and/or modify 638 | it under the terms of the GNU General Public License as published by 639 | the Free Software Foundation, either version 3 of the License, or 640 | (at your option) any later version. 641 | 642 | This program is distributed in the hope that it will be useful, 643 | but WITHOUT ANY WARRANTY; without even the implied warranty of 644 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 645 | GNU General Public License for more details. 646 | 647 | You should have received a copy of the GNU General Public License 648 | along with this program. If not, see . 649 | 650 | Also add information on how to contact you by electronic and paper mail. 651 | 652 | If the program does terminal interaction, make it output a short 653 | notice like this when it starts in an interactive mode: 654 | 655 | {project} Copyright (C) {year} {fullname} 656 | This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. 657 | This is free software, and you are welcome to redistribute it 658 | under certain conditions; type `show c' for details. 659 | 660 | The hypothetical commands `show w' and `show c' should show the appropriate 661 | parts of the General Public License. Of course, your program's commands 662 | might be different; for a GUI interface, you would use an "about box". 663 | 664 | You should also get your employer (if you work as a programmer) or school, 665 | if any, to sign a "copyright disclaimer" for the program, if necessary. 666 | For more information on this, and how to apply and follow the GNU GPL, see 667 | . 668 | 669 | The GNU General Public License does not permit incorporating your program 670 | into proprietary programs. If your program is a subroutine library, you 671 | may consider it more useful to permit linking proprietary applications with 672 | the library. If this is what you want to do, use the GNU Lesser General 673 | Public License instead of this License. But first, please read 674 | . 675 | --------------------------------------------------------------------------------