├── .gitignore ├── Makefile ├── README.md ├── h26x_sps_dec.c ├── libs └── .gitkeep ├── live.2019.08.12.tar.gz ├── rtsp_to_h264.cpp ├── shmem.c ├── shmem.h └── test.h264 /.gitignore: -------------------------------------------------------------------------------- 1 | libs/* -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | 2 | ### arm ### 3 | # cross=arm-linux-gnueabihf- 4 | # CXX=$(cross)g++ 5 | # live555Type=armlinux 6 | 7 | ### ubuntu ### 8 | CXX=g++ 9 | live555Type=linux 10 | 11 | RPATH=$(shell pwd) 12 | 13 | INC += -I$(RPATH)/libs/include/BasicUsageEnvironment 14 | INC += -I$(RPATH)/libs/include/liveMedia 15 | INC += -I$(RPATH)/libs/include/groupsock 16 | INC += -I$(RPATH)/libs/include/UsageEnvironment 17 | LIB += -L$(RPATH)/libs/lib 18 | CFLAGS += -lliveMedia -lgroupsock -lBasicUsageEnvironment -lUsageEnvironment -lpthread 19 | 20 | target: 21 | @$(CXX) -O3 -Wall -o demo $(RPATH)/rtsp_to_h264.cpp $(RPATH)/h26x_sps_dec.c $(RPATH)/shmem.c $(INC) $(LIB) $(CFLAGS) 22 | 23 | live555: 24 | @tar -xzf $(RPATH)/live.2019.08.12.tar.gz -C $(RPATH)/libs && \ 25 | cd $(RPATH)/libs/live && \ 26 | sed -i "s\CROSS_COMPILE?= arm-elf-\CROSS_COMPILE?=$(cross)\g" ./config.armlinux && \ 27 | ./genMakefiles $(live555Type) && \ 28 | chmod 777 ./* -R && \ 29 | sed -i "s\/usr/local\$(RPATH)/libs\g" ./BasicUsageEnvironment/Makefile && \ 30 | sed -i "s\/usr/local\$(RPATH)/libs\g" ./liveMedia/Makefile && \ 31 | sed -i "s\/usr/local\$(RPATH)/libs\g" ./liveMedia/Makefile.head && \ 32 | sed -i "s\/usr/local\$(RPATH)/libs\g" ./mediaServer/Makefile && \ 33 | sed -i "s\/usr/local\$(RPATH)/libs\g" ./mediaServer/Makefile.tail && \ 34 | sed -i "s\/usr/local\$(RPATH)/libs\g" ./proxyServer/Makefile && \ 35 | sed -i "s\/usr/local\$(RPATH)/libs\g" ./proxyServer/Makefile.tail && \ 36 | sed -i "s\/usr/local\$(RPATH)/libs\g" ./testProgs/Makefile && \ 37 | sed -i "s\/usr/local\$(RPATH)/libs\g" ./testProgs/Makefile.tail && \ 38 | sed -i "s\/usr/local\$(RPATH)/libs\g" ./UsageEnvironment/Makefile && \ 39 | sed -i "s\/usr/local\$(RPATH)/libs\g" ./UsageEnvironment/Makefile.head && \ 40 | sed -i "s\/usr/local\$(RPATH)/libs\g" ./groupsock/Makefile && \ 41 | sed -i "s\/usr/local\$(RPATH)/libs\g" ./groupsock/Makefile.head && \ 42 | make -j4 && make install && \ 43 | cd - 44 | # cd - && \ 45 | # rm $(RPATH)/libs/live -rf 46 | 47 | clean: 48 | @rm -rf ./demo 49 | 50 | cleanall: 51 | @rm -rf ./libs/* ./demo 52 | 53 | 54 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | # 编译依赖库 3 | make live555 4 | # 编译 5 | make 6 | 7 | -------------------------------------------------------------------------------- /h26x_sps_dec.c: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | * 4 | * adapted from https://github.com/stephenyin/h264_sps_decoder 5 | * 6 | * 2019.9.19 7 | * 8 | */ 9 | 10 | #include 11 | #include 12 | #include 13 | #include 14 | #include 15 | 16 | void get_profile(int profile_idc, char* profile_str) 17 | { 18 | switch(profile_idc){ 19 | case 66: 20 | strcpy(profile_str, "Baseline"); 21 | break; 22 | case 77: 23 | strcpy(profile_str, "Main"); 24 | break; 25 | case 88: 26 | strcpy(profile_str, "Extended"); 27 | break; 28 | case 100: 29 | strcpy(profile_str, "High(FRExt)"); 30 | break; 31 | case 110: 32 | strcpy(profile_str, "High10(FRExt)"); 33 | break; 34 | case 122: 35 | strcpy(profile_str, "High4:2:2(FRExt)"); 36 | break; 37 | case 144: 38 | strcpy(profile_str, "High4:4:4(FRExt)"); 39 | break; 40 | default: 41 | strcpy(profile_str, "Unknown"); 42 | } 43 | } 44 | 45 | unsigned int Ue(unsigned char *pBuff, unsigned int nLen, unsigned int *nStartBit) 46 | { 47 | unsigned int nZeroNum = 0; 48 | while (*nStartBit < nLen * 8) 49 | { 50 | if (pBuff[*nStartBit / 8] & (0x80 >> (*nStartBit % 8))) 51 | { 52 | break; 53 | } 54 | nZeroNum++; 55 | *nStartBit+=1; 56 | } 57 | *nStartBit+=1; 58 | 59 | unsigned long dwRet = 0; 60 | for (unsigned int i=0; i> (*nStartBit % 8))) 64 | { 65 | dwRet += 1; 66 | } 67 | *nStartBit+=1; 68 | } 69 | return (1 << nZeroNum) - 1 + dwRet; 70 | } 71 | 72 | int Se(unsigned char *pBuff, unsigned int nLen, unsigned int *nStartBit) 73 | { 74 | int UeVal=Ue(pBuff,nLen,nStartBit); 75 | double k=UeVal; 76 | int nValue=ceil(k/2); 77 | if (UeVal % 2==0) 78 | nValue=-nValue; 79 | return nValue; 80 | } 81 | 82 | unsigned long u(unsigned int BitCount, unsigned char * buf, unsigned int *nStartBit) 83 | { 84 | unsigned long dwRet = 0; 85 | for (unsigned int i=0; i> (*nStartBit % 8))) 89 | { 90 | dwRet += 1; 91 | } 92 | *nStartBit += 1; 93 | } 94 | return dwRet; 95 | } 96 | 97 | void de_emulation_prevention(unsigned char* buf,unsigned int* buf_size) 98 | { 99 | int i=0,j=0; 100 | unsigned char* tmp_ptr=NULL; 101 | unsigned int tmp_buf_size=0; 102 | int val=0; 103 | 104 | tmp_ptr=buf; 105 | tmp_buf_size=*buf_size; 106 | for(i=0;i<(tmp_buf_size-2);i++) 107 | { 108 | //check for 0x000003 109 | val=(tmp_ptr[i]^0x00) +(tmp_ptr[i+1]^0x00)+(tmp_ptr[i+2]^0x03); 110 | if(val==0) 111 | { 112 | //kick out 0x03 113 | for(j=i+2;j hit 1-1\n"); 177 | int general_max_12bit_constraint_flag = u(1,buf,&StartBit); 178 | int general_max_10bit_constraint_flag = u(1,buf,&StartBit); 179 | int general_max_8bit_constraint_flag = u(1,buf,&StartBit); 180 | int general_max_422chroma_constraint_flag = u(1,buf,&StartBit); 181 | int general_max_420chroma_constraint_flag = u(1,buf,&StartBit); 182 | int general_max_monochrome_constraint_flag = u(1,buf,&StartBit); 183 | int general_intra_constraint_flag = u(1,buf,&StartBit); 184 | int general_one_picture_only_constraint_flag = u(1,buf,&StartBit); 185 | int general_lower_bit_rate_constraint_flag = u(1,buf,&StartBit); 186 | if(general_profile_idc == 5 || general_profile_compatibility_flag[5] || 187 | general_profile_idc == 9 || general_profile_compatibility_flag[9] || 188 | general_profile_idc == 10 || general_profile_compatibility_flag[10]) 189 | { 190 | int general_max_14bit_constraint_flag = u(1,buf,&StartBit); 191 | int general_reserved_zero_33bits = u(33,buf,&StartBit); 192 | } 193 | else 194 | { 195 | int general_reserved_zero_34bits = u(34,buf,&StartBit); 196 | } 197 | } 198 | else if(general_profile_idc == 2 || general_profile_compatibility_flag[2]) 199 | { 200 | // printf("> hit 1-2\n"); 201 | int general_reserved_zero_7bits = u(7,buf,&StartBit); 202 | int general_one_picture_only_constraint_flag = u(1,buf,&StartBit); 203 | int general_reserved_zero_35bits = u(35,buf,&StartBit); 204 | } 205 | else 206 | { 207 | // printf("> hit 1-3\n"); 208 | int general_reserved_zero_43bits = u(43,buf,&StartBit); 209 | } 210 | if((general_profile_idc >= 1 && general_profile_idc <= 5) || 211 | general_profile_idc == 9 || 212 | general_profile_compatibility_flag[1] || general_profile_compatibility_flag[2] || 213 | general_profile_compatibility_flag[3] || general_profile_compatibility_flag[4] || 214 | general_profile_compatibility_flag[5] || general_profile_compatibility_flag[9]) 215 | { 216 | // printf("> hit 2-1\n"); 217 | int general_inbld_flag = u(1,buf,&StartBit); 218 | } 219 | else 220 | { 221 | // printf("> hit 2-2\n"); 222 | int general_reserved_zero_bit = u(1,buf,&StartBit); 223 | } 224 | int general_level_idc = u(8,buf,&StartBit); 225 | if(sps_max_sub_layers_minus1 > 0) 226 | { 227 | fprintf(stderr, "error: sps_max_sub_layers_minus1 must 0 (%d)\n", 228 | sps_max_sub_layers_minus1); 229 | return 0; 230 | } 231 | } 232 | int sps_seq_parameter_set_id = Ue(buf,nLen,&StartBit); 233 | int chroma_format_idc = Ue(buf,nLen,&StartBit); 234 | if(chroma_format_idc == 3) 235 | { 236 | int separate_colour_plane_flag = u(1,buf,&StartBit); 237 | } 238 | int pic_width_in_luma_samples = Ue(buf,nLen,&StartBit); 239 | int pic_height_in_luma_samples = Ue(buf,nLen,&StartBit); 240 | int conformance_window_flag = u(1,buf,&StartBit); 241 | 242 | int conf_win_left_offset = 0; 243 | int conf_win_right_offset = 0; 244 | int conf_win_top_offset = 0; 245 | int conf_win_bottom_offset = 0; 246 | if(conformance_window_flag) 247 | { 248 | int conf_win_left_offset = Ue(buf,nLen,&StartBit); 249 | int conf_win_right_offset = Ue(buf,nLen,&StartBit); 250 | int conf_win_top_offset = Ue(buf,nLen,&StartBit); 251 | int conf_win_bottom_offset = Ue(buf,nLen,&StartBit); 252 | } 253 | 254 | // printf("forbidden_zero_bit/%d,\n" 255 | // "nal_unit_type/%d, nuh_layer_id/%d,\n" 256 | // "sps_video_parameter_set_id/%d,\n" 257 | // "sps_max_sub_layers_minus1/%d,\n" 258 | // "sps_temporal_id_nesting_flag/%d\n" 259 | // "sps_seq_parameter_set_id/%d\n" 260 | // "chroma_format_idc/%d\n", 261 | // forbidden_zero_bit, 262 | // nal_unit_type, 263 | // nuh_layer_id, 264 | // sps_video_parameter_set_id, 265 | // sps_max_sub_layers_minus1, 266 | // sps_temporal_id_nesting_flag, 267 | // sps_seq_parameter_set_id, 268 | // chroma_format_idc); 269 | 270 | *width = pic_width_in_luma_samples - (conf_win_left_offset + conf_win_right_offset); 271 | *height = pic_height_in_luma_samples - (conf_win_top_offset + conf_win_bottom_offset); 272 | *fps = 0; 273 | 274 | return 1; 275 | } 276 | 277 | //return: 0/false 1/success 278 | int h264_decode_sps(unsigned char * buf,unsigned int nLen,int *width,int *height,int *fps) 279 | { 280 | unsigned int StartBit=0; 281 | de_emulation_prevention(buf,&nLen); 282 | 283 | int timing_info_present_flag = 0; 284 | int forbidden_zero_bit=u(1,buf,&StartBit); 285 | int nal_ref_idc=u(2,buf,&StartBit); 286 | int nal_unit_type=u(5,buf,&StartBit); 287 | if(nal_unit_type==7) 288 | { 289 | int profile_idc=u(8,buf,&StartBit); 290 | int constraint_set0_flag=u(1,buf,&StartBit);//(buf[1] & 0x80)>>7; 291 | int constraint_set1_flag=u(1,buf,&StartBit);//(buf[1] & 0x40)>>6; 292 | int constraint_set2_flag=u(1,buf,&StartBit);//(buf[1] & 0x20)>>5; 293 | int constraint_set3_flag=u(1,buf,&StartBit);//(buf[1] & 0x10)>>4; 294 | int reserved_zero_4bits=u(4,buf,&StartBit); 295 | int level_idc=u(8,buf,&StartBit); 296 | 297 | int seq_parameter_set_id=Ue(buf,nLen,&StartBit); 298 | 299 | int chroma_format_idc = 1; 300 | // if( profile_idc == 100 || profile_idc == 110 || 301 | // profile_idc == 122 || profile_idc == 144 ) 302 | if (profile_idc == 100 || profile_idc == 110 || profile_idc == 122 || 303 | profile_idc == 244 || profile_idc == 44 || profile_idc == 83 || 304 | profile_idc == 86 || profile_idc == 118 || profile_idc == 128 || 305 | profile_idc == 144 || profile_idc == 138 || profile_idc == 139 || 306 | profile_idc == 134 || profile_idc == 135) 307 | { 308 | chroma_format_idc=Ue(buf,nLen,&StartBit); 309 | if( chroma_format_idc == 3 ) 310 | { 311 | unsigned long residual_colour_transform_flag=u(1,buf,&StartBit); 312 | } 313 | int bit_depth_luma_minus8=Ue(buf,nLen,&StartBit); 314 | int bit_depth_chroma_minus8=Ue(buf,nLen,&StartBit); 315 | int qpprime_y_zero_transform_bypass_flag=u(1,buf,&StartBit); 316 | int seq_scaling_matrix_present_flag=u(1,buf,&StartBit); 317 | 318 | int seq_scaling_list_present_flag[8]; 319 | if( seq_scaling_matrix_present_flag ) 320 | { 321 | for( int i = 0; i < 8; i++ ) { 322 | seq_scaling_list_present_flag[i]=u(1,buf,&StartBit); 323 | } 324 | } 325 | } 326 | int log2_max_frame_num_minus4=Ue(buf,nLen,&StartBit); 327 | int pic_order_cnt_type=Ue(buf,nLen,&StartBit); 328 | if( pic_order_cnt_type == 0 ) 329 | { 330 | unsigned int log2_max_pic_order_cnt_lsb_minus4=Ue(buf,nLen,&StartBit); 331 | } 332 | else if( pic_order_cnt_type == 1 ) 333 | { 334 | int delta_pic_order_always_zero_flag=u(1,buf,&StartBit); 335 | int offset_for_non_ref_pic=Se(buf,nLen,&StartBit); 336 | int offset_for_top_to_bottom_field=Se(buf,nLen,&StartBit); 337 | int num_ref_frames_in_pic_order_cnt_cycle=Ue(buf,nLen,&StartBit); 338 | 339 | int *offset_for_ref_frame = (int*)calloc(num_ref_frames_in_pic_order_cnt_cycle, sizeof(int)); 340 | for( int i = 0; i < num_ref_frames_in_pic_order_cnt_cycle; i++ ) 341 | offset_for_ref_frame[i]=Se(buf,nLen,&StartBit); 342 | free(offset_for_ref_frame); 343 | } 344 | int num_ref_frames=Ue(buf,nLen,&StartBit); 345 | int gaps_in_frame_num_value_allowed_flag=u(1,buf,&StartBit); 346 | int pic_width_in_mbs_minus1=Ue(buf,nLen,&StartBit); 347 | int pic_height_in_map_units_minus1=Ue(buf,nLen,&StartBit); 348 | 349 | 350 | int frame_mbs_only_flag=u(1,buf,&StartBit); 351 | if(!frame_mbs_only_flag) 352 | { 353 | unsigned long mb_adaptive_frame_field_flag=u(1,buf,&StartBit); 354 | } 355 | int direct_8x8_inference_flag=u(1,buf,&StartBit); 356 | int frame_cropping_flag=u(1,buf,&StartBit); 357 | 358 | int frame_crop_left_offset=0; 359 | int frame_crop_right_offset=0; 360 | int frame_crop_top_offset=0; 361 | int frame_crop_bottom_offset=0; 362 | if(frame_cropping_flag) 363 | { 364 | frame_crop_left_offset=Ue(buf,nLen,&StartBit); 365 | frame_crop_right_offset=Ue(buf,nLen,&StartBit); 366 | frame_crop_top_offset=Ue(buf,nLen,&StartBit); 367 | frame_crop_bottom_offset=Ue(buf,nLen,&StartBit); 368 | } 369 | int vui_parameter_present_flag=u(1,buf,&StartBit); 370 | if(vui_parameter_present_flag) 371 | { 372 | int aspect_ratio_info_present_flag=u(1,buf,&StartBit); 373 | if(aspect_ratio_info_present_flag) 374 | { 375 | int aspect_ratio_idc=u(8,buf,&StartBit); 376 | if(aspect_ratio_idc==255) 377 | { 378 | int sar_width=u(16,buf,&StartBit); 379 | int sar_height=u(16,buf,&StartBit); 380 | } 381 | } 382 | int overscan_info_present_flag=u(1,buf,&StartBit); 383 | if(overscan_info_present_flag) 384 | { 385 | int overscan_appropriate_flagu=u(1,buf,&StartBit); 386 | } 387 | int video_signal_type_present_flag=u(1,buf,&StartBit); 388 | if(video_signal_type_present_flag) 389 | { 390 | int video_format=u(3,buf,&StartBit); 391 | int video_full_range_flag=u(1,buf,&StartBit); 392 | int colour_description_present_flag=u(1,buf,&StartBit); 393 | if(colour_description_present_flag) 394 | { 395 | int colour_primaries=u(8,buf,&StartBit); 396 | int transfer_characteristics=u(8,buf,&StartBit); 397 | int matrix_coefficients=u(8,buf,&StartBit); 398 | } 399 | } 400 | int chroma_loc_info_present_flag=u(1,buf,&StartBit); 401 | if(chroma_loc_info_present_flag) 402 | { 403 | int chroma_sample_loc_type_top_field=Ue(buf,nLen,&StartBit); 404 | int chroma_sample_loc_type_bottom_field=Ue(buf,nLen,&StartBit); 405 | } 406 | timing_info_present_flag=u(1,buf,&StartBit); 407 | 408 | if(timing_info_present_flag) 409 | { 410 | int num_units_in_tick=u(32,buf,&StartBit); 411 | int time_scale=u(32,buf,&StartBit); 412 | *fps=time_scale/num_units_in_tick; 413 | int fixed_frame_rate_flag=u(1,buf,&StartBit); 414 | if(fixed_frame_rate_flag) 415 | { 416 | *fps = (*fps)/2; 417 | } 418 | } 419 | } 420 | 421 | //Source, decoded, and output picture formats 422 | int crop_unit_x = 1; 423 | int crop_unit_y = 2 - frame_mbs_only_flag; //monochrome or 4:4:4 424 | if (chroma_format_idc == 1) { //4:2:0 425 | crop_unit_x = 2; 426 | crop_unit_y = 2 * (2 - frame_mbs_only_flag); 427 | }else if (chroma_format_idc == 2) { //4:2:2 428 | crop_unit_x = 2; 429 | crop_unit_y = 2 - frame_mbs_only_flag; 430 | } 431 | 432 | *width=(pic_width_in_mbs_minus1+1)*16; 433 | *height=(2-frame_mbs_only_flag)*(pic_height_in_map_units_minus1+1)*16; 434 | 435 | *width-=crop_unit_x*(frame_crop_left_offset+frame_crop_right_offset); 436 | *height-=crop_unit_y*(frame_crop_top_offset+frame_crop_bottom_offset); 437 | 438 | *fps = 0; 439 | 440 | // char profile_str[32] = {0}; 441 | // get_profile(profile_idc, &profile_str[0]); 442 | // if(timing_info_present_flag){ 443 | // printf("H.264 SPS: -> video size %dx%d, %d fps, profile(%d) %s\n", 444 | // *width, *height, *fps, profile_idc, profile_str); 445 | // } else { 446 | // printf("H.264 SPS: -> video size %dx%d, unknown fps, profile(%d) %s\n", 447 | // *width, *height, profile_idc, profile_str); 448 | // } 449 | return 1; 450 | } 451 | else 452 | return 0; 453 | } 454 | 455 | #include 456 | #include 457 | #include 458 | #include 459 | #include 460 | 461 | //return: 0/false 1/success 462 | int h26x_get_width_height(char *filePath, int *width, int *height, char isH264) 463 | { 464 | unsigned char buff[1024] = {0}; 465 | int fps, fd, ret; 466 | int retFinal = 0; 467 | if((fd = open(filePath, O_RDONLY)) < 1) 468 | { 469 | fprintf(stderr, "h26x_get_width_height: open %s err !\n", filePath); 470 | return retFinal; 471 | } 472 | if((ret = read(fd, buff, 1024)) > 0) 473 | { 474 | int i, step = 0; 475 | if(isH264) 476 | { 477 | for(i = 0; i < ret - 10; i++) 478 | { 479 | if(buff[i] == 0 && buff[i+1] == 0 && buff[i+2] == 0 && buff[i+3] == 1) 480 | { 481 | if(step == 0 && (buff[i+4]&0x1F) == 7) 482 | { 483 | i += 4; 484 | step = i; 485 | } 486 | else if(step) 487 | { 488 | retFinal = h264_decode_sps(&buff[step], i-step, width, height, &fps); 489 | break; 490 | } 491 | } 492 | } 493 | } 494 | else 495 | { 496 | for(i = 0; i < ret - 10; i++) 497 | { 498 | if(buff[i] == 0 && buff[i+1] == 0 && buff[i+2] == 0 && buff[i+3] == 1) 499 | { 500 | if(step == 0 && ((buff[i+4]&0x7E)>>1) == 33) 501 | { 502 | i += 4; 503 | step = i; 504 | } 505 | else if(step) 506 | { 507 | retFinal = h265_decode_sps(&buff[step], i-step, width, height, &fps); 508 | break; 509 | } 510 | } 511 | } 512 | } 513 | } 514 | close(fd); 515 | return retFinal; 516 | } 517 | 518 | //return: 0/false 1/success 519 | int mp4_get_width_height(char *filePath, int *width, int *height) 520 | { 521 | unsigned char buff[9] = {0}; 522 | int fd, ret; 523 | unsigned int size; 524 | char *type; 525 | int retFinal = 0; 526 | 527 | type = (char*)&buff[4]; 528 | 529 | if((fd = open(filePath, O_RDONLY)) < 1) 530 | { 531 | fprintf(stderr, "mp4_get_width_height: open %s err !\n", filePath); 532 | return retFinal; 533 | } 534 | while((ret = read(fd, buff, 8)) > 0) 535 | { 536 | size = (buff[0]<<24)|(buff[1]<<16)|(buff[2]<<8)|buff[3]; 537 | // printf(" type: %s size: %d\n", type, size); 538 | 539 | if(strncmp(type, "tkhd", 4) == 0) 540 | { 541 | //取该box的后8字节,宽高信息就在其中 542 | if(lseek(fd, size - ret - 8, SEEK_CUR) < 1) 543 | { 544 | fprintf(stderr, "mp4_get_width_height: lseek err\n"); 545 | break; 546 | } 547 | if((ret = read(fd, buff, 8)) == 8) 548 | { 549 | // [16.16]格式?????? 550 | if(width) 551 | *width = (buff[0]<<8)|buff[1]; 552 | if(height) 553 | *height = (buff[4]<<8)|buff[5]; 554 | 555 | retFinal = 1; 556 | } 557 | break; 558 | } 559 | else if(strncmp(type, "moov", 4) == 0 || strncmp(type, "trak", 4) == 0)//tkhd的上级目录,直接移动8字节即可 560 | size = ret; 561 | else if(size == 1 && strncmp(type, "mdat", 4) == 0)//mdat的largesize情况,要读取后面8字节作为box的size 562 | { 563 | if(read(fd, buff, 8) != 8) 564 | { 565 | fprintf(stderr, "mp4_get_width_height: read err\n"); 566 | break; 567 | } 568 | //计算当前的mdat包还剩多少字节 569 | int i; 570 | unsigned long long lsize = 0; 571 | for(i = 0, lsize = 0; i < 8; i++) 572 | { 573 | lsize <<= 8; 574 | lsize |= buff[i]; 575 | } 576 | // printf(" type: %s largesize: %ld\n", type, lsize); 577 | lsize = lsize - ret - 8; 578 | //lseek传参是int型,需小块移动 579 | unsigned int tsize = 0; 580 | while(lsize > 0) 581 | { 582 | if(lsize > 0x7FFFFFFF) 583 | tsize = 0x7FFFFFFF; 584 | else 585 | tsize = (unsigned int)lsize; 586 | lsize -= tsize; 587 | if(lseek(fd, tsize, SEEK_CUR) < 0) 588 | break; 589 | } 590 | //不再移动 591 | size = ret; 592 | } 593 | 594 | if(size < ret) 595 | { 596 | fprintf(stderr, "mp4_get_width_height: format err at addr: %ld, size = %d\n", 597 | lseek(fd, 0, SEEK_CUR) - ret, size); 598 | break; 599 | } 600 | else if(lseek(fd, size - ret, SEEK_CUR) < 0) 601 | { 602 | fprintf(stderr, "mp4_get_width_height: lseek to final\n"); 603 | break; 604 | } 605 | } 606 | close(fd); 607 | return retFinal; 608 | } 609 | 610 | 611 | static int mp4_fd = 0; 612 | static unsigned long long mdat_size = 0; 613 | 614 | void mp4_close(void) 615 | { 616 | if(mp4_fd > 0) 617 | close(mp4_fd); 618 | mp4_fd = 0; 619 | mdat_size = 0; 620 | } 621 | 622 | void mp4_open(char *filePath) 623 | { 624 | unsigned char buff[9] = {0}; 625 | char *type; 626 | int ret; 627 | 628 | type = (char*)&buff[4]; 629 | 630 | if(mp4_fd) 631 | mp4_close(); 632 | mp4_fd = open(filePath, O_RDONLY); 633 | if(mp4_fd > 0) 634 | { 635 | //找到mdat 636 | while((ret = read(mp4_fd, buff, 8)) > 0) 637 | { 638 | mdat_size = (buff[0]<<24)|(buff[1]<<16)|(buff[2]<<8)|buff[3]; 639 | 640 | if(strncmp(type, "mdat", 4) == 0) 641 | { 642 | if(mdat_size == 1)//largesize模式取后面8字节作为size 643 | { 644 | if(read(mp4_fd, buff, 8) != 8) 645 | { 646 | fprintf(stderr, "mp4_open: read err\n"); 647 | mp4_close(); 648 | break; 649 | } 650 | int i; 651 | for(i = 0, mdat_size = 0; i < 8; i++) 652 | { 653 | mdat_size <<= 8; 654 | mdat_size |= buff[i]; 655 | } 656 | mdat_size = mdat_size - ret - 8; 657 | } 658 | break; 659 | } 660 | 661 | if(lseek(mp4_fd, mdat_size - ret, SEEK_CUR) < 0) 662 | { 663 | fprintf(stderr, "mp4_open: lseek to final\n"); 664 | mp4_close(); 665 | break; 666 | } 667 | } 668 | } 669 | } 670 | 671 | //return: <=0 final or error 672 | int mp4_read_frame(unsigned char *data, int dataMaxLen) 673 | { 674 | if(mp4_fd < 1 || mdat_size < 1) 675 | { 676 | mp4_close(); 677 | return -1; 678 | } 679 | 680 | unsigned char buff[9] = {0}; 681 | unsigned int size = 0; 682 | int retFinal = 0, ret; 683 | 684 | if(read(mp4_fd, buff, 4) == 4) 685 | { 686 | size = (buff[0]<<24)|(buff[1]<<16)|(buff[2]<<8)|buff[3]; 687 | if(size < 1) 688 | goto error_return; 689 | 690 | // printf("mp4_read_frame: size %d / remain %ld\n", size, mdat_size); 691 | 692 | mdat_size = mdat_size - 4 - size; 693 | 694 | if(size > dataMaxLen) 695 | { 696 | retFinal = read(mp4_fd, data, dataMaxLen); 697 | read(mp4_fd, buff, size - dataMaxLen); 698 | } 699 | else 700 | retFinal = read(mp4_fd, data, size); 701 | 702 | if(retFinal < 1) 703 | goto error_return; 704 | 705 | //寻找下一个mdat 706 | if(mdat_size == 0) 707 | { 708 | char *type; 709 | type = (char*)&buff[4]; 710 | 711 | //找到mdat 712 | while((ret = read(mp4_fd, buff, 8)) > 0) 713 | { 714 | mdat_size = (buff[0]<<24)|(buff[1]<<16)|(buff[2]<<8)|buff[3]; 715 | 716 | if(strncmp(type, "mdat", 4) == 0) 717 | { 718 | if(mdat_size == 1) 719 | { 720 | if(read(mp4_fd, buff, 8) != 8) 721 | { 722 | fprintf(stderr, "mp4_open: read err\n"); 723 | mp4_close(); 724 | break; 725 | } 726 | int i; 727 | for(i = 0, mdat_size = 0; i < 8; i++) 728 | { 729 | mdat_size <<= 8; 730 | mdat_size |= buff[i]; 731 | } 732 | mdat_size = mdat_size - ret - 8; 733 | } 734 | break; 735 | } 736 | 737 | if(lseek(mp4_fd, mdat_size - ret, SEEK_CUR) < 0) 738 | { 739 | fprintf(stderr, "mp4_open: lseek to final\n"); 740 | mp4_close(); 741 | break; 742 | } 743 | } 744 | } 745 | 746 | return retFinal; 747 | } 748 | 749 | error_return: 750 | //error 751 | mp4_close(); 752 | return 0; 753 | } 754 | -------------------------------------------------------------------------------- /libs/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wexiangis/rtsp_to_h264/6826f646306f3458d0a773bcd45a1ca861eff13f/libs/.gitkeep -------------------------------------------------------------------------------- /live.2019.08.12.tar.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wexiangis/rtsp_to_h264/6826f646306f3458d0a773bcd45a1ca861eff13f/live.2019.08.12.tar.gz -------------------------------------------------------------------------------- /rtsp_to_h264.cpp: -------------------------------------------------------------------------------- 1 | /********** 2 | This library is free software; you can redistribute it and/or modify it under 3 | the terms of the GNU Lesser General Public License as published by the 4 | Free Software Foundation; either version 3 of the License, or (at your 5 | option) any later version. (See .) 6 | 7 | This library is distributed in the hope that it will be useful, but WITHOUT 8 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 9 | FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for 10 | more details. 11 | 12 | You should have received a copy of the GNU Lesser General Public License 13 | along with this library; if not, write to the Free Software Foundation, Inc., 14 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 15 | **********/ 16 | // Copyright (c) 1996-2019, Live Networks, Inc. All rights reserved 17 | // A demo application, showing how to create and run a RTSP client (that can potentially receive multiple streams concurrently). 18 | // 19 | // NOTE: This code - although it builds a running application - is intended only to illustrate how to develop your own RTSP 20 | // client application. For a full-featured RTSP client application - with much more functionality, and many options - see 21 | // "openRTSP": http://www.live555.com/openRTSP/ 22 | 23 | #include "liveMedia.hh" 24 | #include "BasicUsageEnvironment.hh" 25 | 26 | #include "shmem.h" 27 | 28 | typedef struct{ 29 | char stepCount; 30 | int cI, cB, cP; 31 | bool isH264; 32 | 33 | FILE *fp; 34 | char tar_file_name[128]; 35 | 36 | bool slave_mode;//从机模式,连接后从stdout吐帧数据,可用重定向'>>'来写到文件 37 | 38 | int shm_fd; 39 | ShmData_Struct *shm_dat; 40 | char shm_path[64]; 41 | char shm_flag[2]; 42 | bool shm_mode; 43 | 44 | bool debug; 45 | int frameType; 46 | char head[4]; 47 | 48 | RTSPClient* rtspClient; 49 | unsigned int rtspClientCount; 50 | 51 | char argv0[128]; 52 | char argvX[128]; 53 | }Main_Pro; 54 | 55 | static Main_Pro main_pro = { 56 | .stepCount = 0, 57 | .cI = 0, 58 | .cB = 0, 59 | .cP = 0, 60 | .isH264 = false, 61 | 62 | .fp = NULL, 63 | .tar_file_name = {0},//"test", 64 | 65 | .slave_mode = false, 66 | 67 | .shm_fd = 0, 68 | .shm_dat = NULL, 69 | .shm_path = {0},//"/tmp", 70 | .shm_flag = {0},//"s", 71 | .shm_mode = 0, 72 | 73 | .debug = false, 74 | .frameType = 0, 75 | .head = {0x00, 0x00, 0x00, 0x01}, 76 | 77 | .rtspClient = NULL, 78 | .rtspClientCount = 0, 79 | 80 | .argv0 = {0}, 81 | .argvX = {0}, 82 | }; 83 | 84 | // Forward function definitions: 85 | 86 | // RTSP 'response handlers': 87 | void continueAfterDESCRIBE(RTSPClient* rtspClient, int resultCode, char* resultString); 88 | void continueAfterSETUP(RTSPClient* rtspClient, int resultCode, char* resultString); 89 | void continueAfterPLAY(RTSPClient* rtspClient, int resultCode, char* resultString); 90 | 91 | // Other event handler functions: 92 | void subsessionAfterPlaying(void* clientData); // called when a stream's subsession (e.g., audio or video substream) ends 93 | void subsessionByeHandler(void* clientData, char const* reason); 94 | // called when a RTCP "BYE" is received for a subsession 95 | void streamTimerHandler(void* clientData); 96 | // called at the end of a stream's expected duration (if the stream has not already signaled its end using a RTCP "BYE") 97 | 98 | // The main streaming routine (for each "rtsp://" URL): 99 | void openURL(UsageEnvironment& env, char const* progName, char const* rtspURL); 100 | 101 | // Used to iterate through each stream's 'subsessions', setting up each one: 102 | void setupNextSubsession(RTSPClient* rtspClient); 103 | 104 | // Used to shut down and close a stream (including its "RTSPClient" object): 105 | void shutdownStream(RTSPClient* rtspClient, int exitCode = 1); 106 | 107 | // A function that outputs a string that identifies each stream (for debugging output). Modify this if you wish: 108 | UsageEnvironment& operator<<(UsageEnvironment& env, const RTSPClient& rtspClient) { 109 | return env << "[URL:\"" << rtspClient.url() << "\"]: "; 110 | } 111 | 112 | // A function that outputs a string that identifies each subsession (for debugging output). Modify this if you wish: 113 | UsageEnvironment& operator<<(UsageEnvironment& env, const MediaSubsession& subsession) { 114 | return env << subsession.mediumName() << "/" << subsession.codecName(); 115 | } 116 | 117 | // Define a class to hold per-stream state that we maintain throughout each stream's lifetime: 118 | 119 | class StreamClientState { 120 | public: 121 | StreamClientState(); 122 | virtual ~StreamClientState(); 123 | 124 | public: 125 | MediaSubsessionIterator* iter; 126 | MediaSession* session; 127 | MediaSubsession* subsession; 128 | TaskToken streamTimerTask; 129 | double duration; 130 | }; 131 | 132 | // If you're streaming just a single stream (i.e., just from a single URL, once), then you can define and use just a single 133 | // "StreamClientState" structure, as a global variable in your application. However, because - in this demo application - we're 134 | // showing how to play multiple streams, concurrently, we can't do that. Instead, we have to have a separate "StreamClientState" 135 | // structure for each "RTSPClient". To do this, we subclass "RTSPClient", and add a "StreamClientState" field to the subclass: 136 | 137 | class ourRTSPClient: public RTSPClient { 138 | public: 139 | static ourRTSPClient* createNew(UsageEnvironment& env, char const* rtspURL, 140 | int verbosityLevel = 0, 141 | char const* applicationName = NULL, 142 | portNumBits tunnelOverHTTPPortNum = 0); 143 | 144 | void reconnect(); 145 | 146 | protected: 147 | ourRTSPClient(UsageEnvironment& env, char const* rtspURL, 148 | int verbosityLevel, char const* applicationName, portNumBits tunnelOverHTTPPortNum); 149 | // called only by createNew(); 150 | virtual ~ourRTSPClient(); 151 | 152 | public: 153 | StreamClientState scs; 154 | }; 155 | 156 | // Define a data sink (a subclass of "MediaSink") to receive the data for each subsession (i.e., each audio or video 'substream'). 157 | // In practice, this might be a class (or a chain of classes) that decodes and then renders the incoming audio or video. 158 | // Or it might be a "FileSink", for outputting the received data into a file (as is done by the "openRTSP" application). 159 | // In this example code, however, we define a simple 'dummy' sink that receives incoming data, but does nothing with it. 160 | 161 | class DummySink: public MediaSink { 162 | public: 163 | static DummySink* createNew(UsageEnvironment& env, 164 | MediaSubsession& subsession, // identifies the kind of data that's being received 165 | char const* streamId = NULL); // identifies the stream itself (optional) 166 | 167 | private: 168 | DummySink(UsageEnvironment& env, MediaSubsession& subsession, char const* streamId); 169 | // called only by "createNew()" 170 | virtual ~DummySink(); 171 | 172 | static void afterGettingFrame(void* clientData, unsigned frameSize, 173 | unsigned numTruncatedBytes, 174 | struct timeval presentationTime, 175 | unsigned durationInMicroseconds); 176 | void afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes, 177 | struct timeval presentationTime, unsigned durationInMicroseconds); 178 | 179 | private: 180 | // redefined virtual functions: 181 | virtual Boolean continuePlaying(); 182 | 183 | private: 184 | u_int8_t* fReceiveBuffer; 185 | MediaSubsession& fSubsession; 186 | char* fStreamId; 187 | }; 188 | 189 | #define RTSP_CLIENT_VERBOSITY_LEVEL 0 // by default, print verbose output from each "RTSPClient" 190 | 191 | void openURL(UsageEnvironment& env, char const* progName, char const* rtspURL) { 192 | // Begin by creating a "RTSPClient" object. Note that there is a separate "RTSPClient" object for each stream that we wish 193 | // to receive (even if more than stream uses the same "rtsp://" URL). 194 | main_pro.rtspClient = ourRTSPClient::createNew(env, rtspURL, RTSP_CLIENT_VERBOSITY_LEVEL, progName); 195 | if (main_pro.rtspClient == NULL) { 196 | env << "Failed to create a RTSP client for URL \"" << rtspURL << "\": " << env.getResultMsg() << "\n"; 197 | return; 198 | } 199 | 200 | ++main_pro.rtspClientCount; 201 | 202 | // Next, send a RTSP "DESCRIBE" command, to get a SDP description for the stream. 203 | // Note that this command - like all RTSP commands - is sent asynchronously; we do not block, waiting for a response. 204 | // Instead, the following function call returns immediately, and we handle the RTSP response later, from within the event loop: 205 | main_pro.rtspClient->sendDescribeCommand(continueAfterDESCRIBE); 206 | } 207 | 208 | 209 | // Implementation of the RTSP 'response handlers': 210 | 211 | void continueAfterDESCRIBE(RTSPClient* rtspClient, int resultCode, char* resultString) { 212 | do { 213 | UsageEnvironment& env = rtspClient->envir(); // alias 214 | StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias 215 | 216 | if (resultCode != 0) { 217 | env << *rtspClient << "Failed to get a SDP description: " << resultString << "\n"; 218 | delete[] resultString; 219 | break; 220 | } 221 | 222 | char* const sdpDescription = resultString; 223 | env << *rtspClient << "Got a SDP description:\n" << sdpDescription << "\n"; 224 | 225 | // Create a media session object from this SDP description: 226 | scs.session = MediaSession::createNew(env, sdpDescription); 227 | delete[] sdpDescription; // because we don't need it anymore 228 | if (scs.session == NULL) { 229 | env << *rtspClient << "Failed to create a MediaSession object from the SDP description: " << env.getResultMsg() << "\n"; 230 | break; 231 | } else if (!scs.session->hasSubsessions()) { 232 | env << *rtspClient << "This session has no media subsessions (i.e., no \"m=\" lines)\n"; 233 | break; 234 | } 235 | 236 | // Then, create and set up our data source objects for the session. We do this by iterating over the session's 'subsessions', 237 | // calling "MediaSubsession::initiate()", and then sending a RTSP "SETUP" command, on each one. 238 | // (Each 'subsession' will have its own data source.) 239 | scs.iter = new MediaSubsessionIterator(*scs.session); 240 | setupNextSubsession(rtspClient); 241 | return; 242 | } while (0); 243 | 244 | // An unrecoverable error occurred with this stream. 245 | shutdownStream(rtspClient); 246 | } 247 | 248 | // By default, we request that the server stream its data using RTP/UDP. 249 | // If, instead, you want to request that the server stream via RTP-over-TCP, change the following to True: 250 | #define REQUEST_STREAMING_OVER_TCP False 251 | 252 | void setupNextSubsession(RTSPClient* rtspClient) { 253 | UsageEnvironment& env = rtspClient->envir(); // alias 254 | StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias 255 | 256 | scs.subsession = scs.iter->next(); 257 | if (scs.subsession != NULL) { 258 | if (!scs.subsession->initiate()) { 259 | env << *rtspClient << "Failed to initiate the \"" << *scs.subsession << "\" subsession: " << env.getResultMsg() << "\n"; 260 | setupNextSubsession(rtspClient); // give up on this subsession; go to the next one 261 | } else { 262 | env << *rtspClient << "Initiated the \"" << *scs.subsession << "\" subsession ("; 263 | if (scs.subsession->rtcpIsMuxed()) { 264 | env << "client port " << scs.subsession->clientPortNum(); 265 | } else { 266 | env << "client ports " << scs.subsession->clientPortNum() << "-" << scs.subsession->clientPortNum()+1; 267 | } 268 | env << ")\n"; 269 | 270 | // Continue setting up this subsession, by sending a RTSP "SETUP" command: 271 | rtspClient->sendSetupCommand(*scs.subsession, continueAfterSETUP, False, REQUEST_STREAMING_OVER_TCP); 272 | } 273 | return; 274 | } 275 | 276 | // We've finished setting up all of the subsessions. Now, send a RTSP "PLAY" command to start the streaming: 277 | if (scs.session->absStartTime() != NULL) { 278 | // Special case: The stream is indexed by 'absolute' time, so send an appropriate "PLAY" command: 279 | rtspClient->sendPlayCommand(*scs.session, continueAfterPLAY, scs.session->absStartTime(), scs.session->absEndTime()); 280 | } else { 281 | scs.duration = scs.session->playEndTime() - scs.session->playStartTime(); 282 | rtspClient->sendPlayCommand(*scs.session, continueAfterPLAY); 283 | } 284 | } 285 | 286 | void continueAfterSETUP(RTSPClient* rtspClient, int resultCode, char* resultString) { 287 | do { 288 | UsageEnvironment& env = rtspClient->envir(); // alias 289 | StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias 290 | 291 | if (resultCode != 0) { 292 | env << *rtspClient << "Failed to set up the \"" << *scs.subsession << "\" subsession: " << resultString << "\n"; 293 | break; 294 | } 295 | 296 | env << *rtspClient << "Set up the \"" << *scs.subsession << "\" subsession ("; 297 | if (scs.subsession->rtcpIsMuxed()) { 298 | env << "client port " << scs.subsession->clientPortNum(); 299 | } else { 300 | env << "client ports " << scs.subsession->clientPortNum() << "-" << scs.subsession->clientPortNum()+1; 301 | } 302 | env << ")\n"; 303 | 304 | // Having successfully setup the subsession, create a data sink for it, and call "startPlaying()" on it. 305 | // (This will prepare the data sink to receive data; the actual flow of data from the client won't start happening until later, 306 | // after we've sent a RTSP "PLAY" command.) 307 | 308 | scs.subsession->sink = DummySink::createNew(env, *scs.subsession, rtspClient->url()); 309 | // perhaps use your own custom "MediaSink" subclass instead 310 | if (scs.subsession->sink == NULL) { 311 | env << *rtspClient << "Failed to create a data sink for the \"" << *scs.subsession 312 | << "\" subsession: " << env.getResultMsg() << "\n"; 313 | break; 314 | } 315 | 316 | env << *rtspClient << "Created a data sink for the \"" << *scs.subsession << "\" subsession\n"; 317 | scs.subsession->miscPtr = rtspClient; // a hack to let subsession handler functions get the "RTSPClient" from the subsession 318 | scs.subsession->sink->startPlaying(*(scs.subsession->readSource()), 319 | subsessionAfterPlaying, scs.subsession); 320 | // Also set a handler to be called if a RTCP "BYE" arrives for this subsession: 321 | if (scs.subsession->rtcpInstance() != NULL) { 322 | scs.subsession->rtcpInstance()->setByeWithReasonHandler(subsessionByeHandler, scs.subsession); 323 | } 324 | } while (0); 325 | delete[] resultString; 326 | 327 | // Set up the next subsession, if any: 328 | setupNextSubsession(rtspClient); 329 | } 330 | 331 | void continueAfterPLAY(RTSPClient* rtspClient, int resultCode, char* resultString) { 332 | Boolean success = False; 333 | 334 | do { 335 | UsageEnvironment& env = rtspClient->envir(); // alias 336 | StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias 337 | 338 | if (resultCode != 0) { 339 | env << *rtspClient << "Failed to start playing session: " << resultString << "\n"; 340 | break; 341 | } 342 | 343 | // Set a timer to be handled at the end of the stream's expected duration (if the stream does not already signal its end 344 | // using a RTCP "BYE"). This is optional. If, instead, you want to keep the stream active - e.g., so you can later 345 | // 'seek' back within it and do another RTSP "PLAY" - then you can omit this code. 346 | // (Alternatively, if you don't want to receive the entire stream, you could set this timer for some shorter value.) 347 | if (scs.duration > 0) { 348 | unsigned const delaySlop = 2; // number of seconds extra to delay, after the stream's expected duration. (This is optional.) 349 | scs.duration += delaySlop; 350 | unsigned uSecsToDelay = (unsigned)(scs.duration*1000000); 351 | scs.streamTimerTask = env.taskScheduler().scheduleDelayedTask(uSecsToDelay, (TaskFunc*)streamTimerHandler, rtspClient); 352 | } 353 | 354 | env << *rtspClient << "Started playing session"; 355 | if (scs.duration > 0) { 356 | env << " (for up to " << scs.duration << " seconds)"; 357 | } 358 | env << "...\n"; 359 | 360 | success = True; 361 | } while (0); 362 | delete[] resultString; 363 | 364 | if (!success) { 365 | // An unrecoverable error occurred with this stream. 366 | shutdownStream(rtspClient); 367 | } 368 | } 369 | 370 | 371 | // Implementation of the other event handlers: 372 | 373 | void subsessionAfterPlaying(void* clientData) { 374 | MediaSubsession* subsession = (MediaSubsession*)clientData; 375 | RTSPClient* rtspClient = (RTSPClient*)(subsession->miscPtr); 376 | 377 | // Begin by closing this subsession's stream: 378 | Medium::close(subsession->sink); 379 | subsession->sink = NULL; 380 | 381 | // Next, check whether *all* subsessions' streams have now been closed: 382 | MediaSession& session = subsession->parentSession(); 383 | MediaSubsessionIterator iter(session); 384 | while ((subsession = iter.next()) != NULL) { 385 | if (subsession->sink != NULL) return; // this subsession is still active 386 | } 387 | 388 | // All subsessions' streams have now been closed, so shutdown the client: 389 | shutdownStream(rtspClient); 390 | } 391 | 392 | void subsessionByeHandler(void* clientData, char const* reason) { 393 | MediaSubsession* subsession = (MediaSubsession*)clientData; 394 | RTSPClient* rtspClient = (RTSPClient*)subsession->miscPtr; 395 | UsageEnvironment& env = rtspClient->envir(); // alias 396 | 397 | env << *rtspClient << "Received RTCP \"BYE\""; 398 | if (reason != NULL) { 399 | env << " (reason:\"" << reason << "\")"; 400 | delete[] reason; 401 | } 402 | env << " on \"" << *subsession << "\" subsession\n"; 403 | 404 | // Now act as if the subsession had closed: 405 | subsessionAfterPlaying(subsession); 406 | } 407 | 408 | void streamTimerHandler(void* clientData) { 409 | ourRTSPClient* rtspClient = (ourRTSPClient*)clientData; 410 | StreamClientState& scs = rtspClient->scs; // alias 411 | 412 | scs.streamTimerTask = NULL; 413 | 414 | // Shut down the stream: 415 | shutdownStream(rtspClient); 416 | } 417 | 418 | void shutdownStream(RTSPClient* rtspClient, int exitCode) { 419 | UsageEnvironment& env = rtspClient->envir(); // alias 420 | StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias 421 | 422 | // First, check whether any subsessions have still to be closed: 423 | if (scs.session != NULL) { 424 | Boolean someSubsessionsWereActive = False; 425 | MediaSubsessionIterator iter(*scs.session); 426 | MediaSubsession* subsession; 427 | 428 | while ((subsession = iter.next()) != NULL) { 429 | if (subsession->sink != NULL) { 430 | Medium::close(subsession->sink); 431 | subsession->sink = NULL; 432 | 433 | if (subsession->rtcpInstance() != NULL) { 434 | subsession->rtcpInstance()->setByeHandler(NULL, NULL); // in case the server sends a RTCP "BYE" while handling "TEARDOWN" 435 | } 436 | 437 | someSubsessionsWereActive = True; 438 | } 439 | } 440 | 441 | if (someSubsessionsWereActive) { 442 | // Send a RTSP "TEARDOWN" command, to tell the server to shutdown the stream. 443 | // Don't bother handling the response to the "TEARDOWN". 444 | rtspClient->sendTeardownCommand(*scs.session, NULL); 445 | } 446 | } 447 | 448 | env << *rtspClient << "Closing the stream.\n"; 449 | Medium::close(rtspClient); 450 | // Note that this will also cause this stream's "StreamClientState" structure to get reclaimed. 451 | 452 | if (--main_pro.rtspClientCount == 0) { 453 | // The final stream has ended, so exit the application now. 454 | // (Of course, if you're embedding this code into your own application, you might want to comment this out, 455 | // and replace it with "eventLoopWatchVariable = 1;", so that we leave the LIVE555 event loop, and continue running "main()".) 456 | // exit(exitCode); 457 | } 458 | } 459 | 460 | 461 | // Implementation of "ourRTSPClient": 462 | 463 | ourRTSPClient* ourRTSPClient::createNew(UsageEnvironment& env, char const* rtspURL, 464 | int verbosityLevel, char const* applicationName, portNumBits tunnelOverHTTPPortNum) { 465 | return new ourRTSPClient(env, rtspURL, verbosityLevel, applicationName, tunnelOverHTTPPortNum); 466 | } 467 | 468 | ourRTSPClient::ourRTSPClient(UsageEnvironment& env, char const* rtspURL, 469 | int verbosityLevel, char const* applicationName, portNumBits tunnelOverHTTPPortNum) 470 | : RTSPClient(env,rtspURL, verbosityLevel, applicationName, tunnelOverHTTPPortNum, -1) { 471 | } 472 | 473 | ourRTSPClient::~ourRTSPClient() { 474 | } 475 | 476 | void ourRTSPClient::reconnect() 477 | { 478 | reset(); 479 | } 480 | 481 | // Implementation of "StreamClientState": 482 | 483 | StreamClientState::StreamClientState() 484 | : iter(NULL), session(NULL), subsession(NULL), streamTimerTask(NULL), duration(0.0) { 485 | } 486 | 487 | StreamClientState::~StreamClientState() { 488 | delete iter; 489 | if (session != NULL) { 490 | // We also need to delete "session", and unschedule "streamTimerTask" (if set) 491 | UsageEnvironment& env = session->envir(); // alias 492 | 493 | env.taskScheduler().unscheduleDelayedTask(streamTimerTask); 494 | Medium::close(session); 495 | } 496 | } 497 | 498 | 499 | // Implementation of "DummySink": 500 | 501 | // Even though we're not going to be doing anything with the incoming data, we still need to receive it. 502 | // Define the size of the buffer that we'll use: 503 | #define DUMMY_SINK_RECEIVE_BUFFER_SIZE 524275 //512*1024=524288 - 13 504 | 505 | DummySink* DummySink::createNew(UsageEnvironment& env, MediaSubsession& subsession, char const* streamId) { 506 | return new DummySink(env, subsession, streamId); 507 | } 508 | 509 | DummySink::DummySink(UsageEnvironment& env, MediaSubsession& subsession, char const* streamId) 510 | : MediaSink(env), 511 | fSubsession(subsession) { 512 | fStreamId = strDup(streamId); 513 | fReceiveBuffer = new u_int8_t[DUMMY_SINK_RECEIVE_BUFFER_SIZE]; 514 | } 515 | 516 | DummySink::~DummySink() { 517 | delete[] fReceiveBuffer; 518 | delete[] fStreamId; 519 | } 520 | 521 | void DummySink::afterGettingFrame(void* clientData, unsigned frameSize, unsigned numTruncatedBytes, 522 | struct timeval presentationTime, unsigned durationInMicroseconds) { 523 | DummySink* sink = (DummySink*)clientData; 524 | sink->afterGettingFrame(frameSize, numTruncatedBytes, presentationTime, durationInMicroseconds); 525 | } 526 | 527 | Boolean DummySink::continuePlaying() { 528 | if (fSource == NULL) return False; // sanity check (should not happen) 529 | 530 | // Request the next frame of data from our input source. "afterGettingFrame()" will get called later, when it arrives: 531 | fSource->getNextFrame(fReceiveBuffer, DUMMY_SINK_RECEIVE_BUFFER_SIZE, 532 | afterGettingFrame, this, 533 | onSourceClosure, this); 534 | return True; 535 | } 536 | 537 | 538 | //---------------------------------------- 分割线 ---------------------------------------- 539 | 540 | extern int h264_decode_sps(unsigned char * buf,unsigned int nLen,int *width,int *height,int *fps); 541 | extern int h265_decode_sps(unsigned char * buf,unsigned int nLen,int *width,int *height,int *fps); 542 | 543 | void DummySink::afterGettingFrame( 544 | unsigned frameSize, 545 | unsigned numTruncatedBytes, 546 | struct timeval presentationTime, 547 | unsigned /*durationInMicroseconds*/) 548 | { 549 | // printf("head/0x%X len/%d\n", fReceiveBuffer[0], frameSize); 550 | 551 | // We've just received a frame of data. (Optionally) print out information about it: 552 | // if(main_pro.stepCount == 0) 553 | // { 554 | // if (fStreamId != NULL) 555 | // envir() << "Stream \"" << fStreamId << "\"; "; 556 | // envir() << fSubsession.mediumName() << "/" << fSubsession.codecName() << ":\tReceived " << frameSize << " bytes"; 557 | // if (numTruncatedBytes > 0) 558 | // envir() << " (with " << numTruncatedBytes << " bytes truncated)"; 559 | // char uSecsStr[6+1]; // used to output the 'microseconds' part of the presentation time 560 | // sprintf(uSecsStr, "%06u", (unsigned)presentationTime.tv_usec); 561 | // envir() << ".\tPresentation time: " << (unsigned)presentationTime.tv_sec << "." << uSecsStr; 562 | // if (fSubsession.rtpSource() != NULL && !fSubsession.rtpSource()->hasBeenSynchronizedUsingRTCP()) 563 | // envir() << "!"; // mark the debugging output to indicate that this presentation time is not RTCP-synchronized 564 | // envir() << "\n"; 565 | // } 566 | 567 | //save to file 568 | // if(!strcmp(fSubsession.mediumName(), "video")) 569 | { 570 | //写数据到共享内存 571 | if(main_pro.shm_dat) 572 | { 573 | if(main_pro.shm_dat->ready) 574 | usleep(1000); 575 | *((unsigned int*)main_pro.shm_dat->len) = frameSize; 576 | memcpy(main_pro.shm_dat->data, fReceiveBuffer, frameSize); 577 | main_pro.shm_dat->order++; 578 | main_pro.shm_dat->ready = 1; 579 | } 580 | 581 | if(main_pro.stepCount == 0) 582 | { 583 | //流类型判断 584 | if(strstr(fSubsession.codecName(), "265")) 585 | main_pro.isH264 = false; 586 | else if(strstr(fSubsession.codecName(), "264")) 587 | main_pro.isH264 = true; 588 | //fp准备 589 | if(main_pro.slave_mode) 590 | main_pro.fp = stdout; 591 | else if(main_pro.tar_file_name[0]) 592 | { 593 | if(main_pro.isH264) 594 | { 595 | strcpy(&main_pro.tar_file_name[strlen(main_pro.tar_file_name)], ".h264"); 596 | main_pro.fp = fopen(main_pro.tar_file_name, "w"); 597 | // main_pro.fp = fopen(main_pro.tar_file_name, "a+b"); 598 | } 599 | else 600 | { 601 | strcpy(&main_pro.tar_file_name[strlen(main_pro.tar_file_name)], ".h265"); 602 | main_pro.fp = fopen(main_pro.tar_file_name, "w"); 603 | // main_pro.fp = fopen(main_pro.tar_file_name, "a+b"); 604 | } 605 | } 606 | //不再进入该段内容 607 | main_pro.stepCount += 1; 608 | } 609 | 610 | //是否要补上头4字节?是则设置偏移量为4 611 | main_pro.frameType = 0; 612 | if(*((int*)fReceiveBuffer) == 0x1000000) // head == 00,00,00,01 ? 613 | main_pro.frameType = 4; 614 | 615 | //写文件 616 | if(main_pro.fp) 617 | { 618 | if(main_pro.frameType == 0) 619 | fwrite(main_pro.head, 4, 1, main_pro.fp); 620 | fwrite(fReceiveBuffer, frameSize, 1, main_pro.fp); 621 | } 622 | 623 | //截取SPS帧,解析视频宽/高信息 624 | if(main_pro.stepCount == 1) 625 | { 626 | if(main_pro.isH264) 627 | { 628 | main_pro.frameType = fReceiveBuffer[main_pro.frameType]&0x1F; 629 | if(main_pro.frameType == 7) 630 | { 631 | int width = 0, height = 0, fps = 0; 632 | if(h264_decode_sps(fReceiveBuffer,frameSize,&width,&height,&fps)) 633 | { 634 | if(main_pro.shm_dat) 635 | { 636 | main_pro.shm_dat->type = 1; 637 | main_pro.shm_dat->width[0] = width&0xFF; 638 | main_pro.shm_dat->width[1] = (width>>8)&0xFF; 639 | main_pro.shm_dat->height[0] = height&0xFF; 640 | main_pro.shm_dat->height[1] = (height>>8)&0xFF; 641 | main_pro.shm_dat->fps = fps; 642 | } 643 | envir() << "--> hit SPS frame: w/" << width 644 | << " h/" << height 645 | << " fps/" << fps 646 | << " " << fSubsession.mediumName() 647 | << "/" << fSubsession.codecName() 648 | << " I-frame/" << main_pro.cI 649 | << " P-frame/" << main_pro.cP 650 | << " B-frame/" << main_pro.cB 651 | << "\n"; 652 | //不再进入该段内容 653 | if(!main_pro.debug) 654 | main_pro.stepCount += 1; 655 | } 656 | } 657 | else if(main_pro.frameType == 5) 658 | { 659 | main_pro.cI += 1; 660 | main_pro.cP = 0; 661 | main_pro.cB = 0; 662 | } 663 | else if(main_pro.frameType == 1) 664 | main_pro.cP += 1; 665 | } 666 | else 667 | { 668 | main_pro.frameType = (fReceiveBuffer[main_pro.frameType]&0x7E)>>1; 669 | if(main_pro.frameType == 33) 670 | { 671 | int width = 0, height = 0, fps = 0; 672 | if(h265_decode_sps(fReceiveBuffer,frameSize,&width,&height,&fps)) 673 | { 674 | if(main_pro.shm_dat) 675 | { 676 | main_pro.shm_dat->type = 2; 677 | main_pro.shm_dat->width[0] = width&0xFF; 678 | main_pro.shm_dat->width[1] = (width>>8)&0xFF; 679 | main_pro.shm_dat->height[0] = height&0xFF; 680 | main_pro.shm_dat->height[1] = (height>>8)&0xFF; 681 | main_pro.shm_dat->fps = fps; 682 | } 683 | envir() << "--> hit SPS frame: w/" << width 684 | << " h/" << height 685 | << " fps/" << fps 686 | << " " << fSubsession.mediumName() 687 | << "/" << fSubsession.codecName() 688 | << " I-frame/" << main_pro.cI 689 | << " P-frame/" << main_pro.cP 690 | << " B-frame/" << main_pro.cB 691 | << "\n"; 692 | //不再进入该段内容 693 | if(!main_pro.debug) 694 | main_pro.stepCount += 1; 695 | } 696 | } 697 | else if(main_pro.frameType == 19) 698 | { 699 | main_pro.cI += 1; 700 | main_pro.cP = 0; 701 | main_pro.cB = 0; 702 | } 703 | else if(main_pro.frameType == 1) 704 | main_pro.cP += 1; 705 | } 706 | } 707 | else if(main_pro.shm_dat) 708 | { 709 | if(main_pro.isH264) 710 | main_pro.shm_dat->type = 1; 711 | else 712 | main_pro.shm_dat->type = 2; 713 | } 714 | 715 | } 716 | // Then continue, to request the next frame of data: 717 | continuePlaying(); 718 | } 719 | 720 | char eventLoopWatchVariable = 0; 721 | 722 | void usage(UsageEnvironment& env, char const* progName) 723 | { 724 | env << "\n"; 725 | env << "Usage:\n"; 726 | env << " " << progName << "