├── .gitignore ├── AzureTemplateFile.json ├── AzureTemplateParams.json ├── Dockerfile_cef ├── Dockerfile_cef_binary ├── Dockerfile_cef_create_from_binaries ├── Dockerfile_cefsharp ├── Dockerfile_vs ├── README.md ├── automate-git-shallow-support.py ├── az_create.ps1 ├── build.ps1 ├── cef_build.ps1 ├── cef_patch.ps1 ├── cef_patch_placeholder.diff ├── cefsharp_patch.ps1 ├── cefsharp_patch_placeholder.diff ├── cefsharp_set_versions_and_restore.ps1 ├── daemon.json ├── functions.ps1 ├── mem_log.ps1 ├── sample_patches ├── cef_patch_67_3396_pdfcrash_fix.diff └── cef_patch_find_vs2019_tools.diff └── versions_src.ps1 /.gitignore: -------------------------------------------------------------------------------- 1 | versions.ps1 2 | .dockerignore 3 | cefsharp_patch_*.diff 4 | cef_patch_*.diff 5 | *.zip 6 | *.pfx -------------------------------------------------------------------------------- /AzureTemplateFile.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", 3 | "contentVersion": "1.0.0.0", 4 | "parameters": { 5 | "location": { 6 | "type": "string" 7 | }, 8 | "networkInterfaceName": { 9 | "type": "string" 10 | }, 11 | "enableAcceleratedNetworking": { 12 | "type": "bool" 13 | }, 14 | "networkSecurityGroupName": { 15 | "type": "string" 16 | }, 17 | "virtualNetworkName": { 18 | "type": "string" 19 | }, 20 | "addressPrefix": { 21 | "type": "string" 22 | }, 23 | "subnetName": { 24 | "type": "string" 25 | }, 26 | "subnetPrefix": { 27 | "type": "string" 28 | }, 29 | "PsRemoteSecretVaultID": { 30 | "type": "string" 31 | }, 32 | "PsRemoteSecretUrl": { 33 | "type": "string" 34 | }, 35 | "publicIpAddressName": { 36 | "type": "string" 37 | }, 38 | "publicIpAddressType": { 39 | "type": "string" 40 | }, 41 | "publicIpAddressSku": { 42 | "type": "string" 43 | }, 44 | "virtualMachineName": { 45 | "type": "string" 46 | }, 47 | "virtualMachineRG": { 48 | "type": "string" 49 | }, 50 | "osDiskType": { 51 | "type": "string" 52 | }, 53 | "virtualMachineSize": { 54 | "type": "string" 55 | }, 56 | "adminUsername": { 57 | "type": "string" 58 | }, 59 | "adminPassword": { 60 | "type": "secureString" 61 | }, 62 | "diagnosticsStorageAccountName": { 63 | "type": "string" 64 | }, 65 | "diagnosticsStorageAccountId": { 66 | "type": "string" 67 | }, 68 | "diagnosticsStorageAccountType": { 69 | "type": "string" 70 | }, 71 | "diagnosticsStorageAccountKind": { 72 | "type": "string" 73 | }, 74 | "autoShutdownStatus": { 75 | "type": "string" 76 | }, 77 | "autoShutdownTime": { 78 | "type": "string" 79 | }, 80 | "autoShutdownTimeZone": { 81 | "type": "string" 82 | }, 83 | "autoShutdownNotificationStatus": { 84 | "type": "string" 85 | }, 86 | "autoShutdownNotificationLocale": { 87 | "type": "string" 88 | }, 89 | "autoShutdownNotificationEmail": { 90 | "type": "string" 91 | } 92 | }, 93 | "variables": { 94 | "nsgId": "[resourceId(resourceGroup().name, 'Microsoft.Network/networkSecurityGroups', parameters('networkSecurityGroupName'))]", 95 | "vnetId": "[resourceId(resourceGroup().name,'Microsoft.Network/virtualNetworks', parameters('virtualNetworkName'))]", 96 | "subnetRef": "[concat(variables('vnetId'), '/subnets/', parameters('subnetName'))]", 97 | "diagnosticsExtensionName": "Microsoft.Insights.VMDiagnosticsSettings" 98 | }, 99 | "resources": [ 100 | { 101 | "name": "[parameters('networkInterfaceName')]", 102 | "type": "Microsoft.Network/networkInterfaces", 103 | "apiVersion": "2018-04-01", 104 | "location": "[parameters('location')]", 105 | "dependsOn": [ 106 | "[concat('Microsoft.Network/networkSecurityGroups/', parameters('networkSecurityGroupName'))]", 107 | "[concat('Microsoft.Network/virtualNetworks/', parameters('virtualNetworkName'))]", 108 | "[concat('Microsoft.Network/publicIpAddresses/', parameters('publicIpAddressName'))]" 109 | ], 110 | "properties": { 111 | "ipConfigurations": [ 112 | { 113 | "name": "ipconfig1", 114 | "properties": { 115 | "subnet": { 116 | "id": "[variables('subnetRef')]" 117 | }, 118 | "privateIPAllocationMethod": "Dynamic", 119 | "publicIpAddress": { 120 | "id": "[resourceId(resourceGroup().name, 'Microsoft.Network/publicIpAddresses', parameters('publicIpAddressName'))]" 121 | } 122 | } 123 | } 124 | ], 125 | "enableAcceleratedNetworking": "[parameters('enableAcceleratedNetworking')]", 126 | "networkSecurityGroup": { 127 | "id": "[variables('nsgId')]" 128 | } 129 | }, 130 | "tags": {} 131 | }, 132 | { 133 | "name": "[parameters('networkSecurityGroupName')]", 134 | "type": "Microsoft.Network/networkSecurityGroups", 135 | "apiVersion": "2018-08-01", 136 | "location": "[parameters('location')]", 137 | "properties": { 138 | "securityRules": [ 139 | { 140 | "name": "RDP", 141 | "properties": { 142 | "priority": 300, 143 | "protocol": "TCP", 144 | "access": "Allow", 145 | "direction": "Inbound", 146 | "sourceAddressPrefix": "*", 147 | "sourcePortRange": "*", 148 | "destinationAddressPrefix": "*", 149 | "destinationPortRange": "3389" 150 | } 151 | }, 152 | { 153 | "name": "PSRemote", 154 | "properties": { 155 | "priority": 301, 156 | "protocol": "TCP", 157 | "access": "Allow", 158 | "direction": "Inbound", 159 | "sourceAddressPrefix": "*", 160 | "sourcePortRange": "*", 161 | "destinationAddressPrefix": "*", 162 | "destinationPortRange": "5986" 163 | } 164 | } 165 | ] 166 | }, 167 | "tags": {} 168 | }, 169 | { 170 | "name": "[parameters('virtualNetworkName')]", 171 | "type": "Microsoft.Network/virtualNetworks", 172 | "apiVersion": "2018-08-01", 173 | "location": "[parameters('location')]", 174 | "properties": { 175 | "addressSpace": { 176 | "addressPrefixes": [ 177 | "[parameters('addressPrefix')]" 178 | ] 179 | }, 180 | "subnets": [ 181 | { 182 | "name": "[parameters('subnetName')]", 183 | "properties": { 184 | "addressPrefix": "[parameters('subnetPrefix')]" 185 | } 186 | } 187 | ] 188 | }, 189 | "tags": {} 190 | }, 191 | { 192 | "name": "[parameters('publicIpAddressName')]", 193 | "type": "Microsoft.Network/publicIpAddresses", 194 | "apiVersion": "2018-08-01", 195 | "location": "[parameters('location')]", 196 | "properties": { 197 | "publicIpAllocationMethod": "[parameters('publicIpAddressType')]" 198 | }, 199 | "sku": { 200 | "name": "[parameters('publicIpAddressSku')]" 201 | }, 202 | "tags": {} 203 | }, 204 | { 205 | "name": "[parameters('virtualMachineName')]", 206 | "type": "Microsoft.Compute/virtualMachines", 207 | "apiVersion": "2018-04-01", 208 | "location": "[parameters('location')]", 209 | "dependsOn": [ 210 | "[concat('Microsoft.Network/networkInterfaces/', parameters('networkInterfaceName'))]", 211 | "[concat('Microsoft.Storage/storageAccounts/', parameters('diagnosticsStorageAccountName'))]" 212 | ], 213 | "properties": { 214 | "hardwareProfile": { 215 | "vmSize": "[parameters('virtualMachineSize')]" 216 | }, 217 | "storageProfile": { 218 | "osDisk": { 219 | "createOption": "fromImage", 220 | "managedDisk": { 221 | "storageAccountType": "[parameters('osDiskType')]" 222 | } 223 | }, 224 | "imageReference": { 225 | "publisher": "MicrosoftWindowsServer", 226 | "offer": "WindowsServer", 227 | "sku": "Datacenter-Core-1903-with-Containers-smalldisk", 228 | "version": "latest" 229 | } 230 | }, 231 | "networkProfile": { 232 | "networkInterfaces": [ 233 | { 234 | "id": "[resourceId('Microsoft.Network/networkInterfaces', parameters('networkInterfaceName'))]" 235 | } 236 | ] 237 | }, 238 | "osProfile": { 239 | "computerName": "[parameters('virtualMachineName')]", 240 | "adminUsername": "[parameters('adminUsername')]", 241 | "adminPassword": "[parameters('adminPassword')]", 242 | "secrets": [ 243 | { 244 | "sourceVault": { 245 | "id": "[parameters('PsRemoteSecretVaultID')]" 246 | }, 247 | "vaultCertificates": [ 248 | { 249 | "certificateUrl": "[parameters('PsRemoteSecretUrl')]", 250 | "certificateStore": "My" 251 | } 252 | ] 253 | } 254 | ], 255 | "windowsConfiguration": { 256 | "enableAutomaticUpdates": true, 257 | "provisionVmAgent": true, 258 | "winRM": { 259 | "listeners": [ 260 | { 261 | "protocol": "https", 262 | "certificateUrl": "[parameters('PsRemoteSecretUrl')]" 263 | } 264 | ] 265 | } 266 | } 267 | }, 268 | "licenseType": "Windows_Server", 269 | "diagnosticsProfile": { 270 | "bootDiagnostics": { 271 | "enabled": true, 272 | "storageUri": "[concat('https://', parameters('diagnosticsStorageAccountName'), '.blob.core.windows.net/')]" 273 | } 274 | } 275 | }, 276 | "tags": {} 277 | }, 278 | { 279 | "name": "[parameters('diagnosticsStorageAccountName')]", 280 | "type": "Microsoft.Storage/storageAccounts", 281 | "apiVersion": "2018-02-01", 282 | "location": "[parameters('location')]", 283 | "properties": {}, 284 | "kind": "[parameters('diagnosticsStorageAccountKind')]", 285 | "sku": { 286 | "name": "[parameters('diagnosticsStorageAccountType')]" 287 | }, 288 | "tags": {} 289 | }, 290 | { 291 | "name": "[concat('shutdown-computevm-', parameters('virtualMachineName'))]", 292 | "type": "Microsoft.DevTestLab/schedules", 293 | "apiVersion": "2017-04-26-preview", 294 | "location": "[parameters('location')]", 295 | "dependsOn": [ 296 | "[concat('Microsoft.Compute/virtualMachines/', parameters('virtualMachineName'))]" 297 | ], 298 | "properties": { 299 | "status": "[parameters('autoShutdownStatus')]", 300 | "taskType": "ComputeVmShutdownTask", 301 | "dailyRecurrence": { 302 | "time": "[parameters('autoShutdownTime')]" 303 | }, 304 | "timeZoneId": "[parameters('autoShutdownTimeZone')]", 305 | "targetResourceId": "[resourceId('Microsoft.Compute/virtualMachines', parameters('virtualMachineName'))]", 306 | "notificationSettings": { 307 | "status": "[parameters('autoShutdownNotificationStatus')]", 308 | "notificationLocale": "[parameters('autoShutdownNotificationLocale')]", 309 | "timeInMinutes": "30", 310 | "emailRecipient": "[parameters('autoShutdownNotificationEmail')]" 311 | } 312 | }, 313 | "tags": {} 314 | }, 315 | { 316 | "name": "[concat(parameters('virtualMachineName'),'/', variables('diagnosticsExtensionName'))]", 317 | "type": "Microsoft.Compute/virtualMachines/extensions", 318 | "apiVersion": "2017-12-01", 319 | "location": "[parameters('location')]", 320 | "properties": { 321 | "publisher": "Microsoft.Azure.Diagnostics", 322 | "type": "IaaSDiagnostics", 323 | "typeHandlerVersion": "1.5", 324 | "autoUpgradeMinorVersion": true, 325 | "settings": { 326 | "StorageAccount": "[parameters('diagnosticsStorageAccountName')]", 327 | "WadCfg": { 328 | "DiagnosticMonitorConfiguration": { 329 | "overallQuotaInMB": 5120, 330 | "Metrics": { 331 | "resourceId": "[concat('/subscriptions/', subscription().subscriptionId, '/resourceGroups/', resourceGroup().name, '/providers/', 'Microsoft.Compute/virtualMachines/', parameters('virtualMachineName'))]", 332 | "MetricAggregation": [ 333 | { 334 | "scheduledTransferPeriod": "PT1H" 335 | }, 336 | { 337 | "scheduledTransferPeriod": "PT1M" 338 | } 339 | ] 340 | }, 341 | "DiagnosticInfrastructureLogs": { 342 | "scheduledTransferLogLevelFilter": "Error" 343 | }, 344 | "PerformanceCounters": { 345 | "scheduledTransferPeriod": "PT1M", 346 | "PerformanceCounterConfiguration": [ 347 | { 348 | "counterSpecifier": "\\Processor Information(_Total)\\% Processor Time", 349 | "sampleRate": "PT1M" 350 | }, 351 | { 352 | "counterSpecifier": "\\Processor Information(_Total)\\% Privileged Time", 353 | "sampleRate": "PT1M" 354 | }, 355 | { 356 | "counterSpecifier": "\\Processor Information(_Total)\\% User Time", 357 | "sampleRate": "PT1M" 358 | }, 359 | { 360 | "counterSpecifier": "\\Processor Information(_Total)\\Processor Frequency", 361 | "sampleRate": "PT1M" 362 | }, 363 | { 364 | "counterSpecifier": "\\System\\Processes", 365 | "sampleRate": "PT1M" 366 | }, 367 | { 368 | "counterSpecifier": "\\Process(_Total)\\Thread Count", 369 | "sampleRate": "PT1M" 370 | }, 371 | { 372 | "counterSpecifier": "\\Process(_Total)\\Handle Count", 373 | "sampleRate": "PT1M" 374 | }, 375 | { 376 | "counterSpecifier": "\\System\\System Up Time", 377 | "sampleRate": "PT1M" 378 | }, 379 | { 380 | "counterSpecifier": "\\System\\Context Switches/sec", 381 | "sampleRate": "PT1M" 382 | }, 383 | { 384 | "counterSpecifier": "\\System\\Processor Queue Length", 385 | "sampleRate": "PT1M" 386 | }, 387 | { 388 | "counterSpecifier": "\\Memory\\% Committed Bytes In Use", 389 | "sampleRate": "PT1M" 390 | }, 391 | { 392 | "counterSpecifier": "\\Memory\\Available Bytes", 393 | "sampleRate": "PT1M" 394 | }, 395 | { 396 | "counterSpecifier": "\\Memory\\Committed Bytes", 397 | "sampleRate": "PT1M" 398 | }, 399 | { 400 | "counterSpecifier": "\\Memory\\Cache Bytes", 401 | "sampleRate": "PT1M" 402 | }, 403 | { 404 | "counterSpecifier": "\\Memory\\Pool Paged Bytes", 405 | "sampleRate": "PT1M" 406 | }, 407 | { 408 | "counterSpecifier": "\\Memory\\Pool Nonpaged Bytes", 409 | "sampleRate": "PT1M" 410 | }, 411 | { 412 | "counterSpecifier": "\\Memory\\Pages/sec", 413 | "sampleRate": "PT1M" 414 | }, 415 | { 416 | "counterSpecifier": "\\Memory\\Page Faults/sec", 417 | "sampleRate": "PT1M" 418 | }, 419 | { 420 | "counterSpecifier": "\\Process(_Total)\\Working Set", 421 | "sampleRate": "PT1M" 422 | }, 423 | { 424 | "counterSpecifier": "\\Process(_Total)\\Working Set - Private", 425 | "sampleRate": "PT1M" 426 | }, 427 | { 428 | "counterSpecifier": "\\LogicalDisk(_Total)\\% Disk Time", 429 | "sampleRate": "PT1M" 430 | }, 431 | { 432 | "counterSpecifier": "\\LogicalDisk(_Total)\\% Disk Read Time", 433 | "sampleRate": "PT1M" 434 | }, 435 | { 436 | "counterSpecifier": "\\LogicalDisk(_Total)\\% Disk Write Time", 437 | "sampleRate": "PT1M" 438 | }, 439 | { 440 | "counterSpecifier": "\\LogicalDisk(_Total)\\% Idle Time", 441 | "sampleRate": "PT1M" 442 | }, 443 | { 444 | "counterSpecifier": "\\LogicalDisk(_Total)\\Disk Bytes/sec", 445 | "sampleRate": "PT1M" 446 | }, 447 | { 448 | "counterSpecifier": "\\LogicalDisk(_Total)\\Disk Read Bytes/sec", 449 | "sampleRate": "PT1M" 450 | }, 451 | { 452 | "counterSpecifier": "\\LogicalDisk(_Total)\\Disk Write Bytes/sec", 453 | "sampleRate": "PT1M" 454 | }, 455 | { 456 | "counterSpecifier": "\\LogicalDisk(_Total)\\Disk Transfers/sec", 457 | "sampleRate": "PT1M" 458 | }, 459 | { 460 | "counterSpecifier": "\\LogicalDisk(_Total)\\Disk Reads/sec", 461 | "sampleRate": "PT1M" 462 | }, 463 | { 464 | "counterSpecifier": "\\LogicalDisk(_Total)\\Disk Writes/sec", 465 | "sampleRate": "PT1M" 466 | }, 467 | { 468 | "counterSpecifier": "\\LogicalDisk(_Total)\\Avg. Disk sec/Transfer", 469 | "sampleRate": "PT1M" 470 | }, 471 | { 472 | "counterSpecifier": "\\LogicalDisk(_Total)\\Avg. Disk sec/Read", 473 | "sampleRate": "PT1M" 474 | }, 475 | { 476 | "counterSpecifier": "\\LogicalDisk(_Total)\\Avg. Disk sec/Write", 477 | "sampleRate": "PT1M" 478 | }, 479 | { 480 | "counterSpecifier": "\\LogicalDisk(_Total)\\Avg. Disk Queue Length", 481 | "sampleRate": "PT1M" 482 | }, 483 | { 484 | "counterSpecifier": "\\LogicalDisk(_Total)\\Avg. Disk Read Queue Length", 485 | "sampleRate": "PT1M" 486 | }, 487 | { 488 | "counterSpecifier": "\\LogicalDisk(_Total)\\Avg. Disk Write Queue Length", 489 | "sampleRate": "PT1M" 490 | }, 491 | { 492 | "counterSpecifier": "\\LogicalDisk(_Total)\\% Free Space", 493 | "sampleRate": "PT1M" 494 | }, 495 | { 496 | "counterSpecifier": "\\LogicalDisk(_Total)\\Free Megabytes", 497 | "sampleRate": "PT1M" 498 | }, 499 | { 500 | "counterSpecifier": "\\Network Interface(*)\\Bytes Total/sec", 501 | "sampleRate": "PT1M" 502 | }, 503 | { 504 | "counterSpecifier": "\\Network Interface(*)\\Bytes Sent/sec", 505 | "sampleRate": "PT1M" 506 | }, 507 | { 508 | "counterSpecifier": "\\Network Interface(*)\\Bytes Received/sec", 509 | "sampleRate": "PT1M" 510 | }, 511 | { 512 | "counterSpecifier": "\\Network Interface(*)\\Packets/sec", 513 | "sampleRate": "PT1M" 514 | }, 515 | { 516 | "counterSpecifier": "\\Network Interface(*)\\Packets Sent/sec", 517 | "sampleRate": "PT1M" 518 | }, 519 | { 520 | "counterSpecifier": "\\Network Interface(*)\\Packets Received/sec", 521 | "sampleRate": "PT1M" 522 | }, 523 | { 524 | "counterSpecifier": "\\Network Interface(*)\\Packets Outbound Errors", 525 | "sampleRate": "PT1M" 526 | }, 527 | { 528 | "counterSpecifier": "\\Network Interface(*)\\Packets Received Errors", 529 | "sampleRate": "PT1M" 530 | } 531 | ] 532 | }, 533 | "WindowsEventLog": { 534 | "scheduledTransferPeriod": "PT1M", 535 | "DataSource": [ 536 | { 537 | "name": "Application!*[System[(Level = 1 or Level = 2 or Level = 3)]]" 538 | }, 539 | { 540 | "name": "Security!*[System[band(Keywords,4503599627370496)]]" 541 | }, 542 | { 543 | "name": "System!*[System[(Level = 1 or Level = 2 or Level = 3)]]" 544 | } 545 | ] 546 | } 547 | } 548 | } 549 | }, 550 | "protectedSettings": { 551 | "storageAccountName": "[parameters('diagnosticsStorageAccountName')]", 552 | "storageAccountKey": "[listKeys(parameters('diagnosticsStorageAccountId'),'2015-06-15').key1]", 553 | "storageAccountEndPoint": "https://core.windows.net/" 554 | } 555 | }, 556 | "dependsOn": [ 557 | "[concat('Microsoft.Compute/virtualMachines/', parameters('virtualMachineName'))]" 558 | ] 559 | } 560 | ], 561 | "outputs": { 562 | "adminUsername": { 563 | "type": "string", 564 | "value": "[parameters('adminUsername')]" 565 | } 566 | } 567 | } -------------------------------------------------------------------------------- /AzureTemplateParams.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentParameters.json#", 3 | "contentVersion": "1.0.0.0", 4 | "parameters": { 5 | "networkInterfaceName": { 6 | "value": "ceftestvm984" 7 | }, 8 | "enableAcceleratedNetworking": { 9 | "value": false 10 | }, 11 | "networkSecurityGroupName": { 12 | "value": "CefTestVM-nsg" 13 | }, 14 | "virtualNetworkName": { 15 | "value": "CEFTest-vnet" 16 | }, 17 | "addressPrefix": { 18 | "value": "10.0.0.0/24" 19 | }, 20 | "subnetName": { 21 | "value": "default" 22 | }, 23 | "subnetPrefix": { 24 | "value": "10.0.0.0/24" 25 | }, 26 | "publicIpAddressName": { 27 | "value": "CefTestVM-ip" 28 | }, 29 | "publicIpAddressType": { 30 | "value": "Dynamic" 31 | }, 32 | "publicIpAddressSku": { 33 | "value": "Basic" 34 | }, 35 | "virtualMachineName": { 36 | "value": "CefTestVM" 37 | }, 38 | "virtualMachineRG": { 39 | "value": "CEFTest" 40 | }, 41 | "osDiskType": { 42 | "value": "Premium_LRS" 43 | }, 44 | "diagnosticsStorageAccountType": { 45 | "value": "Standard_LRS" 46 | }, 47 | "diagnosticsStorageAccountKind": { 48 | "value": "Storage" 49 | }, 50 | "autoShutdownStatus": { 51 | "value": "Enabled" 52 | }, 53 | "autoShutdownTime": { 54 | "value": "19:00" 55 | }, 56 | "autoShutdownTimeZone": { 57 | "value": "Pacific Standard Time" 58 | }, 59 | "autoShutdownNotificationStatus": { 60 | "value": "Enabled" 61 | }, 62 | "autoShutdownNotificationLocale": { 63 | "value": "en" 64 | } 65 | } 66 | } -------------------------------------------------------------------------------- /Dockerfile_cef: -------------------------------------------------------------------------------- 1 | FROM vs 2 | 3 | RUN $env:Path = 'c:/Program Files/Git/bin/;' + $env:Path;setx /M PATH $env:Path; 4 | RUN Invoke-WebRequest 'https://storage.googleapis.com/chrome-infra/depot_tools.zip' -OutFile 'C:/code/depot_tools.zip'; 5 | RUN Expand-Archive C:/code/depot_tools.zip -DestinationPath "c:/code/depot_tools"; 6 | ARG GN_DEFINES="is_debug=false" 7 | ARG GN_ARGUMENTS="--ide=vs2019 --sln=cef --filters=//cef/*" 8 | ENV DEPOT_TOOLS_WIN_TOOLCHAIN 0 9 | ENV CEF_USE_GN 1 10 | ENV GYP_MSVS_VERSION 2019 11 | ARG DUAL_BUILD="0" 12 | ARG BINARY_EXT="zip" 13 | ARG GYP_DEFINES="target_arch=x64 " 14 | ARG CEF_SAVE_SOURCES 15 | ARG ARCHES 16 | ARG CHROME_BRANCH=3239 17 | RUN setX /M DEPOT_TOOLS_WIN_TOOLCHAIN 0;setX /M GYP_MSVS_VERSION "$env:GN_DEFINES";setx /M GN_DEFINES "$env:GN_DEFINES";setx /M GN_ARGUMENTS "$env:GN_ARGUMENTS";setx /M GYP_DEFINES "$env:GYP_DEFINES";setx /M DUAL_BUILD "$env:DUAL_BUILD";setx /M BINARY_EXT "$env:BINARY_EXT";setx /M ARCHES "$env:ARCHES";setx /M CEF_SAVE_SOURCES "$env:CEF_SAVE_SOURCES";setx /M CHROME_BRANCH "$env:CHROME_BRANCH"; 18 | 19 | ADD https://bitbucket.org/chromiumembedded/cef/raw/master/tools/automate/automate-git.py c:/code/automate/ 20 | #ADD automate-git-shallow-support.py c:/code/automate/automate-git.py 21 | RUN cd c:/code/depot_tools/;./update_depot_tools.bat;./update_depot_tools.bat; 22 | RUN $env:Path = 'c:/code/depot_tools/;' + $env:Path;setx /M PATH $env:Path; 23 | ADD functions.ps1 cef_build.ps1 cef_patch.ps1 cef_patch_*.diff c:/code/ 24 | 25 | 26 | 27 | CMD ["powershell", "c:/code/cef_build.ps1"] -------------------------------------------------------------------------------- /Dockerfile_cef_binary: -------------------------------------------------------------------------------- 1 | FROM cef as builder 2 | 3 | FROM vs 4 | COPY --from=builder c:/code/binaries c:/code/binaries 5 | 6 | SHELL ["powershell", "-Command", "$ErrorActionPreference = 'Stop'; $ProgressPreference = 'SilentlyContinue';"] 7 | 8 | RUN [Net.ServicePointManager]::SecurityProtocol=[Net.SecurityProtocolType]::Tls12;Invoke-WebRequest 'https://github.com/cefsharp/cef-binary/archive/master.zip' -OutFile '/code/master.zip'; 9 | RUN Expand-Archive c:\code\master.zip -DestinationPath "C:\code\cef-binary"; 10 | WORKDIR /code/cef-binary/cef-binary-master/ 11 | RUN $env:Path = 'c:\Program Files (x86)\Microsoft Visual Studio\2019\Community\Common7\IDE\CommonExtensions\Microsoft\CMake\CMake\bin\;' + $env:Path;setx /M PATH $env:Path; 12 | RUN 'New-ItemProperty -Path Registry::HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\MSBuild\ToolsVersions\4.0 -Name VCTargetsPath -PropertyType String -Value "`$`(MSBuildExtensionsPath32)\Microsoft.Cpp\v4.0\"' 13 | ARG BINARY_EXT="zip" 14 | ARG ARCHES 15 | ARG CEFSHARP_VERSION 16 | 17 | ##Next lines incase trying custom build items before in master 18 | #ADD cef-binary-build.ps1 /code/cef-binary/cef-binary-master/build.ps1 19 | #Add chromiumembeddedframework.runtime.nuspec.template /code/cef-binary/cef-binary-master/NuGet/ 20 | #Add chromiumembeddedframework.runtime.win.nuspec /code/cef-binary/cef-binary-master/NuGet/ 21 | 22 | RUN ./build.ps1 -Verbose -DownloadBinary local -CefBinaryDir /code/binaries/ -CefVersion auto -NoDebugBuild -Extension $env:BINARY_EXT -BuildArches $env:ARCHES -Target vs2019; #here we use auto to pickup the actual CEF packages 23 | RUN ./build.ps1 -DownloadBinary local -CefBinaryDir /code/binaries/ -CefVersion $env:CEFSHARP_VERSION+fakebldv -NoDebugBuild -Extension $env:BINARY_EXT -BuildArches $env:ARCHES -Target nupkg-only; #next we set the cefversion for nuget to the desired cefsharp version rather than the true one from auto, this way all are aligned. 24 | RUN Compress-Archive -Path NuGet -CompressionLevel Fastest -DestinationPath C:\packages -------------------------------------------------------------------------------- /Dockerfile_cef_create_from_binaries: -------------------------------------------------------------------------------- 1 | FROM vs 2 | ARG BINARY_EXT="zip" 3 | COPY *.$BINARY_EXT c:/code/binaries/ 4 | -------------------------------------------------------------------------------- /Dockerfile_cefsharp: -------------------------------------------------------------------------------- 1 | FROM cef_binary 2 | SHELL ["powershell", "-Command", "$ErrorActionPreference = 'Stop'; $ProgressPreference = 'SilentlyContinue';"] 3 | RUN MKDIR c:/code/cefsharp 4 | WORKDIR c:/code/cefsharp 5 | RUN $env:Path = 'c:/Program Files/Git/bin/;' + $env:Path;setx /M PATH $env:Path; 6 | ARG CEFSHARP_VERSION=63.0.90 7 | RUN git clone https://github.com/cefsharp/CefSharp.git . 8 | ARG CEFSHARP_BRANCH="cefsharp/63" 9 | #This line and the one two below are due to waiting to use the new vs2017 compat build script 10 | RUN cp build.ps1 ../ 11 | RUN git checkout $env:CEFSHARP_BRANCH; 12 | RUN cp ../build.ps1 . 13 | ARG CEF_VERSION_STR=auto 14 | ARG CHROME_BRANCH=3239 15 | ENV PACKAGE_SOURCE C:/code/cef-binary/cef-binary-master/NuGet 16 | ARG ARCHES 17 | ADD cefsharp_patch_*.diff cefsharp_set_versions_and_restore.ps1 functions.ps1 cefsharp_patch.ps1 c:/code/cefsharp/ 18 | RUN ./cefsharp_patch.ps1 19 | RUN ./cefsharp_set_versions_and_restore.ps1 20 | 21 | ##Next line for manually testing a replacement cefsharp build script before in master 22 | #ADD cefsharp_build.ps1 /code/cefsharp/build.ps1 23 | 24 | RUN [Net.ServicePointManager]::SecurityProtocol=[Net.SecurityProtocolType]::Tls12;./build.ps1 -BuildArches $env:ARCHES -Verbose -Target "update-build-version" -Version $env:CEFSHARP_VERSION -AssemblyVersion $env:CEFSHARP_VERSION -TargetFramework "NetFramework"; 25 | RUN [Net.ServicePointManager]::SecurityProtocol=[Net.SecurityProtocolType]::Tls12;./build.ps1 -BuildArches $env:ARCHES -Verbose -Target "vs2019" -Version $env:CEFSHARP_VERSION -AssemblyVersion $env:CEFSHARP_VERSION -TargetFramework "NetFramework"; 26 | RUN cp $env:PACKAGE_SOURCE/*.nupkg C:/code/cefsharp/nuget/ 27 | RUN Compress-Archive -Path C:/code/cefsharp/nuget/*.nupkg -CompressionLevel Fastest -DestinationPath C:\packages_cefsharp -------------------------------------------------------------------------------- /Dockerfile_vs: -------------------------------------------------------------------------------- 1 | ARG BASE_DOCKER_FILE="mcr.microsoft.com/windows/servercore:1903-amd64" 2 | FROM $BASE_DOCKER_FILE 3 | SHELL ["powershell", "-Command", "$ErrorActionPreference = 'Stop'; $ProgressPreference = 'SilentlyContinue';"] 4 | 5 | ENV COMPLUS_NGenProtectedProcess_FeatureEnabled 0 6 | RUN mkdir c:\code\automate;mkdir c:\temp\;mkdir c:\code\chromium_git;;mkdir c:\code\chromium_git\depot_tools 7 | RUN Invoke-WebRequest 'https://aka.ms/vscollect.exe' -OutFile 'C:\TEMP\collect.exe'; 8 | RUN Invoke-WebRequest 'https://dist.nuget.org/win-x86-commandline/v4.1.0/nuget.exe' -OutFile 'c:\code\chromium_git\depot_tools\;'; 9 | 10 | RUN Invoke-WebRequest 'https://www.7-zip.org/a/7z1801-x64.msi' -OutFile '/code/7zip.msi'; 11 | RUN Start-Process msiexec.exe -Wait -ArgumentList '/i c:\code\7zip.msi /quiet /norestart /L*v install.log' 12 | 13 | 14 | WORKDIR "Program Files" 15 | #Powershell 50 char limit fix 16 | RUN Remove-Item -Recurse -Force '.\WindowsPowerShell\Modules\PSReadLine' 17 | WORKDIR c:\\code\\chromium_git\\depot_tools 18 | 19 | #Not sure why the community installer doesn't instal the debugger with --all but all the same this is a quick fix. 20 | RUN Invoke-WebRequest https://go.microsoft.com/fwlink/?linkid=2120843 -OutFile 'C:\code\winsdksetup.exe'; 21 | RUN $p = Start-Process -Wait -PassThru -FilePath C:\code\winsdksetup.exe -ArgumentList ' /features OptionId.WindowsDesktopDebuggers /quiet /Log #"%TEMP%\\winsdksetup.log /norestart'; if (($ret = $p.ExitCode) -and ($ret -ne 3010)) { $log_path=$env:Temp; $err = 'Install ' + 'failed with exit code 0x{0:x} error logs in: ' + $log_path + '\winsdksetup.log'; throw ($err -f $ret) } 22 | 23 | 24 | RUN Invoke-WebRequest 'https://aka.ms/vs/16/release/vs_community.exe' -OutFile 'C:\code\vs_community.exe'; 25 | 26 | #3010 is fine, as that is just restart requested. 27 | # https://docs.microsoft.com/en-us/visualstudio/install/workload-component-id-vs-community 28 | # Some of these are more needed for cefsharp like 4.5.2 and 4.6.2 (it uses both targets depending on project) 29 | RUN $p = Start-Process -Wait -PassThru -FilePath C:\code\vs_community.exe -ArgumentList ' --quiet --wait --norestart --nocache --add Microsoft.Net.Component.4.7.2.SDK --add Microsoft.NetCore.Component.SDK --add Microsoft.NetCore.Component.Runtime.5.0 --add Microsoft.VisualStudio.Component.Windows10SDK --remove Microsoft.Net.Component.4.6.1.SDK --remove Microsoft.VisualStudio.ComponentGroup.NativeDesktop.Win81 --remove Microsoft.VisualStudio.Component.Windows81SDK --add Microsoft.Net.Component.4.5.2.TargetingPack --add Microsoft.Net.Component.4.6.2.TargetingPack --add Microsoft.Net.Component.4.7.2.TargetingPack --add Microsoft.Net.Component.4.6.2.SDK --add Microsoft.VisualStudio.Component.Windows10SDK.18362 --add Microsoft.VisualStudio.Component.Windows10SDK.19041 --remove Microsoft.VisualStudio.Component.Windows10SDK.16299.Desktop --remove Microsoft.VisualStudio.Component.Windows10SDK.17134 --add Microsoft.VisualStudio.Component.Git --remove Microsoft.VisualStudio.Component.Windows10SDK.15063.Desktop --add Microsoft.VisualStudio.Workload.NativeDesktop --add Microsoft.VisualStudio.Component.VC.ATLMFC --includeRecommended --remove Microsoft.VisualStudio.Component.Windows10SDK.10240 --remove Microsoft.VisualStudio.Component.Windows10SDK.10586 --remove Component.Android.NDK.R12B --remove Component.Android.SDK23.Private --remove Component.MDD.Android --remove Component.Unreal.Android --remove Component.Android.NDK.R15C --remove Component.Android.SDK19 --remove Component.Android.SDK22 --remove Component.Android.SDK23 --remove Component.Android.SDK25 --remove Component.MDD.Android --remove Component.Android.NDK.R12B --remove Component.Android.NDK.R12B_3264 --remove Component.Android.NDK.R13B --remove Component.Android.NDK.R13B_3264 --remove Component.Android.NDK.R15C_3264 --remove Component.Google.Android.Emulator.API23.V2 --remove Component.Android.SDK25 --remove Component.Google.Android.Emulator.API25 --remove Component.Android.SDK23.Private --remove Component.Google.Android.Emulator.API23.Private --remove Component.Android.Emulator --remove Component.Android.NDK.R11C --remove Component.Android.NDK.R11C_3264 --remove Microsoft.VisualStudio.Component.Windows10SDK.14393 --remove Microsoft.VisualStudio.Component.Phone.Emulator.15254 --remove Microsoft.VisualStudio.Component.Phone.Emulator.15254 --remove Microsoft.VisualStudio.Component.Phone.Emulator --remove Microsoft.VisualStudio.Component.Phone.Emulator.15063 --remove Component.Anaconda3.x64 --remove Component.Anaconda2.x64 --remove Component.Anaconda2.x86 --remove Component.Anaconda3.x86 --remove Microsoft.VisualStudio.Component.Unity --remove Component.UnityEngine.x64 --remove Component.UnityEngine.x86 --remove Component.Incredibuild --remove Component.IncredibuildMenu --remove Microsoft.VisualStudio.Component.Sharepoint.Tools --remove Microsoft.VisualStudio.Component.TeamOffice --remove Component.Cocos;'; if (($ret = $p.ExitCode) -and ($ret -ne 3010)) { C:\TEMP\collect.exe;$log_path = $env:Temp;$err='Install failed with exit code 0x{0:x} error logs in: ' + $log_path + '\vslogs.zip'; throw ($err -f $ret) }; Remove-Item -Recurse -Force 'c:\ProgramData\Package Cache\' 30 | RUN $env:Path = 'c:/program files/dotnet/;' + $env:Path;setx /M PATH $env:Path; 31 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # CefSharp Dockerfiles 2 | 3 | 4 | 5 | - [Summary](#summary) 6 | - [Thanks](#thanks) 7 | - [Quick Start](#quick-start) 8 | - [Caveats](#caveats) 9 | - [Requirements](#requirements) 10 | - [New Volume Mode](#new-volume-mode) 11 | - [Server Setup](#server-setup) 12 | - [Docker For Windows Config File](#docker-for-windows-config-file) 13 | - [Azure Specifics](#azure-specifics) 14 | - [Azure Auto Create Scripts](#azure-auto-create-scripts) 15 | - [Estimated time requirements](#estimated-time-requirements) 16 | - [HyperV Isolation \(for server or Windows 10 client\) Mode](#hyperv-isolation-for-server-or-windows-10-client-mode) 17 | - [Build Process](#build-process) 18 | - [Dual Build Flag](#dual-build-flag) 19 | - [Docker for Windows Caveats](#docker-for-windows-caveats) 20 | - [How requirements were determined](#how-requirements-were-determined) 21 | - [Patching CEF / CEFSharp](#patching-cef--cefsharp) 22 | - [Building only CEF or CEFSharp](#building-only-cef-or-cefsharp) 23 | - [Sample of building only CEFSharp](#sample-of-building-only-cefsharp) 24 | - [General Warnings for build flags:](#general-warnings-for-build-flags) 25 | - [Additional Resources](#additional-resources) 26 | 27 | 28 | 29 | ## Summary 30 | 31 | Automated chrome cef building and/or cefsharp building dockerfiles and scripts. 32 | 33 | While the processes of building CEF and CEFSHARP are not hard they require a very exacting environment and build steps can take a _long_ time so are annoying to repeat. The goal if this repo is a collection of scripts to automate everything to make it easy for anyone to do. We are using Docker to run everything in a container as it makes it much easier to reproduce and won't pollute your dev environment with all the pre-reqs. You can easily tweak the exact versions you want to build, and the build flags. From creating a VM on your cloud provider of choice (or your own machine) it is about 20 minutes of setup, starting a build script, and just waiting a few hours for it to spit out the compiled binaries. It has been tested with chrome 63->100 but would likely work for any modern chrome build without changes (in most cases). 34 | 35 | ## Thanks 36 | 37 | Thanks to the fantastic CEFSharp team, especially @amaitland who works insanely hard on the open source project. @perlun provided some great direction on the Windows building and was also a huge help. Please support CEFSharp if you use it, even if you do a small monthly donation of $10 or $25 it can be a big help: https://github.com/sponsors/amaitland (primary CEFSharp developer, and full time parent). 38 | 39 | ## Quick Start 40 | 41 | If you are building in process isolation mode (recommended) make sure the base image file is the same build as your actual OS (or the VM's os). IE if you are on windows Fall 2018 release 1803 (10.0.17134) change VAR_BASE_DOCKER_FILE to the 1803 image. Run ./build.ps1 and it should build the packages. **Warning**: The Windows 10 Client May 2021/Nov 2021 DOES NOT support the more performant process isolation mode. See [HyperV Isolation (for server or Windows 10 client) Mode](#hyperv-isolation-for-server-or-windows-10-client-mode) below for details. 42 | 43 | For a super fast start look at the [azure auto provision option below](#azure-auto-create-scripts). As long as you have an azure account created it can create the entire setup and build in a few commands. 44 | If using Azure create a F32_v2 VM with the image "Datacenter Core 1903 with Containers", if using another machine just install docker for windows (make sure you have 20GB (40GB for chrome < 65) of ram between actual ram + page file). Set the [Docker For Windows Config File](#docker-for-windows-config-file) changing the path to the folder to store data on (suggested local temp drive) and restart docker service. Copy the items from this repo into a folder. Copy the versions_src.ps1 to versions.ps1 and change the variables to what you want: for example ```$VAR_GN_DEFINES="is_official_build=true proprietary_codecs=true ffmpeg_branding=Chrome";$VAR_DUAL_BUILD="1";```. Only use DUAL_BUILD if you have 30 gigs of ram or more, otherwise leave it at 0 and the build will take an extra 20-40 minutes. 45 | 46 | ## Caveats 47 | 48 | Beware if using the exact same version string as an official CEF Build as it will mean you need to make sure your nuget source is always used before the master source. If you use a slightly different minor build you will not have that problem. For CefSharp you can use a manual higher fake minor version number(ie .99) to not get confused with the official builds (but the CEF build note above still applies). 49 | 50 | In part we use the latest version of several installers/build tools if they changed so might the success of these dockerfiles. It does not build the debug versions of CEF or CEFSharp. This could be added as an option pretty easily (but would probably at-least double build times). For some reason I had issues getting the automated build script for CEF to work doing the calls by hand is pretty basic however. 51 | 52 | Window 10 Client (Pro) by default with docker uses HyperV isolation, this mode is very non performant vs process isolation mode. Make sure to change it (and see the note below, but beware does not work on 21H1/21H2 builds). 53 | 54 | ## Requirements 55 | 56 | The following requirements are for chrome and the current vs_2019 installer, they may change over time. Compiling is largely CPU bound but linking is largely IO bound. 57 | 58 | - At least 20GB of ram dedicated to this would recommend 30GB total with page file to make sure you don't run out (older builds like 63 were 32GB with 40GB total). You can have any amount of that 20/30GB as a page file, just beware the less actual ram the much slower linking will be. 59 | - At least 250GB of space. 60 | 61 | ## New Volume Mode 62 | 63 | Due to a bug in 1903 (https://github.com/microsoft/hcsshim/issues/708) a new method had to be found rather than building all the cef source in a normal docker build step. The solution was to move the source building to store data in a volume. This has several benefits now including: builds can be resumed (if you force the same volume to be used), source code can be left behind on the volume as docker will not manually copy this with the storage driver (previously very non-performant). Downsides include the fact it will leave volumes behind that need to be removed from unsuccessful builds, there is a bit of a hacked step to support volumes. To support volumes we have to use `docker run` for the cef build step (as docker build does not support volumes during build). This means we needed a way to detect IF we need to run the build step or if it was already completed successfully. To do this we rely on some tag naming tests (see build script). 64 | 65 | ## Server Setup 66 | 67 | There is not much in terms of a software requirements other than docker. You can run it on Windows Server or Windows 10 Client. 68 | For Windows 10 Client Install it from https://store.docker.com/editions/community/docker-ce-desktop-windows. For server Docker EE from: https://docs.docker.com/install/windows/docker-ee/#docker-universal-control-plane-and-windows (or standard docker for windows for desktops) if docker is not auto installed. If installing on Windows 10 Client make sure to see the Hyper V Notes below. 69 | 70 | ### Docker For Windows Config File 71 | 72 | You will want a docker configuration with options similar to this. On windows client you can use the docker settings (Right-click on the Docker whale icon in the on the task bar, then click "Settings..." then click to advanced mode). For server the file is edited directly (or created if it didn't exist) at C:\ProgramData\docker\config\daemon.json 73 | 74 | ``` 75 | { 76 | "registry-mirrors": [], 77 | "insecure-registries": [], 78 | "debug": true, 79 | "experimental": false, 80 | "exec-opts": [ 81 | "isolation=process" 82 | ], 83 | "data-root": "d:/docker_data", 84 | "storage-opts": [ 85 | "size=400G" 86 | ] 87 | } 88 | ``` 89 | 90 | ### Azure Specifics 91 | 92 | If you are new to Azure it is pretty easy to get started and they will give you $200 for your first month free so there will be no costs. Below we even have an auto deploy script if you prefer not to do it by hand. 93 | An Azure F32 v2 is pretty good, its only 256 gigs of space but that should be ok. ~$2.72 an hour in WestUS2 running the latest windows image. You can use the prebuilt image "Windows Server 2016 Datacenter - with Containers" or a newer one if it exists. You can either use one with a full shell (pre 1709) or one of the newer builds like "Windows Server 2016 Datacenter - with Containers 1803". Without a full shell you won't have explorer and remote desktop will just open a command prompt. You can launch notepad and manage it all through there (or use remote PS) but a full shell is easier for some people. Use the local SSD as the docker storage folder (note this will likely get wiped if you de-allocate the machine so do the entire build at once). You could potentially hook up a huge number of disks in raid 0 configuration to get somewhat decent speed that way. 94 | Create a new resource, search for the prebuilt image noted above. You do not need managed disks, assign a random user/password, new network/storage/etc is all fine. For the size make sure you select one of the F series (F32 recommended). It won't show by default, leave HD type set to SSD put Min CPU's at 32 and Ram at 64 then hit "View all". 95 | 96 | I suggest auto-shutdown to make sure you don't leave it running. 97 | 98 | #### Azure Auto Create Scripts 99 | 100 | If you have an azure account already created you can use the az_create.ps1 script to automatically setup the VM for you. It will create a Standard_F32s_v2 VM by default but this is 32 cpu's. New Azure accounts need to file a support request to increase CPU quota beyond 10. These are often approved within minutes but if you don't want to do so you can edit az_create.ps1 and change the size to something like a Standard_F8s_v2. It will take longer to build but your cost will be roughly the same total cost. 101 | 102 | It will create everything under a new "CEFTest" resource group to make cleanup at the end easy. You can adjust the settings at the top if desired but really the only important options you pass as options to it. It will setup the VM and enable remote powershell to make the process very easy. Just launch powershell (or type powershell into the run box in windows). If the first time using powershell with azure you will need to install the tools for azure: ```Install-Module -Name AzureRM -Scope CurrentUser```. Next change to the folder with all the CefSharpDockerfiles (cd c:\downloads\CefSharpDockerfiles for example). 103 | 104 | Login with your azure credentials first: 105 | ```Connect-AzureRmAccount``` 106 | 107 | Then if you have multiple subscriptions set the one you want with: 108 | ```Set-AzureRmContext -SubscriptionName "My Subscription"``` 109 | 110 | Next run this and enter a new username and password to configure the new VM with: 111 | ```$cred = Get-Credential -Message "Enter user and password for remote machine admin"``` 112 | 113 | Next we will run the deploy script, by default it can configure the machine to automatically shutdown at 11:30pm PDT if you provide an email it will do this and notify you. If you do not you need to manually turn the machine off when done. You can adjust time and such in the az_create.ps1 at the top (along with some other items but likely you do not need to adjust them): 114 | ```./az_create.ps1 -admin_creds $cred -shutdown_email "john@gmail.com"``` 115 | 116 | It should print out when done Public IP: 123.123.123.123 117 | 118 | Next set this in a variable $IP_ADDY like: 119 | ```$IP_ADDY = "123.123.123.123"``` 120 | 121 | **Note we are disabling the security checks in the remote powershell session. This could make you vulnerable to MITM attacks if on an unsafe network.** 122 | 123 | Next create the remote powershell session and copy the files over, and disable anti virus real time scanning (slows down compiling significantly): 124 | 125 | ``` 126 | $so = New-PsSessionOption -SkipCACheck -SkipCNCheck -SkipRevocationCheck; 127 | $remote = New-PSSession -ComputerName $IP_ADDY -UseSSL -SessionOption $so -Credential $cred; 128 | 129 | Get-ChildItem -Path "./" | Copy-Item -ToSession $remote -Destination "C:/CefSharpDockerfiles/" 130 | Copy-Item -ToSession $remote daemon.json -Destination "c:/ProgramData/docker/config/daemon.json"; 131 | Invoke-Command -Session $remote -ScriptBlock { Restart-Service Docker;Set-MpPreference -DisableRealtimeMonitoring $true; } 132 | ``` 133 | 134 | Next we will "enter" the remote machine via powershell: 135 | 136 | ``` 137 | Enter-PSSession $remote 138 | ``` 139 | 140 | Then you should see your terminal is like ```[123.123.123.123]: PS c:\>``` showing you are on the remote machine. 141 | 142 | Finally we start the build: 143 | 144 | ``` 145 | cd C:/CefSharpDockerfiles 146 | ./build.ps1 147 | ``` 148 | 149 | When done exit out the remote session by typing: ```exit``` 150 | 151 | Finally we copy the resulting files locally: 152 | ```Copy-Item -FromSession $remote "c:/CefSharpDockerfiles/packages_cefsharp.zip" -Destination ".";``` 153 | 154 | You could also expose the docker server to the internet and use remote docker commands rather than running the powershell remotely. When done you can delete the entire CEFTest resource group from the azure portal as well to clean everything up. 155 | 156 | ### Estimated time requirements 157 | 158 | With the Azure F32 v2 host above the total estimated build time is about 2-3 hours depending on CEF version (~$6 on azure). Machines are nice 600MB/sec read/write to the local disk. There are larger VM's as well up to 72+ CPU's however some of the build stages are very linear. Note it can vary somewhat dramatically for the not cef build steps based on the luck of the draw (but the cef build is most of the build time). It seems local IO depending on what physical host it is spun up on can cause 30-50% performance fluxes. Most of the build steps make efficient use of the machine however: The git cloning is not very efficient. It is 30 minutes of the cef build time below. It doesn't quite max out network or IO. The linking stage is also not super efficient see the DUAL_BUILD flag below to help with that. Linking will take 20+ minutes per platform (40 total unless run concurrently). Here are the individual build/commit times: 159 | 160 | - pull source image: 3 minutes 161 | - vs: 8 minutes 162 | - cef: 1.8->3.2 hours (with DUAL_BUILD) 163 | - cef-binary: 3 minutes 164 | - cefsharp: 4 minutes 165 | 166 | ### HyperV Isolation (for server or Windows 10 client) Mode 167 | 168 | HyperV isolation mode should be avoided if possible. It is slower, and more prone to fail. For Windows 10 clients below 1809 (October 2018 edition) there is not a **legal** alternative. You must also use Docker Desktop build newer than October of 2018. NOTE: If you are not using process isolation mode you WILL need to set ```$VAR_HYPERV_MEMORY_ADD``` and make sure your page file is properly sized (recommend a page file at least a few gigs bigger as it needs that amount of FREE page file space). It will set the memory on every docker build step to up the default memory limit. Technically this is primarily needed in the CEF build step. NOTE if you stop docker during a build with HyperV it does not properly kill off the hyperV container restart docker to fix this. **Warning**: The Windows 10 May 2021/Nov 2021 (21H1/21H2 10.0.19043/10.0.19044) DOES NOT support process isolation mode. This is due to an MS failure per [micorosft/windows-containers#163](https://github.com/microsoft/Windows-Containers/issues/163). If you want isolation support you must use builds 20H2 (10.0.19042) or lower OR Windows 11. Note Hyper-V isolation can be over 3 times as slow as process isolation mode for Windows 10 clients. 169 | 170 | ## Build Process 171 | 172 | Once docker is setup and running copy this repo to a local folder. Copy versions_src.ps1 to versions.ps1 and change the version strings to match what you want. NOTE BASE_DOCKER_FILE must match the same kernel as the host machine IF you are using process isolation mode. This means you cannot use the 1709 image on an older host and you can use and older image on a 1709 host. Either base file is fine however to use just match it to the host. 173 | 174 | Next run build.ps1 and if you are lucky you will end up with a cefsharp_packages.zip file with all the nupkg files you need:) Beware that as docker might be flaky(especially in hyperV mode) you may need to call build.ps1 a few times. It should largely just resume. Once it is done building you will have the cefsharp_packages.zip file. If you want any of the CEF binaries, or symbol files, you can copy them from the CEF image like: ```docker cp cef:c:/code/binaries/*.zip .``` 175 | 176 | To be safer you can run the biggest build command by hand. The hardest (longest) build step if the CEF build at the start. You can comment out the last step in the dockerfile and manually do that step and commit it. Infact you can just docker run the build image from before than manually call cef_build.ps1 one or more times (it should do a decent job at auto-resuming) until success. If you are using a proper host with enough ram it should be able to automatically build 9 times out of 10 (if not higher) with its current redundant tries. Of course if you prefer to manually run the commands from it you can do that too. To do so comment out the final build step in Dockerfile_cef then run the following: 177 | 178 | ``` 179 | #So if the autmate-git.py doesn't work (if something errors out it doesn't always stop at the right point) try running the build steps manually that are there. 180 | # From the c:/code/chromium_git/chromium/src folder run the build hooks to download tools: gclient runhooks 181 | # From the c:/code/chromium_git/chromium/src/cef folder run the following to make the projects: ./cef_create_projects.bat 182 | # From the c:/code/chromium_git/chromium/src 183 | # ninja -C out/Release_GN_x64 cefclient 184 | # ninja -C out/Release_GN_x86 cefclient 185 | # cd C:/code/chromium_git/chromium/src/cef/tools/ 186 | # C:/code/chromium_git/chromium/src/cef/tools/make_distrib.bat --ninja-build --allow-partial; 187 | # c:/code/chromium_git/chromium/src/cef/tools/make_distrib.bat --ninja-build --allow-partial --x64-build; 188 | # Allow partial needed if not building debug builds, make sure to run it when done or run the cef_build last few commands to create the archive with the result and to clean up the workspace of the source files. 189 | ``` 190 | 191 | ### Dual Build Flag 192 | 193 | Note the DUAL_BUILD may speed up builds by running x86 and x64 builds concurrently (each with 1/2 as many threads). This is primarily useful during linking. Linking is largely single threaded and takes awhile and is single thread CPU bound (given enough IO). The main issue is memory usage. If both linking steps run at once you may need nearly 30GB of memory at once (in worst case older builds would use up to 50GB). It would be better if they linked at slightly separate times but as every compute system was different there did not seem to be a good way to determine how long to sleep for to make it most efficient. 194 | 195 | ## Docker for Windows Caveats 196 | 197 | - Most of these issues become an issue on the longer docker runs and disk speed. On an Azure VM it rarely fails out. There has been some redundancy added to build scripts to address these issues. 198 | - It is slow and Docker for windows can be flaky (especially in HyperV mode). Keep in mind this is running latest 16299 w/ 32 gigs of ram. Sometimes it will miss a step that should be cached and redo it. It seems less flaky running docker in process isolation mode (--isolation=process) instead of hyper-v mode. This is a legal compile time limitation however that windows clients cannot use this mode. Granted building your own docker windows binary is also not that hard. If using hyperv mode please see the server setup for HyperV notes. 199 | - Make sure to disable file indexing on the drive used for docker data and DISABLE any anti-virus / windows defender it will hugely slow you down. The build script will try to notify you if it notices defender is doing real time monitoring. If you are using a dedicated build drive (in azure or locally) disabling indexing is recommended: To disable file indexing right click on the drive and uncheck allow indexing. If you leave indexing on it may slow building down but should not break anything. 200 | - Space is massively hogged and it is super slow with large numbers of small files and large files are rewritten 3 extra times when a container is committed. To avoid this we will remove repos/etc used during a build step before it finishes to speed things up. 201 | - windowsfilter behind the scenes is exceptionally non performant. This primarily comes into play after a build step is finished and it needs to create the diff for the result. First you work in the vhd file, so all changes made while it is building or you are running it happen in the VHD. Second after commit / build step finishes the container will exit. Docker will not return until it fully commits this build step (but the container will NOT show running). Docker starts the diff with the VHD and copies all the files for that layer to docker_data\tmp\random_id\. Oddly it actually seems to create one random tmp random id folders with duplicate data from the VM, then it reads each file in this tmp folder writing it to another docker-data\tmp\random_id\ folder. It slowly deletes from one of them once it finishes writing the second. Then it makes another copy to the docker_data\windowsfilter\final_id permanent folder then removes the temp folder and the original VHD. I am not sure why all the copying. This can take A LONG time (hours on a 7200 rpm drive), the only way to know if this is going on is watch your storage. If docker is writing then its doing it. Use procmon.exe if it is reading from a VHD writing to a tmp folder then its step 1. If it is reading from one tmp folder and writing to another tmp folder that is step 2. If it is reading from a tmp folder and writing to a windowsfilter sub folder then it is on the final step 3. 202 | - Sometimes docker may start to mis-behave often restarting docker may fix the problem. Sometimes a full reboot is needed. 203 | 204 | ### How requirements were determined 205 | 206 | - Space: windows base ~6 gigs, ~9 gigs for the finished visual studio build image. Another 20 or so when done with cefsharp. Chrome will take 200 gigs or so during build for the VHD, we remove the bulk of this before it finishes though. So for docker storage I would recommend 16 + 200 = ~ 220 gigs of space + some buffer so maybe 250GB. 207 | - Memory: For Chrome 63 bare minimum memory requirements (actual + page file) for JUST the linker is x86: 24.2 GB x64: 25.7 GB. For chrome 67 however the memory requirements are much lower, only 13GB for linking! I would make sure you have at least 24 gigs of ram to be safe with OS and other overhead, for older versions at least 32GB. 208 | 209 | ## Patching CEF / CEFSharp 210 | 211 | - If so desired you can patch CEF or CEFSharp relatively easily. Place a file named cef_patch_XXXX.diff or cefsharp_patch_XXXX.diff to the build folder. You can change XXX to whatever you want, and even have multiple if desired. It will automatically be applied with git apply. This works for several different patch formats (anything git apply will take will work). 212 | 213 | ## Building only CEF or CEFSharp 214 | 215 | - You can build just CEF and not cefsharp by setting $VAR_CEF_BUILD_ONLY to $true in the versions.ps1. 216 | - If you want to only build CEFSharp you will need to provide the CEF binaries (either you built or official ones from: http://opensource.spotify.com/cefbuilds/index.html). You should download both 32 bit and 64 bit standard distribution versions and put them in a local folder. Then edit versions.ps1 and set $VAR_CEF_USE_BINARY_PATH to the local folders. You should then set $VAR_CEF_BINARY_EXT to the extension of them (ie zip or tar.bz2 for example). 217 | 218 | ### Sample of building only CEFSharp 219 | 220 | - Download the binary files to a local folder like: http://opensource.spotify.com/cefbuilds/cef_binary_3.3396.1785.ga27bbfa_windows64.tar.bz2 and http://opensource.spotify.com/cefbuilds/cef_binary_3.3396.1785.ga27bbfa_windows32.tar.bz2 221 | - copy the version_src.ps1 to version.ps1 222 | - Edit version.ps1 set the folder the binaries are in for example: `$VAR_CEF_USE_BINARY_PATH=".\..\CEFBinaries\";` and set extension to the downloaded extension: `$VAR_CEF_BINARY_EXT="tar.bz2";` 223 | - Run build.ps1 224 | 225 | ## General Warnings for build flags: 226 | 227 | - Cannot do component builds as it will not work for other items 228 | - Remove_webcore_debug_symbols seemed to also cause issues 229 | - DON'T USE is_win_fastlink as it is only for debug builds not for release 230 | - YOU MUST DO A --quiet VS install for headless, otherwise it will just hang forever. 231 | - use_jumbo_build see http://magpcss.org/ceforum/viewtopic.php?p=37293 about enabling this if you are doing proprietary_codecs as well, note this does not seem to actually cause a problem however in the builds we tested. 232 | 233 | ## Additional Resources 234 | 235 | The following were helpful: 236 | 237 | - https://perlun.eu.org/en/2017/11/30/building-chromium-and-cef-from-source 238 | - https://bitbucket.org/chromiumembedded/cef/wiki/MasterBuildQuickStart.md 239 | - https://docs.microsoft.com/en-us/visualstudio/install/advanced-build-tools-container 240 | - https://docs.microsoft.com/en-us/visualstudio/install/build-tools-container 241 | - https://chromium.googlesource.com/chromium/src/+/lkcr/docs/windows_build_instructions.md 242 | -------------------------------------------------------------------------------- /automate-git-shallow-support.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2014 The Chromium Embedded Framework Authors. All rights 2 | # reserved. Use of this source code is governed by a BSD-style license that 3 | # can be found in the LICENSE file. 4 | 5 | from __future__ import absolute_import 6 | from __future__ import print_function 7 | from datetime import datetime 8 | import json 9 | from io import open 10 | from optparse import OptionParser 11 | import os 12 | import re 13 | import shlex 14 | import shutil 15 | import subprocess 16 | import sys 17 | import tempfile 18 | import zipfile 19 | 20 | is_python2 = sys.version_info.major == 2 21 | 22 | if is_python2: 23 | from urllib import FancyURLopener 24 | from urllib2 import urlopen 25 | else: 26 | from urllib.request import FancyURLopener, urlopen 27 | 28 | ## 29 | # Default URLs. 30 | ## 31 | 32 | depot_tools_url = 'https://chromium.googlesource.com/chromium/tools/depot_tools.git' 33 | depot_tools_archive_url = 'https://storage.googleapis.com/chrome-infra/depot_tools.zip' 34 | 35 | cef_git_url = 'https://bitbucket.org/chromiumembedded/cef.git' 36 | 37 | chromium_channel_json_url = 'https://omahaproxy.appspot.com/all.json' 38 | 39 | shallow_git_depth_limit = '' 40 | shallow_gclient_no_history = '' 41 | 42 | if os.environ.get('SHALLOW') == '1': 43 | shallow_gclient_no_history = '--no-history' 44 | shallow_git_depth_limit = '--depth 1' 45 | 46 | 47 | ## 48 | # Global system variables. 49 | ## 50 | 51 | # Script directory. 52 | script_dir = os.path.dirname(__file__) 53 | 54 | ## 55 | # Helper functions. 56 | ## 57 | 58 | 59 | def msg(message): 60 | """ Output a message. """ 61 | sys.stdout.write('--> ' + message + "\n") 62 | 63 | 64 | def run(command_line, working_dir, depot_tools_dir=None, output_file=None): 65 | """ Runs the specified command. """ 66 | # add depot_tools to the path 67 | env = os.environ 68 | if not depot_tools_dir is None: 69 | env['PATH'] = depot_tools_dir + os.pathsep + env['PATH'] 70 | 71 | sys.stdout.write('-------- Running "'+command_line+'" in "'+\ 72 | working_dir+'"...'+"\n") 73 | if not options.dryrun: 74 | args = shlex.split(command_line.replace('\\', '\\\\')) 75 | 76 | if not output_file: 77 | return subprocess.check_call( 78 | args, cwd=working_dir, env=env, shell=(sys.platform == 'win32')) 79 | try: 80 | msg('Writing %s' % output_file) 81 | with open(output_file, 'w', encoding='utf-8') as fp: 82 | return subprocess.check_call( 83 | args, 84 | cwd=working_dir, 85 | env=env, 86 | shell=(sys.platform == 'win32'), 87 | stderr=subprocess.STDOUT, 88 | stdout=fp) 89 | except subprocess.CalledProcessError: 90 | msg('ERROR Run failed. See %s for output.' % output_file) 91 | raise 92 | 93 | 94 | def create_directory(path): 95 | """ Creates a directory if it doesn't already exist. """ 96 | if not os.path.exists(path): 97 | msg("Creating directory %s" % (path)) 98 | if not options.dryrun: 99 | os.makedirs(path) 100 | 101 | 102 | def delete_directory(path): 103 | """ Removes an existing directory. """ 104 | if os.path.exists(path): 105 | msg("Removing directory %s" % (path)) 106 | if not options.dryrun: 107 | shutil.rmtree(path, onerror=onerror) 108 | 109 | 110 | def copy_directory(source, target, allow_overwrite=False): 111 | """ Copies a directory from source to target. """ 112 | if not options.dryrun and os.path.exists(target): 113 | if not allow_overwrite: 114 | raise Exception("Directory %s already exists" % (target)) 115 | remove_directory(target) 116 | if os.path.exists(source): 117 | msg("Copying directory %s to %s" % (source, target)) 118 | if not options.dryrun: 119 | shutil.copytree(source, target) 120 | 121 | 122 | def move_directory(source, target, allow_overwrite=False): 123 | """ Copies a directory from source to target. """ 124 | if not options.dryrun and os.path.exists(target): 125 | if not allow_overwrite: 126 | raise Exception("Directory %s already exists" % (target)) 127 | remove_directory(target) 128 | if os.path.exists(source): 129 | msg("Moving directory %s to %s" % (source, target)) 130 | if not options.dryrun: 131 | shutil.move(source, target) 132 | 133 | 134 | def is_git_checkout(path): 135 | """ Returns true if the path represents a git checkout. """ 136 | return os.path.exists(os.path.join(path, '.git')) 137 | 138 | 139 | def exec_cmd(cmd, path): 140 | """ Execute the specified command and return the result. """ 141 | out = '' 142 | err = '' 143 | sys.stdout.write("-------- Running \"%s\" in \"%s\"...\n" % (cmd, path)) 144 | parts = cmd.split() 145 | try: 146 | process = subprocess.Popen( 147 | parts, 148 | cwd=path, 149 | stdout=subprocess.PIPE, 150 | stderr=subprocess.PIPE, 151 | shell=(sys.platform == 'win32')) 152 | out, err = process.communicate() 153 | except IOError as e: 154 | (errno, strerror) = e.args 155 | raise 156 | except: 157 | raise 158 | return {'out': out.decode('utf-8'), 'err': err.decode('utf-8')} 159 | 160 | 161 | def get_git_hash(path, branch): 162 | """ Returns the git hash for the specified branch/tag/hash. """ 163 | cmd = "%s rev-parse %s" % (git_exe, branch) 164 | result = exec_cmd(cmd, path) 165 | if result['out'] != '': 166 | return result['out'].strip() 167 | return 'Unknown' 168 | 169 | 170 | def get_git_date(path, branch): 171 | """ Returns the date for the specified branch/tag/hash. """ 172 | cmd = "%s show -s --format=%%ct %s" % (git_exe, branch) 173 | result = exec_cmd(cmd, path) 174 | if result['out'] != '': 175 | return datetime.utcfromtimestamp( 176 | int(result['out'].strip())).strftime('%Y-%m-%d %H:%M:%S UTC') 177 | return 'Unknown' 178 | 179 | 180 | def get_git_url(path): 181 | """ Returns the origin url for the specified path. """ 182 | cmd = "%s config --get remote.origin.url" % (git_exe) 183 | result = exec_cmd(cmd, path) 184 | if result['out'] != '': 185 | return result['out'].strip() 186 | return 'Unknown' 187 | 188 | 189 | def download_and_extract(src, target): 190 | """ Extracts the contents of src, which may be a URL or local file, to the 191 | target directory. """ 192 | temporary = False 193 | sys.stdout.write("-------- downloading and extracting \"%s\" to \"%s\"...\n" % (src, target)) 194 | if src[:4] == 'http': 195 | # Attempt to download a URL. 196 | opener = FancyURLopener({}) 197 | response = opener.open(src) 198 | 199 | temporary = True 200 | handle, archive_path = tempfile.mkstemp(suffix='.zip') 201 | os.write(handle, response.read()) 202 | os.close(handle) 203 | elif os.path.exists(src): 204 | # Use a local file. 205 | archive_path = src 206 | else: 207 | raise Exception('Path type is unsupported or does not exist: ' + src) 208 | 209 | if not zipfile.is_zipfile(archive_path): 210 | raise Exception('Not a valid zip archive: ' + src) 211 | 212 | # Attempt to extract the archive file. 213 | try: 214 | os.makedirs(target) 215 | zf = zipfile.ZipFile(archive_path, 'r') 216 | zf.extractall(target) 217 | except: 218 | shutil.rmtree(target, onerror=onerror) 219 | raise 220 | zf.close() 221 | 222 | # Delete the archive file if temporary. 223 | if temporary and os.path.exists(archive_path): 224 | os.remove(archive_path) 225 | 226 | 227 | def read_file(path): 228 | """ Read a file. """ 229 | if os.path.exists(path): 230 | with open(path, 'r', encoding='utf-8') as fp: 231 | return fp.read() 232 | else: 233 | raise Exception("Path does not exist: %s" % (path)) 234 | 235 | 236 | def write_fp(fp, data): 237 | if is_python2: 238 | fp.write(data.decode('utf-8')) 239 | else: 240 | fp.write(data) 241 | 242 | 243 | def write_file(path, data): 244 | """ Write a file. """ 245 | msg('Writing %s' % path) 246 | if not options.dryrun: 247 | with open(path, 'w', encoding='utf-8') as fp: 248 | write_fp(fp, data) 249 | 250 | 251 | def read_config_file(path): 252 | """ Read a configuration file. """ 253 | # Parse the contents. 254 | return eval(read_file(path), {'__builtins__': None}, None) 255 | 256 | 257 | def write_config_file(path, contents): 258 | """ Write a configuration file. """ 259 | data = "{\n" 260 | for key in sorted(contents.keys()): 261 | data += " '%s': '%s',\n" % (key, contents[key]) 262 | data += "}\n" 263 | write_file(path, data) 264 | 265 | 266 | def read_branch_config_file(path): 267 | """ Read the CEF branch from the specified path. """ 268 | config_file = os.path.join(path, 'cef.branch') 269 | if os.path.isfile(config_file): 270 | contents = read_config_file(config_file) 271 | if 'branch' in contents: 272 | return contents['branch'] 273 | return '' 274 | 275 | 276 | def write_branch_config_file(path, branch): 277 | """ Write the CEF branch to the specified path. """ 278 | config_file = os.path.join(path, 'cef.branch') 279 | if not os.path.isfile(config_file): 280 | write_config_file(config_file, {'branch': branch}) 281 | 282 | 283 | def apply_patch(name): 284 | patch_file = os.path.join(cef_dir, 'patch', 'patches', name) 285 | if os.path.exists(patch_file + ".patch"): 286 | # Attempt to apply the patch file. 287 | patch_tool = os.path.join(cef_dir, 'tools', 'patcher.py') 288 | run('%s %s --patch-file "%s" --patch-dir "%s"' % 289 | (python_exe, patch_tool, patch_file, 290 | chromium_src_dir), chromium_src_dir, depot_tools_dir) 291 | 292 | 293 | def apply_deps_patch(): 294 | """ Patch the Chromium DEPS file before `gclient sync` if necessary. """ 295 | deps_path = os.path.join(chromium_src_dir, deps_file) 296 | if os.path.isfile(deps_path): 297 | msg("Chromium DEPS file: %s" % (deps_path)) 298 | apply_patch(deps_file) 299 | else: 300 | raise Exception("Path does not exist: %s" % (deps_path)) 301 | 302 | 303 | def apply_runhooks_patch(): 304 | """ Patch the Chromium runhooks files before `gclient runhooks` if necessary. """ 305 | apply_patch('runhooks') 306 | 307 | 308 | def run_patch_updater(args='', output_file=None): 309 | """ Run the patch updater script. """ 310 | tool = os.path.join(cef_src_dir, 'tools', 'patch_updater.py') 311 | if len(args) > 0: 312 | args = ' ' + args 313 | run('%s %s%s' % (python_exe, tool, args), cef_src_dir, depot_tools_dir, 314 | output_file) 315 | 316 | 317 | def onerror(func, path, exc_info): 318 | """ 319 | Error handler for ``shutil.rmtree``. 320 | 321 | If the error is due to an access error (read only file) 322 | it attempts to add write permission and then retries. 323 | 324 | If the error is for another reason it re-raises the error. 325 | 326 | Usage : ``shutil.rmtree(path, onerror=onerror)`` 327 | """ 328 | import stat 329 | if not os.access(path, os.W_OK): 330 | # Is the error an access error ? 331 | os.chmod(path, stat.S_IWUSR) 332 | func(path) 333 | else: 334 | raise 335 | 336 | 337 | def read_json_url(url): 338 | """ Read a JSON URL. """ 339 | msg('Downloading %s' % url) 340 | return json.loads(urlopen(url).read()) 341 | 342 | 343 | g_channel_data = None 344 | 345 | 346 | def get_chromium_channel_data(os, channel, param=None): 347 | """ Returns all data for the specified Chromium channel. """ 348 | global g_channel_data 349 | 350 | if g_channel_data is None: 351 | g_channel_data = read_json_url(chromium_channel_json_url) 352 | assert len(g_channel_data) > 0, 'Failed to load Chromium channel data' 353 | 354 | for oses in g_channel_data: 355 | if oses['os'] == os: 356 | for version in oses['versions']: 357 | if version['channel'] == channel: 358 | assert version['os'] == os 359 | assert version['channel'] == channel 360 | if param is None: 361 | return version 362 | else: 363 | assert param in version, 'Missing parameter %s for Chromium channel %s %s' % ( 364 | param, os, channel) 365 | return version[param] 366 | raise Exception("Invalid Chromium channel value: %s" % channel) 367 | raise Exception("Invalid Chromium os value: %s" % os) 368 | 369 | 370 | def get_chromium_channel_commit(os, channel): 371 | """ Returns the current branch commit for the specified Chromium channel. """ 372 | return get_chromium_channel_data(os, channel, 'branch_commit') 373 | 374 | 375 | def get_chromium_channel_version(os, channel): 376 | """ Returns the current version for the specified Chromium channel. """ 377 | return get_chromium_channel_data(os, channel, 'current_version') 378 | 379 | 380 | def get_chromium_main_position(commit): 381 | """ Returns the closest main position for the specified Chromium commit. """ 382 | # Using -2 because a "Publish DEPS" commit which does not have a master 383 | # position may be first. 384 | cmd = "%s log -2 %s" % (git_exe, commit) 385 | result = exec_cmd(cmd, chromium_src_dir) 386 | if result['out'] != '': 387 | match = re.search(r'refs/heads/(?:master|main)@{#([\d]+)}', result['out']) 388 | assert match != None, 'Failed to find position' 389 | return int(match.groups()[0]) 390 | return None 391 | 392 | 393 | def get_chromium_main_commit(position): 394 | """ Returns the main commit for the specified Chromium commit position. """ 395 | cmd = '%s log -1 --grep=refs/heads/master@{#%s} --grep=refs/heads/main@{#%s} origin/main' % ( 396 | git_exe, str(position), str(position)) 397 | result = exec_cmd(cmd, chromium_src_dir) 398 | if result['out'] != '': 399 | match = re.search(r'^commit ([a-f0-9]+)', result['out']) 400 | assert match != None, 'Failed to find commit' 401 | return match.groups()[0] 402 | return None 403 | 404 | 405 | def get_chromium_versions(commit): 406 | """ Returns the list of Chromium versions that contain the specified commit. 407 | Versions are listed oldest to newest. """ 408 | cmd = '%s tag --contains %s' % (git_exe, commit) 409 | result = exec_cmd(cmd, chromium_src_dir) 410 | if result['out'] != '': 411 | return [line.strip() for line in result['out'].strip().split('\n')] 412 | return None 413 | 414 | 415 | def get_build_compat_versions(): 416 | """ Returns the compatible Chromium and (optionally) depot_tools versions 417 | specified by the CEF checkout. """ 418 | compat_path = os.path.join(cef_dir, 'CHROMIUM_BUILD_COMPATIBILITY.txt') 419 | msg("Reading %s" % compat_path) 420 | config = read_config_file(compat_path) 421 | 422 | if not 'chromium_checkout' in config: 423 | raise Exception("Missing chromium_checkout value in %s" % (compat_path)) 424 | return config 425 | 426 | 427 | def get_chromium_target_version(os='win', channel='canary', target_distance=0): 428 | """ Returns the target Chromium version based on a heuristic. """ 429 | # The current compatible version from CEF. 430 | compat_version = chromium_compat_version 431 | compat_commit = get_git_hash(chromium_src_dir, compat_version) 432 | if compat_version == compat_commit: 433 | versions = get_chromium_versions(compat_commit) 434 | if len(versions) > 0: 435 | compat_version = 'refs/tags/' + versions[0] 436 | # Closest version may not align with the compat position, so adjust the 437 | # commit to match. 438 | compat_commit = get_git_hash(chromium_src_dir, compat_version) 439 | compat_position = get_chromium_main_position(compat_commit) 440 | compat_date = get_git_date(chromium_src_dir, compat_commit) 441 | 442 | # The most recent channel version from the Chromium website. 443 | channel_version = 'refs/tags/' + get_chromium_channel_version(os, channel) 444 | channel_commit = get_chromium_channel_commit(os, channel) 445 | channel_position = get_chromium_main_position(channel_commit) 446 | channel_date = get_git_date(chromium_src_dir, channel_commit) 447 | 448 | if compat_position >= channel_position: 449 | # Already compatible with the channel version or newer. 450 | target_version = compat_version 451 | target_commit = compat_commit 452 | target_position = compat_position 453 | target_date = compat_date 454 | elif target_distance <= 0 or compat_position + target_distance >= channel_position: 455 | # Channel version is within the target distance. 456 | target_version = channel_version 457 | target_commit = channel_commit 458 | target_position = channel_position 459 | target_date = channel_date 460 | else: 461 | # Find an intermediary version that's within the target distance. 462 | target_position = compat_position + target_distance 463 | target_commit = get_chromium_main_commit(target_position) 464 | versions = get_chromium_versions(target_commit) 465 | if len(versions) > 0: 466 | target_version = 'refs/tags/' + versions[0] 467 | # Closest version may not align with the target position, so adjust the 468 | # commit and position to match. 469 | target_commit = get_git_hash(chromium_src_dir, target_version) 470 | target_position = get_chromium_main_position(target_commit) 471 | else: 472 | target_version = target_commit 473 | target_date = get_git_date(chromium_src_dir, target_commit) 474 | 475 | msg("") 476 | msg("Computed Chromium update for %s %s at distance %d" % (os, channel, 477 | target_distance)) 478 | msg("Compat: %s %s %s (#%d)" % (compat_date, compat_version, compat_commit, 479 | compat_position)) 480 | msg("Target: %s %s %s (#%d)" % (target_date, target_version, target_commit, 481 | target_position)) 482 | msg("Channel: %s %s %s (#%d)" % (channel_date, channel_version, 483 | channel_commit, channel_position)) 484 | msg("") 485 | 486 | return target_version 487 | 488 | 489 | def get_build_directory_name(is_debug): 490 | build_dir = ('Debug' if is_debug else 'Release') + '_' 491 | 492 | # CEF uses a consistent directory naming scheme for GN via 493 | # GetAllPlatformConfigs in tools/gn_args.py. 494 | if options.x64build: 495 | build_dir += 'GN_x64' 496 | elif options.armbuild: 497 | build_dir += 'GN_arm' 498 | elif options.arm64build: 499 | build_dir += 'GN_arm64' 500 | else: 501 | build_dir += 'GN_x86' 502 | return build_dir 503 | 504 | 505 | def read_update_file(): 506 | update_path = os.path.join(cef_src_dir, 'CHROMIUM_UPDATE.txt') 507 | if not os.path.exists(update_path): 508 | msg("Missing file: %s" % update_path) 509 | return None 510 | 511 | msg("Reading %s" % update_path) 512 | return read_config_file(update_path) 513 | 514 | 515 | def log_chromium_changes(): 516 | """ Evaluate the Chromium checkout for changes. """ 517 | config = read_update_file() 518 | if config is None: 519 | msg("Skipping Chromium changes log.") 520 | return 521 | 522 | if 'files' in config: 523 | out_file = os.path.join(download_dir, 'chromium_update_changes.diff') 524 | if os.path.exists(out_file): 525 | os.remove(out_file) 526 | 527 | old_commit = get_chromium_main_commit( 528 | get_chromium_main_position(chromium_compat_version)) 529 | new_commit = get_chromium_main_commit( 530 | get_chromium_main_position(chromium_checkout)) 531 | 532 | cmd = '%s diff --relative --no-prefix %s..%s -- %s' % ( 533 | git_exe, old_commit, new_commit, ' '.join(config['files'])) 534 | result = exec_cmd(cmd, chromium_src_dir) 535 | if result['out'] != '': 536 | write_file(out_file, result['out']) 537 | 538 | 539 | def check_pattern_matches(output_file=None): 540 | """ Evaluate the Chromium checkout for pattern matches. """ 541 | config = read_update_file() 542 | if config is None: 543 | msg("Skipping Chromium pattern matching.") 544 | return 545 | 546 | if 'patterns' in config: 547 | if output_file is None: 548 | fp = sys.stdout 549 | else: 550 | msg('Writing %s' % output_file) 551 | fp = open(output_file, 'w', encoding='utf-8') 552 | 553 | has_output = False 554 | for entry in config['patterns']: 555 | msg("Evaluating pattern: %s" % entry['pattern']) 556 | 557 | # Read patterns from a file to avoid formatting problems. 558 | pattern_handle, pattern_file = tempfile.mkstemp() 559 | os.write(pattern_handle, entry['pattern']) 560 | os.close(pattern_handle) 561 | 562 | cmd = '%s grep -n -f %s' % (git_exe, pattern_file) 563 | result = exec_cmd(cmd, chromium_src_dir) 564 | os.remove(pattern_file) 565 | 566 | if result['out'] != '': 567 | write_msg = True 568 | re_exclude = re.compile( 569 | entry['exclude_matches']) if 'exclude_matches' in entry else None 570 | 571 | for line in result['out'].split('\n'): 572 | line = line.strip() 573 | if len(line) == 0: 574 | continue 575 | skip = not re_exclude is None and re_exclude.match(line) != None 576 | if not skip: 577 | if write_msg: 578 | if has_output: 579 | write_fp(fp, '\n') 580 | write_fp(fp, 581 | '!!!! WARNING: FOUND PATTERN: %s\n' % entry['pattern']) 582 | if 'message' in entry: 583 | write_fp(fp, entry['message'] + '\n') 584 | write_fp(fp, '\n') 585 | write_msg = False 586 | write_fp(fp, line + '\n') 587 | has_output = True 588 | 589 | if not output_file is None: 590 | if has_output: 591 | msg('ERROR Matches found. See %s for output.' % out_file) 592 | else: 593 | write_fp(fp, 'Good news! No matches.\n') 594 | fp.close() 595 | 596 | if has_output: 597 | # Don't continue when we know the build will be wrong. 598 | sys.exit(1) 599 | 600 | 601 | ## 602 | # Program entry point. 603 | ## 604 | 605 | # Cannot be loaded as a module. 606 | if __name__ != "__main__": 607 | sys.stderr.write('This file cannot be loaded as a module!') 608 | sys.exit() 609 | 610 | # Parse command-line options. 611 | disc = """ 612 | This utility implements automation for the download, update, build and 613 | distribution of CEF. 614 | """ 615 | 616 | parser = OptionParser(description=disc) 617 | 618 | # Setup options. 619 | parser.add_option( 620 | '--download-dir', 621 | dest='downloaddir', 622 | metavar='DIR', 623 | help='Download directory with no spaces [required].') 624 | parser.add_option( 625 | '--depot-tools-dir', 626 | dest='depottoolsdir', 627 | metavar='DIR', 628 | help='Download directory for depot_tools.', 629 | default='') 630 | parser.add_option('--depot-tools-archive', dest='depottoolsarchive', 631 | help='Zip archive file that contains a single top-level '+\ 632 | 'depot_tools directory.', default='') 633 | parser.add_option('--branch', dest='branch', 634 | help='Branch of CEF to build (master, 3987, ...). This '+\ 635 | 'will be used to name the CEF download directory and '+\ 636 | 'to identify the correct URL if --url is not '+\ 637 | 'specified. The default value is master.', 638 | default='master') 639 | parser.add_option('--url', dest='url', 640 | help='CEF download URL. If not specified the default URL '+\ 641 | 'will be used.', 642 | default='') 643 | parser.add_option('--chromium-url', dest='chromiumurl', 644 | help='Chromium download URL. If not specified the default '+\ 645 | 'URL will be used.', 646 | default='') 647 | parser.add_option('--checkout', dest='checkout', 648 | help='Version of CEF to checkout. If not specified the '+\ 649 | 'most recent remote version of the branch will be used.', 650 | default='') 651 | parser.add_option('--chromium-checkout', dest='chromiumcheckout', 652 | help='Version of Chromium to checkout (Git '+\ 653 | 'branch/hash/tag). This overrides the value specified '+\ 654 | 'by CEF in CHROMIUM_BUILD_COMPATIBILITY.txt.', 655 | default='') 656 | parser.add_option('--chromium-channel', dest='chromiumchannel', 657 | help='Chromium channel to check out (canary, dev, beta or '+\ 658 | 'stable). This overrides the value specified by CEF '+\ 659 | 'in CHROMIUM_BUILD_COMPATIBILITY.txt.', 660 | default='') 661 | parser.add_option('--chromium-channel-distance', dest='chromiumchanneldistance', 662 | help='The target number of commits to step in the '+\ 663 | 'channel, or 0 to use the newest channel version. '+\ 664 | 'Used in combination with --chromium-channel.', 665 | default='') 666 | 667 | # Miscellaneous options. 668 | parser.add_option( 669 | '--force-config', 670 | action='store_true', 671 | dest='forceconfig', 672 | default=False, 673 | help='Force creation of a new gclient config file.') 674 | parser.add_option('--force-clean', 675 | action='store_true', dest='forceclean', default=False, 676 | help='Force a clean checkout of Chromium and CEF. This will'+\ 677 | ' trigger a new update, build and distribution.') 678 | parser.add_option('--force-clean-deps', 679 | action='store_true', dest='forcecleandeps', default=False, 680 | help='Force a clean checkout of Chromium dependencies. Used'+\ 681 | ' in combination with --force-clean.') 682 | parser.add_option( 683 | '--dry-run', 684 | action='store_true', 685 | dest='dryrun', 686 | default=False, 687 | help="Output commands without executing them.") 688 | parser.add_option('--dry-run-platform', dest='dryrunplatform', default=None, 689 | help='Simulate a dry run on the specified platform '+\ 690 | '(windows, mac, linux). Must be used in combination'+\ 691 | ' with the --dry-run flag.') 692 | 693 | # Update-related options. 694 | parser.add_option('--force-update', 695 | action='store_true', dest='forceupdate', default=False, 696 | help='Force a Chromium and CEF update. This will trigger a '+\ 697 | 'new build and distribution.') 698 | parser.add_option('--no-update', 699 | action='store_true', dest='noupdate', default=False, 700 | help='Do not update Chromium or CEF. Pass --force-build or '+\ 701 | '--force-distrib if you desire a new build or '+\ 702 | 'distribution.') 703 | parser.add_option('--no-cef-update', 704 | action='store_true', dest='nocefupdate', default=False, 705 | help='Do not update CEF. Pass --force-build or '+\ 706 | '--force-distrib if you desire a new build or '+\ 707 | 'distribution.') 708 | parser.add_option('--force-cef-update', 709 | action='store_true', dest='forcecefupdate', default=False, 710 | help='Force a CEF update. This will cause local changes in '+\ 711 | 'the CEF checkout to be discarded and patch files to '+\ 712 | 'be reapplied.') 713 | parser.add_option( 714 | '--no-chromium-update', 715 | action='store_true', 716 | dest='nochromiumupdate', 717 | default=False, 718 | help='Do not update Chromium.') 719 | parser.add_option( 720 | '--no-depot-tools-update', 721 | action='store_true', 722 | dest='nodepottoolsupdate', 723 | default=False, 724 | help='Do not update depot_tools.') 725 | parser.add_option('--fast-update', 726 | action='store_true', dest='fastupdate', default=False, 727 | help='Update existing Chromium/CEF checkouts for fast incremental '+\ 728 | 'builds by attempting to minimize the number of modified files. '+\ 729 | 'The update will fail if there are unstaged CEF changes or if '+\ 730 | 'Chromium changes are not included in a patch file.') 731 | parser.add_option( 732 | '--force-patch-update', 733 | action='store_true', 734 | dest='forcepatchupdate', 735 | default=False, 736 | help='Force update of patch files.') 737 | parser.add_option( 738 | '--resave', 739 | action='store_true', 740 | dest='resave', 741 | default=False, 742 | help='Resave patch files.') 743 | parser.add_option( 744 | '--log-chromium-changes', 745 | action='store_true', 746 | dest='logchromiumchanges', 747 | default=False, 748 | help='Create a log of the Chromium changes.') 749 | 750 | # Build-related options. 751 | parser.add_option('--force-build', 752 | action='store_true', dest='forcebuild', default=False, 753 | help='Force CEF debug and release builds. This builds '+\ 754 | '[build-target] on all platforms and chrome_sandbox '+\ 755 | 'on Linux.') 756 | parser.add_option( 757 | '--no-build', 758 | action='store_true', 759 | dest='nobuild', 760 | default=False, 761 | help='Do not build CEF.') 762 | parser.add_option( 763 | '--build-target', 764 | dest='buildtarget', 765 | default='cefclient', 766 | help='Target name(s) to build (defaults to "cefclient").') 767 | parser.add_option( 768 | '--build-tests', 769 | action='store_true', 770 | dest='buildtests', 771 | default=False, 772 | help='Also build the test target specified via --test-target.') 773 | parser.add_option( 774 | '--no-debug-build', 775 | action='store_true', 776 | dest='nodebugbuild', 777 | default=False, 778 | help="Don't perform the CEF debug build.") 779 | parser.add_option( 780 | '--no-release-build', 781 | action='store_true', 782 | dest='noreleasebuild', 783 | default=False, 784 | help="Don't perform the CEF release build.") 785 | parser.add_option( 786 | '--verbose-build', 787 | action='store_true', 788 | dest='verbosebuild', 789 | default=False, 790 | help='Show all command lines while building.') 791 | parser.add_option( 792 | '--build-failure-limit', 793 | dest='buildfailurelimit', 794 | default=1, 795 | type="int", 796 | help='Keep going until N jobs fail.') 797 | parser.add_option('--build-log-file', 798 | action='store_true', dest='buildlogfile', default=False, 799 | help='Write build logs to file. The file will be named '+\ 800 | '"build-[branch]-[debug|release].log" in the download '+\ 801 | 'directory.') 802 | parser.add_option( 803 | '--x64-build', 804 | action='store_true', 805 | dest='x64build', 806 | default=False, 807 | help='Create a 64-bit build.') 808 | parser.add_option( 809 | '--arm-build', 810 | action='store_true', 811 | dest='armbuild', 812 | default=False, 813 | help='Create an ARM build.') 814 | parser.add_option( 815 | '--arm64-build', 816 | action='store_true', 817 | dest='arm64build', 818 | default=False, 819 | help='Create an ARM64 build.') 820 | 821 | # Test-related options. 822 | parser.add_option( 823 | '--run-tests', 824 | action='store_true', 825 | dest='runtests', 826 | default=False, 827 | help='Run the ceftests target.') 828 | parser.add_option( 829 | '--no-debug-tests', 830 | action='store_true', 831 | dest='nodebugtests', 832 | default=False, 833 | help="Don't run debug build tests.") 834 | parser.add_option( 835 | '--no-release-tests', 836 | action='store_true', 837 | dest='noreleasetests', 838 | default=False, 839 | help="Don't run release build tests.") 840 | parser.add_option( 841 | '--test-target', 842 | dest='testtarget', 843 | default='ceftests', 844 | help='Test target name to build (defaults to "ceftests").') 845 | parser.add_option( 846 | '--test-prefix', 847 | dest='testprefix', 848 | default='', 849 | help='Prefix for running the test executable (e.g. `xvfb-run` on Linux).') 850 | parser.add_option( 851 | '--test-args', 852 | dest='testargs', 853 | default='', 854 | help='Arguments that will be passed to the test executable.') 855 | 856 | # Distribution-related options. 857 | parser.add_option( 858 | '--force-distrib', 859 | action='store_true', 860 | dest='forcedistrib', 861 | default=False, 862 | help='Force creation of a CEF binary distribution.') 863 | parser.add_option( 864 | '--no-distrib', 865 | action='store_true', 866 | dest='nodistrib', 867 | default=False, 868 | help="Don't create a CEF binary distribution.") 869 | parser.add_option( 870 | '--minimal-distrib', 871 | action='store_true', 872 | dest='minimaldistrib', 873 | default=False, 874 | help='Create a minimal CEF binary distribution.') 875 | parser.add_option( 876 | '--minimal-distrib-only', 877 | action='store_true', 878 | dest='minimaldistribonly', 879 | default=False, 880 | help='Create a minimal CEF binary distribution only.') 881 | parser.add_option( 882 | '--client-distrib', 883 | action='store_true', 884 | dest='clientdistrib', 885 | default=False, 886 | help='Create a client CEF binary distribution.') 887 | parser.add_option( 888 | '--client-distrib-only', 889 | action='store_true', 890 | dest='clientdistribonly', 891 | default=False, 892 | help='Create a client CEF binary distribution only.') 893 | parser.add_option( 894 | '--sandbox-distrib', 895 | action='store_true', 896 | dest='sandboxdistrib', 897 | default=False, 898 | help='Create a cef_sandbox static library distribution.') 899 | parser.add_option( 900 | '--sandbox-distrib-only', 901 | action='store_true', 902 | dest='sandboxdistribonly', 903 | default=False, 904 | help='Create a cef_sandbox static library distribution only.') 905 | parser.add_option( 906 | '--no-distrib-docs', 907 | action='store_true', 908 | dest='nodistribdocs', 909 | default=False, 910 | help="Don't create CEF documentation.") 911 | parser.add_option( 912 | '--no-distrib-archive', 913 | action='store_true', 914 | dest='nodistribarchive', 915 | default=False, 916 | help="Don't create archives for output directories.") 917 | parser.add_option( 918 | '--clean-artifacts', 919 | action='store_true', 920 | dest='cleanartifacts', 921 | default=False, 922 | help='Clean the artifacts output directory.') 923 | parser.add_option( 924 | '--distrib-subdir', 925 | dest='distribsubdir', 926 | default='', 927 | help='CEF distrib dir name, child of chromium/src/cef/binary_distrib') 928 | parser.add_option( 929 | '--distrib-subdir-suffix', 930 | dest='distribsubdirsuffix', 931 | default='', 932 | help='CEF distrib dir name suffix, child of chromium/src/cef/binary_distrib' 933 | ) 934 | 935 | (options, args) = parser.parse_args() 936 | 937 | if options.downloaddir is None: 938 | print("The --download-dir option is required.") 939 | parser.print_help(sys.stderr) 940 | sys.exit() 941 | 942 | # Opt into component-specific flags for later use. 943 | if options.noupdate: 944 | options.nocefupdate = True 945 | options.nochromiumupdate = True 946 | options.nodepottoolsupdate = True 947 | 948 | if options.runtests: 949 | options.buildtests = True 950 | 951 | if (options.nochromiumupdate and options.forceupdate) or \ 952 | (options.nocefupdate and options.forceupdate) or \ 953 | (options.nobuild and options.forcebuild) or \ 954 | (options.nodistrib and options.forcedistrib) or \ 955 | ((options.forceclean or options.forcecleandeps) and options.fastupdate) or \ 956 | (options.chromiumcheckout and options.chromiumchannel): 957 | print("Invalid combination of options.") 958 | parser.print_help(sys.stderr) 959 | sys.exit() 960 | 961 | if (options.noreleasebuild and \ 962 | (options.minimaldistrib or options.minimaldistribonly or \ 963 | options.clientdistrib or options.clientdistribonly)) or \ 964 | (options.minimaldistribonly + options.clientdistribonly + options.sandboxdistribonly > 1): 965 | print('Invalid combination of options.') 966 | parser.print_help(sys.stderr) 967 | sys.exit() 968 | 969 | if options.x64build + options.armbuild + options.arm64build > 1: 970 | print('Invalid combination of options.') 971 | parser.print_help(sys.stderr) 972 | sys.exit() 973 | 974 | if (options.buildtests or options.runtests) and len(options.testtarget) == 0: 975 | print("A test target must be specified via --test-target.") 976 | parser.print_help(sys.stderr) 977 | sys.exit() 978 | 979 | # Operating system. 980 | if options.dryrun and options.dryrunplatform is not None: 981 | platform = options.dryrunplatform 982 | if not platform in ['windows', 'mac', 'linux']: 983 | print('Invalid dry-run-platform value: %s' % (platform)) 984 | sys.exit() 985 | elif sys.platform == 'win32': 986 | platform = 'windows' 987 | elif sys.platform == 'darwin': 988 | platform = 'mac' 989 | elif sys.platform.startswith('linux'): 990 | platform = 'linux' 991 | else: 992 | print('Unknown operating system platform') 993 | sys.exit() 994 | 995 | if options.clientdistrib or options.clientdistribonly: 996 | if platform == 'linux' or (platform == 'windows' and options.arm64build): 997 | client_app = 'cefsimple' 998 | else: 999 | client_app = 'cefclient' 1000 | if options.buildtarget.find(client_app) == -1: 1001 | print('A client distribution cannot be generated if --build-target ' + 1002 | 'excludes %s.' % client_app) 1003 | parser.print_help(sys.stderr) 1004 | sys.exit() 1005 | 1006 | # CEF branch. 1007 | cef_branch = options.branch 1008 | 1009 | branch_is_master = (cef_branch == 'master' or cef_branch == 'trunk') 1010 | if not branch_is_master: 1011 | # Verify that the branch value is numeric. 1012 | if not cef_branch.isdigit(): 1013 | print('Invalid branch value: %s' % cef_branch) 1014 | sys.exit() 1015 | 1016 | # Verify the minimum supported branch number. 1017 | if int(cef_branch) < 3071: 1018 | print('The requested branch (%s) is too old to build using this tool. ' + 1019 | 'The minimum supported branch is 3071.' % cef_branch) 1020 | sys.exit() 1021 | 1022 | # True if the requested branch is 3538 or newer. 1023 | branch_is_3538_or_newer = (branch_is_master or int(cef_branch) >= 3538) 1024 | 1025 | # True if the requested branch is 3945 or newer. 1026 | branch_is_3945_or_newer = (branch_is_master or int(cef_branch) >= 3945) 1027 | 1028 | # Enable Python 3 usage in Chromium for branches 3945 and newer. 1029 | if branch_is_3945_or_newer and not is_python2 and \ 1030 | not 'GCLIENT_PY3' in os.environ.keys(): 1031 | os.environ['GCLIENT_PY3'] = '1' 1032 | 1033 | if not branch_is_3945_or_newer and \ 1034 | (not is_python2 or bool(int(os.environ.get('GCLIENT_PY3', '0')))): 1035 | print('Python 3 is not supported with branch 3904 and older ' + 1036 | '(set GCLIENT_PY3=0 and run with Python 2 executable).') 1037 | sys.exit() 1038 | 1039 | if options.armbuild: 1040 | if platform != 'linux': 1041 | print('The ARM build option is only supported on Linux.') 1042 | sys.exit() 1043 | 1044 | deps_file = 'DEPS' 1045 | 1046 | if platform == 'mac' and not (options.x64build or options.arm64build): 1047 | print('32-bit MacOS builds are not supported. ' + 1048 | 'Add --x64-build or --arm64-build flag to generate a 64-bit build.') 1049 | sys.exit() 1050 | 1051 | # Platforms that build a cef_sandbox library. 1052 | sandbox_lib_platforms = ['windows'] 1053 | if branch_is_3538_or_newer: 1054 | sandbox_lib_platforms.append('mac') 1055 | 1056 | if not platform in sandbox_lib_platforms and (options.sandboxdistrib or 1057 | options.sandboxdistribonly): 1058 | print('The sandbox distribution is not supported on this platform.') 1059 | sys.exit() 1060 | 1061 | # Options that force the sources to change. 1062 | force_change = options.forceclean or options.forceupdate 1063 | 1064 | # Options that cause local changes to be discarded. 1065 | discard_local_changes = force_change or options.forcecefupdate 1066 | 1067 | if options.resave and (options.forcepatchupdate or discard_local_changes): 1068 | print('--resave cannot be combined with options that modify or discard ' + 1069 | 'patches.') 1070 | parser.print_help(sys.stderr) 1071 | sys.exit() 1072 | 1073 | if platform == 'windows': 1074 | # Avoid errors when the "vs_toolchain.py update" Chromium hook runs. 1075 | os.environ['DEPOT_TOOLS_WIN_TOOLCHAIN'] = '0' 1076 | 1077 | download_dir = os.path.abspath(options.downloaddir) 1078 | chromium_dir = os.path.join(download_dir, 'chromium') 1079 | chromium_src_dir = os.path.join(chromium_dir, 'src') 1080 | out_src_dir = os.path.join(chromium_src_dir, 'out') 1081 | cef_src_dir = os.path.join(chromium_src_dir, 'cef') 1082 | 1083 | if options.fastupdate and os.path.exists(cef_src_dir): 1084 | cef_dir = cef_src_dir 1085 | else: 1086 | cef_dir = os.path.join(download_dir, 'cef') 1087 | 1088 | ## 1089 | # Manage the download directory. 1090 | ## 1091 | 1092 | # Create the download directory if necessary. 1093 | create_directory(download_dir) 1094 | 1095 | msg("Download Directory: %s" % (download_dir)) 1096 | 1097 | ## 1098 | # Manage the depot_tools directory. 1099 | ## 1100 | 1101 | # Check if the depot_tools directory exists. 1102 | if options.depottoolsdir != '': 1103 | depot_tools_dir = os.path.abspath(options.depottoolsdir) 1104 | else: 1105 | depot_tools_dir = os.path.join(download_dir, 'depot_tools') 1106 | 1107 | msg("Depot Tools Directory: %s" % (depot_tools_dir)) 1108 | 1109 | if not os.path.exists(depot_tools_dir): 1110 | if platform == 'windows' and options.depottoolsarchive == '': 1111 | # On Windows download depot_tools as an archive file since we can't assume 1112 | # that git is already installed. 1113 | options.depottoolsarchive = depot_tools_archive_url 1114 | 1115 | if options.depottoolsarchive != '': 1116 | # Extract depot_tools from an archive file. 1117 | msg('Extracting %s to %s.' % \ 1118 | (options.depottoolsarchive, depot_tools_dir)) 1119 | if not options.dryrun: 1120 | download_and_extract(options.depottoolsarchive, depot_tools_dir) 1121 | else: 1122 | # On Linux and OS X check out depot_tools using Git. 1123 | run('git clone ' + depot_tools_url + ' ' + depot_tools_dir, download_dir) 1124 | 1125 | if not options.nodepottoolsupdate: 1126 | # Update depot_tools. 1127 | # On Windows this will download required python and git binaries. 1128 | msg('Updating depot_tools') 1129 | if platform == 'windows': 1130 | run('update_depot_tools.bat', depot_tools_dir, depot_tools_dir) 1131 | else: 1132 | run('update_depot_tools', depot_tools_dir, depot_tools_dir) 1133 | 1134 | # Determine the executables to use. 1135 | if platform == 'windows': 1136 | # Force use of the version bundled with depot_tools. 1137 | git_exe = os.path.join(depot_tools_dir, 'git.bat') 1138 | python_bat = 'python.bat' if is_python2 else 'python3.bat' 1139 | python_exe = os.path.join(depot_tools_dir, python_bat) 1140 | if options.dryrun and not os.path.exists(git_exe): 1141 | sys.stdout.write("WARNING: --dry-run assumes that depot_tools" \ 1142 | " is already in your PATH. If it isn't\nplease" \ 1143 | " specify a --depot-tools-dir value.\n") 1144 | git_exe = 'git.bat' 1145 | python_exe = python_bat 1146 | else: 1147 | git_exe = 'git' 1148 | python_exe = sys.executable 1149 | 1150 | ## 1151 | # Manage the cef directory. 1152 | ## 1153 | 1154 | # Delete the existing CEF directory if requested. 1155 | if options.forceclean and os.path.exists(cef_dir): 1156 | delete_directory(cef_dir) 1157 | 1158 | # Determine the type of CEF checkout to use. 1159 | if os.path.exists(cef_dir) and not is_git_checkout(cef_dir): 1160 | raise Exception("Not a valid CEF Git checkout: %s" % (cef_dir)) 1161 | 1162 | # Determine the CEF download URL to use. 1163 | cef_url = options.url.strip() 1164 | if cef_url == '': 1165 | cef_url = cef_git_url 1166 | 1167 | # Verify that the requested CEF URL matches the existing checkout. 1168 | if not options.nocefupdate and os.path.exists(cef_dir): 1169 | cef_existing_url = get_git_url(cef_dir) 1170 | if cef_url != cef_existing_url: 1171 | raise Exception( 1172 | 'Requested CEF checkout URL %s does not match existing URL %s' % 1173 | (cef_url, cef_existing_url)) 1174 | 1175 | msg("CEF Branch: %s" % (cef_branch)) 1176 | msg("CEF URL: %s" % (cef_url)) 1177 | msg("CEF Source Directory: %s" % (cef_dir)) 1178 | 1179 | # Determine the CEF Git branch to use. 1180 | if options.checkout == '': 1181 | # Target the most recent branch commit from the remote repo. 1182 | if branch_is_master: 1183 | cef_checkout = 'origin/master' 1184 | else: 1185 | cef_checkout = 'origin/' + cef_branch 1186 | else: 1187 | cef_checkout = options.checkout 1188 | 1189 | # Create the CEF checkout if necessary. 1190 | if not options.nocefupdate and not os.path.exists(cef_dir): 1191 | cef_checkout_new = True 1192 | run('%s clone %s %s %s' % (git_exe,shallow_git_depth_limit, cef_url, cef_dir), download_dir, 1193 | depot_tools_dir) 1194 | else: 1195 | cef_checkout_new = False 1196 | 1197 | # Determine if the CEF checkout needs to change. 1198 | if not options.nocefupdate and os.path.exists(cef_dir): 1199 | cef_current_hash = get_git_hash(cef_dir, 'HEAD') 1200 | 1201 | if not cef_checkout_new: 1202 | # fetch --depth 1 updated sources. 1203 | run('%s fetch %s' % (git_exe, shallow_git_depth_limit), cef_dir, depot_tools_dir) 1204 | 1205 | cef_desired_hash = get_git_hash(cef_dir, cef_checkout) 1206 | cef_checkout_changed = cef_checkout_new or force_change or \ 1207 | options.forcecefupdate or \ 1208 | cef_current_hash != cef_desired_hash 1209 | 1210 | msg("CEF Current Checkout: %s" % (cef_current_hash)) 1211 | msg("CEF Desired Checkout: %s (%s)" % (cef_desired_hash, cef_checkout)) 1212 | 1213 | if cef_checkout_changed: 1214 | if cef_dir == cef_src_dir: 1215 | # Running in fast update mode. Backup and revert the patched files before 1216 | # changing the CEF checkout. 1217 | run_patch_updater("--backup --revert") 1218 | 1219 | # Update the CEF checkout. 1220 | run('%s checkout %s%s' % 1221 | (git_exe, '--force ' if discard_local_changes else '', cef_checkout), \ 1222 | cef_dir, depot_tools_dir) 1223 | else: 1224 | cef_checkout_changed = False 1225 | 1226 | build_compat_versions = get_build_compat_versions() 1227 | 1228 | if not options.nodepottoolsupdate and \ 1229 | 'depot_tools_checkout' in build_compat_versions: 1230 | # Update the depot_tools checkout. 1231 | depot_tools_compat_version = build_compat_versions['depot_tools_checkout'] 1232 | run('%s checkout %s%s' % 1233 | (git_exe, '--force ' if discard_local_changes else '', depot_tools_compat_version), \ 1234 | depot_tools_dir, depot_tools_dir) 1235 | 1236 | # Disable further depot_tools updates. 1237 | os.environ['DEPOT_TOOLS_UPDATE'] = '0' 1238 | 1239 | ## 1240 | # Manage the out directory. 1241 | ## 1242 | 1243 | out_dir = os.path.join(download_dir, 'out_' + cef_branch) 1244 | 1245 | # Delete the existing out directory if requested. 1246 | if options.forceclean and os.path.exists(out_dir): 1247 | delete_directory(out_dir) 1248 | 1249 | msg("CEF Output Directory: %s" % (out_dir)) 1250 | 1251 | ## 1252 | # Manage the chromium directory. 1253 | ## 1254 | 1255 | # Create the chromium directory if necessary. 1256 | create_directory(chromium_dir) 1257 | 1258 | if options.chromiumurl != '': 1259 | chromium_url = options.chromiumurl 1260 | else: 1261 | chromium_url = 'https://chromium.googlesource.com/chromium/src.git' 1262 | 1263 | # Create gclient configuration file. 1264 | gclient_file = os.path.join(chromium_dir, '.gclient') 1265 | if not os.path.exists(gclient_file) or options.forceconfig: 1266 | # Exclude unnecessary directories. Intentionally written without newlines. 1267 | gclient_spec = \ 1268 | "solutions = [{"+\ 1269 | "'managed': False,"+\ 1270 | "'name': 'src', "+\ 1271 | "'url': '" + chromium_url + "', "+\ 1272 | "'custom_deps': {"+\ 1273 | "'build': None, "+\ 1274 | "'build/scripts/command_wrapper/bin': None, "+\ 1275 | "'build/scripts/gsd_generate_index': None, "+\ 1276 | "'build/scripts/private/data/reliability': None, "+\ 1277 | "'build/scripts/tools/deps2git': None, "+\ 1278 | "'build/third_party/lighttpd': None, "+\ 1279 | "'commit-queue': None, "+\ 1280 | "'depot_tools': None, "+\ 1281 | "'src/chrome_frame/tools/test/reference_build/chrome': None, "+\ 1282 | "'src/chrome/tools/test/reference_build/chrome_linux': None, "+\ 1283 | "'src/chrome/tools/test/reference_build/chrome_mac': None, "+\ 1284 | "'src/chrome/tools/test/reference_build/chrome_win': None, "+\ 1285 | "}, "+\ 1286 | "'deps_file': '" + deps_file + "', "+\ 1287 | "'safesync_url': ''"+\ 1288 | "}]" 1289 | 1290 | msg('Writing %s' % gclient_file) 1291 | if not options.dryrun: 1292 | with open(gclient_file, 'w', encoding='utf-8') as fp: 1293 | write_fp(fp, gclient_spec) 1294 | 1295 | # Initial Chromium checkout. 1296 | if not options.nochromiumupdate and not os.path.exists(chromium_src_dir): 1297 | chromium_checkout_new = True 1298 | run("gclient sync " + shallow_gclient_no_history + " --nohooks --with_branch_heads --jobs 16", \ 1299 | chromium_dir, depot_tools_dir) 1300 | else: 1301 | chromium_checkout_new = False 1302 | 1303 | # Verify the Chromium checkout. 1304 | if not options.dryrun and not is_git_checkout(chromium_src_dir): 1305 | raise Exception('Not a valid git checkout: %s' % (chromium_src_dir)) 1306 | 1307 | if os.path.exists(chromium_src_dir): 1308 | msg("Chromium URL: %s" % (get_git_url(chromium_src_dir))) 1309 | 1310 | # fetch Chromium changes so that we can perform the necessary calculations using 1311 | # local history. 1312 | if not options.nochromiumupdate and os.path.exists(chromium_src_dir): 1313 | # fetch --depth 1 updated sources. 1314 | run("%s fetch %s" % (git_exe,shallow_git_depth_limit), chromium_src_dir, depot_tools_dir) 1315 | # Also fetch tags, which are required for release branch builds. 1316 | run("%s fetch %s --tags" % (git_exe,shallow_git_depth_limit), chromium_src_dir, depot_tools_dir) 1317 | 1318 | # Determine the Chromium checkout options required by CEF. 1319 | chromium_compat_version = build_compat_versions['chromium_checkout'] 1320 | if len(options.chromiumcheckout) > 0: 1321 | chromium_checkout = options.chromiumcheckout 1322 | elif len(options.chromiumchannel) > 0: 1323 | target_distance = int(options.chromiumchanneldistance 1324 | ) if len(options.chromiumchanneldistance) > 0 else 0 1325 | chromium_checkout = get_chromium_target_version( 1326 | channel=options.chromiumchannel, target_distance=target_distance) 1327 | else: 1328 | chromium_checkout = chromium_compat_version 1329 | 1330 | # Determine if the Chromium checkout needs to change. 1331 | if not options.nochromiumupdate and os.path.exists(chromium_src_dir): 1332 | chromium_current_hash = get_git_hash(chromium_src_dir, 'HEAD') 1333 | chromium_desired_hash = get_git_hash(chromium_src_dir, chromium_checkout) 1334 | chromium_checkout_changed = chromium_checkout_new or force_change or \ 1335 | chromium_current_hash != chromium_desired_hash 1336 | 1337 | msg("Chromium Current Checkout: %s" % (chromium_current_hash)) 1338 | msg("Chromium Desired Checkout: %s (%s)" % \ 1339 | (chromium_desired_hash, chromium_checkout)) 1340 | else: 1341 | chromium_checkout_changed = options.dryrun 1342 | 1343 | if cef_checkout_changed: 1344 | if cef_dir != cef_src_dir and os.path.exists(cef_src_dir): 1345 | # Delete the existing src/cef directory. It will be re-copied from the 1346 | # download directory later. 1347 | delete_directory(cef_src_dir) 1348 | elif chromium_checkout_changed and cef_dir == cef_src_dir: 1349 | # Running in fast update mode. Backup and revert the patched files before 1350 | # changing the Chromium checkout. 1351 | run_patch_updater("--backup --revert") 1352 | 1353 | # Delete the existing src/out directory if requested. 1354 | if options.forceclean and os.path.exists(out_src_dir): 1355 | delete_directory(out_src_dir) 1356 | 1357 | # Move the existing src/out directory to the correct location in the download 1358 | # directory. It will be moved back from the download directory later. 1359 | if os.path.exists(out_src_dir): 1360 | old_branch = read_branch_config_file(out_src_dir) 1361 | if old_branch != '' and (chromium_checkout_changed or 1362 | old_branch != cef_branch): 1363 | old_out_dir = os.path.join(download_dir, 'out_' + old_branch) 1364 | move_directory(out_src_dir, old_out_dir) 1365 | 1366 | # Update the Chromium checkout. 1367 | if chromium_checkout_changed: 1368 | if not chromium_checkout_new and not options.fastupdate: 1369 | if options.forceclean and options.forcecleandeps: 1370 | # Remove all local changes including third-party git checkouts managed by 1371 | # gclient. 1372 | run("%s clean -dffx" % (git_exe), chromium_src_dir, depot_tools_dir) 1373 | else: 1374 | # Revert all changes in the Chromium checkout. 1375 | run("gclient revert --nohooks", chromium_dir, depot_tools_dir) 1376 | 1377 | # Checkout the requested branch. 1378 | run("%s checkout %s%s" % \ 1379 | (git_exe, '--force ' if discard_local_changes else '', chromium_checkout), \ 1380 | chromium_src_dir, depot_tools_dir) 1381 | 1382 | # Patch the Chromium DEPS file if necessary. 1383 | apply_deps_patch() 1384 | 1385 | # Update third-party dependencies including branch/tag information. 1386 | run("gclient sync " + shallow_gclient_no_history + " %s--nohooks --with_branch_heads --jobs 16" % \ 1387 | ('--reset ' if discard_local_changes else ''), chromium_dir, depot_tools_dir) 1388 | 1389 | # Patch the Chromium runhooks scripts if necessary. 1390 | apply_runhooks_patch() 1391 | 1392 | # Runs hooks for files that have been modified in the local working copy. 1393 | run("gclient runhooks --jobs 16", chromium_dir, depot_tools_dir) 1394 | 1395 | # Delete the src/out directory created by `gclient sync`. 1396 | delete_directory(out_src_dir) 1397 | 1398 | if cef_dir == cef_src_dir: 1399 | # Running in fast update mode. 1400 | if cef_checkout_changed or chromium_checkout_changed: 1401 | # Check and restore the patched files. 1402 | run_patch_updater("--reapply --restore") 1403 | elif os.path.exists(cef_dir) and not os.path.exists(cef_src_dir): 1404 | # Restore the src/cef directory. 1405 | copy_directory(cef_dir, cef_src_dir) 1406 | 1407 | # Restore the src/out directory. 1408 | out_src_dir_exists = os.path.exists(out_src_dir) 1409 | if os.path.exists(out_dir) and not out_src_dir_exists: 1410 | move_directory(out_dir, out_src_dir) 1411 | out_src_dir_exists = True 1412 | elif not out_src_dir_exists: 1413 | create_directory(out_src_dir) 1414 | 1415 | # Write the config file for identifying the branch. 1416 | write_branch_config_file(out_src_dir, cef_branch) 1417 | 1418 | if options.logchromiumchanges and chromium_checkout != chromium_compat_version: 1419 | log_chromium_changes() 1420 | 1421 | if options.forcepatchupdate or ((chromium_checkout_new or not options.fastupdate) and \ 1422 | chromium_checkout_changed and \ 1423 | chromium_checkout != chromium_compat_version): 1424 | # Not using the known-compatible Chromium version. Try to update patch files. 1425 | if options.logchromiumchanges: 1426 | out_file = os.path.join(download_dir, 'chromium_update_patches.txt') 1427 | if os.path.exists(out_file): 1428 | os.remove(out_file) 1429 | else: 1430 | out_file = None 1431 | run_patch_updater(output_file=out_file) 1432 | elif options.resave: 1433 | # Resave patch files. 1434 | run_patch_updater("--resave") 1435 | 1436 | if chromium_checkout != chromium_compat_version: 1437 | if options.logchromiumchanges: 1438 | out_file = os.path.join(download_dir, 'chromium_update_patterns.txt') 1439 | if os.path.exists(out_file): 1440 | os.remove(out_file) 1441 | else: 1442 | out_file = None 1443 | check_pattern_matches(output_file=out_file) 1444 | 1445 | ## 1446 | # Build CEF. 1447 | ## 1448 | 1449 | if not options.nobuild and (chromium_checkout_changed or \ 1450 | cef_checkout_changed or options.forcebuild or \ 1451 | not out_src_dir_exists): 1452 | # Building should also force a distribution. 1453 | options.forcedistrib = True 1454 | 1455 | # Make sure the GN configuration exists. 1456 | if not options.dryrun and \ 1457 | not os.path.exists(os.path.join(cef_src_dir, 'BUILD.gn')): 1458 | raise Exception('GN configuration does not exist.') 1459 | 1460 | # Print all build-related environment variables including any that were set 1461 | # previously. 1462 | for key in os.environ.keys(): 1463 | if key.startswith('CEF_') or key.startswith('GCLIENT_') or \ 1464 | key.startswith('GN_') or key.startswith('GYP_') or \ 1465 | key.startswith('DEPOT_TOOLS_'): 1466 | msg('%s=%s' % (key, os.environ[key])) 1467 | 1468 | # Generate project files. 1469 | tool = os.path.join(cef_src_dir, 'tools', 'gclient_hook.py') 1470 | run('%s %s' % (python_exe, tool), cef_src_dir, depot_tools_dir) 1471 | 1472 | # Build using Ninja. 1473 | command = 'ninja ' 1474 | if options.verbosebuild: 1475 | command += '-v ' 1476 | if options.buildfailurelimit != 1: 1477 | command += '-k %d ' % options.buildfailurelimit 1478 | command += '-C ' 1479 | target = ' ' + options.buildtarget 1480 | if options.buildtests: 1481 | target += ' ' + options.testtarget 1482 | if platform == 'linux': 1483 | target += ' chrome_sandbox' 1484 | 1485 | # Make a CEF Debug build. 1486 | if not options.nodebugbuild: 1487 | build_path = os.path.join('out', get_build_directory_name(True)) 1488 | args_path = os.path.join(chromium_src_dir, build_path, 'args.gn') 1489 | msg(args_path + ' contents:\n' + read_file(args_path)) 1490 | 1491 | run(command + build_path + target, chromium_src_dir, depot_tools_dir, 1492 | os.path.join(download_dir, 'build-%s-debug.log' % (cef_branch)) \ 1493 | if options.buildlogfile else None) 1494 | 1495 | if platform in sandbox_lib_platforms: 1496 | # Make the separate cef_sandbox build when GN is_official_build=true. 1497 | build_path += '_sandbox' 1498 | if os.path.exists(os.path.join(chromium_src_dir, build_path)): 1499 | args_path = os.path.join(chromium_src_dir, build_path, 'args.gn') 1500 | msg(args_path + ' contents:\n' + read_file(args_path)) 1501 | 1502 | run(command + build_path + ' cef_sandbox', chromium_src_dir, depot_tools_dir, 1503 | os.path.join(download_dir, 'build-%s-debug-sandbox.log' % (cef_branch)) \ 1504 | if options.buildlogfile else None) 1505 | 1506 | # Make a CEF Release build. 1507 | if not options.noreleasebuild: 1508 | build_path = os.path.join('out', get_build_directory_name(False)) 1509 | args_path = os.path.join(chromium_src_dir, build_path, 'args.gn') 1510 | msg(args_path + ' contents:\n' + read_file(args_path)) 1511 | 1512 | run(command + build_path + target, chromium_src_dir, depot_tools_dir, 1513 | os.path.join(download_dir, 'build-%s-release.log' % (cef_branch)) \ 1514 | if options.buildlogfile else None) 1515 | 1516 | if platform in sandbox_lib_platforms: 1517 | # Make the separate cef_sandbox build when GN is_official_build=true. 1518 | build_path += '_sandbox' 1519 | if os.path.exists(os.path.join(chromium_src_dir, build_path)): 1520 | args_path = os.path.join(chromium_src_dir, build_path, 'args.gn') 1521 | msg(args_path + ' contents:\n' + read_file(args_path)) 1522 | 1523 | run(command + build_path + ' cef_sandbox', chromium_src_dir, depot_tools_dir, 1524 | os.path.join(download_dir, 'build-%s-release-sandbox.log' % (cef_branch)) \ 1525 | if options.buildlogfile else None) 1526 | 1527 | elif not options.nobuild: 1528 | msg('Not building. The source hashes have not changed and ' + 1529 | 'the output folder "%s" already exists' % (out_src_dir)) 1530 | 1531 | ## 1532 | # Run CEF tests. 1533 | ## 1534 | 1535 | if options.runtests: 1536 | if platform == 'windows': 1537 | test_exe = '%s.exe' % options.testtarget 1538 | elif platform == 'mac': 1539 | test_exe = '%s.app/Contents/MacOS/%s' % (options.testtarget, 1540 | options.testtarget) 1541 | elif platform == 'linux': 1542 | test_exe = options.testtarget 1543 | 1544 | test_prefix = options.testprefix 1545 | if len(test_prefix) > 0: 1546 | test_prefix += ' ' 1547 | 1548 | test_args = options.testargs 1549 | if len(test_args) > 0: 1550 | test_args = ' ' + test_args 1551 | 1552 | if not options.nodebugtests: 1553 | build_path = os.path.join(out_src_dir, get_build_directory_name(True)) 1554 | test_path = os.path.join(build_path, test_exe) 1555 | if os.path.exists(test_path): 1556 | run(test_prefix + test_path + test_args, build_path, depot_tools_dir) 1557 | else: 1558 | msg('Not running debug tests. Missing executable: %s' % test_path) 1559 | 1560 | if not options.noreleasetests: 1561 | build_path = os.path.join(out_src_dir, get_build_directory_name(False)) 1562 | test_path = os.path.join(build_path, test_exe) 1563 | if os.path.exists(test_path): 1564 | run(test_prefix + test_path + test_args, build_path, depot_tools_dir) 1565 | else: 1566 | msg('Not running release tests. Missing executable: %s' % test_path) 1567 | 1568 | ## 1569 | # Create the CEF binary distribution. 1570 | ## 1571 | 1572 | if not options.nodistrib and (chromium_checkout_changed or \ 1573 | cef_checkout_changed or options.forcedistrib): 1574 | if not options.forceclean and options.cleanartifacts: 1575 | # Clean the artifacts output directory. 1576 | artifacts_path = os.path.join(cef_src_dir, 'binary_distrib') 1577 | delete_directory(artifacts_path) 1578 | 1579 | # Determine the requested distribution types. 1580 | distrib_types = [] 1581 | if options.minimaldistribonly: 1582 | distrib_types.append('minimal') 1583 | elif options.clientdistribonly: 1584 | distrib_types.append('client') 1585 | elif options.sandboxdistribonly: 1586 | distrib_types.append('sandbox') 1587 | else: 1588 | distrib_types.append('standard') 1589 | if options.minimaldistrib: 1590 | distrib_types.append('minimal') 1591 | if options.clientdistrib: 1592 | distrib_types.append('client') 1593 | if options.sandboxdistrib: 1594 | distrib_types.append('sandbox') 1595 | 1596 | cef_tools_dir = os.path.join(cef_src_dir, 'tools') 1597 | 1598 | # Create the requested distribution types. 1599 | first_type = True 1600 | for type in distrib_types: 1601 | path = '%s make_distrib.py --output-dir=../binary_distrib/' % python_exe 1602 | 1603 | if options.nodebugbuild or options.noreleasebuild or type != 'standard': 1604 | path += ' --allow-partial' 1605 | path = path + ' --ninja-build' 1606 | if options.x64build: 1607 | path += ' --x64-build' 1608 | elif options.armbuild: 1609 | path += ' --arm-build' 1610 | elif options.arm64build: 1611 | path += ' --arm64-build' 1612 | 1613 | if type == 'minimal': 1614 | path += ' --minimal' 1615 | elif type == 'client': 1616 | path += ' --client' 1617 | elif type == 'sandbox': 1618 | path += ' --sandbox' 1619 | 1620 | if first_type: 1621 | if options.nodistribdocs: 1622 | path += ' --no-docs' 1623 | if options.nodistribarchive: 1624 | path += ' --no-archive' 1625 | first_type = False 1626 | else: 1627 | # Don't create the symbol archives or documentation more than once. 1628 | path += ' --no-symbols --no-docs' 1629 | 1630 | # Override the subdirectory name of binary_distrib if the caller requested. 1631 | if options.distribsubdir != '': 1632 | path += ' --distrib-subdir=' + options.distribsubdir 1633 | if options.distribsubdirsuffix != '': 1634 | path += ' --distrib-subdir-suffix=' + options.distribsubdirsuffix 1635 | 1636 | # Create the distribution. 1637 | run(path, cef_tools_dir, depot_tools_dir) 1638 | -------------------------------------------------------------------------------- /az_create.ps1: -------------------------------------------------------------------------------- 1 | [CmdletBinding()] 2 | Param( 3 | [Parameter(Mandatory=$true)] 4 | [PSCredential] $admin_creds, 5 | [String] $shutdown_email, 6 | [String] $RESOURCE_GROUP="CEFTest", 7 | [String] $LOCATION="West US 2", 8 | [String] $MACHINE_SIZE="Standard_F32s_v2", 9 | [String] $SHUTDOWN_TIME="23:30", 10 | [String] $RANDOM_STR="" 11 | 12 | ) 13 | $WorkingDir = split-path -parent $MyInvocation.MyCommand.Definition; 14 | . (Join-Path $WorkingDir 'functions.ps1') 15 | 16 | Set-StrictMode -version latest 17 | $ErrorActionPreference = "Stop"; 18 | $rand_str = $RANDOM_STR; 19 | if (! $rand_str){ 20 | $rand_str = -join ((97..122) | Get-Random -Count 5 | % {[char]$_}); 21 | } 22 | $VAULT_NAME = "CEFVault-" + $rand_str; 23 | $SECRET_NAME="CEFPSCertSecret" 24 | $CERT_PASS="dummy" 25 | $SHUTDOWN_TIMEZONE="Pacific Standard Time"; 26 | $DIAG_STORAGE_ACT="estdiag86" + $rand_str; 27 | 28 | 29 | try{ 30 | 31 | Write-Host "RANDOM_STR FOR THIS SESSION: $rand_str" 32 | $CERT_PASS_SEC=ConvertTo-SecureString -AsPlainText -Force $CERT_PASS 33 | $cred = $admin_creds 34 | #Connect-AzureRmAccount 35 | #Set-AzureRmContext -SubscriptionName $SUBSCRIPTION 36 | 37 | #Create or check for existing resource group 38 | $resourceGroup = Get-AzureRmResourceGroup -Name $RESOURCE_GROUP -ErrorAction SilentlyContinue 39 | if(!$resourceGroup) 40 | { 41 | Write-Host "Resource group '$RESOURCE_GROUP' does not exist. To create a new resource group, please enter a location."; 42 | if(!$LOCATION) { 43 | $LOCATION = Read-Host "resourceGroupLocation"; 44 | } 45 | Write-Host "Creating resource group '$RESOURCE_GROUP' in location '$LOCATION'"; 46 | New-AzureRmResourceGroup -Name $RESOURCE_GROUP -Location $LOCATION | Out-Null; 47 | } 48 | else{ 49 | Write-Host "Using existing resource group '$RESOURCE_GROUP'"; 50 | } 51 | 52 | 53 | $vault = Get-AzureRmKeyVault -VaultName $VAULT_NAME -ErrorAction SilentlyContinue 54 | if (! $vault){ 55 | Write-Host "Creating key vault to store remote powershell certificate in: $VAULT_NAME" 56 | New-AzureRmKeyVault -VaultName $VAULT_NAME -ResourceGroupName $RESOURCE_GROUP -Location $LOCATION -EnabledForDeployment -EnabledForTemplateDeployment | Out-Null 57 | $vault = Get-AzureRmKeyVault -VaultName $VAULT_NAME 58 | }else{ 59 | Write-Host "Vault already exists not re-creating" 60 | } 61 | $certificateName = "CEFRemoteCert" 62 | $secretURL = (Get-AzureKeyVaultSecret -VaultName $VAULT_NAME -Name $SECRET_NAME -ErrorAction SilentlyContinue) 63 | if (! $secretURL){ 64 | Write-Host "Creating remote PS certificate" 65 | $thumbprint = (New-SelfSignedCertificate -DnsName $certificateName -CertStoreLocation Cert:\CurrentUser\My -KeySpec KeyExchange).Thumbprint 66 | $cert = (Get-ChildItem -Path cert:\CurrentUser\My\$thumbprint) 67 | $fileName = ".\$certificateName.pfx" 68 | 69 | Export-PfxCertificate -Cert $cert -FilePath $fileName -Password $CERT_PASS_SEC 70 | #not sure why we can't just call Remove-Item on $cert but does not work 71 | Get-ChildItem -Path cert:\CurrentUser\My\$thumbprint | Remove-Item 72 | $fileContentBytes = Get-Content $fileName -Encoding Byte 73 | $fileContentEncoded = [System.Convert]::ToBase64String($fileContentBytes) 74 | 75 | $jsonObject = @" 76 | { 77 | "data": "$filecontentencoded", 78 | "dataType" :"pfx", 79 | "password": "$CERT_PASS" 80 | } 81 | "@ 82 | 83 | $jsonObjectBytes = [System.Text.Encoding]::UTF8.GetBytes($jsonObject) 84 | $jsonEncoded = [System.Convert]::ToBase64String($jsonObjectBytes) 85 | 86 | $secret = ConvertTo-SecureString -String $jsonEncoded -AsPlainText -Force 87 | Write-Host "Going to store certificate in vault" 88 | Set-AzureKeyVaultSecret -VaultName $VAULT_NAME -Name $SECRET_NAME -SecretValue $secret | Out-Null 89 | $secretURL = (Get-AzureKeyVaultSecret -VaultName $VAULT_NAME -Name $SECRET_NAME).Id 90 | 91 | }else{ 92 | Write-Host "Secure storage for cert already exists reusing" 93 | $secretURL = $secretURL.Id; 94 | } 95 | 96 | $json = Get-Content 'AzureTemplateParams.json' | Out-String | ConvertFrom-Json 97 | 98 | $hashtable = @{} 99 | $json.parameters.PSObject.Properties | Foreach { $hashtable[$_.Name] = $_.Value.value } 100 | if (! $shutdown_email){ 101 | $hashtable.autoShutdownStatus = $hashtable.autoShutdownNotificationStatus = "Disabled"; 102 | } 103 | $hashtable.autoShutdownNotificationEmail = $shutdown_email; 104 | $hashtable.PsRemoteSecretVaultID = $vault.ResourceID; 105 | $hashtable.PsRemoteSecretUrl = $secretURL; 106 | $hashtable.adminUsername = $cred.UserName; 107 | $hashtable.adminPassword = $cred.Password; 108 | $hashtable.location = $LOCATION; 109 | $hashtable.diagnosticsStorageAccountName = $DIAG_STORAGE_ACT; 110 | $hashtable.diagnosticsStorageAccountId = "Microsoft.Storage/storageAccounts/" + $DIAG_STORAGE_ACT; 111 | $hashtable.virtualMachineSize = $MACHINE_SIZE; 112 | $hashtable.autoShutdownTimeZone = $SHUTDOWN_TIMEZONE; 113 | $hashtable.autoShutdownTime = $SHUTDOWN_TIME; 114 | $resourceProviders = @("microsoft.network","microsoft.compute","microsoft.storage","microsoft.devtestlab"); 115 | Function RegisterRP { 116 | Param( 117 | [string]$ResourceProviderNamespace 118 | ) 119 | 120 | Write-Host "Registering resource provider '$ResourceProviderNamespace'"; 121 | Register-AzureRmResourceProvider -ProviderNamespace $ResourceProviderNamespace | Out-Null; 122 | } 123 | 124 | if($resourceProviders.length) { 125 | Write-Host "Registering resource providers"; 126 | foreach($resourceProvider in $resourceProviders) { 127 | RegisterRP($resourceProvider); 128 | } 129 | } 130 | 131 | 132 | # Start the deployment 133 | Write-Host "Starting deployment..."; 134 | New-AzureRmResourceGroupDeployment -ResourceGroupName $RESOURCE_GROUP -TemplateParameterObject $hashtable -TemplateFile 'AzureTemplateFile.json' | Out-Null; 135 | $vm = Get-AzureRmVM -Name "CefTestVM" -ResourceGroupName $RESOURCE_GROUP 136 | $ip =Get-AzureRmPublicIpAddress -Name "CefTestVM-ip" -ResourceGroupName $RESOURCE_GROUP 137 | Write-Host "Public IP: " $ip.IpAddress 138 | 139 | }catch{ 140 | Write-Host -Foreground Yellow "If you get an exception about invalid template make sure you have your quotas high enough to support whatever size machine you are creating. Otherwise use Get-AzureRMLog -DetailedOutput -CorrelationId xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx with the id from the exception to see the issue"; 141 | WriteException $_; 142 | } -------------------------------------------------------------------------------- /build.ps1: -------------------------------------------------------------------------------- 1 | [CmdletBinding()] 2 | Param( 3 | [Switch] $NoSkip, 4 | [Switch] $NoMemoryWarn, 5 | [Switch] $NoVS2019PatchCopy 6 | ) 7 | $WorkingDir = split-path -parent $MyInvocation.MyCommand.Definition; 8 | . (Join-Path $WorkingDir 'functions.ps1') 9 | #Always read the source file first incase of a new variable. 10 | . (Join-Path $WorkingDir "versions_src.ps1") 11 | #user overrides 12 | if (Test-Path ./versions.ps1 -PathType Leaf){ 13 | . (Join-Path $WorkingDir "versions.ps1") 14 | } 15 | Set-StrictMode -version latest; 16 | $ErrorActionPreference = "Stop"; 17 | $ORIGINAL_WORKING_DIR = Get-Location; 18 | try{ 19 | if (-not $VAR_CEF_BUILD_MOUNT_VOL_NAME){ 20 | $VAR_CEF_BUILD_MOUNT_VOL_NAME = "cefbuild_" + -join ((97..122) | Get-Random -Count 5 | % {[char]$_}); 21 | } 22 | 23 | Write-Host -Foreground Green "Will use local volume/build name: '$VAR_CEF_BUILD_MOUNT_VOL_NAME' if not empty will resume cef build in there set `$VAR_CEF_BUILD_MOUNT_VOL_NAME in versions.ps1 to this value to resume" 24 | 25 | $redirect_output = $false; 26 | $PSSenderInfo = Get-Variable -name "PSSenderInfo" -ErrorAction SilentlyContinue; 27 | if ($PSSenderInfo){ 28 | $redirect_output = $true; 29 | Write-Host -Foreground Yellow "Warning when running this build command in a remote powershell session you will not see realtime output generated by commands run. This is due to a limitation in remote powershell. You can work around this by running the build.ps1 using remote desktop instead. In general it is only helpful to see the output if there is an error. The stdout and stderr will be captured and printed for remote sessions but only after a command finishes." 30 | } 31 | $global:PERF_FILE = Join-Path $WorkingDir "perf.log"; 32 | if ((Get-MpPreference).DisableRealtimeMonitoring -eq $false){ #as admin you can disable with: Set-MpPreference -DisableRealtimeMonitoring $true 33 | Write-Host Warning, windows defender is enabled it will slow things down. -Foreground Red 34 | } 35 | if (! $NoMemoryWarn){ 36 | $page_files = Get-CimInstance Win32_PageFileSetting; 37 | $os = Get-Ciminstance Win32_OperatingSystem; 38 | $min_gigs = 27; 39 | $warning = "linking may take around $min_gigs during linking"; 40 | if ($VAR_DUAL_BUILD -eq "1"){ 41 | $warning="dual build mode is enabled and may use 50+ GB if both releases link at once."; 42 | $min_gigs = 50; 43 | } 44 | if (($os.FreePhysicalMemory/1mb + $os.FreeSpaceInPagingFiles/1mb) -lt $min_gigs) { #if the memory isn't yet avail with the page files and they have a set size lets try to compute it that way 45 | $total_memory_gb = $os.FreePhysicalMemory/1mb; 46 | foreach ($file in $page_files){ 47 | $total_memory_gb += $file.MaximumSize/1kb; #is zero if system managed, then we really don't know how big it could be. 48 | } 49 | if ($total_memory_gb -lt $min_gigs){ 50 | if (! (confirm("Warning $warning. Your machine may not have enough memory, make sure your page files are working and can grow to allow it. (Disable this warning with -NoMemoryWarn flag). Do you want to proceed?"))){ 51 | exit 1; 52 | } 53 | 54 | } 55 | } 56 | } 57 | if (! $NoVS2019PatchCopy -and $VAR_CHROME_BRANCH -lt 4103 ){ 58 | if ( (Test-Path "cef_patch_find_vs2019_tools.diff") -eq $false){ 59 | Copy-Item sample_patches/cef_patch_find_vs2019_tools.diff -Destination . 60 | } 61 | } 62 | 63 | echo *.zip | out .dockerignore 64 | TimerNow("Starting"); 65 | RunProc -proc "docker" -redirect_output:$redirect_output -opts "pull $VAR_BASE_DOCKER_FILE"; 66 | TimerNow("Pull base file"); 67 | RunProc -proc "docker" -redirect_output:$redirect_output -opts "build $VAR_HYPERV_MEMORY_ADD --build-arg BASE_DOCKER_FILE=`"$VAR_BASE_DOCKER_FILE`" -f Dockerfile_vs -t vs ." 68 | TimerNow("VSBuild"); 69 | 70 | if ($VAR_CEF_USE_BINARY_PATH -and $VAR_CEF_USE_BINARY_PATH -ne ""){ 71 | $docker_file_name="Dockerfile_cef_create_from_binaries"; 72 | 73 | $good_hash = Get-FileHash $docker_file_name; 74 | $new_path = Join-Path $VAR_CEF_USE_BINARY_PATH $docker_file_name; 75 | if ( (Test-Path $new_path -PathType Leaf) -eq $false -or (Get-FileHash $new_path).Hash -ne $good_hash.Hash){ 76 | Copy $docker_file_name $VAR_CEF_USE_BINARY_PATH; 77 | } 78 | Set-Location -Path $VAR_CEF_USE_BINARY_PATH; 79 | RunProc -proc "docker" -redirect_output:$redirect_output -opts "build $VAR_HYPERV_MEMORY_ADD --build-arg BINARY_EXT=`"$VAR_CEF_BINARY_EXT`" -f $docker_file_name -t cef ." 80 | Set-Location $ORIGINAL_WORKING_DIR; 81 | } else { 82 | RunProc -proc "docker" -redirect_output:$redirect_output -opts "build $VAR_HYPERV_MEMORY_ADD --build-arg CEF_SAVE_SOURCES=`"$VAR_CEF_SAVE_SOURCES`" --build-arg ARCHES=`"$VAR_BUILD_ARCHES`" --build-arg BINARY_EXT=`"$VAR_CEF_BINARY_EXT`" --build-arg GN_ARGUMENTS=`"$VAR_GN_ARGUMENTS`" --build-arg DUAL_BUILD=`"$VAR_DUAL_BUILD`" --build-arg GN_DEFINES=`"$VAR_GN_DEFINES`" --build-arg GYP_DEFINES=`"$VAR_GYP_DEFINES`" --build-arg CHROME_BRANCH=`"$VAR_CHROME_BRANCH`" -f Dockerfile_cef -t cef_build_env ." 83 | $exit_code = RunProc -errok -proc "docker" -opts "tag i_$($VAR_CEF_BUILD_MOUNT_VOL_NAME) cef"; #if this fails we know it didn't build correctly and to continue 84 | if ($exit_code -ne 0){ 85 | RunProc -errok -proc "docker" -opts "rm c_$($VAR_CEF_BUILD_MOUNT_VOL_NAME)_tmp" 86 | RunProc -proc "docker" -redirect_output:$redirect_output -opts "run $VAR_HYPERV_MEMORY_ADD -v $($VAR_CEF_BUILD_MOUNT_VOL_NAME):C:/code/chromium_git --name c_$($VAR_CEF_BUILD_MOUNT_VOL_NAME)_tmp cef_build_env" 87 | 88 | $exit_code = RunProc -errok -proc "docker" -opts "commit c_$($VAR_CEF_BUILD_MOUNT_VOL_NAME)_tmp i_$($VAR_CEF_BUILD_MOUNT_VOL_NAME)"; 89 | $exit_code = RunProc -errok -proc "docker" -opts "tag i_$($VAR_CEF_BUILD_MOUNT_VOL_NAME) cef"; 90 | } 91 | } 92 | TimerNow("CEF Build"); 93 | if (! $VAR_CEF_BUILD_ONLY){ 94 | RunProc -proc "docker" -redirect_output:$redirect_output -opts "build $VAR_HYPERV_MEMORY_ADD --build-arg ARCHES=`"$VAR_BUILD_ARCHES`" --build-arg BINARY_EXT=`"$VAR_CEF_BINARY_EXT`" --build-arg CEFSHARP_VERSION=`"$VAR_CEFSHARP_VERSION`" -f Dockerfile_cef_binary -t cef_binary ." 95 | TimerNow("CEF Binary compile"); 96 | RunProc -proc "docker" -redirect_output:$redirect_output -opts "build $VAR_HYPERV_MEMORY_ADD --build-arg CEFSHARP_BRANCH=`"$VAR_CEFSHARP_BRANCH`" --build-arg CEFSHARP_VERSION=`"$VAR_CEFSHARP_VERSION`" --build-arg CEF_VERSION_STR=`"$VAR_CEF_VERSION_STR`" --build-arg ARCHES=`"$VAR_BUILD_ARCHES`" --build-arg CHROME_BRANCH=`"$VAR_CHROME_BRANCH`" -f Dockerfile_cefsharp -t cefsharp ." 97 | TimerNow("CEFSharp compile"); 98 | RunProc -proc "docker" -opts "rm cefsharp" -errok; 99 | Start-Sleep -s 6; #sometimes we are too fast, file in use error 100 | RunProc -proc "docker" -redirect_output:$redirect_output -opts "run --name cefsharp cefsharp cmd /C echo CopyVer" 101 | RunProc -proc "docker" -redirect_output:$redirect_output -opts "cp cefsharp:/packages_cefsharp.zip ." 102 | TimerNow("CEFSharp copy files locally"); 103 | }else{ 104 | docker rm cef; 105 | Start-Sleep -s 3; #sometimes we are too fast, file in use error 106 | RunProc -proc "docker" -redirect_output:$redirect_output -opts "run --name cef cef powershell Compress-Archive -Path C:/code/binaries/*.zip -CompressionLevel Fastest -DestinationPath /packages_cef" 107 | RunProc -proc "docker" -redirect_output:$redirect_output -opts "cp cef:/packages_cef.zip ." 108 | 109 | TimerNow("CEF copy files locally"); 110 | } 111 | if ($VAR_REMOVE_VOLUME_ON_SUCCESSFUL_BUILD){ 112 | RunProc --errok -proc "docker" -opts "volumes rm $VAR_CEF_BUILD_MOUNT_VOL_NAME"; 113 | } 114 | Write-Host -ForegroundColor Green Build completed successfully! See $global:PERF_FILE for timing for each step. 115 | }catch{ 116 | WriteException $_; 117 | } 118 | Set-Location $ORIGINAL_WORKING_DIR; -------------------------------------------------------------------------------- /cef_build.ps1: -------------------------------------------------------------------------------- 1 | Set-StrictMode -version latest; 2 | $ErrorActionPreference = "Stop"; 3 | $WorkingDir = split-path -parent $MyInvocation.MyCommand.Definition; 4 | . (Join-Path $WorkingDir 'functions.ps1') 5 | 6 | if (! $env:ARCHES){ 7 | $env:ARCHES = "x86 x64 amd64"; 8 | } 9 | $ARCHES = $env:ARCHES.Split(" "); 10 | $ARCHES_TO_BITKEY = @{}; 11 | foreach ($arch in $ARCHES) { 12 | $arch_bit = $arch; 13 | if ($arch_bit.StartsWith("x")) { 14 | $arch_bit = $arch.Substring(1); 15 | if ($arch_bit -eq "86"){ 16 | $arch_bit = "32"; 17 | } 18 | $ARCHES_TO_BITKEY[$arch] = $arch_bit; 19 | } 20 | } 21 | 22 | 23 | Function CopyBinaries{ 24 | foreach ($arch in $ARCHES) { 25 | $arch_bit = $ARCHES_TO_BITKEY[$arch]; 26 | if (@(dir -Filter "cef_binary_*_windows$($ARCHES_TO_BITKEY[$arch]).$env:BINARY_EXT" "c:/code/chromium_git/chromium/src/cef/binary_distrib/").Count -ne 1){ 27 | throw "Not able to find win$ARCHES_TO_BITKEY[$arch] file as expected"; 28 | } 29 | } 30 | 31 | mkdir c:/code/binaries -Force; 32 | copy-item ("c:/code/chromium_git/chromium/src/cef/binary_distrib/*." + $env:BINARY_EXT) -destination C:/code/binaries; 33 | Set-Location -Path /; 34 | if ($env:CEF_SAVE_SOURCES -eq "1"){ 35 | RunProc -errok -proc ($env:ProgramFiles + "\\7-Zip\\7z.exe") -opts "a -aoa -y -mx=1 -r -tzip c:\code\sources.zip c:/code/chromium_git/chromium"; 36 | } 37 | echo $null >> c:/code/chromium_git/done 38 | } 39 | 40 | $build_args_add = ""; 41 | if (! $env:BINARY_EXT){ 42 | $env:BINARY_EXT="zip"; 43 | } 44 | if ($env:BINARY_EXT -eq "7z"){ 45 | $env:CEF_COMMAND_7ZIP="C:/Program Files/7-Zip/7z.exe"; 46 | } 47 | 48 | 49 | $env:CEF_ARCHIVE_FORMAT = $env:BINARY_EXT; 50 | if ($env:DUAL_BUILD -eq "1" -and $env:CHROME_BRANCH -lt 3396){ #newer builds can take a good bit more time linking just let run with double the proc count 51 | $cores = ([int]$env:NUMBER_OF_PROCESSORS) + 2; #ninja defaults to number of procs + 2 52 | if ($cores % 2 -eq 1){ 53 | $cores +=1; 54 | } 55 | $build_args_add = "-j " + ($cores/2); 56 | } 57 | if (Test-Path c:/code/chromium_git/done -PathType Leaf){ 58 | Write-Host "Already Done just copying binaries"; 59 | CopyBinaries; 60 | exit 0; 61 | } 62 | 63 | 64 | Function RunBuild{ 65 | [CmdletBinding()] 66 | Param($build_args_add,$version) 67 | return RunProc -verbose_mode "host" -proc "c:/code/depot_tools/ninja.exe" -opts "$build_args_add -C out/Release_GN_$version cefclient" -no_wait; 68 | } 69 | 70 | if ($Env:SHALLOW -eq "1"){ 71 | $chrome_data = Invoke-RestMethod -Uri 'https://omahaproxy.appspot.com/all.json' 72 | $win_data = $chrome_data | Where { $_.os -eq "win64"} | Select -First 1 73 | $branch_data = $win_data.versions | Where {$_.true_branch -eq $env:CHROME_BRANCH} | Select -First 1 74 | $latest_tag = $branch_data.version 75 | 76 | if (! (Test-Path /code/chromium_git/cef/.git)){ #we will manually clone this out first time or wont be on right branch 77 | Runproc -proc "c:/code/depot_tools/git.bat" -opts "clone --depth 1 --branch $env:CHROME_BRANCH https://bitbucket.org/chromiumembedded/cef.git c:/code/chromium_git/cef"; #as shallow fails if they don't speify the branch so we will do it first for them 78 | } 79 | if (! (Test-Path /code/chromium_git/chromium/src/.git)){ #as we now use no update for source we need to check it out 80 | Runproc -proc "c:/code/depot_tools/git.bat" -opts "-c core.deltaBaseCacheLimit=2g clone --depth=1 --branch $latest_tag --progress https://chromium.googlesource.com/chromium/src.git c:/code/chromium_git/chromium/src"; 81 | } 82 | } 83 | 84 | # --no-update can't do no update for first time 85 | RunProc -proc "c:/code/depot_tools/python.bat" -opts "c:/code/automate/automate-git.py --download-dir=c:/code/chromium_git --branch=$env:CHROME_BRANCH --no-build --depot-tools-dir=c:/code/depot_tools --no-debug-build --no-distrib --no-depot-tools-update"; #not sure why allowed errok before 86 | Set-Location -Path c:/code/chromium_git/cef; 87 | if (! (Test-Path /code/chromium_git/already_patched -PathType Leaf)){ 88 | copy c:/code/*.ps1 . 89 | copy c:/code/*.diff . 90 | ./cef_patch.ps1 91 | if ($env:GN_DEFINES -contains "proprietary_codecs" -and $env:CHROME_BRANCH -lt 3396){ 92 | #I was unable to generate a patch that worked across branches so manually patching the file per: https://bitbucket.org/chromiumembedded/cef/issues/2352/windows-3239-build-fails-due-to-missing 93 | #this is only needed for versions < 3396 94 | $str = [system.io.file]::ReadAllText("c:/code/chromium_git/cef/BUILD.gn"); 95 | $str = $str -replace "deps = \[\s+`"//components/crash/core/common`",", "deps = [`n `"//components/crash/core/common`",`n `"//media:media_features`","; 96 | $str | Out-File "c:/code/chromium_git//cef/BUILD.gn" -Encoding ASCII; 97 | } 98 | Set-Location -Path c:/code/chromium_git/chromium/src/cef 99 | RunProc -proc "c:/code/chromium_git/cef/cef_create_projects.bat" -errok -opts ""; 100 | "1" > /code/chromium_git/already_patched 101 | } 102 | Set-Location -Path c:/code/chromium_git/chromium/src; 103 | 104 | # track the build procs and build failures per arch 105 | $build_procs = @{} 106 | $build_fails = @{} 107 | foreach ($arch in $ARCHES) { 108 | $build_fails[$arch] = -1; 109 | } 110 | 111 | $MAX_FAILURES=20; 112 | 113 | while ($true){ 114 | $retry=$false; 115 | foreach ($arch in $ARCHES) { 116 | if ( ! $build_procs.ContainsKey($arch) -or ($build_procs[$arch].HasExited -and $build_procs[$arch].ExitCode -ne 0 -and $build_fails[$arch] -lt $MAX_FAILURES)){ 117 | $build_fails[$arch]++;#starts at -1 so ok to increment first no matter the result;) 118 | $build_procs[$arch] = RunBuild -build_args_add $build_args_add -version $arch; 119 | if ($env:DUAL_BUILD -ne "1"){ 120 | $build_procs[$arch].WaitForExit(); 121 | } 122 | $retry=$true; 123 | } 124 | } 125 | 126 | $all_exited=$true; 127 | foreach ($arch in $ARCHES){ 128 | if (! $build_procs[$arch].HasExited){ 129 | $all_exited=$false; 130 | } 131 | } 132 | if ($all_exited -and ! $retry){ 133 | break; 134 | } 135 | Start-Sleep -s 15 136 | } 137 | foreach ($arch in $ARCHES){ 138 | $build_procs[$arch].WaitForExit(); 139 | if ($build_procs[$arch].ExitCode -ne 0){ 140 | throw "$arch build failed with $($build_procs[$arch].ExitCode)"; 141 | } 142 | } 143 | if ($env:CHROME_BRANCH -ge 4406) { #need to manually build sandbox lib now 144 | Set-Location -Path C:/code/chromium_git/chromium/src/cef; 145 | RunProc -proc "c:/code/depot_tools/python.bat" -opts "tools\gn_args.py"; 146 | 147 | Set-Location -Path C:/code/chromium_git/chromium/src; 148 | foreach ($arch in $ARCHES){ 149 | 150 | RunProc -proc "c:/code/depot_tools/gn.bat" -opts "gen out/Release_GN_$($arch)_sandbox"; 151 | RunProc -verbose_mode "host" -proc "c:/code/depot_tools/ninja.exe" -opts "-C out/Release_GN_$($arch)_sandbox cef_sandbox"; 152 | } 153 | } 154 | 155 | Set-Location -Path C:/code/chromium_git/chromium/src/cef/tools/; 156 | foreach ($arch in $ARCHES){ 157 | $distrib_add = "--$($arch)-build"; 158 | if ($arch -eq "x86"){ 159 | $distrib_add = ""; 160 | } 161 | RunProc -proc "C:/code/chromium_git/chromium/src/cef/tools/make_distrib.bat" -opts "--ninja-build --allow-partial $distrib_add"; 162 | } 163 | 164 | CopyBinaries; 165 | #Remove-Item -Recurse -Force c:/code/chromium_git/chromium; #no longer removing source by default as stored in a volume now -------------------------------------------------------------------------------- /cef_patch.ps1: -------------------------------------------------------------------------------- 1 | Set-StrictMode -version latest; 2 | $ErrorActionPreference = "Stop"; 3 | $WorkingDir = split-path -parent $MyInvocation.MyCommand.Definition; 4 | . (Join-Path $WorkingDir 'functions.ps1') 5 | 6 | $patches = dir . -Filter cef_patch_*.diff; 7 | foreach($patch in $patches){ 8 | $name = $patch.Name; 9 | if ($name -ne "cef_patch_placeholder.diff"){ 10 | RunProc -proc "git" -opts "apply $name"; 11 | Write-Host "Applied patch $name" -Foreground Magenta; 12 | } 13 | } -------------------------------------------------------------------------------- /cef_patch_placeholder.diff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mitchcapper/CefSharpDockerfiles/47a9b61de232ba2465c3b739ae71b4110282462c/cef_patch_placeholder.diff -------------------------------------------------------------------------------- /cefsharp_patch.ps1: -------------------------------------------------------------------------------- 1 | Set-StrictMode -version latest; 2 | $ErrorActionPreference = "Stop"; 3 | $WorkingDir = split-path -parent $MyInvocation.MyCommand.Definition; 4 | . (Join-Path $WorkingDir 'functions.ps1') 5 | 6 | $patches = dir . -Filter cefsharp_patch_*.diff; 7 | foreach($patch in $patches){ 8 | $name = $patch.Name; 9 | if ($name -ne "cefsharp_patch_placeholder.diff"){ 10 | RunProc -proc "git" -opts "apply $name"; 11 | Write-Host "Applied patch $name" -Foreground Magenta; 12 | } 13 | } -------------------------------------------------------------------------------- /cefsharp_patch_placeholder.diff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mitchcapper/CefSharpDockerfiles/47a9b61de232ba2465c3b739ae71b4110282462c/cefsharp_patch_placeholder.diff -------------------------------------------------------------------------------- /cefsharp_set_versions_and_restore.ps1: -------------------------------------------------------------------------------- 1 | Set-StrictMode -version latest; 2 | $ErrorActionPreference = "Stop"; 3 | 4 | Function UpdateProject($path,$original_version,$new_version){ 5 | $xml = [xml](Get-Content ($path)); 6 | Write-Host doing $path; 7 | $node = $xml.SelectSingleNode("//Project/Import/@Project"); 8 | $ns = new-object Xml.XmlNamespaceManager $xml.NameTable 9 | $ns.AddNamespace("msb", "http://schemas.microsoft.com/developer/msbuild/2003") 10 | $nodes = $xml.SelectNodes("//msb:Import",$ns); 11 | $changed = $false; 12 | foreach ($node in $nodes){ 13 | if ($node.HasAttribute("Project") -and $node.Project -like "*" + $original_version + "*"){ 14 | $node.Project = $node.Project -replace $original_version, $new_version; 15 | $changed=$true; 16 | } 17 | if ($node.HasAttribute("Condition") -and $node.Condition -like "*" + $original_version + "*"){ 18 | $node.Condition = $node.Condition -replace $original_version, $new_version; 19 | $changed=$true; 20 | } 21 | } 22 | if ($changed){ 23 | $xml.Save($path); 24 | Write-Host Updated $path; 25 | } 26 | } 27 | Function UpdatePackageConfig($path, [string[]]$CHECK_IDS){ 28 | 29 | 30 | if(-not (Test-Path $path)) { 31 | return; 32 | } 33 | Write-Host doing $path 34 | $xml = [xml](Get-Content ($path)); 35 | $changed=$false; 36 | foreach ($node_name in $CHECK_IDS){ 37 | $node = $xml.SelectSingleNode("//packages/package[@id='" + $node_name +"']/@version"); 38 | if ($node -and $node.value -ne $env:CEF_VERSION_STR){ 39 | $changed=$true; 40 | $node.value = $env:CEF_VERSION_STR; 41 | } 42 | } 43 | if ($changed){ 44 | $xml.Save($path); 45 | Write-Host Updated $path; 46 | } 47 | } 48 | 49 | 50 | $WorkingDir = split-path -parent $MyInvocation.MyCommand.Definition; 51 | if ($env:CEF_VERSION_STR -eq "auto"){ 52 | $name = (dir -Filter cef.redist.*.*.nupkg $env:PACKAGE_SOURCE)[0].Name; 53 | $name = ((($name -replace "cef.redist.x64.", "") -replace ".nupkg", "") -replace "cef.redist.x86.", "") -replace "cef.redist.arm64.", ""; 54 | $base_check = $env:CEFSHARP_VERSION.SubString(0, $env:CEFSHARP_VERSION.IndexOf('.')); 55 | if ($name -and $name.StartsWith($base_check + ".") ) { #with new version string format we will just make sure they are both starting with the same master version 56 | $env:CEF_VERSION_STR = $name; 57 | setx /M CEF_VERSION_STR $env:CEF_VERSION_STR; 58 | } 59 | } 60 | 61 | $CefSharpCorePackagesXml = [xml](Get-Content (Join-Path $WorkingDir 'CefSharp.BrowserSubprocess.Core\packages.CefSharp.BrowserSubprocess.Core.config')) 62 | $original_version = $CefSharpCorePackagesXml.SelectSingleNode("//packages/package[@id='cef.sdk']/@version").value; 63 | 64 | 65 | 66 | $nuget = Join-Path $WorkingDir ".\nuget\NuGet.exe" 67 | if(-not (Test-Path $nuget)) { 68 | Invoke-WebRequest 'https://dist.nuget.org/win-x86-commandline/latest/nuget.exe' -OutFile $nuget; 69 | } 70 | 71 | 72 | $sdk_path = [io.path]::combine($env:PACKAGE_SOURCE,"cef.sdk." + $env:CEF_VERSION_STR + ".nupkg"); 73 | 74 | if(-not (Test-Path $sdk_path)) { 75 | throw "The sdk and redist packages should be in the $env:PACKAGE_SOURCE folder but $sdk_path is missing"; 76 | } 77 | #Check each subfolder for a packages.config, then check each for any of the 3 values 78 | $CHECK_IDS = @("cef.sdk","cef.redist.x64","cef.redist.x86","cef.redist.arm64"); 79 | $folders = dir -Directory; 80 | foreach ($folder in $folders){ 81 | $package_config_path = [io.path]::combine($WorkingDir,$folder,'Packages.config'); 82 | UpdatePackageConfig $package_config_path $CHECK_IDS; 83 | $package_config_path = [io.path]::combine($WorkingDir,$folder,"packages.${folder}.config"); 84 | UpdatePackageConfig $package_config_path $CHECK_IDS; 85 | $package_config_path = [io.path]::combine($WorkingDir,$folder,"packages.${folder}.netcore.config"); 86 | UpdatePackageConfig $package_config_path $CHECK_IDS; 87 | 88 | $projects = dir $folder -Filter *.*proj; 89 | foreach($project in $projects){ 90 | $project_path = [io.path]::combine($WorkingDir,$folder,$project ); 91 | UpdateProject $project_path $original_version $env:CEF_VERSION_STR; 92 | } 93 | } 94 | #Previously there was a bug in 63 where vs2017 was flagged as 2015 for the bin packages. 95 | #$props_path = "CefSharp.props"; 96 | #$content = Get-Content $props_path; 97 | #$bad_str = "'16.0'`">2017"; 98 | #$good_str = "'16.0'`">2019"; 99 | #if ($content -like "*" + $bad_str + "*"){ 100 | #$content = $content -replace $bad_str , $good_str; 101 | #$content > $props_path; 102 | #Write-Host Updated $props_path; 103 | #} 104 | 105 | $args = "restore -source `"$env:PACKAGE_SOURCE`" -FallbackSource https://api.nuget.org/v3/index.json CefSharp3.sln"; 106 | $p = Start-Process -Wait -PassThru -FilePath $nuget -ArgumentList $args; 107 | if (($ret = $p.ExitCode) ) { 108 | $rethex = '{0:x}' -f $ret 109 | throw ("restore failed running '$nuget $args' with exit code 0x$rethex") 110 | }; 111 | -------------------------------------------------------------------------------- /daemon.json: -------------------------------------------------------------------------------- 1 | { 2 | "registry-mirrors": [], 3 | "insecure-registries": [], 4 | "debug": true, 5 | "experimental": false, 6 | "exec-opts": [ 7 | "isolation=process" 8 | ], 9 | "data-root": "d:/docker_data" 10 | } -------------------------------------------------------------------------------- /functions.ps1: -------------------------------------------------------------------------------- 1 | Set-StrictMode -version latest; 2 | $ErrorActionPreference = "Stop"; 3 | Function RunProc{ 4 | [CmdletBinding()] 5 | Param($proc,$opts, 6 | [switch] $errok, 7 | [switch] $dry_run, 8 | [switch] $redirect_output, 9 | [switch] $no_wait, 10 | [ValidateSet("verbose","host","none")] 11 | [String] $verbose_mode="host" 12 | ) 13 | if ($no_wait -and $redirect_output){ 14 | throw "Cannot do no_wait and redirect_output"; 15 | } 16 | if ($proc.ToUpper().EndsWith(".PL")){ 17 | $path = (C:\Windows\system32\where.exe $proc | Out-String).Trim() 18 | $opts = "$path $opts"; 19 | $proc="perl"; 20 | } 21 | $pinfo = New-Object System.Diagnostics.ProcessStartInfo; 22 | $pinfo.FileName = $proc; 23 | $working = Convert-Path .; 24 | $verbose_str = "Running: $proc $opts in $working"; 25 | if ($verbose_mode -eq "host"){ 26 | Write-Host $verbose_str -Foreground Green; 27 | }elseif($verbose_mode -eq "verbose"){ 28 | Write-Verbose $verbose_str; 29 | } 30 | $pinfo.WorkingDirectory = $working; 31 | $pinfo.UseShellExecute = $false; 32 | 33 | $pinfo.Arguments = $opts; 34 | $p = New-Object System.Diagnostics.Process; 35 | $p.StartInfo = $pinfo; 36 | $oStdOutEvent = $null; 37 | $oStdErrEvent = $null; 38 | $oStdOutBuilder = $null; 39 | $oStdErrBuilder = $null; 40 | if ($dry_run){ 41 | return $null; 42 | } 43 | 44 | if ($redirect_output){ 45 | $pinfo.RedirectStandardError = $true; 46 | $pinfo.RedirectStandardOutput = $true; 47 | $oStdOutBuilder = New-Object -TypeName System.Text.StringBuilder; 48 | $oStdErrBuilder = New-Object -TypeName System.Text.StringBuilder; 49 | $sScripBlock = { 50 | if (! [String]::IsNullOrWhitespace($EventArgs.Data)) { 51 | $Event.MessageData.AppendLine($EventArgs.Data); 52 | #Write-Host $EventArgs.Data; #not sure how big the buffer is for this 53 | #[console]::WriteLine($EventArgs.Data); #console.WriteLine does not work for remote powershell, but does display instant output 54 | } 55 | }; 56 | $oStdOutEvent = Register-ObjectEvent -InputObject $p -Action $sScripBlock -EventName 'OutputDataReceived' -MessageData $oStdOutBuilder; 57 | $oStdErrEvent = Register-ObjectEvent -InputObject $p -Action $sScripBlock -EventName 'ErrorDataReceived' -MessageData $oStdErrBuilder; 58 | } 59 | $p.Start() | Out-Null; 60 | if ($no_wait){ 61 | return $p; 62 | } 63 | if ($redirect_output){ 64 | $p.BeginOutputReadLine(); 65 | $p.BeginErrorReadLine(); 66 | } 67 | $p.WaitForExit(); 68 | 69 | if ($redirect_output){ 70 | Unregister-Event -SourceIdentifier $oStdOutEvent.Name; 71 | Unregister-Event -SourceIdentifier $oStdErrEvent.Name; 72 | $stdout = $oStdOutBuilder.ToString().Trim(); 73 | $stderr = $oStdErrBuilder.ToString().Trim(); 74 | if (! [String]::IsNullOrWhitespace($stdout)){ 75 | Write-Host $stdout; 76 | } 77 | if (! [String]::IsNullOrWhitespace($stderr)){ 78 | Write-Host $stderr; 79 | } 80 | } 81 | if ($p.ExitCode -ne 0 -and -not $errok){ 82 | throw "Process $proc $opts exited with non zero code: $($p.ExitCode) aborting!" ; 83 | } 84 | if ($errok){ 85 | return $p.ExitCode; 86 | } 87 | } 88 | function out { 89 | Param( 90 | [Parameter(Mandatory = $true, ParameterSetName = "ByPath", Position = 0)] 91 | $FilePath, 92 | [Parameter(Mandatory = $true,ValueFromPipeline=$true,ValueFromPipelineByPropertyName=$true, ParameterSetName = "ByPath", Position = 1)] 93 | $InputObject, 94 | [switch] $Append 95 | ) 96 | Begin { 97 | if (! $Append){ 98 | Out-File -FilePath $FilePath -Encoding ASCII; 99 | } 100 | } 101 | Process { 102 | Out-File -Append -InputObject $InputObject -FilePath $FilePath -Encoding ASCII 103 | } 104 | } 105 | Function confirm($str){ 106 | return $PSCmdlet.ShouldContinue($str, ""); 107 | } 108 | $global:last_time = Get-Date; 109 | $global:PERF_FILE=""; 110 | Function TimerNow($name){ 111 | $now = Get-Date; 112 | $diff = ($now - $global:last_time).TotalSeconds.ToString("0.0"); 113 | $str = "$(Get-Date) $name took $diff secs"; 114 | Write-Host $str -ForegroundColor Green; 115 | if ($PERF_FILE -ne ""){ 116 | $str | out -FilePath $PERF_FILE -Append; 117 | } 118 | $global:last_time = $now; 119 | } 120 | Function WriteException($exp){ 121 | write-host "Caught an exception:" -ForegroundColor Yellow -NoNewline 122 | write-host " $($exp.Exception.Message)" -ForegroundColor Red 123 | write-host "`tException Type: $($exp.Exception.GetType().FullName)" 124 | $stack = $exp.ScriptStackTrace; 125 | $stack = $stack.replace("`n","`n`t") 126 | write-host "`tStack Trace: $stack" 127 | if ($exp.Exception.InnerException){ 128 | write-host "`tInnerException:" -ForegroundColor Yellow -NoNewline 129 | write-host " $($exp.Exception.InnerException.Message)" -ForegroundColor Red 130 | } 131 | } -------------------------------------------------------------------------------- /mem_log.ps1: -------------------------------------------------------------------------------- 1 | $max = 0; 2 | $max_ws = 0; 3 | while($true){ 4 | $proc = Get-Process lld-Link -ErrorAction SilentlyContinue; 5 | if (! proc){ 6 | $proc = Get-Process Link -ErrorAction SilentlyContinue; 7 | } 8 | if ($proc){ 9 | $proc = $proc[0]; 10 | $memory = $proc.PM; 11 | $memory_ws = $proc.WS; 12 | if (! $memory){ 13 | $memory=0; 14 | } 15 | if (! $memory_ws){ 16 | $memory_ws=0; 17 | } 18 | $memory/=1024*1024*1024; 19 | if ($memory -gt $max){ 20 | $max = $memory; 21 | } 22 | $memory_ws/=1024*1024*1024; 23 | if ($memory_ws -gt $max_ws){ 24 | $max_ws = $memory_ws; 25 | } 26 | Write-Host $(Get-Date -Format u) $memory.ToString("0.00")G Max: $max.ToString("0.00")G WS: $memory_ws.ToString("0.00")G Max: $max_ws.ToString("0.00")G; 27 | Start-Sleep -Seconds 60; 28 | } 29 | } -------------------------------------------------------------------------------- /sample_patches/cef_patch_67_3396_pdfcrash_fix.diff: -------------------------------------------------------------------------------- 1 | diff --git a/libcef/browser/osr/render_widget_host_view_osr.cc b/libcef/browser/osr/render_widget_host_view_osr.cc 2 | index 167ebf0b..8d0a3bfc 100644 3 | --- a/libcef/browser/osr/render_widget_host_view_osr.cc 4 | +++ b/libcef/browser/osr/render_widget_host_view_osr.cc 5 | @@ -27,6 +27,7 @@ 6 | #include "content/browser/bad_message.h" 7 | #include "content/browser/compositor/image_transport_factory.h" 8 | #include "content/browser/frame_host/render_widget_host_view_guest.h" 9 | +#include "content/browser/renderer_host/cursor_manager.h" 10 | #include "content/browser/renderer_host/dip_util.h" 11 | #include "content/browser/renderer_host/render_widget_host_delegate.h" 12 | #include "content/browser/renderer_host/render_widget_host_impl.h" 13 | @@ -281,6 +282,8 @@ CefRenderWidgetHostViewOSR::CefRenderWidgetHostViewOSR( 14 | if (browser_impl_.get()) 15 | ResizeRootLayer(); 16 | 17 | + cursor_manager_.reset(new content::CursorManager(this)); 18 | + 19 | // Do this last because it may result in a call to SetNeedsBeginFrames. 20 | render_widget_host_->SetView(this); 21 | } 22 | @@ -627,6 +630,10 @@ void CefRenderWidgetHostViewOSR::UpdateCursor( 23 | #endif 24 | } 25 | 26 | +content::CursorManager* CefRenderWidgetHostViewOSR::GetCursorManager() { 27 | + return cursor_manager_.get(); 28 | +} 29 | + 30 | void CefRenderWidgetHostViewOSR::SetIsLoading(bool is_loading) {} 31 | 32 | void CefRenderWidgetHostViewOSR::RenderProcessGone( 33 | diff --git a/libcef/browser/osr/render_widget_host_view_osr.h b/libcef/browser/osr/render_widget_host_view_osr.h 34 | index 1f3062b2..3f76034b 100644 35 | --- a/libcef/browser/osr/render_widget_host_view_osr.h 36 | +++ b/libcef/browser/osr/render_widget_host_view_osr.h 37 | @@ -39,6 +39,7 @@ class RenderWidgetHost; 38 | class RenderWidgetHostImpl; 39 | class RenderWidgetHostViewGuest; 40 | class BackingStore; 41 | +class CursorManager; 42 | } // namespace content 43 | 44 | class CefBeginFrameTimer; 45 | @@ -145,6 +146,7 @@ class CefRenderWidgetHostViewOSR : public content::RenderWidgetHostViewBase, 46 | int error_code) override; 47 | void Destroy() override; 48 | void SetTooltipText(const base::string16& tooltip_text) override; 49 | + content::CursorManager* GetCursorManager() override; 50 | 51 | gfx::Size GetRequestedRendererSize() const override; 52 | gfx::Size GetCompositorViewportPixelSize() const override; 53 | @@ -336,6 +338,8 @@ class CefRenderWidgetHostViewOSR : public content::RenderWidgetHostViewBase, 54 | std::unique_ptr invisible_cursor_; 55 | #endif 56 | 57 | + std::unique_ptr cursor_manager_; 58 | + 59 | // Used to control the VSync rate in subprocesses when BeginFrame scheduling 60 | // is enabled. 61 | std::unique_ptr begin_frame_timer_; 62 | -------------------------------------------------------------------------------- /sample_patches/cef_patch_find_vs2019_tools.diff: -------------------------------------------------------------------------------- 1 | diff --git a/tools/msvs_env.bat b/tools/msvs_env.bat 2 | index ac44e293..e2f0b1c4 100644 3 | --- a/tools/msvs_env.bat 4 | +++ b/tools/msvs_env.bat 5 | @@ -37,6 +37,14 @@ for %%x in ("%PROGRAMFILES(X86)%" "%PROGRAMFILES%") do ( 6 | ) 7 | ) 8 | 9 | +:: Search for the default VS2019 installation path. 10 | +for %%x in ("%PROGRAMFILES(X86)%" "%PROGRAMFILES%") do ( 11 | + for %%y in (Professional Enterprise Community BuildTools) do ( 12 | + set vcvars="%%~x\Microsoft Visual Studio\2019\%%y\VC\Auxiliary\Build\%vcvarsbat%" 13 | + if exist !vcvars! goto found_vcvars 14 | + ) 15 | +) 16 | + 17 | echo ERROR: Failed to find vcvars 18 | set ERRORLEVEL=1 19 | goto end 20 | -------------------------------------------------------------------------------- /versions_src.ps1: -------------------------------------------------------------------------------- 1 | $VAR_CHROME_BRANCH="3370"; 2 | $VAR_CEFSHARP_VERSION="75.0.90"; 3 | $VAR_CEFSHARP_BRANCH="cefsharp/75"; 4 | $VAR_BASE_DOCKER_FILE="mcr.microsoft.com/windows/servercore:1809-amd64";#mcr.microsoft.com/windows/servercore:1903-amd64 5 | $VAR_DUAL_BUILD="0"; #set to 1 to build all arches together, mainly to speed up linking which is single threaded, note may need excess ram. 6 | $VAR_BUILD_ARCHES="x86 x64 arm64"; 7 | $VAR_GN_DEFINES=""; 8 | $VAR_CEF_BUILD_MOUNT_VOL_NAME=""; #force using this volume for building, allows resuming MUST BE LOWER CASE 9 | $VAR_GN_ARGUMENTS="--ide=vs2019 --sln=cef --filters=//cef/*"; 10 | $VAR_GYP_DEFINES=""; 11 | $VAR_CEF_BUILD_ONLY=$false;#Only build CEF do not build cefsharp or the cef-binary. 12 | $VAR_CEF_USE_BINARY_PATH=""; #If you want to use existing CEF binaries point this to a local folder where the cef_binary*.zip files are. It will skip the long CEF build step then but still must make the VS container for the cefsharp building. Note will copy a dockerfile into this folder. 13 | $VAR_REMOVE_VOLUME_ON_SUCCESSFUL_BUILD=$true; 14 | $VAR_CEF_BINARY_EXT="zip"; #Can be zip,tar.bz2, 7z Generally do not change this off of Zip unless you are supplying your own binaries using $VAR_CEF_USE_BINARY_PATH above, and they have a different extension, will try to work with the other formats however 15 | $VAR_CEF_SAVE_SOURCES="0"; #normally sources are deleted before finishing the CEF build step. Set to 1 to create a /code/sources.zip archive that has them (note it is left in docker image, must use docker cp to copy it out, it is also around 30GB). 16 | $VAR_CEF_VERSION_STR="auto"; #can set to "3.3239.1723" or similar if you have multiple binaries that Docker_cefsharp might find 17 | $VAR_HYPERV_MEMORY_ADD="--memory=30g"; #only matters if using HyperV, Note your swap file alone must be this big or able to grow to be this big, 30G is fairly safe for single build will need 60G for dual build. 18 | if ($false){ #Sample 65 overrides 19 | $VAR_CHROME_BRANCH="3325"; 20 | $VAR_CEFSHARP_VERSION="65.0.90"; 21 | $VAR_CEFSHARP_BRANCH="master"; 22 | } --------------------------------------------------------------------------------