=h?p=0:(-1===p||h component higher in the tree to provide a loading indicator or placeholder to display."+ut(s))}zo=!0,f=lo(f,s),u=c;do{switch(u.tag){case 3:u.effectTag|=2048,u.expirationTime=a,Ji(u,a=wo(u,f,a));break e;case 1:if(p=f,m=u.type,s=u.stateNode,0===(64&u.effectTag)&&("function"===typeof m.getDerivedStateFromError||null!==s&&"function"===typeof s.componentDidCatch&&(null===Lo||!Lo.has(s)))){u.effectTag|=2048,u.expirationTime=a,Ji(u,a=xo(u,p,a));break e}}u=u.return}while(null!==u)}Po=$o(i);continue}l=!0,za(v)}}break}if(Co=!1,So.current=n,Fi=Di=Ui=null,Gl(),l)No=null,e.finishedWork=null;else if(null!==Po)e.finishedWork=null;else{if(null===(n=e.current.alternate)&&o("281"),No=null,zo){if(l=e.latestPendingTime,i=e.latestSuspendedTime,a=e.latestPingedTime,0!==l&<?0:t)):(e.pendingCommitExpirationTime=r,e.finishedWork=n)}}function qo(e,t){for(var n=e.return;null!==n;){switch(n.tag){case 1:var r=n.stateNode;if("function"===typeof n.type.getDerivedStateFromError||"function"===typeof r.componentDidCatch&&(null===Lo||!Lo.has(r)))return Gi(n,e=xo(n,e=lo(t,e),1073741823)),void Jo(n,1073741823);break;case 3:return Gi(n,e=wo(n,e=lo(t,e),1073741823)),void Jo(n,1073741823)}n=n.return}3===e.tag&&(Gi(e,n=wo(e,n=lo(t,e),1073741823)),Jo(e,1073741823))}function Yo(e,t){var n=i.unstable_getCurrentPriorityLevel(),r=void 0;if(0===(1&t.mode))r=1073741823;else if(Co&&!Io)r=Oo;else{switch(n){case i.unstable_ImmediatePriority:r=1073741823;break;case i.unstable_UserBlockingPriority:r=1073741822-10*(1+((1073741822-e+15)/10|0));break;case i.unstable_NormalPriority:r=1073741822-25*(1+((1073741822-e+500)/25|0));break;case i.unstable_LowPriority:case i.unstable_IdlePriority:r=1;break;default:o("313")}null!==No&&r===Oo&&--r}return n===i.unstable_UserBlockingPriority&&(0===aa||r=r&&(e.didError=!1,(0===(t=e.latestPingedTime)||t>n)&&(e.latestPingedTime=n),nl(n,e),0!==(n=e.expirationTime)&&Ta(e,n)))}function Go(e,t){e.expirationTimeOo&&Ao(),Zr(e,t),Co&&!Io&&No===e||Ta(e,e.expirationTime),va>ya&&(va=0,o("185")))}function Zo(e,t,n,r,l){return i.unstable_runWithPriority(i.unstable_ImmediatePriority,function(){return e(t,n,r,l)})}var ea=null,ta=null,na=0,ra=void 0,la=!1,ia=null,oa=0,aa=0,ua=!1,ca=null,sa=!1,fa=!1,da=null,pa=i.unstable_now(),ma=1073741822-(pa/10|0),ha=ma,ya=50,va=0,ga=null;function ba(){ma=1073741822-((i.unstable_now()-pa)/10|0)}function ka(e,t){if(0!==na){if(te.expirationTime&&(e.expirationTime=t),la||(sa?fa&&(ia=e,oa=1073741823,Oa(e,1073741823,!1)):1073741823===t?Pa(1073741823,!1):ka(e,t))}function Sa(){var e=0,t=null;if(null!==ta)for(var n=ta,r=ea;null!==r;){var l=r.expirationTime;if(0===l){if((null===n||null===ta)&&o("244"),r===r.nextScheduledRoot){ea=ta=r.nextScheduledRoot=null;break}if(r===ea)ea=l=r.nextScheduledRoot,ta.nextScheduledRoot=l,r.nextScheduledRoot=null;else{if(r===ta){(ta=n).nextScheduledRoot=ea,r.nextScheduledRoot=null;break}n.nextScheduledRoot=r.nextScheduledRoot,r.nextScheduledRoot=null}r=n.nextScheduledRoot}else{if(l>e&&(e=l,t=r),r===ta)break;if(1073741823===e)break;n=r,r=r.nextScheduledRoot}}ia=t,oa=e}var _a=!1;function Ea(){return!!_a||!!i.unstable_shouldYield()&&(_a=!0)}function Ca(){try{if(!Ea()&&null!==ea){ba();var e=ea;do{var t=e.expirationTime;0!==t&&ma<=t&&(e.nextExpirationTimeToWorkOn=ma),e=e.nextScheduledRoot}while(e!==ea)}Pa(0,!0)}finally{_a=!1}}function Pa(e,t){if(Sa(),t)for(ba(),ha=ma;null!==ia&&0!==oa&&e<=oa&&!(_a&&ma>oa);)Oa(ia,oa,ma>oa),Sa(),ba(),ha=ma;else for(;null!==ia&&0!==oa&&e<=oa;)Oa(ia,oa,!1),Sa();if(t&&(na=0,ra=null),0!==oa&&ka(ia,oa),va=0,ga=null,null!==da)for(e=da,da=null,t=0;t=n&&(null===da?da=[r]:da.push(r),r._defer))return e.finishedWork=t,void(e.expirationTime=0);e.finishedWork=null,e===ga?va++:(ga=e,va=0),i.unstable_runWithPriority(i.unstable_ImmediatePriority,function(){Ho(e,t)})}function za(e){null===ia&&o("246"),ia.expirationTime=0,ua||(ua=!0,ca=e)}function Ma(e,t){var n=sa;sa=!0;try{return e(t)}finally{(sa=n)||la||Pa(1073741823,!1)}}function Ia(e,t){if(sa&&!fa){fa=!0;try{return e(t)}finally{fa=!1}}return e(t)}function Ua(e,t,n){sa||la||0===aa||(Pa(aa,!1),aa=0);var r=sa;sa=!0;try{return i.unstable_runWithPriority(i.unstable_UserBlockingPriority,function(){return e(t,n)})}finally{(sa=r)||la||Pa(1073741823,!1)}}function Da(e,t,n,r,l){var i=t.current;e:if(n){t:{2===tn(n=n._reactInternalFiber)&&1===n.tag||o("170");var a=n;do{switch(a.tag){case 3:a=a.stateNode.context;break t;case 1:if(Ir(a.type)){a=a.stateNode.__reactInternalMemoizedMergedChildContext;break t}}a=a.return}while(null!==a);o("171"),a=void 0}if(1===n.tag){var u=n.type;if(Ir(u)){n=Lr(n,u,a);break e}}n=a}else n=Nr;return null===t.context?t.context=n:t.pendingContext=n,t=l,(l=Yi(r)).payload={element:e},null!==(t=void 0===t?null:t)&&(l.callback=t),Bo(),Gi(i,l),Jo(i,r),r}function Fa(e,t,n,r){var l=t.current;return Da(e,t,n,l=Yo(xa(),l),r)}function La(e){if(!(e=e.current).child)return null;switch(e.child.tag){case 5:default:return e.child.stateNode}}function Aa(e){var t=1073741822-25*(1+((1073741822-xa()+500)/25|0));t>=Eo&&(t=Eo-1),this._expirationTime=Eo=t,this._root=e,this._callbacks=this._next=null,this._hasChildren=this._didComplete=!1,this._children=null,this._defer=!0}function ja(){this._callbacks=null,this._didCommit=!1,this._onCommit=this._onCommit.bind(this)}function Wa(e,t,n){e={current:t=$r(3,null,null,t?3:0),containerInfo:e,pendingChildren:null,pingCache:null,earliestPendingTime:0,latestPendingTime:0,earliestSuspendedTime:0,latestSuspendedTime:0,latestPingedTime:0,didError:!1,pendingCommitExpirationTime:0,finishedWork:null,timeoutHandle:-1,context:null,pendingContext:null,hydrate:n,nextExpirationTimeToWorkOn:0,expirationTime:0,firstBatch:null,nextScheduledRoot:null},this._internalRoot=t.stateNode=e}function Va(e){return!(!e||1!==e.nodeType&&9!==e.nodeType&&11!==e.nodeType&&(8!==e.nodeType||" react-mount-point-unstable "!==e.nodeValue))}function Ba(e,t,n,r,l){var i=n._reactRootContainer;if(i){if("function"===typeof l){var o=l;l=function(){var e=La(i._internalRoot);o.call(e)}}null!=e?i.legacy_renderSubtreeIntoContainer(e,t,l):i.render(t,l)}else{if(i=n._reactRootContainer=function(e,t){if(t||(t=!(!(t=e?9===e.nodeType?e.documentElement:e.firstChild:null)||1!==t.nodeType||!t.hasAttribute("data-reactroot"))),!t)for(var n;n=e.lastChild;)e.removeChild(n);return new Wa(e,!1,t)}(n,r),"function"===typeof l){var a=l;l=function(){var e=La(i._internalRoot);a.call(e)}}Ia(function(){null!=e?i.legacy_renderSubtreeIntoContainer(e,t,l):i.render(t,l)})}return La(i._internalRoot)}function Ha(e,t){var n=2=t;)n=r,r=r._next;e._next=r,null!==n&&(n._next=e)}return e},ze=Ma,Me=Ua,Ie=function(){la||0===aa||(Pa(aa,!1),aa=0)};var $a={createPortal:Ha,findDOMNode:function(e){if(null==e)return null;if(1===e.nodeType)return e;var t=e._reactInternalFiber;return void 0===t&&("function"===typeof e.render?o("188"):o("268",Object.keys(e))),e=null===(e=rn(t))?null:e.stateNode},hydrate:function(e,t,n){return Va(t)||o("200"),Ba(null,e,t,!0,n)},render:function(e,t,n){return Va(t)||o("200"),Ba(null,e,t,!1,n)},unstable_renderSubtreeIntoContainer:function(e,t,n,r){return Va(n)||o("200"),(null==e||void 0===e._reactInternalFiber)&&o("38"),Ba(e,t,n,!1,r)},unmountComponentAtNode:function(e){return Va(e)||o("40"),!!e._reactRootContainer&&(Ia(function(){Ba(null,null,e,!1,function(){e._reactRootContainer=null})}),!0)},unstable_createPortal:function(){return Ha.apply(void 0,arguments)},unstable_batchedUpdates:Ma,unstable_interactiveUpdates:Ua,flushSync:function(e,t){la&&o("187");var n=sa;sa=!0;try{return Zo(e,t)}finally{sa=n,Pa(1073741823,!1)}},unstable_createRoot:function(e,t){return Va(e)||o("299","unstable_createRoot"),new Wa(e,!0,null!=t&&!0===t.hydrate)},unstable_flushControlled:function(e){var t=sa;sa=!0;try{Zo(e)}finally{(sa=t)||la||Pa(1073741823,!1)}},__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED:{Events:[D,F,L,N.injectEventPluginsByName,g,H,function(e){E(e,B)},Oe,Re,Pn,R]}};!function(e){var t=e.findFiberByHostInstance;(function(e){if("undefined"===typeof __REACT_DEVTOOLS_GLOBAL_HOOK__)return!1;var t=__REACT_DEVTOOLS_GLOBAL_HOOK__;if(t.isDisabled||!t.supportsFiber)return!0;try{var n=t.inject(e);Wr=Br(function(e){return t.onCommitFiberRoot(n,e)}),Vr=Br(function(e){return t.onCommitFiberUnmount(n,e)})}catch(r){}})(l({},e,{overrideProps:null,currentDispatcherRef:He.ReactCurrentDispatcher,findHostInstanceByFiber:function(e){return null===(e=rn(e))?null:e.stateNode},findFiberByHostInstance:function(e){return t?t(e):null}}))}({findFiberByHostInstance:U,bundleType:0,version:"16.8.4",rendererPackageName:"react-dom"});var Qa={default:$a},Ka=Qa&&$a||Qa;e.exports=Ka.default||Ka},function(e,t,n){"use strict";e.exports=n(14)},function(e,t,n){"use strict";(function(e){Object.defineProperty(t,"__esModule",{value:!0});var n=null,r=!1,l=3,i=-1,o=-1,a=!1,u=!1;function c(){if(!a){var e=n.expirationTime;u?T():u=!0,x(d,e)}}function s(){var e=n,t=n.next;if(n===t)n=null;else{var r=n.previous;n=r.next=t,t.previous=r}e.next=e.previous=null,r=e.callback,t=e.expirationTime,e=e.priorityLevel;var i=l,a=o;l=e,o=t;try{var u=r()}finally{l=i,o=a}if("function"===typeof u)if(u={callback:u,priorityLevel:e,expirationTime:t,next:null,previous:null},null===n)n=u.next=u.previous=u;else{r=null,e=n;do{if(e.expirationTime>=t){r=e;break}e=e.next}while(e!==n);null===r?r=n:r===n&&(n=u,c()),(t=r.previous).next=r.previous=u,u.next=r,u.previous=t}}function f(){if(-1===i&&null!==n&&1===n.priorityLevel){a=!0;try{do{s()}while(null!==n&&1===n.priorityLevel)}finally{a=!1,null!==n?c():u=!1}}}function d(e){a=!0;var l=r;r=e;try{if(e)for(;null!==n;){var i=t.unstable_now();if(!(n.expirationTime<=i))break;do{s()}while(null!==n&&n.expirationTime<=i)}else if(null!==n)do{s()}while(null!==n&&!S())}finally{a=!1,r=l,null!==n?c():u=!1,f()}}var p,m,h=Date,y="function"===typeof setTimeout?setTimeout:void 0,v="function"===typeof clearTimeout?clearTimeout:void 0,g="function"===typeof requestAnimationFrame?requestAnimationFrame:void 0,b="function"===typeof cancelAnimationFrame?cancelAnimationFrame:void 0;function k(e){p=g(function(t){v(m),e(t)}),m=y(function(){b(p),e(t.unstable_now())},100)}if("object"===typeof performance&&"function"===typeof performance.now){var w=performance;t.unstable_now=function(){return w.now()}}else t.unstable_now=function(){return h.now()};var x,T,S,_=null;if("undefined"!==typeof window?_=window:"undefined"!==typeof e&&(_=e),_&&_._schedMock){var E=_._schedMock;x=E[0],T=E[1],S=E[2],t.unstable_now=E[3]}else if("undefined"===typeof window||"function"!==typeof MessageChannel){var C=null,P=function(e){if(null!==C)try{C(e)}finally{C=null}};x=function(e){null!==C?setTimeout(x,0,e):(C=e,setTimeout(P,0,!1))},T=function(){C=null},S=function(){return!1}}else{"undefined"!==typeof console&&("function"!==typeof g&&console.error("This browser doesn't support requestAnimationFrame. Make sure that you load a polyfill in older browsers. https://fb.me/react-polyfills"),"function"!==typeof b&&console.error("This browser doesn't support cancelAnimationFrame. Make sure that you load a polyfill in older browsers. https://fb.me/react-polyfills"));var N=null,O=!1,R=-1,z=!1,M=!1,I=0,U=33,D=33;S=function(){return I<=t.unstable_now()};var F=new MessageChannel,L=F.port2;F.port1.onmessage=function(){O=!1;var e=N,n=R;N=null,R=-1;var r=t.unstable_now(),l=!1;if(0>=I-r){if(!(-1!==n&&n<=r))return z||(z=!0,k(A)),N=e,void(R=n);l=!0}if(null!==e){M=!0;try{e(l)}finally{M=!1}}};var A=function e(t){if(null!==N){k(e);var n=t-I+D;nn&&(n=8),D=nt?L.postMessage(void 0):z||(z=!0,k(A))},T=function(){N=null,O=!1,R=-1}}t.unstable_ImmediatePriority=1,t.unstable_UserBlockingPriority=2,t.unstable_NormalPriority=3,t.unstable_IdlePriority=5,t.unstable_LowPriority=4,t.unstable_runWithPriority=function(e,n){switch(e){case 1:case 2:case 3:case 4:case 5:break;default:e=3}var r=l,o=i;l=e,i=t.unstable_now();try{return n()}finally{l=r,i=o,f()}},t.unstable_next=function(e){switch(l){case 1:case 2:case 3:var n=3;break;default:n=l}var r=l,o=i;l=n,i=t.unstable_now();try{return e()}finally{l=r,i=o,f()}},t.unstable_scheduleCallback=function(e,r){var o=-1!==i?i:t.unstable_now();if("object"===typeof r&&null!==r&&"number"===typeof r.timeout)r=o+r.timeout;else switch(l){case 1:r=o+-1;break;case 2:r=o+250;break;case 5:r=o+1073741823;break;case 4:r=o+1e4;break;default:r=o+5e3}if(e={callback:e,priorityLevel:l,expirationTime:r,next:null,previous:null},null===n)n=e.next=e.previous=e,c();else{o=null;var a=n;do{if(a.expirationTime>r){o=a;break}a=a.next}while(a!==n);null===o?o=n:o===n&&(n=e,c()),(r=o.previous).next=o.previous=e,e.next=o,e.previous=r}return e},t.unstable_cancelCallback=function(e){var t=e.next;if(null!==t){if(t===e)n=null;else{e===n&&(n=t);var r=e.previous;r.next=t,t.previous=r}e.next=e.previous=null}},t.unstable_wrapCallback=function(e){var n=l;return function(){var r=l,o=i;l=n,i=t.unstable_now();try{return e.apply(this,arguments)}finally{l=r,i=o,f()}}},t.unstable_getCurrentPriorityLevel=function(){return l},t.unstable_shouldYield=function(){return!r&&(null!==n&&n.expirationTime void;\n}\n\ninterface SearchFormState {\n value: string;\n}\n\nclass SearchForm extends Component {\n state = { value: '' }\n\n ref = React.createRef();\n\n handleSubmit = (e: React.FormEvent) => {\n e.preventDefault();\n const val = this.state.value.trim();\n if (val !== '') {\n this.props.onSubmit(val);\n }\n if (this.ref.current) {\n this.ref.current.blur();\n }\n }\n\n handleInputChanged = (e: React.ChangeEvent) => {\n this.setState({ value: e.target.value });\n }\n\n render() {\n const inputStyle = {\n marginLeft: '10px',\n }\n\n return (\n \n )\n }\n}\n\nfunction FlickrUrl(props: { account: string }) {\n const { account } = props;\n return (\n {account}\n )\n}\n\nfunction SearchResult(props: Meta & {ffhqId: string}) {\n return (\n \n )\n}\n\nfunction SearchResultList (props: {\n ids: string[];\n searchString: string;\n dataset: Dataset | null;\n}) {\n const dataset = props.dataset;\n if (dataset == null) {\n return null;\n }\n\n const maxResults = 3000;\n const limitedIds = props.ids.slice(0, maxResults);\n\n const resultRows = limitedIds.map(id => {\n return (\n \n )\n });\n\n return (\n \n {limitedIds.length != 0 ?\n \n Results
\n {limitedIds.length < props.ids.length ? `Your search term '${props.searchString}' produced a large number of results. Showing only the first ${maxResults} hits.` : null}\n {resultRows}\n :\n
No results.
}\n
\n )\n}\n\nfunction Loading() {\n return Loading
;\n}\n\nfunction grepAccountFromUrl(url: string) {\n const reg = /^http[s]?:\\/\\/www\\.flickr\\.com\\/photos\\/([^\\/]+)\\/.*/;\n const m = url.match(reg);\n if (m !== null && m[1] !== undefined) {\n return m[1];\n }\n throw new Error('Invalid input json -- should be able to match account name from flickr URL.');\n}\n\nfunction processDataset(data: any): Dataset {\n const res: Dataset = {};\n Object.keys(data).forEach((key: string) => {\n const m: Meta = data[key];\n const account = grepAccountFromUrl(m.url)\n res[key] = {\n ...m,\n account\n };\n });\n return res;\n}\n\ninterface AppState {\n loading: boolean;\n searchString: string;\n dataset: Dataset | null;\n searchResults: string[];\n}\n\nclass App extends Component<{}, AppState> {\n state = {\n loading: true,\n searchString: '',\n dataset: null,\n searchResults: []\n }\n\n componentDidMount() {\n fetch('ffhq-dataset-v1-processed.json')\n .then(resp => resp.json())\n .then(data => {\n this.setState({\n dataset: processDataset(data),\n loading: false\n });\n });\n }\n\n searchItems (searchString: string) {\n if (!this.state.dataset) {\n return;\n }\n\n const dataset: Dataset = this.state.dataset!;\n\n const ids: string[] = [];\n const searchTerm = searchString.toLowerCase();\n\n Object.keys(dataset).forEach((key: string) => {\n const img = dataset[key];\n const author = img.author.toLowerCase();\n const account = img.account.toLowerCase();\n\n if (account == searchTerm) {\n ids.push(key);\n return;\n }\n\n if (author.indexOf(searchTerm) != -1) {\n ids.push(key);\n return;\n }\n });\n\n // Sort results by Flickr account name\n ids.sort((keyA: string, keyB: string) => {\n const imgA = dataset[keyA];\n const imgB = dataset[keyB];\n return imgA.account.localeCompare(imgB.account);\n });\n\n this.setState({\n searchResults: ids\n })\n }\n\n handleSetSearchString = (value: string) => {\n this.setState({ searchString: value });\n this.searchItems(value);\n }\n\n render() {\n return (\n \n
Flickr-Faces-HQ Dataset (FFHQ)
\n
Want to know if your images are included in the FFHQ dataset? Use this tool to list images in the FFHQ for a given Flickr login or display name.
\n
For more information, see FFHQ dataset.
\n
\n {this.state.loading &&
}\n {this.state.searchString != '' &&\n
}\n
\n );\n }\n}\n\nexport default App;\n","// This optional code is used to register a service worker.\n// register() is not called by default.\n\n// This lets the app load faster on subsequent visits in production, and gives\n// it offline capabilities. However, it also means that developers (and users)\n// will only see deployed updates on subsequent visits to a page, after all the\n// existing tabs open on the page have been closed, since previously cached\n// resources are updated in the background.\n\n// To learn more about the benefits of this model and instructions on how to\n// opt-in, read https://bit.ly/CRA-PWA\n\nconst isLocalhost = Boolean(\n window.location.hostname === 'localhost' ||\n // [::1] is the IPv6 localhost address.\n window.location.hostname === '[::1]' ||\n // 127.0.0.1/8 is considered localhost for IPv4.\n window.location.hostname.match(\n /^127(?:\\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}$/\n )\n);\n\nexport function register(config) {\n if (process.env.NODE_ENV === 'production' && 'serviceWorker' in navigator) {\n // The URL constructor is available in all browsers that support SW.\n const publicUrl = new URL(process.env.PUBLIC_URL, window.location.href);\n if (publicUrl.origin !== window.location.origin) {\n // Our service worker won't work if PUBLIC_URL is on a different origin\n // from what our page is served on. This might happen if a CDN is used to\n // serve assets; see https://github.com/facebook/create-react-app/issues/2374\n return;\n }\n\n window.addEventListener('load', () => {\n const swUrl = `${process.env.PUBLIC_URL}/service-worker.js`;\n\n if (isLocalhost) {\n // This is running on localhost. Let's check if a service worker still exists or not.\n checkValidServiceWorker(swUrl, config);\n\n // Add some additional logging to localhost, pointing developers to the\n // service worker/PWA documentation.\n navigator.serviceWorker.ready.then(() => {\n console.log(\n 'This web app is being served cache-first by a service ' +\n 'worker. To learn more, visit https://bit.ly/CRA-PWA'\n );\n });\n } else {\n // Is not localhost. Just register service worker\n registerValidSW(swUrl, config);\n }\n });\n }\n}\n\nfunction registerValidSW(swUrl, config) {\n navigator.serviceWorker\n .register(swUrl)\n .then(registration => {\n registration.onupdatefound = () => {\n const installingWorker = registration.installing;\n if (installingWorker == null) {\n return;\n }\n installingWorker.onstatechange = () => {\n if (installingWorker.state === 'installed') {\n if (navigator.serviceWorker.controller) {\n // At this point, the updated precached content has been fetched,\n // but the previous service worker will still serve the older\n // content until all client tabs are closed.\n console.log(\n 'New content is available and will be used when all ' +\n 'tabs for this page are closed. See https://bit.ly/CRA-PWA.'\n );\n\n // Execute callback\n if (config && config.onUpdate) {\n config.onUpdate(registration);\n }\n } else {\n // At this point, everything has been precached.\n // It's the perfect time to display a\n // \"Content is cached for offline use.\" message.\n console.log('Content is cached for offline use.');\n\n // Execute callback\n if (config && config.onSuccess) {\n config.onSuccess(registration);\n }\n }\n }\n };\n };\n })\n .catch(error => {\n console.error('Error during service worker registration:', error);\n });\n}\n\nfunction checkValidServiceWorker(swUrl, config) {\n // Check if the service worker can be found. If it can't reload the page.\n fetch(swUrl)\n .then(response => {\n // Ensure service worker exists, and that we really are getting a JS file.\n const contentType = response.headers.get('content-type');\n if (\n response.status === 404 ||\n (contentType != null && contentType.indexOf('javascript') === -1)\n ) {\n // No service worker found. Probably a different app. Reload the page.\n navigator.serviceWorker.ready.then(registration => {\n registration.unregister().then(() => {\n window.location.reload();\n });\n });\n } else {\n // Service worker found. Proceed as normal.\n registerValidSW(swUrl, config);\n }\n })\n .catch(() => {\n console.log(\n 'No internet connection found. App is running in offline mode.'\n );\n });\n}\n\nexport function unregister() {\n if ('serviceWorker' in navigator) {\n navigator.serviceWorker.ready.then(registration => {\n registration.unregister();\n });\n }\n}\n","import React from 'react';\nimport ReactDOM from 'react-dom';\nimport './index.css';\nimport App from './App';\nimport * as serviceWorker from './serviceWorker';\n\nReactDOM.render(, document.getElementById('root'));\n\n// If you want your app to work offline and load faster, you can change\n// unregister() to register() below. Note this comes with some pitfalls.\n// Learn more about service workers: https://bit.ly/CRA-PWA\nserviceWorker.unregister();\n"],"sourceRoot":""}
--------------------------------------------------------------------------------
/docs/search/static/js/runtime~main.e5de1b36.js:
--------------------------------------------------------------------------------
1 | !function(e){function r(r){for(var n,f,a=r[0],i=r[1],l=r[2],p=0,s=[];p 0 and data_size < 8192:
101 | with open(tmp_path, 'rb') as f:
102 | data = f.read()
103 | data_str = data.decode('utf-8')
104 |
105 | # Google Drive virus checker nag.
106 | links = [html.unescape(link) for link in data_str.split('"') if 'export=download' in link]
107 | if len(links) == 1:
108 | if attempts_left:
109 | file_url = requests.compat.urljoin(file_url, links[0])
110 | continue
111 |
112 | # Google Drive quota exceeded.
113 | if 'Google Drive - Quota exceeded' in data_str:
114 | if not attempts_left:
115 | raise IOError("Google Drive download quota exceeded -- please try again later")
116 |
117 | # Last attempt => raise error.
118 | if not attempts_left:
119 | raise
120 |
121 | # Rename temp file to the correct name.
122 | os.replace(tmp_path, file_path) # atomic
123 | with stats['lock']:
124 | stats['files_done'] += 1
125 |
126 | # Attempt to clean up any leftover temps.
127 | for filename in glob.glob(file_path + '.tmp.*'):
128 | try:
129 | os.remove(filename)
130 | except:
131 | pass
132 |
133 | #----------------------------------------------------------------------------
134 |
135 | def choose_bytes_unit(num_bytes):
136 | b = int(np.rint(num_bytes))
137 | if b < (100 << 0): return 'B', (1 << 0)
138 | if b < (100 << 10): return 'kB', (1 << 10)
139 | if b < (100 << 20): return 'MB', (1 << 20)
140 | if b < (100 << 30): return 'GB', (1 << 30)
141 | return 'TB', (1 << 40)
142 |
143 | #----------------------------------------------------------------------------
144 |
145 | def format_time(seconds):
146 | s = int(np.rint(seconds))
147 | if s < 60: return '%ds' % s
148 | if s < 60 * 60: return '%dm %02ds' % (s // 60, s % 60)
149 | if s < 24 * 60 * 60: return '%dh %02dm' % (s // (60 * 60), (s // 60) % 60)
150 | if s < 100 * 24 * 60 * 60: return '%dd %02dh' % (s // (24 * 60 * 60), (s // (60 * 60)) % 24)
151 | return '>100d'
152 |
153 | #----------------------------------------------------------------------------
154 |
155 | def download_files(file_specs, num_threads=32, status_delay=0.2, timing_window=50, **download_kwargs):
156 |
157 | # Determine which files to download.
158 | done_specs = {spec['file_path']: spec for spec in file_specs if os.path.isfile(spec['file_path'])}
159 | missing_specs = [spec for spec in file_specs if spec['file_path'] not in done_specs]
160 | files_total = len(file_specs)
161 | bytes_total = sum(spec['file_size'] for spec in file_specs)
162 | stats = dict(files_done=len(done_specs), bytes_done=sum(spec['file_size'] for spec in done_specs.values()), lock=threading.Lock())
163 | if len(done_specs) == files_total:
164 | print('All files already downloaded -- skipping.')
165 | return
166 |
167 | # Launch worker threads.
168 | spec_queue = queue.Queue()
169 | exception_queue = queue.Queue()
170 | for spec in missing_specs:
171 | spec_queue.put(spec)
172 | thread_kwargs = dict(spec_queue=spec_queue, exception_queue=exception_queue, stats=stats, download_kwargs=download_kwargs)
173 | for _thread_idx in range(min(num_threads, len(missing_specs))):
174 | threading.Thread(target=_download_thread, kwargs=thread_kwargs, daemon=True).start()
175 |
176 | # Monitor status until done.
177 | bytes_unit, bytes_div = choose_bytes_unit(bytes_total)
178 | spinner = '/-\\|'
179 | timing = []
180 | while True:
181 | with stats['lock']:
182 | files_done = stats['files_done']
183 | bytes_done = stats['bytes_done']
184 | spinner = spinner[1:] + spinner[:1]
185 | timing = timing[max(len(timing) - timing_window + 1, 0):] + [(time.time(), bytes_done)]
186 | bandwidth = max((timing[-1][1] - timing[0][1]) / max(timing[-1][0] - timing[0][0], 1e-8), 0)
187 | bandwidth_unit, bandwidth_div = choose_bytes_unit(bandwidth)
188 | eta = format_time((bytes_total - bytes_done) / max(bandwidth, 1))
189 |
190 | print('\r%s %6.2f%% done %d/%d files %-13s %-10s ETA: %-7s ' % (
191 | spinner[0],
192 | bytes_done / bytes_total * 100,
193 | files_done, files_total,
194 | '%.2f/%.2f %s' % (bytes_done / bytes_div, bytes_total / bytes_div, bytes_unit),
195 | '%.2f %s/s' % (bandwidth / bandwidth_div, bandwidth_unit),
196 | 'done' if bytes_total == bytes_done else '...' if len(timing) < timing_window or bandwidth == 0 else eta,
197 | ), end='', flush=True)
198 |
199 | if files_done == files_total:
200 | print()
201 | break
202 |
203 | try:
204 | exc_info = exception_queue.get(timeout=status_delay)
205 | raise exc_info[1].with_traceback(exc_info[2])
206 | except queue.Empty:
207 | pass
208 |
209 | def _download_thread(spec_queue, exception_queue, stats, download_kwargs):
210 | with requests.Session() as session:
211 | while not spec_queue.empty():
212 | spec = spec_queue.get()
213 | try:
214 | download_file(session, spec, stats, **download_kwargs)
215 | except:
216 | exception_queue.put(sys.exc_info())
217 |
218 | #----------------------------------------------------------------------------
219 |
220 | def print_statistics(json_data):
221 | categories = defaultdict(int)
222 | licenses = defaultdict(int)
223 | countries = defaultdict(int)
224 | for item in json_data.values():
225 | categories[item['category']] += 1
226 | licenses[item['metadata']['license']] += 1
227 | country = item['metadata']['country']
228 | countries[country if country else ''] += 1
229 |
230 | for name in [name for name, num in countries.items() if num / len(json_data) < 1e-3]:
231 | countries[''] += countries.pop(name)
232 |
233 | rows = [[]] * 2
234 | rows += [['Category', 'Images', '% of all']]
235 | rows += [['---'] * 3]
236 | for name, num in sorted(categories.items(), key=lambda x: -x[1]):
237 | rows += [[name, '%d' % num, '%.2f' % (100.0 * num / len(json_data))]]
238 |
239 | rows += [[]] * 2
240 | rows += [['License', 'Images', '% of all']]
241 | rows += [['---'] * 3]
242 | for name, num in sorted(licenses.items(), key=lambda x: -x[1]):
243 | rows += [[name, '%d' % num, '%.2f' % (100.0 * num / len(json_data))]]
244 |
245 | rows += [[]] * 2
246 | rows += [['Country', 'Images', '% of all', '% of known']]
247 | rows += [['---'] * 4]
248 | for name, num in sorted(countries.items(), key=lambda x: -x[1] if x[0] != '' else 0):
249 | rows += [[name, '%d' % num, '%.2f' % (100.0 * num / len(json_data)),
250 | '%.2f' % (0 if name == '' else 100.0 * num / (len(json_data) - countries['']))]]
251 |
252 | rows += [[]] * 2
253 | widths = [max(len(cell) for cell in column if cell is not None) for column in itertools.zip_longest(*rows)]
254 | for row in rows:
255 | print(" ".join(cell + " " * (width - len(cell)) for cell, width in zip(row, widths)))
256 |
257 | #----------------------------------------------------------------------------
258 |
259 | def recreate_aligned_images(json_data, source_dir, dst_dir='realign1024x1024', output_size=1024, transform_size=4096, enable_padding=True, rotate_level=True, random_shift=0.0, retry_crops=False):
260 | print('Recreating aligned images...')
261 |
262 | # Fix random seed for reproducibility
263 | np.random.seed(12345)
264 | # The following random numbers are unused in present implementation, but we consume them for reproducibility
265 | _ = np.random.normal(0, 1, (len(json_data.values()), 2))
266 |
267 | if dst_dir:
268 | os.makedirs(dst_dir, exist_ok=True)
269 | shutil.copyfile('LICENSE.txt', os.path.join(dst_dir, 'LICENSE.txt'))
270 |
271 | for item_idx, item in enumerate(json_data.values()):
272 | print('\r%d / %d ... ' % (item_idx, len(json_data)), end='', flush=True)
273 |
274 | # Parse landmarks.
275 | # pylint: disable=unused-variable
276 | lm = np.array(item['in_the_wild']['face_landmarks'])
277 | lm_chin = lm[0 : 17] # left-right
278 | lm_eyebrow_left = lm[17 : 22] # left-right
279 | lm_eyebrow_right = lm[22 : 27] # left-right
280 | lm_nose = lm[27 : 31] # top-down
281 | lm_nostrils = lm[31 : 36] # top-down
282 | lm_eye_left = lm[36 : 42] # left-clockwise
283 | lm_eye_right = lm[42 : 48] # left-clockwise
284 | lm_mouth_outer = lm[48 : 60] # left-clockwise
285 | lm_mouth_inner = lm[60 : 68] # left-clockwise
286 |
287 | # Calculate auxiliary vectors.
288 | eye_left = np.mean(lm_eye_left, axis=0)
289 | eye_right = np.mean(lm_eye_right, axis=0)
290 | eye_avg = (eye_left + eye_right) * 0.5
291 | eye_to_eye = eye_right - eye_left
292 | mouth_left = lm_mouth_outer[0]
293 | mouth_right = lm_mouth_outer[6]
294 | mouth_avg = (mouth_left + mouth_right) * 0.5
295 | eye_to_mouth = mouth_avg - eye_avg
296 |
297 | # Choose oriented crop rectangle.
298 | if rotate_level:
299 | x = eye_to_eye - np.flipud(eye_to_mouth) * [-1, 1]
300 | x /= np.hypot(*x)
301 | x *= max(np.hypot(*eye_to_eye) * 2.0, np.hypot(*eye_to_mouth) * 1.8)
302 | y = np.flipud(x) * [-1, 1]
303 | c0 = eye_avg + eye_to_mouth * 0.1
304 | else:
305 | x = np.array([1, 0], dtype=np.float64)
306 | x *= max(np.hypot(*eye_to_eye) * 2.0, np.hypot(*eye_to_mouth) * 1.8)
307 | y = np.flipud(x) * [-1, 1]
308 | c0 = eye_avg + eye_to_mouth * 0.1
309 |
310 | # Load in-the-wild image.
311 | src_file = os.path.join(source_dir, item['in_the_wild']['file_path'])
312 | if not os.path.isfile(src_file):
313 | print('\nCannot find source image. Please run "--wilds" before "--align".')
314 | return
315 | img = PIL.Image.open(src_file)
316 |
317 | quad = np.stack([c0 - x - y, c0 - x + y, c0 + x + y, c0 + x - y])
318 | qsize = np.hypot(*x) * 2
319 |
320 | # Keep drawing new random crop offsets until we find one that is contained in the image
321 | # and does not require padding
322 | if random_shift != 0:
323 | for _ in range(1000):
324 | # Offset the crop rectange center by a random shift proportional to image dimension
325 | # and the requested standard deviation
326 | c = (c0 + np.hypot(*x)*2 * random_shift * np.random.normal(0, 1, c0.shape))
327 | quad = np.stack([c - x - y, c - x + y, c + x + y, c + x - y])
328 | crop = (int(np.floor(min(quad[:,0]))), int(np.floor(min(quad[:,1]))), int(np.ceil(max(quad[:,0]))), int(np.ceil(max(quad[:,1]))))
329 | if not retry_crops or not (crop[0] < 0 or crop[1] < 0 or crop[2] >= img.width or crop[3] >= img.height):
330 | # We're happy with this crop (either it fits within the image, or retries are disabled)
331 | break
332 | else:
333 | # rejected N times, give up and move to next image
334 | # (does not happen in practice with the FFHQ data)
335 | print('rejected image')
336 | return
337 |
338 | # Shrink.
339 | shrink = int(np.floor(qsize / output_size * 0.5))
340 | if shrink > 1:
341 | rsize = (int(np.rint(float(img.size[0]) / shrink)), int(np.rint(float(img.size[1]) / shrink)))
342 | img = img.resize(rsize, PIL.Image.ANTIALIAS)
343 | quad /= shrink
344 | qsize /= shrink
345 |
346 | # Crop.
347 | border = max(int(np.rint(qsize * 0.1)), 3)
348 | crop = (int(np.floor(min(quad[:,0]))), int(np.floor(min(quad[:,1]))), int(np.ceil(max(quad[:,0]))), int(np.ceil(max(quad[:,1]))))
349 | crop = (max(crop[0] - border, 0), max(crop[1] - border, 0), min(crop[2] + border, img.size[0]), min(crop[3] + border, img.size[1]))
350 | if crop[2] - crop[0] < img.size[0] or crop[3] - crop[1] < img.size[1]:
351 | img = img.crop(crop)
352 | quad -= crop[0:2]
353 |
354 | # Pad.
355 | pad = (int(np.floor(min(quad[:,0]))), int(np.floor(min(quad[:,1]))), int(np.ceil(max(quad[:,0]))), int(np.ceil(max(quad[:,1]))))
356 | pad = (max(-pad[0] + border, 0), max(-pad[1] + border, 0), max(pad[2] - img.size[0] + border, 0), max(pad[3] - img.size[1] + border, 0))
357 | if enable_padding and max(pad) > border - 4:
358 | pad = np.maximum(pad, int(np.rint(qsize * 0.3)))
359 | img = np.pad(np.float32(img), ((pad[1], pad[3]), (pad[0], pad[2]), (0, 0)), 'reflect')
360 | h, w, _ = img.shape
361 | y, x, _ = np.ogrid[:h, :w, :1]
362 | mask = np.maximum(1.0 - np.minimum(np.float32(x) / pad[0], np.float32(w-1-x) / pad[2]), 1.0 - np.minimum(np.float32(y) / pad[1], np.float32(h-1-y) / pad[3]))
363 | blur = qsize * 0.02
364 | img += (scipy.ndimage.gaussian_filter(img, [blur, blur, 0]) - img) * np.clip(mask * 3.0 + 1.0, 0.0, 1.0)
365 | img += (np.median(img, axis=(0,1)) - img) * np.clip(mask, 0.0, 1.0)
366 | img = PIL.Image.fromarray(np.uint8(np.clip(np.rint(img), 0, 255)), 'RGB')
367 | quad += pad[:2]
368 |
369 | # Transform.
370 | img = img.transform((transform_size, transform_size), PIL.Image.QUAD, (quad + 0.5).flatten(), PIL.Image.BILINEAR)
371 | if output_size < transform_size:
372 | img = img.resize((output_size, output_size), PIL.Image.ANTIALIAS)
373 |
374 | # Save aligned image.
375 | dst_subdir = os.path.join(dst_dir, '%05d' % (item_idx - item_idx % 1000))
376 | os.makedirs(dst_subdir, exist_ok=True)
377 | img.save(os.path.join(dst_subdir, '%05d.png' % item_idx))
378 |
379 | # All done.
380 | print('\r%d / %d ... done' % (len(json_data), len(json_data)))
381 |
382 | #----------------------------------------------------------------------------
383 |
384 | def run(tasks, **download_kwargs):
385 | if not os.path.isfile(json_spec['file_path']) or not os.path.isfile('LICENSE.txt'):
386 | print('Downloading JSON metadata...')
387 | download_files([json_spec, license_specs['json']], **download_kwargs)
388 |
389 | print('Parsing JSON metadata...')
390 | with open(json_spec['file_path'], 'rb') as f:
391 | json_data = json.load(f, object_pairs_hook=OrderedDict)
392 |
393 | if 'stats' in tasks:
394 | print_statistics(json_data)
395 |
396 | specs = []
397 | if 'images' in tasks:
398 | specs += [item['image'] for item in json_data.values()] + [license_specs['images']]
399 | if 'thumbs' in tasks:
400 | specs += [item['thumbnail'] for item in json_data.values()] + [license_specs['thumbs']]
401 | if 'wilds' in tasks:
402 | specs += [item['in_the_wild'] for item in json_data.values()] + [license_specs['wilds']]
403 | if 'tfrecords' in tasks:
404 | specs += tfrecords_specs + [license_specs['tfrecords']]
405 |
406 | if len(specs):
407 | print('Downloading %d files...' % len(specs))
408 | np.random.shuffle(specs) # to make the workload more homogeneous
409 | download_files(specs, **download_kwargs)
410 |
411 | if 'align' in tasks:
412 | recreate_aligned_images(json_data, source_dir=download_kwargs['source_dir'], rotate_level=not download_kwargs['no_rotation'], random_shift=download_kwargs['random_shift'], enable_padding=not download_kwargs['no_padding'], retry_crops=download_kwargs['retry_crops'])
413 |
414 | #----------------------------------------------------------------------------
415 |
416 | def run_cmdline(argv):
417 | parser = argparse.ArgumentParser(prog=argv[0], description='Download Flickr-Face-HQ (FFHQ) dataset to current working directory.')
418 | parser.add_argument('-j', '--json', help='download metadata as JSON (254 MB)', dest='tasks', action='append_const', const='json')
419 | parser.add_argument('-s', '--stats', help='print statistics about the dataset', dest='tasks', action='append_const', const='stats')
420 | parser.add_argument('-i', '--images', help='download 1024x1024 images as PNG (89.1 GB)', dest='tasks', action='append_const', const='images')
421 | parser.add_argument('-t', '--thumbs', help='download 128x128 thumbnails as PNG (1.95 GB)', dest='tasks', action='append_const', const='thumbs')
422 | parser.add_argument('-w', '--wilds', help='download in-the-wild images as PNG (955 GB)', dest='tasks', action='append_const', const='wilds')
423 | parser.add_argument('-r', '--tfrecords', help='download multi-resolution TFRecords (273 GB)', dest='tasks', action='append_const', const='tfrecords')
424 | parser.add_argument('-a', '--align', help='recreate 1024x1024 images from in-the-wild images', dest='tasks', action='append_const', const='align')
425 | parser.add_argument('--num_threads', help='number of concurrent download threads (default: 32)', type=int, default=32, metavar='NUM')
426 | parser.add_argument('--status_delay', help='time between download status prints (default: 0.2)', type=float, default=0.2, metavar='SEC')
427 | parser.add_argument('--timing_window', help='samples for estimating download eta (default: 50)', type=int, default=50, metavar='LEN')
428 | parser.add_argument('--chunk_size', help='chunk size for each download thread (default: 128)', type=int, default=128, metavar='KB')
429 | parser.add_argument('--num_attempts', help='number of download attempts per file (default: 10)', type=int, default=10, metavar='NUM')
430 | parser.add_argument('--random-shift', help='standard deviation of random crop rectangle jitter', type=float, default=0.0, metavar='SHIFT')
431 | parser.add_argument('--retry-crops', help='retry random shift if crop rectangle falls outside image (up to 1000 times)', dest='retry_crops', default=False, action='store_true')
432 | parser.add_argument('--no-rotation', help='keep the original orientation of images', dest='no_rotation', default=False, action='store_true')
433 | parser.add_argument('--no-padding', help='do not apply blur-padding outside and near the image borders', dest='no_padding', default=False, action='store_true')
434 | parser.add_argument('--source-dir', help='where to find already downloaded FFHQ source data', default='', metavar='DIR')
435 |
436 | args = parser.parse_args()
437 | if not args.tasks:
438 | print('No tasks specified. Please see "-h" for help.')
439 | exit(1)
440 | run(**vars(args))
441 |
442 | #----------------------------------------------------------------------------
443 |
444 | if __name__ == "__main__":
445 | run_cmdline(sys.argv)
446 |
447 | #----------------------------------------------------------------------------
448 |
--------------------------------------------------------------------------------
/ffhq-piecharts.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NVlabs/ffhq-dataset/4826aa6ea77aa7f1a7802b938ed7c40afb985cda/ffhq-piecharts.png
--------------------------------------------------------------------------------
/ffhq-teaser.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NVlabs/ffhq-dataset/4826aa6ea77aa7f1a7802b938ed7c40afb985cda/ffhq-teaser.png
--------------------------------------------------------------------------------