├── GOES16.sci.py ├── README.md ├── crontab ├── favicon.ico ├── goesServer.py ├── index.html ├── static.html ├── status.sh └── videogen.py /GOES16.sci.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 -u 2 | """Create true-color imagery from GOES-R using the Google Compute data NOAA manages.""" 3 | import gc 4 | import os 5 | 6 | from time import sleep 7 | from tempfile import NamedTemporaryFile 8 | from urllib.request import urlopen 9 | import json 10 | import datetime 11 | 12 | import subprocess 13 | import numpy as np 14 | #from scipy.io import netcdf 15 | from scipy.signal import decimate 16 | from scipy.ndimage import zoom 17 | import netCDF4 18 | 19 | from PIL import Image, ImageOps, ImageChops 20 | 21 | # Format for URLs (spaces added by yours truly) 22 | # --////OR_---M-C-G-s _e _c.nc 23 | #https://storage.cloud.google.com/gcp-public-data-goes-19/ABI -L2 -CMIPF /2018 /070 /20 /OR_ABI -L2 -CMIPF -M3 C02 _G19 _s20180702000416_e20180702011183_c20180702011253.nc 24 | 25 | # Path to keep images and thumbnails long-term 26 | STORAGE = '/home/bnitkin/goesr' 27 | 28 | # URL to fetch directory listings from 29 | # https://www.googleapis.com/storage/v1/b/gcp-public-data-goes-19/o?prefix=ABI-L2-CMIPF/ 2018/070/21/OR_ABI-L2-CMIPF-M3C01 30 | DIR_LIST = 'https://www.googleapis.com/storage/v1/b/gcp-public-data-goes-19/o?prefix=ABI-L2-CMIPF/{date:%Y/%j/%H}/OR_ABI-L2-CMIPF-M6C{channel:02}' 31 | 32 | # Size to chunk downloads into, bytes 33 | CHUNK_SIZE = 5000000 # 5MB 34 | 35 | # Final size of the generated images. Refer to the "Channel 2 is X by Y" 36 | # message for the full size. 37 | # This must be a common denominator to all layers. (10848, 5424, 2712, 1356, ...) 38 | #FINAL_SIZE = (10848, 10848) 39 | #FINAL_SIZE = (5424, 5424) 40 | FINAL_SIZE = (2712, 2712) 41 | 42 | # Thumbnail size. Thumbs are generated from the larger downscaled images. 43 | THUMB_SIZE = (1000, 1000) 44 | 45 | # Polling time - how often to check the API for new images (seconds) 46 | # Full-disk scans come every 10 minutes. 47 | POLL_TIME = 5*60 48 | 49 | # How much timestamps can differ while being considered identical (seconds) 50 | # Images are timestamped with a lot of precision; layers can come in a few seconds 51 | # from each other, even as part of the same scan 52 | TIME_FUZZ = 60 53 | 54 | class Timer(): 55 | """A simple lap timer. On each call of lap(), it 56 | returns the elapsed time since the last call.""" 57 | def __init__(self): 58 | """Setup the timer.""" 59 | self.last = datetime.datetime.now() 60 | self.start = self.last 61 | def lap(self): 62 | """Drop a marker. Return the time since lap() was last called.""" 63 | old = self.last 64 | self.last = datetime.datetime.now() 65 | return (self.last - old).total_seconds() 66 | def total(self): 67 | """Get time since timer was started.""" 68 | self.lap() 69 | return (self.last - self.start).total_seconds() 70 | def delay(self, seconds): 71 | """Delays for the number of seconds since the last lap. 72 | Reset the lap counter on exit.""" 73 | sleep_time = seconds - self.lap() 74 | if sleep_time > 0: 75 | print('Sleeping for {} seconds'.format(sleep_time)) 76 | sleep(sleep_time) 77 | else: 78 | print('Period already expired ({}s ago)'.format(-sleep_time)) 79 | self.lap() 80 | 81 | def get_image_list(url): 82 | """Given a URL to a google bucket, return a list of items inside. 83 | Or an empty list, if it's empty. This compensates for the 'items' 84 | key missing in empty folders.""" 85 | response = json.loads(urlopen(url, timeout=15).read().decode('utf-8')) 86 | try: 87 | return response['items'] 88 | except KeyError: 89 | return [] 90 | 91 | def get_next_url(channel, timestamp): 92 | """Gets the URL to the image immediately after the given timestamp. 93 | If the provided timestamp is the latest, it's returned.""" 94 | # Query for current and prior hour. Should give 6-12 image results. 95 | prior = DIR_LIST.format( 96 | date=datetime.datetime.now(datetime.timezone.utc) - datetime.timedelta(seconds=3600), channel=channel) 97 | current = DIR_LIST.format( 98 | date=datetime.datetime.now(datetime.timezone.utc), channel=channel) 99 | #print('Fetching file list:', current) 100 | image_list = get_image_list(prior) + get_image_list(current) 101 | # Generate a timestamp for each image. Return the first image whose 102 | # timestamp is greater than the provided one. 103 | image = {} 104 | for image in image_list: 105 | # Internal timestamp is tenths of second. 106 | if get_time(image) > timestamp + datetime.timedelta(seconds=TIME_FUZZ): 107 | return image 108 | return image 109 | 110 | def download_file(src, dest, size=0): 111 | """Downloads a file, given a source and destination. 112 | Basically wget, but native.""" 113 | handle = urlopen(src, timeout=30) 114 | chunk = 'not an empty string' 115 | 116 | print(' - Downloading{: 0.1f}MB: {}'.format(int(size)/1E6, src)) 117 | # Download the file! 118 | while chunk: 119 | chunk = handle.read(CHUNK_SIZE) 120 | dest.write(chunk) 121 | dest.flush() 122 | 123 | def process_layer(obj): 124 | """Process a single layer into something human eyes can appreciate. 125 | Handles netcdf downscaling and gamma correction, then returns a 126 | monochrome image layer.""" 127 | timer = Timer() 128 | 129 | # Download the netcdf4; convert to netcdf3, and extract the reflectance channel. 130 | # Then delete the sources. 131 | with NamedTemporaryFile() as download3, NamedTemporaryFile() as download4: 132 | download_file(obj['mediaLink'], download4, obj['size']) 133 | print(' - Reading netCDF', timer.lap()) 134 | with netCDF4.Dataset(download4.name, 'r', format="NETCDF4") as g19nc: 135 | print(' - Extracting reflectance', timer.lap()) 136 | reflectance = g19nc.variables['CMI'] # Extract the reflectance 137 | 138 | decimate_factor = int(reflectance.shape[0]/FINAL_SIZE[0] + 0.5) 139 | print(' - Channel is {} by {}; resizing by 1/{}'.format( 140 | g19nc.variables['CMI'].shape[0], 141 | g19nc.variables['CMI'].shape[1], 142 | decimate_factor), 143 | timer.lap()) 144 | 145 | reflectance = decimate(reflectance, decimate_factor, n=0, ftype='fir', zero_phase=False) 146 | #reflectance = reflectance.reshape(-1, decimate_factor).max(1) 147 | 148 | print(' - Ensuring all values are positive', timer.lap()) 149 | np.maximum(reflectance, 0, reflectance) 150 | 151 | print(' - Applying gamma correction', timer.lap()) 152 | reflectance = reflectance ** 0.55 153 | 154 | print(' - Scaling for improved contrast', timer.lap()) 155 | reflectance *= 5 156 | 157 | print(' - Converting to image', timer.lap()) 158 | image = Image.fromarray(reflectance).convert(mode='L') 159 | 160 | gc.collect() 161 | 162 | print(' - Layer time:', timer.total()) 163 | exit() 164 | return image 165 | 166 | def get_time(handle): 167 | """Convert a JSON data descriptor to a datestamp. 168 | 'name' is of the form: 169 | ABI-L2-CMIPF/2020/358/00/OR_ABI-L2-CMIPF-M6C03_G19_s20203580040209_e20203580049517_c20203580049598.nc 170 | So splitting on underscores and taking the last gets: 171 | c20203580049598.nc 172 | Then we strip off the 'c' and '.nc'. The final timestamp is: 173 | 20203580049598 174 | YYYYDDDHHMMSSmmm 175 | That's converted into a Datetime object. 176 | """ 177 | text = handle['name'].split('_')[-1][1:-3] 178 | stamp = datetime.datetime.strptime(text, '%Y%j%H%M%S%f').replace(tzinfo=datetime.timezone.utc) 179 | # One digit of microsecond (tenths of second) isn't very useful. 180 | stamp -= datetime.timedelta(microseconds=stamp.microsecond) 181 | return stamp 182 | 183 | def make_image(last_time=0): 184 | """Put it all together. Download three images, compose them into color, and 185 | save them to sensibly-named files""" 186 | timer = Timer() 187 | print('Downloading latest images') 188 | # Decide which file to download (obj includes filesize, a link, and some other stuff) 189 | obj = {} # Obj is a dictionary of file attributes. Next image availiable for each channel. 190 | for channel in [1, 2, 3]: 191 | obj[channel] = get_next_url(channel, last_time) 192 | obj[channel]['time'] = get_time(obj[channel]) 193 | 194 | # Pick out a timestamp to use elsewhere. 195 | timestamp = obj[1]['time'] 196 | 197 | # Check that all timestamps are "close" 198 | if ((-TIME_FUZZ <= (obj[1]['time'] - obj[2]['time']).total_seconds() <= TIME_FUZZ) 199 | and (-TIME_FUZZ <= (obj[1]['time'] - obj[3]['time']).total_seconds() <= TIME_FUZZ) 200 | and (-TIME_FUZZ <= (obj[2]['time'] - obj[3]['time']).total_seconds() <= TIME_FUZZ)): 201 | print('Images are time-synchronous ({}, {}, and {})'.format( 202 | obj[1]['time'], 203 | obj[2]['time'], 204 | obj[3]['time'])) 205 | else: 206 | # If not, try again later. 207 | print('Images are not time-synchronous ({}, {}, and {})'.format( 208 | obj[1]['time'], 209 | obj[2]['time'], 210 | obj[3]['time'])) 211 | return last_time 212 | 213 | # Check that the image has updated (no sense making duplicates) 214 | if timestamp == last_time: 215 | print('Images have not changed since last check ({})'.format(obj[1]['time'])) 216 | return last_time 217 | 218 | # Getting to work - insert a break. 219 | print() 220 | print('Layers were captured {} ago.'.format(datetime.datetime.now(datetime.timezone.utc) - last_time)) 221 | 222 | print('Processing blue layer') 223 | blue = process_layer(obj[1]) # Load Channel 1 - Blue Visible 224 | print('Processing red layer') 225 | red = process_layer(obj[2]) # Load channel 2 - Red visible 226 | print('Processing veggie layer') 227 | veggie = process_layer(obj[3]) # Load Channel 3 - Veggie Near IR 228 | 229 | # Clean up the NC files before continuing. 230 | gc.collect() 231 | 232 | print('Making a pseudo-green channel', timer.lap()) 233 | # Derived from Planet Labs data, CC > 0.9 234 | # true_green = 0.48358168 * ch_2 + 0.45706946 * ch_1 + 0.06038137 * ch_3 235 | green = ImageChops.add(Image.eval(blue, lambda x: x*0.45706946), 236 | ImageChops.add(Image.eval(red, lambda x: x*0.48358168), 237 | Image.eval(veggie, lambda x: x*0.06038137))) 238 | 239 | print('Colorizing channels', timer.lap()) 240 | red = ImageOps.colorize(red, (0, 0, 0), (255, 0, 0)) 241 | green = ImageOps.colorize(green, (0, 0, 0), (0, 255, 0)) 242 | blue = ImageOps.colorize(blue, (0, 0, 0), (0, 0, 255)) 243 | #veggie = ImageOps.colorize(veggie, (0, 0, 0), (0, 255, 0)) 244 | 245 | print('Generating color outputs', timer.lap()) 246 | # Uncomment this and veggie for 'geocolor', using the veggie layer as green. 247 | # It's not true color and looks a little funny. 248 | #geocolor = ImageChops.add(ImageChops.add(red, veggie), blue) 249 | #geocolor.save(STORAGE+'/geocolor-{}.png'.format(timestamp)) 250 | 251 | truecolor = ImageChops.add(ImageChops.add(red, green), blue) 252 | truecolor.save(STORAGE+'/truecolor-{}.jpg'.format(timestamp.isoformat())) 253 | truecolor.resize(THUMB_SIZE).save(STORAGE+'/truecolor-thumb-{}.jpg'.format(timestamp.isoformat())) 254 | 255 | # Make a symlink pointing to the latest for javascript to point at. 256 | # Symlink + move is atomic. 257 | os.symlink(STORAGE+'/truecolor-{}.jpg'.format(timestamp.isoformat()), 258 | STORAGE+'/truecolor-latest.jpg.tmp') 259 | os.symlink(STORAGE+'/truecolor-thumb-{}.jpg'.format(timestamp.isoformat()), 260 | STORAGE+'/truecolor-thumb-latest.jpg.tmp') 261 | os.rename(STORAGE+'/truecolor-latest.jpg.tmp', STORAGE+'/truecolor-latest.jpg') 262 | os.rename(STORAGE+'/truecolor-thumb-latest.jpg.tmp', STORAGE+'/truecolor-thumb-latest.jpg') 263 | 264 | print('Finished writing', timer.lap()) 265 | print('Total time:', timer.total()) 266 | print() 267 | 268 | return timestamp 269 | 270 | def main(): 271 | """Simple mainloop to call the image generator every 5 mins.""" 272 | # Bogus but correctly-sized timestamp 273 | # (First day of 2000; stroke of midnight) 274 | last_time = datetime.datetime(2000, 1, 1, tzinfo=datetime.timezone.utc) 275 | while True: 276 | # Every five minutes, try to build a new image set. 277 | # make_image will return early if there's no new data. 278 | timer = Timer() 279 | last_time = make_image(last_time) 280 | timer.delay(5*60) 281 | if __name__ == '__main__': 282 | main() 283 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # GOES-R Video Generator 2 | This is a simple video-generator leveraging the wonders of free online stuff. 3 | It uses the GOES-R red, blue, and veggie bands to generate a near-realtime, true-color 4 | video of the past few days of weather. 5 | 6 | With the most recent GOES satellites, NOAA has been posting near-realtime imagery to this 7 | [Google Cloud bucket](https://console.cloud.google.com/storage/browser/gcp-public-data-goes-16) for public use. 8 | 9 | Google also offers a very small VPS as a free demo, which I used to set the project up. 10 | See it live at [bluemarble.nitk.in](http://bluemarble.nitk.in)! 11 | 12 | People usually discuss the frameworks they used, but this is pretty basic. 13 | It uses the latest in Javascript, HTML, and Python, 14 | with a bit of shell and make thrown in for good measure. 15 | 16 | This code should be fairly easy to use - you'll need to modify paths to remove my username, 17 | but everything works! 18 | 19 | # Details 20 | The project consists of three largely independent components: 21 | - Image processing 22 | - Video generation 23 | - HTTP server 24 | Cron acts as a job supervisor to hold everything together. (Again, quick and dirty). 25 | 26 | ## Image processing 27 | GOES16.sci.py implements the image processing. When started, it goes into an infinite loop: 28 | - Poll the Cloud Bucket API 29 | - Check that all images are time-synchronized 30 | - Check if images have changed 31 | - If so, process into a JPG file 32 | - If not, wait 5 minutes and try again. 33 | 34 | Image processing is a fairly simple pipeline. For each layer (red, blue, veggie): 35 | - Download the layer 36 | - Convert from netCDF4 to netCDF3 37 | - netCDF4 is newer, but the Python bindings for it are 5x slower than the netCDF3 libraries. 38 | - nccopy is used for the conversion 39 | - Scale to a managable size 40 | - layers start as 10848 pixels square, which swamps the VPS 41 | - Ensure all values are positive 42 | - Negatives show up sometimes and mess up gamma correction 43 | - Apply gamma correction 44 | - Fancy word for taking x^0.55 for each pixel value 45 | - Convert from numpy array to a PIL image 46 | - Saves space and prepares for final formatting 47 | 48 | Finally, the images are combined 49 | - Create a green channel using red, blue, and veggie 50 | - Stack the layers to make a composite 51 | - Scale down to a thumbnail size for video-ing 52 | 53 | ## Video generation 54 | The video generation is as dumb as a rock. `cron` periodically calls the video generator 55 | with `day/twoday/week/month/year` as an argument. 56 | 57 | The generator uses `ffmpeg` to build a video in `/tmp`; it errors out if the video 58 | exists. (This prevents the generator from stepping on itself under high system loads) 59 | Once generated, the video is moved from `/tmp` to `DEST`. Moves are atomic, which 60 | ensures the file in `DEST` is always a valid video. 61 | 62 | The length of the video and output filename are configured programatically by the FRAMES 63 | dictionary. `day`, `two-day`, `week`, `month`, and `year` all do about what you'd expect. 64 | 65 | Notably, `month` and `year` find the image nearest noon for each day. They 66 | create a video with one frame per day, rather than six frames per hour. 67 | 68 | ## HTTP Server 69 | The final component of the whole setup is a simple Python HTTP server. You could use Apache 70 | (or Nginx, or anything else), but on a disk-constrained server, I chose to use what was installed. 71 | 72 | Compared to the standard Python server, it provides better support for `If-Modified-Since` 73 | and threading. 74 | 75 | It just hosts `index.html`, `static.html` and the videos. 76 | 77 | The HTML file allows a user to select between the different generated videos, and automatically 78 | refreshes the videos to keep them near realtime. 79 | 80 | ## Glue 81 | `Cron` provides the glue for this project. The rules included periodically run the video generator, 82 | and will automatically restart both the HTTP server and the image processor if either die. 83 | 84 | -------------------------------------------------------------------------------- /crontab: -------------------------------------------------------------------------------- 1 | # /etc/crontab: system-wide crontab 2 | # Unlike any other crontab you don't have to run the `crontab' 3 | # command to install the new version when you edit this file 4 | # and files in /etc/cron.d. These files also have username fields, 5 | # that none of the other crontabs do. 6 | 7 | SHELL=/bin/sh 8 | PATH=/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin 9 | 10 | # m h dom mon dow user command 11 | 17 * * * * root cd / && run-parts --report /etc/cron.hourly 12 | 25 6 * * * root test -x /usr/sbin/anacron || ( cd / && run-parts --report /etc/cron.daily ) 13 | 47 6 * * 7 root test -x /usr/sbin/anacron || ( cd / && run-parts --report /etc/cron.weekly ) 14 | 52 6 1 * * root test -x /usr/sbin/anacron || ( cd / && run-parts --report /etc/cron.monthly ) 15 | 16 | # AUTOMATIC RESTART OF SERVICES 17 | # GOES Compositor (layers R/G/B and creates a large and a small JPEG 18 | */5 * * * * bnitkin pidof -x GOES16.sci.py >/dev/null || /home/bnitkin/GOES16.sci.py >> /home/bnitkin/logs/log.txt 2>&1 19 | # HTTP Server: Provides both video and still versions of the page with automatic refresh 20 | */5 * * * * root pidof -x goesServer.py >/dev/null || (cd /home/bnitkin; /home/bnitkin/goesServer.py >> /home/bnitkin/logs/server.txt 2>&1) 21 | # Ensure that the compositor is running at highest priority (if it misses an interval, we're not getting it back). 22 | 0 0 * * * root renice -n -20 -p `pidof -x GOES16.sci.py` 23 | 24 | # VIDEO GENERATION 25 | # Hourly updates reduce CPU usage, and live video is less important than keeping the compositor happy. 26 | # These run near midnight to minimize the visible seam. 27 | 0 4 * * * bnitkin ionice -c3 nice -n20 /home/bnitkin/videogen.py day >> /home/bnitkin/logs/video.txt 2>&1 28 | 0 5 * * * bnitkin ionice -c3 nice -n20 /home/bnitkin/videogen.py two-day >> /home/bnitkin/logs/video.txt 2>&1 29 | # 16:40 UTC is local noon for GOES-east. I've added 20 minutes to compensate for processing delays. 30 | 0 17 * * * bnitkin ionice -c3 nice -n20 /home/bnitkin/videogen.py week >> /home/bnitkin/logs/video.txt 2>&1 31 | 0 19 * * * bnitkin ionice -c3 nice -n20 /home/bnitkin/videogen.py month >> /home/bnitkin/logs/video.txt 2>&1 32 | 0 21 * * thu bnitkin ionice -c3 nice -n20 /home/bnitkin/videogen.py year >> /home/bnitkin/logs/video.txt 2>&1 33 | 34 | # DISK SPACE CLEANUP 35 | # Keep high-res images around for a month. Could be fun for something. 36 | # This will break in the year 3000, but neither I nor GOES-R are going to make it that long. 37 | 0 2 * * * bnitkin find /home/bnitkin/goesr/ -iname truecolor-2\* -mtime +30 -delete 38 | # Delete thumbnails older than 3 years. This may be too long, but we'll see. 39 | 0 3 * * * bnitkin find /home/bnitkin/goesr/ -iname truecolor -mtime +1000 -delete 40 | # Delete old logs monthly. 41 | 0 0 1 * * bnitkin truncate -s0 /home/bnitkin/logs/* 42 | -------------------------------------------------------------------------------- /favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bnitkin/goesr-video/afd1c6e8a006ac9ae5ed503e4f5f971a30ea3f3b/favicon.ico -------------------------------------------------------------------------------- /goesServer.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 -u 2 | # Simple HTTP server brings up the video and not much else. 3 | from http.server import HTTPServer, SimpleHTTPRequestHandler 4 | from socketserver import ThreadingMixIn 5 | 6 | import os, time, urllib 7 | from http import HTTPStatus 8 | from email.utils import parsedate 9 | 10 | class ThreadingServer(ThreadingMixIn, HTTPServer): 11 | pass 12 | 13 | class SimpleCacheHTTPRequestHandler(SimpleHTTPRequestHandler): 14 | def send_head(self): 15 | """Common code for GET and HEAD commands. 16 | This sends the response code and MIME headers. 17 | Return value is either a file object (which has to be copied 18 | to the outputfile by the caller unless the command was HEAD, 19 | and must be closed by the caller under all circumstances), or 20 | None, in which case the caller has nothing further to do. 21 | """ 22 | path = self.translate_path(self.path) 23 | f = None 24 | if os.path.isdir(path): 25 | parts = urllib.parse.urlsplit(self.path) 26 | if not parts.path.endswith('/'): 27 | # redirect browser - doing basically what apache does 28 | self.send_response(HTTPStatus.MOVED_PERMANENTLY) 29 | new_parts = (parts[0], parts[1], parts[2] + '/', 30 | parts[3], parts[4]) 31 | new_url = urllib.parse.urlunsplit(new_parts) 32 | self.send_header("Location", new_url) 33 | self.end_headers() 34 | return None 35 | for index in "index.html", "index.htm": 36 | index = os.path.join(path, index) 37 | if os.path.exists(index): 38 | path = index 39 | break 40 | else: 41 | return self.list_directory(path) 42 | ctype = self.guess_type(path) 43 | try: 44 | f = open(path, 'rb') 45 | except OSError: 46 | self.send_error(HTTPStatus.NOT_FOUND, "File not found") 47 | return None 48 | try: 49 | # Support modified-since header (if provided) 50 | fs = os.fstat(f.fileno()) 51 | modified = True 52 | if None != self.headers['If-Modified-Since']: 53 | modified = (int(fs.st_mtime) > time.mktime(parsedate(self.headers['If-Modified-Since']))) 54 | print('If-Modified-Since:', time.mktime(parsedate(self.headers['If-Modified-Since'])), int(fs.st_mtime), '(modified:', modified, ')' ) 55 | self.send_response(HTTPStatus.OK if modified else HTTPStatus.NOT_MODIFIED) 56 | self.send_header("Content-type", ctype) 57 | self.send_header("Content-Length", str(fs[6])) 58 | self.send_header("Last-Modified", self.date_time_string(fs.st_mtime)) 59 | self.send_header("Cache-Control", "max-age=0, must-revalidate") 60 | self.end_headers() 61 | return f if modified else None 62 | except: 63 | f.close() 64 | raise 65 | 66 | server_address = ('', 80) 67 | SimpleCacheHTTPRequestHandler.extensions_map['webm'] = 'video/webm' 68 | httpd = ThreadingServer(server_address, SimpleCacheHTTPRequestHandler) 69 | print('Server is running') 70 | httpd.serve_forever() 71 | -------------------------------------------------------------------------------- /index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | Blue Marble (GOES-R imagery) 4 | 8 | 61 | 100 | 101 | 102 |
103 | duration: 104 | latest 105 | day 106 | 48 hours 107 | week 108 | month 109 | year 110 |
111 |
brought to you by Ben Nitkin and hosted by Google
112 |
data via GOES-R/Google
113 |
114 | Loading... 115 | hide 116 | source
117 | 125 | 126 | 127 | -------------------------------------------------------------------------------- /static.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | Blue Marble (GOES-R imagery) 4 | 7 | 17 | 55 | 56 | 57 |
58 | resolution: 59 | animated 60 | normal 61 | hi-res 62 |
63 |
brought to you by Ben Nitkin and hosted by Google
64 |
data via GOES-R/Google
65 |
66 | Loading... 67 | hide 68 | source
69 | 70 | 71 | -------------------------------------------------------------------------------- /status.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | [ 'a'$1 == 'awatch' ] && (watch -n15 'bash -c "~bnitkin/status.sh | cut -c -$COLUMNS"'; exit) 3 | 4 | # PID of each process and the last line of each logfile 5 | echo -e "videogen: $(pidof -x videogen.py || echo -e \\t) \t| $(echo /tmp/*webm)" 6 | echo -e "compositor:$(pidof -x GOES16.sci.py ) \t| $(tac ~bnitkin/logs/log.txt | grep -m1 '^Total')\t| 600s max" 7 | echo -e "server: $(pidof -x goesServer.py ) \t| $(tail -n1 ~bnitkin/logs/server.txt)" 8 | echo '' 9 | # List of processes that these guys spawn 10 | echo '%CPU %MEM CMD' 11 | ps -p "`pidof -x GOES16.sci.py nccopy videogen.py ffmpeg goesServer.py`" -o %cpu=,%mem=,cmd= | sort -k3 -t' ' 12 | echo '' 13 | echo '' 14 | free -t --mega 15 | echo '' 16 | df -h ~bnitkin 17 | -------------------------------------------------------------------------------- /videogen.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | # Simple script to turn a bunch of photos into a video 3 | import os, sys, glob, subprocess 4 | from datetime import datetime 5 | 6 | #sys.argv[2] 7 | 8 | FRAMES = 15*24*2 # 2 days of data 9 | SRC = '/home/bnitkin/goesr/truecolor-thumb-*.jpg' 10 | DEST = '/home/bnitkin/goesr/video-{}.webm' 11 | FRAMES = {'day': 24*6-1, 12 | 'two-day': 24*6*2-1, 13 | 'week': 24*6*7-1, 14 | 'month': 31, 15 | 'year': 365} 16 | 17 | # frames per second (real time is four frames per hour) 18 | RATE = {'day': '12', 19 | 'two-day': '12', 20 | 'week': '24', 21 | 'month': '4', 22 | 'year': '8'} 23 | 24 | 25 | def main(): 26 | files = sorted(glob.glob(SRC)) 27 | crf = '31' 28 | if sys.argv[1] == 'week': crf = '45' 29 | if sys.argv[1] == 'year': crf = '37' 30 | 31 | # for month and year settings, use daily pictures instead of 15-minutes. 32 | # This uses some cleverness to find the file closest to noon for each day. 33 | if sys.argv[1] in ['month', 'year']: 34 | # Build a sorted list of file mtime and path 35 | files = [(os.path.getmtime(f), f) for f in files] 36 | files_daily = [] 37 | # 1640Z is local noon for GOES-East. 38 | noon = datetime.utcnow().replace(hour=16, minute=40, second=0).timestamp() 39 | for index in range(FRAMES[sys.argv[1]]): 40 | ideal_time = noon - 3600*24*index 41 | file = min(files, key=lambda f: abs(f[0] - ideal_time))[1] 42 | # Insist that "noon" imagery be +/- an hour of noon. 43 | if abs(os.path.getmtime(file) - ideal_time) > 3600: continue 44 | files_daily.insert(0, file) 45 | files = files_daily 46 | else: 47 | files = files[-FRAMES[sys.argv[1]]:] 48 | 49 | print('Encoding the following', len(files), 'files:', files) 50 | 51 | tmpfile = '/tmp/video-{}.webm'.format(sys.argv[1]) 52 | if os.path.isfile(tmpfile): 53 | print("ERROR: output file {} already exists. Aborting.".format(tmpfile)) 54 | exit(1) 55 | open(tmpfile, 'w').close() # Touch tmpfile (ffmpeg takes a while to flush its output) 56 | 57 | ffmpeg = subprocess.Popen(('ffmpeg', 58 | '-framerate', RATE[sys.argv[1]], # Framerate from RATE 59 | '-y', # Overwrite output path 60 | '-f', 'image2pipe', '-i', '-', # Read images from stdin 61 | '-pix_fmt', 'yuv420p', # Force older pixel format to make Firefox happy. 62 | '-c:v', 'libvpx-vp9', '-an', 63 | # Settings to make it go a litte faster, but at a quality tradeoff. 64 | # '-cpu-used', '2', 65 | # '-speed', '3', 66 | '-crf', crf, '-b:v', '0', # Constant quality (31 is suggested for 1080p) 67 | tmpfile, 68 | # Uncomment the below to add a mp4 output. Should work on Safari, but doesn't... 69 | # '-c:v', 'h264', '-c:a', 'aac', 70 | # '-pix_fmt', 'yuv420p', 71 | # '-preset', 'veryfast', 72 | # '-crf', '30', '-b:v', '0', # Constant quality (18-24 is suggested for mp4) 73 | # tmpfile.replace('webm', 'mp4') 74 | ), stdin=subprocess.PIPE) # Write to a tempfile 75 | 76 | for path in files: 77 | with open(path, 'rb') as image: 78 | ffmpeg.stdin.write(image.read()) 79 | ffmpeg.stdin.close() 80 | ffmpeg.wait() 81 | 82 | # Finally, move to the output area. 83 | # This ensures that the final move is atomic, and reduces web-frontend screwups. 84 | os.rename('/tmp/video-{}.webm'.format(sys.argv[1]), DEST.format(sys.argv[1])) 85 | # os.rename('/tmp/video-{}.mp4'.format(sys.argv[1]), DEST.format(sys.argv[1]).replace('webm', 'mp4')) 86 | 87 | main() 88 | --------------------------------------------------------------------------------